mirror of https://github.com/langgenius/dify.git
Merge remote-tracking branch 'myori/main' into p337
This commit is contained in:
commit
33df7a3848
|
|
@ -1,13 +1,13 @@
|
|||
---
|
||||
name: Dify Frontend Testing
|
||||
description: Generate Jest + React Testing Library tests for Dify frontend components, hooks, and utilities. Triggers on testing, spec files, coverage, Jest, RTL, unit tests, integration tests, or write/review test requests.
|
||||
name: frontend-testing
|
||||
description: Generate Vitest + React Testing Library tests for Dify frontend components, hooks, and utilities. Triggers on testing, spec files, coverage, Vitest, RTL, unit tests, integration tests, or write/review test requests.
|
||||
---
|
||||
|
||||
# Dify Frontend Testing Skill
|
||||
|
||||
This skill enables Claude to generate high-quality, comprehensive frontend tests for the Dify project following established conventions and best practices.
|
||||
|
||||
> **⚠️ Authoritative Source**: This skill is derived from `web/testing/testing.md`. When in doubt, always refer to that document as the canonical specification.
|
||||
> **⚠️ Authoritative Source**: This skill is derived from `web/testing/testing.md`. Use Vitest mock/timer APIs (`vi.*`).
|
||||
|
||||
## When to Apply This Skill
|
||||
|
||||
|
|
@ -15,7 +15,7 @@ Apply this skill when the user:
|
|||
|
||||
- Asks to **write tests** for a component, hook, or utility
|
||||
- Asks to **review existing tests** for completeness
|
||||
- Mentions **Jest**, **React Testing Library**, **RTL**, or **spec files**
|
||||
- Mentions **Vitest**, **React Testing Library**, **RTL**, or **spec files**
|
||||
- Requests **test coverage** improvement
|
||||
- Uses `pnpm analyze-component` output as context
|
||||
- Mentions **testing**, **unit tests**, or **integration tests** for frontend code
|
||||
|
|
@ -33,9 +33,9 @@ Apply this skill when the user:
|
|||
|
||||
| Tool | Version | Purpose |
|
||||
|------|---------|---------|
|
||||
| Jest | 29.7 | Test runner |
|
||||
| Vitest | 4.0.16 | Test runner |
|
||||
| React Testing Library | 16.0 | Component testing |
|
||||
| happy-dom | - | Test environment |
|
||||
| jsdom | - | Test environment |
|
||||
| nock | 14.0 | HTTP mocking |
|
||||
| TypeScript | 5.x | Type safety |
|
||||
|
||||
|
|
@ -46,7 +46,7 @@ Apply this skill when the user:
|
|||
pnpm test
|
||||
|
||||
# Watch mode
|
||||
pnpm test -- --watch
|
||||
pnpm test:watch
|
||||
|
||||
# Run specific file
|
||||
pnpm test -- path/to/file.spec.tsx
|
||||
|
|
@ -77,9 +77,9 @@ import Component from './index'
|
|||
// import { ChildComponent } from './child-component'
|
||||
|
||||
// ✅ Mock external dependencies only
|
||||
jest.mock('@/service/api')
|
||||
jest.mock('next/navigation', () => ({
|
||||
useRouter: () => ({ push: jest.fn() }),
|
||||
vi.mock('@/service/api')
|
||||
vi.mock('next/navigation', () => ({
|
||||
useRouter: () => ({ push: vi.fn() }),
|
||||
usePathname: () => '/test',
|
||||
}))
|
||||
|
||||
|
|
@ -88,7 +88,7 @@ let mockSharedState = false
|
|||
|
||||
describe('ComponentName', () => {
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks() // ✅ Reset mocks BEFORE each test
|
||||
vi.clearAllMocks() // ✅ Reset mocks BEFORE each test
|
||||
mockSharedState = false // ✅ Reset shared state
|
||||
})
|
||||
|
||||
|
|
@ -117,7 +117,7 @@ describe('ComponentName', () => {
|
|||
// User Interactions
|
||||
describe('User Interactions', () => {
|
||||
it('should handle click events', () => {
|
||||
const handleClick = jest.fn()
|
||||
const handleClick = vi.fn()
|
||||
render(<Component onClick={handleClick} />)
|
||||
|
||||
fireEvent.click(screen.getByRole('button'))
|
||||
|
|
@ -178,7 +178,7 @@ Process in this order for multi-file testing:
|
|||
- **500+ lines**: Consider splitting before testing
|
||||
- **Many dependencies**: Extract logic into hooks first
|
||||
|
||||
> 📖 See `guides/workflow.md` for complete workflow details and todo list format.
|
||||
> 📖 See `references/workflow.md` for complete workflow details and todo list format.
|
||||
|
||||
## Testing Strategy
|
||||
|
||||
|
|
@ -289,17 +289,18 @@ For each test file generated, aim for:
|
|||
- ✅ **>95%** branch coverage
|
||||
- ✅ **>95%** line coverage
|
||||
|
||||
> **Note**: For multi-file directories, process one file at a time with full coverage each. See `guides/workflow.md`.
|
||||
> **Note**: For multi-file directories, process one file at a time with full coverage each. See `references/workflow.md`.
|
||||
|
||||
## Detailed Guides
|
||||
|
||||
For more detailed information, refer to:
|
||||
|
||||
- `guides/workflow.md` - **Incremental testing workflow** (MUST READ for multi-file testing)
|
||||
- `guides/mocking.md` - Mock patterns and best practices
|
||||
- `guides/async-testing.md` - Async operations and API calls
|
||||
- `guides/domain-components.md` - Workflow, Dataset, Configuration testing
|
||||
- `guides/common-patterns.md` - Frequently used testing patterns
|
||||
- `references/workflow.md` - **Incremental testing workflow** (MUST READ for multi-file testing)
|
||||
- `references/mocking.md` - Mock patterns and best practices
|
||||
- `references/async-testing.md` - Async operations and API calls
|
||||
- `references/domain-components.md` - Workflow, Dataset, Configuration testing
|
||||
- `references/common-patterns.md` - Frequently used testing patterns
|
||||
- `references/checklist.md` - Test generation checklist and validation steps
|
||||
|
||||
## Authoritative References
|
||||
|
||||
|
|
@ -315,7 +316,7 @@ For more detailed information, refer to:
|
|||
|
||||
### Project Configuration
|
||||
|
||||
- `web/jest.config.ts` - Jest configuration
|
||||
- `web/jest.setup.ts` - Test environment setup
|
||||
- `web/vitest.config.ts` - Vitest configuration
|
||||
- `web/vitest.setup.ts` - Test environment setup
|
||||
- `web/testing/analyze-component.js` - Component analysis tool
|
||||
- `web/__mocks__/react-i18next.ts` - Shared i18n mock (auto-loaded by Jest, no explicit mock needed; override locally only for custom translations)
|
||||
- Modules are not mocked automatically. Global mocks live in `web/vitest.setup.ts` (for example `react-i18next`, `next/image`); mock other modules like `ky` or `mime` locally in test files.
|
||||
|
|
|
|||
|
|
@ -23,14 +23,14 @@ import userEvent from '@testing-library/user-event'
|
|||
// ============================================================================
|
||||
// Mocks
|
||||
// ============================================================================
|
||||
// WHY: Mocks must be hoisted to top of file (Jest requirement).
|
||||
// WHY: Mocks must be hoisted to top of file (Vitest requirement).
|
||||
// They run BEFORE imports, so keep them before component imports.
|
||||
|
||||
// i18n (automatically mocked)
|
||||
// WHY: Shared mock at web/__mocks__/react-i18next.ts is auto-loaded by Jest
|
||||
// WHY: Global mock in web/vitest.setup.ts is auto-loaded by Vitest setup
|
||||
// No explicit mock needed - it returns translation keys as-is
|
||||
// Override only if custom translations are required:
|
||||
// jest.mock('react-i18next', () => ({
|
||||
// vi.mock('react-i18next', () => ({
|
||||
// useTranslation: () => ({
|
||||
// t: (key: string) => {
|
||||
// const customTranslations: Record<string, string> = {
|
||||
|
|
@ -43,17 +43,17 @@ import userEvent from '@testing-library/user-event'
|
|||
|
||||
// Router (if component uses useRouter, usePathname, useSearchParams)
|
||||
// WHY: Isolates tests from Next.js routing, enables testing navigation behavior
|
||||
// const mockPush = jest.fn()
|
||||
// jest.mock('next/navigation', () => ({
|
||||
// const mockPush = vi.fn()
|
||||
// vi.mock('next/navigation', () => ({
|
||||
// useRouter: () => ({ push: mockPush }),
|
||||
// usePathname: () => '/test-path',
|
||||
// }))
|
||||
|
||||
// API services (if component fetches data)
|
||||
// WHY: Prevents real network calls, enables testing all states (loading/success/error)
|
||||
// jest.mock('@/service/api')
|
||||
// vi.mock('@/service/api')
|
||||
// import * as api from '@/service/api'
|
||||
// const mockedApi = api as jest.Mocked<typeof api>
|
||||
// const mockedApi = vi.mocked(api)
|
||||
|
||||
// Shared mock state (for portal/dropdown components)
|
||||
// WHY: Portal components like PortalToFollowElem need shared state between
|
||||
|
|
@ -98,7 +98,7 @@ describe('ComponentName', () => {
|
|||
// - Prevents mock call history from leaking between tests
|
||||
// - MUST be beforeEach (not afterEach) to reset BEFORE assertions like toHaveBeenCalledTimes
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks()
|
||||
vi.clearAllMocks()
|
||||
// Reset shared mock state if used (CRITICAL for portal/dropdown tests)
|
||||
// mockOpenState = false
|
||||
})
|
||||
|
|
@ -155,7 +155,7 @@ describe('ComponentName', () => {
|
|||
// - userEvent simulates real user behavior (focus, hover, then click)
|
||||
// - fireEvent is lower-level, doesn't trigger all browser events
|
||||
// const user = userEvent.setup()
|
||||
// const handleClick = jest.fn()
|
||||
// const handleClick = vi.fn()
|
||||
// render(<ComponentName onClick={handleClick} />)
|
||||
//
|
||||
// await user.click(screen.getByRole('button'))
|
||||
|
|
@ -165,7 +165,7 @@ describe('ComponentName', () => {
|
|||
|
||||
it('should call onChange when value changes', async () => {
|
||||
// const user = userEvent.setup()
|
||||
// const handleChange = jest.fn()
|
||||
// const handleChange = vi.fn()
|
||||
// render(<ComponentName onChange={handleChange} />)
|
||||
//
|
||||
// await user.type(screen.getByRole('textbox'), 'new value')
|
||||
|
|
@ -15,9 +15,9 @@ import { renderHook, act, waitFor } from '@testing-library/react'
|
|||
// ============================================================================
|
||||
|
||||
// API services (if hook fetches data)
|
||||
// jest.mock('@/service/api')
|
||||
// vi.mock('@/service/api')
|
||||
// import * as api from '@/service/api'
|
||||
// const mockedApi = api as jest.Mocked<typeof api>
|
||||
// const mockedApi = vi.mocked(api)
|
||||
|
||||
// ============================================================================
|
||||
// Test Helpers
|
||||
|
|
@ -38,7 +38,7 @@ import { renderHook, act, waitFor } from '@testing-library/react'
|
|||
|
||||
describe('useHookName', () => {
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks()
|
||||
vi.clearAllMocks()
|
||||
})
|
||||
|
||||
// --------------------------------------------------------------------------
|
||||
|
|
@ -145,7 +145,7 @@ describe('useHookName', () => {
|
|||
// --------------------------------------------------------------------------
|
||||
describe('Side Effects', () => {
|
||||
it('should call callback when value changes', () => {
|
||||
// const callback = jest.fn()
|
||||
// const callback = vi.fn()
|
||||
// const { result } = renderHook(() => useHookName({ onChange: callback }))
|
||||
//
|
||||
// act(() => {
|
||||
|
|
@ -156,9 +156,9 @@ describe('useHookName', () => {
|
|||
})
|
||||
|
||||
it('should cleanup on unmount', () => {
|
||||
// const cleanup = jest.fn()
|
||||
// jest.spyOn(window, 'addEventListener')
|
||||
// jest.spyOn(window, 'removeEventListener')
|
||||
// const cleanup = vi.fn()
|
||||
// vi.spyOn(window, 'addEventListener')
|
||||
// vi.spyOn(window, 'removeEventListener')
|
||||
//
|
||||
// const { unmount } = renderHook(() => useHookName())
|
||||
//
|
||||
|
|
@ -49,7 +49,7 @@ import userEvent from '@testing-library/user-event'
|
|||
|
||||
it('should submit form', async () => {
|
||||
const user = userEvent.setup()
|
||||
const onSubmit = jest.fn()
|
||||
const onSubmit = vi.fn()
|
||||
|
||||
render(<Form onSubmit={onSubmit} />)
|
||||
|
||||
|
|
@ -77,15 +77,15 @@ it('should submit form', async () => {
|
|||
```typescript
|
||||
describe('Debounced Search', () => {
|
||||
beforeEach(() => {
|
||||
jest.useFakeTimers()
|
||||
vi.useFakeTimers()
|
||||
})
|
||||
|
||||
afterEach(() => {
|
||||
jest.useRealTimers()
|
||||
vi.useRealTimers()
|
||||
})
|
||||
|
||||
it('should debounce search input', async () => {
|
||||
const onSearch = jest.fn()
|
||||
const onSearch = vi.fn()
|
||||
render(<SearchInput onSearch={onSearch} debounceMs={300} />)
|
||||
|
||||
// Type in the input
|
||||
|
|
@ -95,7 +95,7 @@ describe('Debounced Search', () => {
|
|||
expect(onSearch).not.toHaveBeenCalled()
|
||||
|
||||
// Advance timers
|
||||
jest.advanceTimersByTime(300)
|
||||
vi.advanceTimersByTime(300)
|
||||
|
||||
// Now search is called
|
||||
expect(onSearch).toHaveBeenCalledWith('query')
|
||||
|
|
@ -107,8 +107,8 @@ describe('Debounced Search', () => {
|
|||
|
||||
```typescript
|
||||
it('should retry on failure', async () => {
|
||||
jest.useFakeTimers()
|
||||
const fetchData = jest.fn()
|
||||
vi.useFakeTimers()
|
||||
const fetchData = vi.fn()
|
||||
.mockRejectedValueOnce(new Error('Network error'))
|
||||
.mockResolvedValueOnce({ data: 'success' })
|
||||
|
||||
|
|
@ -120,7 +120,7 @@ it('should retry on failure', async () => {
|
|||
})
|
||||
|
||||
// Advance timer for retry
|
||||
jest.advanceTimersByTime(1000)
|
||||
vi.advanceTimersByTime(1000)
|
||||
|
||||
// Second call succeeds
|
||||
await waitFor(() => {
|
||||
|
|
@ -128,7 +128,7 @@ it('should retry on failure', async () => {
|
|||
expect(screen.getByText('success')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
jest.useRealTimers()
|
||||
vi.useRealTimers()
|
||||
})
|
||||
```
|
||||
|
||||
|
|
@ -136,19 +136,19 @@ it('should retry on failure', async () => {
|
|||
|
||||
```typescript
|
||||
// Run all pending timers
|
||||
jest.runAllTimers()
|
||||
vi.runAllTimers()
|
||||
|
||||
// Run only pending timers (not new ones created during execution)
|
||||
jest.runOnlyPendingTimers()
|
||||
vi.runOnlyPendingTimers()
|
||||
|
||||
// Advance by specific time
|
||||
jest.advanceTimersByTime(1000)
|
||||
vi.advanceTimersByTime(1000)
|
||||
|
||||
// Get current fake time
|
||||
jest.now()
|
||||
Date.now()
|
||||
|
||||
// Clear all timers
|
||||
jest.clearAllTimers()
|
||||
vi.clearAllTimers()
|
||||
```
|
||||
|
||||
## API Testing Patterns
|
||||
|
|
@ -158,7 +158,7 @@ jest.clearAllTimers()
|
|||
```typescript
|
||||
describe('DataFetcher', () => {
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks()
|
||||
vi.clearAllMocks()
|
||||
})
|
||||
|
||||
it('should show loading state', () => {
|
||||
|
|
@ -241,7 +241,7 @@ it('should submit form and show success', async () => {
|
|||
|
||||
```typescript
|
||||
it('should fetch data on mount', async () => {
|
||||
const fetchData = jest.fn().mockResolvedValue({ data: 'test' })
|
||||
const fetchData = vi.fn().mockResolvedValue({ data: 'test' })
|
||||
|
||||
render(<ComponentWithEffect fetchData={fetchData} />)
|
||||
|
||||
|
|
@ -255,7 +255,7 @@ it('should fetch data on mount', async () => {
|
|||
|
||||
```typescript
|
||||
it('should refetch when id changes', async () => {
|
||||
const fetchData = jest.fn().mockResolvedValue({ data: 'test' })
|
||||
const fetchData = vi.fn().mockResolvedValue({ data: 'test' })
|
||||
|
||||
const { rerender } = render(<ComponentWithEffect id="1" fetchData={fetchData} />)
|
||||
|
||||
|
|
@ -276,8 +276,8 @@ it('should refetch when id changes', async () => {
|
|||
|
||||
```typescript
|
||||
it('should cleanup subscription on unmount', () => {
|
||||
const subscribe = jest.fn()
|
||||
const unsubscribe = jest.fn()
|
||||
const subscribe = vi.fn()
|
||||
const unsubscribe = vi.fn()
|
||||
subscribe.mockReturnValue(unsubscribe)
|
||||
|
||||
const { unmount } = render(<SubscriptionComponent subscribe={subscribe} />)
|
||||
|
|
@ -332,14 +332,14 @@ expect(description).toBeInTheDocument()
|
|||
|
||||
```typescript
|
||||
// Bad - fake timers don't work well with real Promises
|
||||
jest.useFakeTimers()
|
||||
vi.useFakeTimers()
|
||||
await waitFor(() => {
|
||||
expect(screen.getByText('Data')).toBeInTheDocument()
|
||||
}) // May timeout!
|
||||
|
||||
// Good - use runAllTimers or advanceTimersByTime
|
||||
jest.useFakeTimers()
|
||||
vi.useFakeTimers()
|
||||
render(<Component />)
|
||||
jest.runAllTimers()
|
||||
vi.runAllTimers()
|
||||
expect(screen.getByText('Data')).toBeInTheDocument()
|
||||
```
|
||||
|
|
@ -74,9 +74,9 @@ Use this checklist when generating or reviewing tests for Dify frontend componen
|
|||
### Mocks
|
||||
|
||||
- [ ] **DO NOT mock base components** (`@/app/components/base/*`)
|
||||
- [ ] `jest.clearAllMocks()` in `beforeEach` (not `afterEach`)
|
||||
- [ ] `vi.clearAllMocks()` in `beforeEach` (not `afterEach`)
|
||||
- [ ] Shared mock state reset in `beforeEach`
|
||||
- [ ] i18n uses shared mock (auto-loaded); only override locally for custom translations
|
||||
- [ ] i18n uses global mock (auto-loaded in `web/vitest.setup.ts`); only override locally for custom translations
|
||||
- [ ] Router mocks match actual Next.js API
|
||||
- [ ] Mocks reflect actual component conditional behavior
|
||||
- [ ] Only mock: API services, complex context providers, third-party libs
|
||||
|
|
@ -132,10 +132,10 @@ For the current file being tested:
|
|||
|
||||
```typescript
|
||||
// ❌ Mock doesn't match actual behavior
|
||||
jest.mock('./Component', () => () => <div>Mocked</div>)
|
||||
vi.mock('./Component', () => () => <div>Mocked</div>)
|
||||
|
||||
// ✅ Mock matches actual conditional logic
|
||||
jest.mock('./Component', () => ({ isOpen }: any) =>
|
||||
vi.mock('./Component', () => ({ isOpen }: any) =>
|
||||
isOpen ? <div>Content</div> : null
|
||||
)
|
||||
```
|
||||
|
|
@ -145,7 +145,7 @@ jest.mock('./Component', () => ({ isOpen }: any) =>
|
|||
```typescript
|
||||
// ❌ Shared state not reset
|
||||
let mockState = false
|
||||
jest.mock('./useHook', () => () => mockState)
|
||||
vi.mock('./useHook', () => () => mockState)
|
||||
|
||||
// ✅ Reset in beforeEach
|
||||
beforeEach(() => {
|
||||
|
|
@ -192,7 +192,7 @@ pnpm test -- path/to/file.spec.tsx
|
|||
pnpm test -- --coverage path/to/file.spec.tsx
|
||||
|
||||
# Watch mode
|
||||
pnpm test -- --watch path/to/file.spec.tsx
|
||||
pnpm test:watch -- path/to/file.spec.tsx
|
||||
|
||||
# Update snapshots (use sparingly)
|
||||
pnpm test -- -u path/to/file.spec.tsx
|
||||
|
|
@ -126,7 +126,7 @@ describe('Counter', () => {
|
|||
describe('ControlledInput', () => {
|
||||
it('should call onChange with new value', async () => {
|
||||
const user = userEvent.setup()
|
||||
const handleChange = jest.fn()
|
||||
const handleChange = vi.fn()
|
||||
|
||||
render(<ControlledInput value="" onChange={handleChange} />)
|
||||
|
||||
|
|
@ -136,7 +136,7 @@ describe('ControlledInput', () => {
|
|||
})
|
||||
|
||||
it('should display controlled value', () => {
|
||||
render(<ControlledInput value="controlled" onChange={jest.fn()} />)
|
||||
render(<ControlledInput value="controlled" onChange={vi.fn()} />)
|
||||
|
||||
expect(screen.getByRole('textbox')).toHaveValue('controlled')
|
||||
})
|
||||
|
|
@ -195,7 +195,7 @@ describe('ItemList', () => {
|
|||
|
||||
it('should handle item selection', async () => {
|
||||
const user = userEvent.setup()
|
||||
const onSelect = jest.fn()
|
||||
const onSelect = vi.fn()
|
||||
|
||||
render(<ItemList items={items} onSelect={onSelect} />)
|
||||
|
||||
|
|
@ -217,20 +217,20 @@ describe('ItemList', () => {
|
|||
```typescript
|
||||
describe('Modal', () => {
|
||||
it('should not render when closed', () => {
|
||||
render(<Modal isOpen={false} onClose={jest.fn()} />)
|
||||
render(<Modal isOpen={false} onClose={vi.fn()} />)
|
||||
|
||||
expect(screen.queryByRole('dialog')).not.toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should render when open', () => {
|
||||
render(<Modal isOpen={true} onClose={jest.fn()} />)
|
||||
render(<Modal isOpen={true} onClose={vi.fn()} />)
|
||||
|
||||
expect(screen.getByRole('dialog')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should call onClose when clicking overlay', async () => {
|
||||
const user = userEvent.setup()
|
||||
const handleClose = jest.fn()
|
||||
const handleClose = vi.fn()
|
||||
|
||||
render(<Modal isOpen={true} onClose={handleClose} />)
|
||||
|
||||
|
|
@ -241,7 +241,7 @@ describe('Modal', () => {
|
|||
|
||||
it('should call onClose when pressing Escape', async () => {
|
||||
const user = userEvent.setup()
|
||||
const handleClose = jest.fn()
|
||||
const handleClose = vi.fn()
|
||||
|
||||
render(<Modal isOpen={true} onClose={handleClose} />)
|
||||
|
||||
|
|
@ -254,7 +254,7 @@ describe('Modal', () => {
|
|||
const user = userEvent.setup()
|
||||
|
||||
render(
|
||||
<Modal isOpen={true} onClose={jest.fn()}>
|
||||
<Modal isOpen={true} onClose={vi.fn()}>
|
||||
<button>First</button>
|
||||
<button>Second</button>
|
||||
</Modal>
|
||||
|
|
@ -279,7 +279,7 @@ describe('Modal', () => {
|
|||
describe('LoginForm', () => {
|
||||
it('should submit valid form', async () => {
|
||||
const user = userEvent.setup()
|
||||
const onSubmit = jest.fn()
|
||||
const onSubmit = vi.fn()
|
||||
|
||||
render(<LoginForm onSubmit={onSubmit} />)
|
||||
|
||||
|
|
@ -296,7 +296,7 @@ describe('LoginForm', () => {
|
|||
it('should show validation errors', async () => {
|
||||
const user = userEvent.setup()
|
||||
|
||||
render(<LoginForm onSubmit={jest.fn()} />)
|
||||
render(<LoginForm onSubmit={vi.fn()} />)
|
||||
|
||||
// Submit empty form
|
||||
await user.click(screen.getByRole('button', { name: /sign in/i }))
|
||||
|
|
@ -308,7 +308,7 @@ describe('LoginForm', () => {
|
|||
it('should validate email format', async () => {
|
||||
const user = userEvent.setup()
|
||||
|
||||
render(<LoginForm onSubmit={jest.fn()} />)
|
||||
render(<LoginForm onSubmit={vi.fn()} />)
|
||||
|
||||
await user.type(screen.getByLabelText(/email/i), 'invalid-email')
|
||||
await user.click(screen.getByRole('button', { name: /sign in/i }))
|
||||
|
|
@ -318,7 +318,7 @@ describe('LoginForm', () => {
|
|||
|
||||
it('should disable submit button while submitting', async () => {
|
||||
const user = userEvent.setup()
|
||||
const onSubmit = jest.fn(() => new Promise(resolve => setTimeout(resolve, 100)))
|
||||
const onSubmit = vi.fn(() => new Promise(resolve => setTimeout(resolve, 100)))
|
||||
|
||||
render(<LoginForm onSubmit={onSubmit} />)
|
||||
|
||||
|
|
@ -407,7 +407,7 @@ it('test 1', () => {
|
|||
|
||||
// Good - cleanup is automatic with RTL, but reset mocks
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks()
|
||||
vi.clearAllMocks()
|
||||
})
|
||||
```
|
||||
|
||||
|
|
@ -23,7 +23,7 @@ import NodeConfigPanel from './node-config-panel'
|
|||
import { createMockNode, createMockWorkflowContext } from '@/__mocks__/workflow'
|
||||
|
||||
// Mock workflow context
|
||||
jest.mock('@/app/components/workflow/hooks', () => ({
|
||||
vi.mock('@/app/components/workflow/hooks', () => ({
|
||||
useWorkflowStore: () => mockWorkflowStore,
|
||||
useNodesInteractions: () => mockNodesInteractions,
|
||||
}))
|
||||
|
|
@ -31,21 +31,21 @@ jest.mock('@/app/components/workflow/hooks', () => ({
|
|||
let mockWorkflowStore = {
|
||||
nodes: [],
|
||||
edges: [],
|
||||
updateNode: jest.fn(),
|
||||
updateNode: vi.fn(),
|
||||
}
|
||||
|
||||
let mockNodesInteractions = {
|
||||
handleNodeSelect: jest.fn(),
|
||||
handleNodeDelete: jest.fn(),
|
||||
handleNodeSelect: vi.fn(),
|
||||
handleNodeDelete: vi.fn(),
|
||||
}
|
||||
|
||||
describe('NodeConfigPanel', () => {
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks()
|
||||
vi.clearAllMocks()
|
||||
mockWorkflowStore = {
|
||||
nodes: [],
|
||||
edges: [],
|
||||
updateNode: jest.fn(),
|
||||
updateNode: vi.fn(),
|
||||
}
|
||||
})
|
||||
|
||||
|
|
@ -161,23 +161,23 @@ import { render, screen, fireEvent, waitFor } from '@testing-library/react'
|
|||
import userEvent from '@testing-library/user-event'
|
||||
import DocumentUploader from './document-uploader'
|
||||
|
||||
jest.mock('@/service/datasets', () => ({
|
||||
uploadDocument: jest.fn(),
|
||||
parseDocument: jest.fn(),
|
||||
vi.mock('@/service/datasets', () => ({
|
||||
uploadDocument: vi.fn(),
|
||||
parseDocument: vi.fn(),
|
||||
}))
|
||||
|
||||
import * as datasetService from '@/service/datasets'
|
||||
const mockedService = datasetService as jest.Mocked<typeof datasetService>
|
||||
const mockedService = vi.mocked(datasetService)
|
||||
|
||||
describe('DocumentUploader', () => {
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks()
|
||||
vi.clearAllMocks()
|
||||
})
|
||||
|
||||
describe('File Upload', () => {
|
||||
it('should accept valid file types', async () => {
|
||||
const user = userEvent.setup()
|
||||
const onUpload = jest.fn()
|
||||
const onUpload = vi.fn()
|
||||
mockedService.uploadDocument.mockResolvedValue({ id: 'doc-1' })
|
||||
|
||||
render(<DocumentUploader onUpload={onUpload} />)
|
||||
|
|
@ -326,14 +326,14 @@ describe('DocumentList', () => {
|
|||
describe('Search & Filtering', () => {
|
||||
it('should filter by search query', async () => {
|
||||
const user = userEvent.setup()
|
||||
jest.useFakeTimers()
|
||||
vi.useFakeTimers()
|
||||
|
||||
render(<DocumentList datasetId="ds-1" />)
|
||||
|
||||
await user.type(screen.getByPlaceholderText(/search/i), 'test query')
|
||||
|
||||
// Debounce
|
||||
jest.advanceTimersByTime(300)
|
||||
vi.advanceTimersByTime(300)
|
||||
|
||||
await waitFor(() => {
|
||||
expect(mockedService.getDocuments).toHaveBeenCalledWith(
|
||||
|
|
@ -342,7 +342,7 @@ describe('DocumentList', () => {
|
|||
)
|
||||
})
|
||||
|
||||
jest.useRealTimers()
|
||||
vi.useRealTimers()
|
||||
})
|
||||
})
|
||||
})
|
||||
|
|
@ -367,13 +367,13 @@ import { render, screen, fireEvent, waitFor } from '@testing-library/react'
|
|||
import userEvent from '@testing-library/user-event'
|
||||
import AppConfigForm from './app-config-form'
|
||||
|
||||
jest.mock('@/service/apps', () => ({
|
||||
updateAppConfig: jest.fn(),
|
||||
getAppConfig: jest.fn(),
|
||||
vi.mock('@/service/apps', () => ({
|
||||
updateAppConfig: vi.fn(),
|
||||
getAppConfig: vi.fn(),
|
||||
}))
|
||||
|
||||
import * as appService from '@/service/apps'
|
||||
const mockedService = appService as jest.Mocked<typeof appService>
|
||||
const mockedService = vi.mocked(appService)
|
||||
|
||||
describe('AppConfigForm', () => {
|
||||
const defaultConfig = {
|
||||
|
|
@ -384,7 +384,7 @@ describe('AppConfigForm', () => {
|
|||
}
|
||||
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks()
|
||||
vi.clearAllMocks()
|
||||
mockedService.getAppConfig.mockResolvedValue(defaultConfig)
|
||||
})
|
||||
|
||||
|
|
@ -19,8 +19,8 @@
|
|||
|
||||
```typescript
|
||||
// ❌ WRONG: Don't mock base components
|
||||
jest.mock('@/app/components/base/loading', () => () => <div>Loading</div>)
|
||||
jest.mock('@/app/components/base/button', () => ({ children }: any) => <button>{children}</button>)
|
||||
vi.mock('@/app/components/base/loading', () => () => <div>Loading</div>)
|
||||
vi.mock('@/app/components/base/button', () => ({ children }: any) => <button>{children}</button>)
|
||||
|
||||
// ✅ CORRECT: Import and use real base components
|
||||
import Loading from '@/app/components/base/loading'
|
||||
|
|
@ -41,20 +41,23 @@ Only mock these categories:
|
|||
|
||||
| Location | Purpose |
|
||||
|----------|---------|
|
||||
| `web/__mocks__/` | Reusable mocks shared across multiple test files |
|
||||
| Test file | Test-specific mocks, inline with `jest.mock()` |
|
||||
| `web/vitest.setup.ts` | Global mocks shared by all tests (for example `react-i18next`, `next/image`) |
|
||||
| `web/__mocks__/` | Reusable mock factories shared across multiple test files |
|
||||
| Test file | Test-specific mocks, inline with `vi.mock()` |
|
||||
|
||||
Modules are not mocked automatically. Use `vi.mock` in test files, or add global mocks in `web/vitest.setup.ts`.
|
||||
|
||||
## Essential Mocks
|
||||
|
||||
### 1. i18n (Auto-loaded via Shared Mock)
|
||||
### 1. i18n (Auto-loaded via Global Mock)
|
||||
|
||||
A shared mock is available at `web/__mocks__/react-i18next.ts` and is auto-loaded by Jest.
|
||||
A global mock is defined in `web/vitest.setup.ts` and is auto-loaded by Vitest setup.
|
||||
**No explicit mock needed** for most tests - it returns translation keys as-is.
|
||||
|
||||
For tests requiring custom translations, override the mock:
|
||||
|
||||
```typescript
|
||||
jest.mock('react-i18next', () => ({
|
||||
vi.mock('react-i18next', () => ({
|
||||
useTranslation: () => ({
|
||||
t: (key: string) => {
|
||||
const translations: Record<string, string> = {
|
||||
|
|
@ -69,15 +72,15 @@ jest.mock('react-i18next', () => ({
|
|||
### 2. Next.js Router
|
||||
|
||||
```typescript
|
||||
const mockPush = jest.fn()
|
||||
const mockReplace = jest.fn()
|
||||
const mockPush = vi.fn()
|
||||
const mockReplace = vi.fn()
|
||||
|
||||
jest.mock('next/navigation', () => ({
|
||||
vi.mock('next/navigation', () => ({
|
||||
useRouter: () => ({
|
||||
push: mockPush,
|
||||
replace: mockReplace,
|
||||
back: jest.fn(),
|
||||
prefetch: jest.fn(),
|
||||
back: vi.fn(),
|
||||
prefetch: vi.fn(),
|
||||
}),
|
||||
usePathname: () => '/current-path',
|
||||
useSearchParams: () => new URLSearchParams('?key=value'),
|
||||
|
|
@ -85,7 +88,7 @@ jest.mock('next/navigation', () => ({
|
|||
|
||||
describe('Component', () => {
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks()
|
||||
vi.clearAllMocks()
|
||||
})
|
||||
|
||||
it('should navigate on click', () => {
|
||||
|
|
@ -102,7 +105,7 @@ describe('Component', () => {
|
|||
// ⚠️ Important: Use shared state for components that depend on each other
|
||||
let mockPortalOpenState = false
|
||||
|
||||
jest.mock('@/app/components/base/portal-to-follow-elem', () => ({
|
||||
vi.mock('@/app/components/base/portal-to-follow-elem', () => ({
|
||||
PortalToFollowElem: ({ children, open, ...props }: any) => {
|
||||
mockPortalOpenState = open || false // Update shared state
|
||||
return <div data-testid="portal" data-open={open}>{children}</div>
|
||||
|
|
@ -119,7 +122,7 @@ jest.mock('@/app/components/base/portal-to-follow-elem', () => ({
|
|||
|
||||
describe('Component', () => {
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks()
|
||||
vi.clearAllMocks()
|
||||
mockPortalOpenState = false // ✅ Reset shared state
|
||||
})
|
||||
})
|
||||
|
|
@ -130,13 +133,13 @@ describe('Component', () => {
|
|||
```typescript
|
||||
import * as api from '@/service/api'
|
||||
|
||||
jest.mock('@/service/api')
|
||||
vi.mock('@/service/api')
|
||||
|
||||
const mockedApi = api as jest.Mocked<typeof api>
|
||||
const mockedApi = vi.mocked(api)
|
||||
|
||||
describe('Component', () => {
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks()
|
||||
vi.clearAllMocks()
|
||||
|
||||
// Setup default mock implementation
|
||||
mockedApi.fetchData.mockResolvedValue({ data: [] })
|
||||
|
|
@ -243,13 +246,13 @@ describe('Component with Context', () => {
|
|||
|
||||
```typescript
|
||||
// SWR
|
||||
jest.mock('swr', () => ({
|
||||
vi.mock('swr', () => ({
|
||||
__esModule: true,
|
||||
default: jest.fn(),
|
||||
default: vi.fn(),
|
||||
}))
|
||||
|
||||
import useSWR from 'swr'
|
||||
const mockedUseSWR = useSWR as jest.Mock
|
||||
const mockedUseSWR = vi.mocked(useSWR)
|
||||
|
||||
describe('Component with SWR', () => {
|
||||
it('should show loading state', () => {
|
||||
|
|
@ -0,0 +1 @@
|
|||
../.claude/skills
|
||||
|
|
@ -6,6 +6,9 @@
|
|||
"context": "..",
|
||||
"dockerfile": "Dockerfile"
|
||||
},
|
||||
"mounts": [
|
||||
"source=dify-dev-tmp,target=/tmp,type=volume"
|
||||
],
|
||||
"features": {
|
||||
"ghcr.io/devcontainers/features/node:1": {
|
||||
"nodeGypDependencies": true,
|
||||
|
|
@ -34,19 +37,13 @@
|
|||
},
|
||||
"postStartCommand": "./.devcontainer/post_start_command.sh",
|
||||
"postCreateCommand": "./.devcontainer/post_create_command.sh"
|
||||
|
||||
// Features to add to the dev container. More info: https://containers.dev/features.
|
||||
// "features": {},
|
||||
|
||||
// Use 'forwardPorts' to make a list of ports inside the container available locally.
|
||||
// "forwardPorts": [],
|
||||
|
||||
// Use 'postCreateCommand' to run commands after the container is created.
|
||||
// "postCreateCommand": "python --version",
|
||||
|
||||
// Configure tool-specific properties.
|
||||
// "customizations": {},
|
||||
|
||||
// Uncomment to connect as root instead. More info: https://aka.ms/dev-containers-non-root.
|
||||
// "remoteUser": "root"
|
||||
}
|
||||
}
|
||||
|
|
@ -1,6 +1,7 @@
|
|||
#!/bin/bash
|
||||
WORKSPACE_ROOT=$(pwd)
|
||||
|
||||
export COREPACK_ENABLE_DOWNLOAD_PROMPT=0
|
||||
corepack enable
|
||||
cd web && pnpm install
|
||||
pipx install uv
|
||||
|
|
|
|||
|
|
@ -7,234 +7,243 @@
|
|||
* @crazywoola @laipz8200 @Yeuoly
|
||||
|
||||
# CODEOWNERS file
|
||||
.github/CODEOWNERS @laipz8200 @crazywoola
|
||||
/.github/CODEOWNERS @laipz8200 @crazywoola
|
||||
|
||||
# Docs
|
||||
docs/ @crazywoola
|
||||
/docs/ @crazywoola
|
||||
|
||||
# Backend (default owner, more specific rules below will override)
|
||||
api/ @QuantumGhost
|
||||
/api/ @QuantumGhost
|
||||
|
||||
# Backend - MCP
|
||||
api/core/mcp/ @Nov1c444
|
||||
api/core/entities/mcp_provider.py @Nov1c444
|
||||
api/services/tools/mcp_tools_manage_service.py @Nov1c444
|
||||
api/controllers/mcp/ @Nov1c444
|
||||
api/controllers/console/app/mcp_server.py @Nov1c444
|
||||
api/tests/**/*mcp* @Nov1c444
|
||||
/api/core/mcp/ @Nov1c444
|
||||
/api/core/entities/mcp_provider.py @Nov1c444
|
||||
/api/services/tools/mcp_tools_manage_service.py @Nov1c444
|
||||
/api/controllers/mcp/ @Nov1c444
|
||||
/api/controllers/console/app/mcp_server.py @Nov1c444
|
||||
/api/tests/**/*mcp* @Nov1c444
|
||||
|
||||
# Backend - Workflow - Engine (Core graph execution engine)
|
||||
api/core/workflow/graph_engine/ @laipz8200 @QuantumGhost
|
||||
api/core/workflow/runtime/ @laipz8200 @QuantumGhost
|
||||
api/core/workflow/graph/ @laipz8200 @QuantumGhost
|
||||
api/core/workflow/graph_events/ @laipz8200 @QuantumGhost
|
||||
api/core/workflow/node_events/ @laipz8200 @QuantumGhost
|
||||
api/core/model_runtime/ @laipz8200 @QuantumGhost
|
||||
/api/core/workflow/graph_engine/ @laipz8200 @QuantumGhost
|
||||
/api/core/workflow/runtime/ @laipz8200 @QuantumGhost
|
||||
/api/core/workflow/graph/ @laipz8200 @QuantumGhost
|
||||
/api/core/workflow/graph_events/ @laipz8200 @QuantumGhost
|
||||
/api/core/workflow/node_events/ @laipz8200 @QuantumGhost
|
||||
/api/core/model_runtime/ @laipz8200 @QuantumGhost
|
||||
|
||||
# Backend - Workflow - Nodes (Agent, Iteration, Loop, LLM)
|
||||
api/core/workflow/nodes/agent/ @Nov1c444
|
||||
api/core/workflow/nodes/iteration/ @Nov1c444
|
||||
api/core/workflow/nodes/loop/ @Nov1c444
|
||||
api/core/workflow/nodes/llm/ @Nov1c444
|
||||
/api/core/workflow/nodes/agent/ @Nov1c444
|
||||
/api/core/workflow/nodes/iteration/ @Nov1c444
|
||||
/api/core/workflow/nodes/loop/ @Nov1c444
|
||||
/api/core/workflow/nodes/llm/ @Nov1c444
|
||||
|
||||
# Backend - RAG (Retrieval Augmented Generation)
|
||||
api/core/rag/ @JohnJyong
|
||||
api/services/rag_pipeline/ @JohnJyong
|
||||
api/services/dataset_service.py @JohnJyong
|
||||
api/services/knowledge_service.py @JohnJyong
|
||||
api/services/external_knowledge_service.py @JohnJyong
|
||||
api/services/hit_testing_service.py @JohnJyong
|
||||
api/services/metadata_service.py @JohnJyong
|
||||
api/services/vector_service.py @JohnJyong
|
||||
api/services/entities/knowledge_entities/ @JohnJyong
|
||||
api/services/entities/external_knowledge_entities/ @JohnJyong
|
||||
api/controllers/console/datasets/ @JohnJyong
|
||||
api/controllers/service_api/dataset/ @JohnJyong
|
||||
api/models/dataset.py @JohnJyong
|
||||
api/tasks/rag_pipeline/ @JohnJyong
|
||||
api/tasks/add_document_to_index_task.py @JohnJyong
|
||||
api/tasks/batch_clean_document_task.py @JohnJyong
|
||||
api/tasks/clean_document_task.py @JohnJyong
|
||||
api/tasks/clean_notion_document_task.py @JohnJyong
|
||||
api/tasks/document_indexing_task.py @JohnJyong
|
||||
api/tasks/document_indexing_sync_task.py @JohnJyong
|
||||
api/tasks/document_indexing_update_task.py @JohnJyong
|
||||
api/tasks/duplicate_document_indexing_task.py @JohnJyong
|
||||
api/tasks/recover_document_indexing_task.py @JohnJyong
|
||||
api/tasks/remove_document_from_index_task.py @JohnJyong
|
||||
api/tasks/retry_document_indexing_task.py @JohnJyong
|
||||
api/tasks/sync_website_document_indexing_task.py @JohnJyong
|
||||
api/tasks/batch_create_segment_to_index_task.py @JohnJyong
|
||||
api/tasks/create_segment_to_index_task.py @JohnJyong
|
||||
api/tasks/delete_segment_from_index_task.py @JohnJyong
|
||||
api/tasks/disable_segment_from_index_task.py @JohnJyong
|
||||
api/tasks/disable_segments_from_index_task.py @JohnJyong
|
||||
api/tasks/enable_segment_to_index_task.py @JohnJyong
|
||||
api/tasks/enable_segments_to_index_task.py @JohnJyong
|
||||
api/tasks/clean_dataset_task.py @JohnJyong
|
||||
api/tasks/deal_dataset_index_update_task.py @JohnJyong
|
||||
api/tasks/deal_dataset_vector_index_task.py @JohnJyong
|
||||
/api/core/rag/ @JohnJyong
|
||||
/api/services/rag_pipeline/ @JohnJyong
|
||||
/api/services/dataset_service.py @JohnJyong
|
||||
/api/services/knowledge_service.py @JohnJyong
|
||||
/api/services/external_knowledge_service.py @JohnJyong
|
||||
/api/services/hit_testing_service.py @JohnJyong
|
||||
/api/services/metadata_service.py @JohnJyong
|
||||
/api/services/vector_service.py @JohnJyong
|
||||
/api/services/entities/knowledge_entities/ @JohnJyong
|
||||
/api/services/entities/external_knowledge_entities/ @JohnJyong
|
||||
/api/controllers/console/datasets/ @JohnJyong
|
||||
/api/controllers/service_api/dataset/ @JohnJyong
|
||||
/api/models/dataset.py @JohnJyong
|
||||
/api/tasks/rag_pipeline/ @JohnJyong
|
||||
/api/tasks/add_document_to_index_task.py @JohnJyong
|
||||
/api/tasks/batch_clean_document_task.py @JohnJyong
|
||||
/api/tasks/clean_document_task.py @JohnJyong
|
||||
/api/tasks/clean_notion_document_task.py @JohnJyong
|
||||
/api/tasks/document_indexing_task.py @JohnJyong
|
||||
/api/tasks/document_indexing_sync_task.py @JohnJyong
|
||||
/api/tasks/document_indexing_update_task.py @JohnJyong
|
||||
/api/tasks/duplicate_document_indexing_task.py @JohnJyong
|
||||
/api/tasks/recover_document_indexing_task.py @JohnJyong
|
||||
/api/tasks/remove_document_from_index_task.py @JohnJyong
|
||||
/api/tasks/retry_document_indexing_task.py @JohnJyong
|
||||
/api/tasks/sync_website_document_indexing_task.py @JohnJyong
|
||||
/api/tasks/batch_create_segment_to_index_task.py @JohnJyong
|
||||
/api/tasks/create_segment_to_index_task.py @JohnJyong
|
||||
/api/tasks/delete_segment_from_index_task.py @JohnJyong
|
||||
/api/tasks/disable_segment_from_index_task.py @JohnJyong
|
||||
/api/tasks/disable_segments_from_index_task.py @JohnJyong
|
||||
/api/tasks/enable_segment_to_index_task.py @JohnJyong
|
||||
/api/tasks/enable_segments_to_index_task.py @JohnJyong
|
||||
/api/tasks/clean_dataset_task.py @JohnJyong
|
||||
/api/tasks/deal_dataset_index_update_task.py @JohnJyong
|
||||
/api/tasks/deal_dataset_vector_index_task.py @JohnJyong
|
||||
|
||||
# Backend - Plugins
|
||||
api/core/plugin/ @Mairuis @Yeuoly @Stream29
|
||||
api/services/plugin/ @Mairuis @Yeuoly @Stream29
|
||||
api/controllers/console/workspace/plugin.py @Mairuis @Yeuoly @Stream29
|
||||
api/controllers/inner_api/plugin/ @Mairuis @Yeuoly @Stream29
|
||||
api/tasks/process_tenant_plugin_autoupgrade_check_task.py @Mairuis @Yeuoly @Stream29
|
||||
/api/core/plugin/ @Mairuis @Yeuoly @Stream29
|
||||
/api/services/plugin/ @Mairuis @Yeuoly @Stream29
|
||||
/api/controllers/console/workspace/plugin.py @Mairuis @Yeuoly @Stream29
|
||||
/api/controllers/inner_api/plugin/ @Mairuis @Yeuoly @Stream29
|
||||
/api/tasks/process_tenant_plugin_autoupgrade_check_task.py @Mairuis @Yeuoly @Stream29
|
||||
|
||||
# Backend - Trigger/Schedule/Webhook
|
||||
api/controllers/trigger/ @Mairuis @Yeuoly
|
||||
api/controllers/console/app/workflow_trigger.py @Mairuis @Yeuoly
|
||||
api/controllers/console/workspace/trigger_providers.py @Mairuis @Yeuoly
|
||||
api/core/trigger/ @Mairuis @Yeuoly
|
||||
api/core/app/layers/trigger_post_layer.py @Mairuis @Yeuoly
|
||||
api/services/trigger/ @Mairuis @Yeuoly
|
||||
api/models/trigger.py @Mairuis @Yeuoly
|
||||
api/fields/workflow_trigger_fields.py @Mairuis @Yeuoly
|
||||
api/repositories/workflow_trigger_log_repository.py @Mairuis @Yeuoly
|
||||
api/repositories/sqlalchemy_workflow_trigger_log_repository.py @Mairuis @Yeuoly
|
||||
api/libs/schedule_utils.py @Mairuis @Yeuoly
|
||||
api/services/workflow/scheduler.py @Mairuis @Yeuoly
|
||||
api/schedule/trigger_provider_refresh_task.py @Mairuis @Yeuoly
|
||||
api/schedule/workflow_schedule_task.py @Mairuis @Yeuoly
|
||||
api/tasks/trigger_processing_tasks.py @Mairuis @Yeuoly
|
||||
api/tasks/trigger_subscription_refresh_tasks.py @Mairuis @Yeuoly
|
||||
api/tasks/workflow_schedule_tasks.py @Mairuis @Yeuoly
|
||||
api/tasks/workflow_cfs_scheduler/ @Mairuis @Yeuoly
|
||||
api/events/event_handlers/sync_plugin_trigger_when_app_created.py @Mairuis @Yeuoly
|
||||
api/events/event_handlers/update_app_triggers_when_app_published_workflow_updated.py @Mairuis @Yeuoly
|
||||
api/events/event_handlers/sync_workflow_schedule_when_app_published.py @Mairuis @Yeuoly
|
||||
api/events/event_handlers/sync_webhook_when_app_created.py @Mairuis @Yeuoly
|
||||
/api/controllers/trigger/ @Mairuis @Yeuoly
|
||||
/api/controllers/console/app/workflow_trigger.py @Mairuis @Yeuoly
|
||||
/api/controllers/console/workspace/trigger_providers.py @Mairuis @Yeuoly
|
||||
/api/core/trigger/ @Mairuis @Yeuoly
|
||||
/api/core/app/layers/trigger_post_layer.py @Mairuis @Yeuoly
|
||||
/api/services/trigger/ @Mairuis @Yeuoly
|
||||
/api/models/trigger.py @Mairuis @Yeuoly
|
||||
/api/fields/workflow_trigger_fields.py @Mairuis @Yeuoly
|
||||
/api/repositories/workflow_trigger_log_repository.py @Mairuis @Yeuoly
|
||||
/api/repositories/sqlalchemy_workflow_trigger_log_repository.py @Mairuis @Yeuoly
|
||||
/api/libs/schedule_utils.py @Mairuis @Yeuoly
|
||||
/api/services/workflow/scheduler.py @Mairuis @Yeuoly
|
||||
/api/schedule/trigger_provider_refresh_task.py @Mairuis @Yeuoly
|
||||
/api/schedule/workflow_schedule_task.py @Mairuis @Yeuoly
|
||||
/api/tasks/trigger_processing_tasks.py @Mairuis @Yeuoly
|
||||
/api/tasks/trigger_subscription_refresh_tasks.py @Mairuis @Yeuoly
|
||||
/api/tasks/workflow_schedule_tasks.py @Mairuis @Yeuoly
|
||||
/api/tasks/workflow_cfs_scheduler/ @Mairuis @Yeuoly
|
||||
/api/events/event_handlers/sync_plugin_trigger_when_app_created.py @Mairuis @Yeuoly
|
||||
/api/events/event_handlers/update_app_triggers_when_app_published_workflow_updated.py @Mairuis @Yeuoly
|
||||
/api/events/event_handlers/sync_workflow_schedule_when_app_published.py @Mairuis @Yeuoly
|
||||
/api/events/event_handlers/sync_webhook_when_app_created.py @Mairuis @Yeuoly
|
||||
|
||||
# Backend - Async Workflow
|
||||
api/services/async_workflow_service.py @Mairuis @Yeuoly
|
||||
api/tasks/async_workflow_tasks.py @Mairuis @Yeuoly
|
||||
/api/services/async_workflow_service.py @Mairuis @Yeuoly
|
||||
/api/tasks/async_workflow_tasks.py @Mairuis @Yeuoly
|
||||
|
||||
# Backend - Billing
|
||||
api/services/billing_service.py @hj24 @zyssyz123
|
||||
api/controllers/console/billing/ @hj24 @zyssyz123
|
||||
/api/services/billing_service.py @hj24 @zyssyz123
|
||||
/api/controllers/console/billing/ @hj24 @zyssyz123
|
||||
|
||||
# Backend - Enterprise
|
||||
api/configs/enterprise/ @GarfieldDai @GareArc
|
||||
api/services/enterprise/ @GarfieldDai @GareArc
|
||||
api/services/feature_service.py @GarfieldDai @GareArc
|
||||
api/controllers/console/feature.py @GarfieldDai @GareArc
|
||||
api/controllers/web/feature.py @GarfieldDai @GareArc
|
||||
/api/configs/enterprise/ @GarfieldDai @GareArc
|
||||
/api/services/enterprise/ @GarfieldDai @GareArc
|
||||
/api/services/feature_service.py @GarfieldDai @GareArc
|
||||
/api/controllers/console/feature.py @GarfieldDai @GareArc
|
||||
/api/controllers/web/feature.py @GarfieldDai @GareArc
|
||||
|
||||
# Backend - Database Migrations
|
||||
api/migrations/ @snakevash @laipz8200 @MRZHUH
|
||||
/api/migrations/ @snakevash @laipz8200 @MRZHUH
|
||||
|
||||
# Backend - Vector DB Middleware
|
||||
/api/configs/middleware/vdb/* @JohnJyong
|
||||
|
||||
# Frontend
|
||||
web/ @iamjoel
|
||||
/web/ @iamjoel
|
||||
|
||||
# Frontend - Web Tests
|
||||
/.github/workflows/web-tests.yml @iamjoel
|
||||
|
||||
# Frontend - App - Orchestration
|
||||
web/app/components/workflow/ @iamjoel @zxhlyh
|
||||
web/app/components/workflow-app/ @iamjoel @zxhlyh
|
||||
web/app/components/app/configuration/ @iamjoel @zxhlyh
|
||||
web/app/components/app/app-publisher/ @iamjoel @zxhlyh
|
||||
/web/app/components/workflow/ @iamjoel @zxhlyh
|
||||
/web/app/components/workflow-app/ @iamjoel @zxhlyh
|
||||
/web/app/components/app/configuration/ @iamjoel @zxhlyh
|
||||
/web/app/components/app/app-publisher/ @iamjoel @zxhlyh
|
||||
|
||||
# Frontend - WebApp - Chat
|
||||
web/app/components/base/chat/ @iamjoel @zxhlyh
|
||||
/web/app/components/base/chat/ @iamjoel @zxhlyh
|
||||
|
||||
# Frontend - WebApp - Completion
|
||||
web/app/components/share/text-generation/ @iamjoel @zxhlyh
|
||||
/web/app/components/share/text-generation/ @iamjoel @zxhlyh
|
||||
|
||||
# Frontend - App - List and Creation
|
||||
web/app/components/apps/ @JzoNgKVO @iamjoel
|
||||
web/app/components/app/create-app-dialog/ @JzoNgKVO @iamjoel
|
||||
web/app/components/app/create-app-modal/ @JzoNgKVO @iamjoel
|
||||
web/app/components/app/create-from-dsl-modal/ @JzoNgKVO @iamjoel
|
||||
/web/app/components/apps/ @JzoNgKVO @iamjoel
|
||||
/web/app/components/app/create-app-dialog/ @JzoNgKVO @iamjoel
|
||||
/web/app/components/app/create-app-modal/ @JzoNgKVO @iamjoel
|
||||
/web/app/components/app/create-from-dsl-modal/ @JzoNgKVO @iamjoel
|
||||
|
||||
# Frontend - App - API Documentation
|
||||
web/app/components/develop/ @JzoNgKVO @iamjoel
|
||||
/web/app/components/develop/ @JzoNgKVO @iamjoel
|
||||
|
||||
# Frontend - App - Logs and Annotations
|
||||
web/app/components/app/workflow-log/ @JzoNgKVO @iamjoel
|
||||
web/app/components/app/log/ @JzoNgKVO @iamjoel
|
||||
web/app/components/app/log-annotation/ @JzoNgKVO @iamjoel
|
||||
web/app/components/app/annotation/ @JzoNgKVO @iamjoel
|
||||
/web/app/components/app/workflow-log/ @JzoNgKVO @iamjoel
|
||||
/web/app/components/app/log/ @JzoNgKVO @iamjoel
|
||||
/web/app/components/app/log-annotation/ @JzoNgKVO @iamjoel
|
||||
/web/app/components/app/annotation/ @JzoNgKVO @iamjoel
|
||||
|
||||
# Frontend - App - Monitoring
|
||||
web/app/(commonLayout)/app/(appDetailLayout)/\[appId\]/overview/ @JzoNgKVO @iamjoel
|
||||
web/app/components/app/overview/ @JzoNgKVO @iamjoel
|
||||
/web/app/(commonLayout)/app/(appDetailLayout)/\[appId\]/overview/ @JzoNgKVO @iamjoel
|
||||
/web/app/components/app/overview/ @JzoNgKVO @iamjoel
|
||||
|
||||
# Frontend - App - Settings
|
||||
web/app/components/app-sidebar/ @JzoNgKVO @iamjoel
|
||||
/web/app/components/app-sidebar/ @JzoNgKVO @iamjoel
|
||||
|
||||
# Frontend - RAG - Hit Testing
|
||||
web/app/components/datasets/hit-testing/ @JzoNgKVO @iamjoel
|
||||
/web/app/components/datasets/hit-testing/ @JzoNgKVO @iamjoel
|
||||
|
||||
# Frontend - RAG - List and Creation
|
||||
web/app/components/datasets/list/ @iamjoel @WTW0313
|
||||
web/app/components/datasets/create/ @iamjoel @WTW0313
|
||||
web/app/components/datasets/create-from-pipeline/ @iamjoel @WTW0313
|
||||
web/app/components/datasets/external-knowledge-base/ @iamjoel @WTW0313
|
||||
/web/app/components/datasets/list/ @iamjoel @WTW0313
|
||||
/web/app/components/datasets/create/ @iamjoel @WTW0313
|
||||
/web/app/components/datasets/create-from-pipeline/ @iamjoel @WTW0313
|
||||
/web/app/components/datasets/external-knowledge-base/ @iamjoel @WTW0313
|
||||
|
||||
# Frontend - RAG - Orchestration (general rule first, specific rules below override)
|
||||
web/app/components/rag-pipeline/ @iamjoel @WTW0313
|
||||
web/app/components/rag-pipeline/components/rag-pipeline-main.tsx @iamjoel @zxhlyh
|
||||
web/app/components/rag-pipeline/store/ @iamjoel @zxhlyh
|
||||
/web/app/components/rag-pipeline/ @iamjoel @WTW0313
|
||||
/web/app/components/rag-pipeline/components/rag-pipeline-main.tsx @iamjoel @zxhlyh
|
||||
/web/app/components/rag-pipeline/store/ @iamjoel @zxhlyh
|
||||
|
||||
# Frontend - RAG - Documents List
|
||||
web/app/components/datasets/documents/list.tsx @iamjoel @WTW0313
|
||||
web/app/components/datasets/documents/create-from-pipeline/ @iamjoel @WTW0313
|
||||
/web/app/components/datasets/documents/list.tsx @iamjoel @WTW0313
|
||||
/web/app/components/datasets/documents/create-from-pipeline/ @iamjoel @WTW0313
|
||||
|
||||
# Frontend - RAG - Segments List
|
||||
web/app/components/datasets/documents/detail/ @iamjoel @WTW0313
|
||||
/web/app/components/datasets/documents/detail/ @iamjoel @WTW0313
|
||||
|
||||
# Frontend - RAG - Settings
|
||||
web/app/components/datasets/settings/ @iamjoel @WTW0313
|
||||
/web/app/components/datasets/settings/ @iamjoel @WTW0313
|
||||
|
||||
# Frontend - Ecosystem - Plugins
|
||||
web/app/components/plugins/ @iamjoel @zhsama
|
||||
/web/app/components/plugins/ @iamjoel @zhsama
|
||||
|
||||
# Frontend - Ecosystem - Tools
|
||||
web/app/components/tools/ @iamjoel @Yessenia-d
|
||||
/web/app/components/tools/ @iamjoel @Yessenia-d
|
||||
|
||||
# Frontend - Ecosystem - MarketPlace
|
||||
web/app/components/plugins/marketplace/ @iamjoel @Yessenia-d
|
||||
/web/app/components/plugins/marketplace/ @iamjoel @Yessenia-d
|
||||
|
||||
# Frontend - Login and Registration
|
||||
web/app/signin/ @douxc @iamjoel
|
||||
web/app/signup/ @douxc @iamjoel
|
||||
web/app/reset-password/ @douxc @iamjoel
|
||||
web/app/install/ @douxc @iamjoel
|
||||
web/app/init/ @douxc @iamjoel
|
||||
web/app/forgot-password/ @douxc @iamjoel
|
||||
web/app/account/ @douxc @iamjoel
|
||||
/web/app/signin/ @douxc @iamjoel
|
||||
/web/app/signup/ @douxc @iamjoel
|
||||
/web/app/reset-password/ @douxc @iamjoel
|
||||
/web/app/install/ @douxc @iamjoel
|
||||
/web/app/init/ @douxc @iamjoel
|
||||
/web/app/forgot-password/ @douxc @iamjoel
|
||||
/web/app/account/ @douxc @iamjoel
|
||||
|
||||
# Frontend - Service Authentication
|
||||
web/service/base.ts @douxc @iamjoel
|
||||
/web/service/base.ts @douxc @iamjoel
|
||||
|
||||
# Frontend - WebApp Authentication and Access Control
|
||||
web/app/(shareLayout)/components/ @douxc @iamjoel
|
||||
web/app/(shareLayout)/webapp-signin/ @douxc @iamjoel
|
||||
web/app/(shareLayout)/webapp-reset-password/ @douxc @iamjoel
|
||||
web/app/components/app/app-access-control/ @douxc @iamjoel
|
||||
/web/app/(shareLayout)/components/ @douxc @iamjoel
|
||||
/web/app/(shareLayout)/webapp-signin/ @douxc @iamjoel
|
||||
/web/app/(shareLayout)/webapp-reset-password/ @douxc @iamjoel
|
||||
/web/app/components/app/app-access-control/ @douxc @iamjoel
|
||||
|
||||
# Frontend - Explore Page
|
||||
web/app/components/explore/ @CodingOnStar @iamjoel
|
||||
/web/app/components/explore/ @CodingOnStar @iamjoel
|
||||
|
||||
# Frontend - Personal Settings
|
||||
web/app/components/header/account-setting/ @CodingOnStar @iamjoel
|
||||
web/app/components/header/account-dropdown/ @CodingOnStar @iamjoel
|
||||
/web/app/components/header/account-setting/ @CodingOnStar @iamjoel
|
||||
/web/app/components/header/account-dropdown/ @CodingOnStar @iamjoel
|
||||
|
||||
# Frontend - Analytics
|
||||
web/app/components/base/ga/ @CodingOnStar @iamjoel
|
||||
/web/app/components/base/ga/ @CodingOnStar @iamjoel
|
||||
|
||||
# Frontend - Base Components
|
||||
web/app/components/base/ @iamjoel @zxhlyh
|
||||
/web/app/components/base/ @iamjoel @zxhlyh
|
||||
|
||||
# Frontend - Utils and Hooks
|
||||
web/utils/classnames.ts @iamjoel @zxhlyh
|
||||
web/utils/time.ts @iamjoel @zxhlyh
|
||||
web/utils/format.ts @iamjoel @zxhlyh
|
||||
web/utils/clipboard.ts @iamjoel @zxhlyh
|
||||
web/hooks/use-document-title.ts @iamjoel @zxhlyh
|
||||
/web/utils/classnames.ts @iamjoel @zxhlyh
|
||||
/web/utils/time.ts @iamjoel @zxhlyh
|
||||
/web/utils/format.ts @iamjoel @zxhlyh
|
||||
/web/utils/clipboard.ts @iamjoel @zxhlyh
|
||||
/web/hooks/use-document-title.ts @iamjoel @zxhlyh
|
||||
|
||||
# Frontend - Billing and Education
|
||||
web/app/components/billing/ @iamjoel @zxhlyh
|
||||
web/app/education-apply/ @iamjoel @zxhlyh
|
||||
/web/app/components/billing/ @iamjoel @zxhlyh
|
||||
/web/app/education-apply/ @iamjoel @zxhlyh
|
||||
|
||||
# Frontend - Workspace
|
||||
web/app/components/header/account-dropdown/workplace-selector/ @iamjoel @zxhlyh
|
||||
/web/app/components/header/account-dropdown/workplace-selector/ @iamjoel @zxhlyh
|
||||
|
||||
# Docker
|
||||
/docker/* @laipz8200
|
||||
|
|
|
|||
|
|
@ -66,7 +66,7 @@ jobs:
|
|||
# mdformat breaks YAML front matter in markdown files. Add --exclude for directories containing YAML front matter.
|
||||
- name: mdformat
|
||||
run: |
|
||||
uvx --python 3.13 mdformat . --exclude ".claude/skills/**"
|
||||
uvx --python 3.13 mdformat . --exclude ".claude/skills/**/SKILL.md"
|
||||
|
||||
- name: Install pnpm
|
||||
uses: pnpm/action-setup@v4
|
||||
|
|
|
|||
|
|
@ -42,12 +42,7 @@ jobs:
|
|||
run: pnpm run check:i18n-types
|
||||
|
||||
- name: Run tests
|
||||
run: |
|
||||
pnpm exec jest \
|
||||
--ci \
|
||||
--runInBand \
|
||||
--coverage \
|
||||
--passWithNoTests
|
||||
run: pnpm test --coverage
|
||||
|
||||
- name: Coverage Summary
|
||||
if: always()
|
||||
|
|
@ -61,7 +56,7 @@ jobs:
|
|||
if [ ! -f "$COVERAGE_FILE" ] && [ ! -f "$COVERAGE_SUMMARY_FILE" ]; then
|
||||
echo "has_coverage=false" >> "$GITHUB_OUTPUT"
|
||||
echo "### 🚨 Test Coverage Report :test_tube:" >> "$GITHUB_STEP_SUMMARY"
|
||||
echo "Coverage data not found. Ensure Jest runs with coverage enabled." >> "$GITHUB_STEP_SUMMARY"
|
||||
echo "Coverage data not found. Ensure Vitest runs with coverage enabled." >> "$GITHUB_STEP_SUMMARY"
|
||||
exit 0
|
||||
fi
|
||||
|
||||
|
|
@ -357,7 +352,7 @@ jobs:
|
|||
.join(' | ')} |`;
|
||||
|
||||
console.log('');
|
||||
console.log('<details><summary>Jest coverage table</summary>');
|
||||
console.log('<details><summary>Vitest coverage table</summary>');
|
||||
console.log('');
|
||||
console.log(headerRow);
|
||||
console.log(dividerRow);
|
||||
|
|
|
|||
|
|
@ -116,6 +116,7 @@ ALIYUN_OSS_AUTH_VERSION=v1
|
|||
ALIYUN_OSS_REGION=your-region
|
||||
# Don't start with '/'. OSS doesn't support leading slash in object names.
|
||||
ALIYUN_OSS_PATH=your-path
|
||||
ALIYUN_CLOUDBOX_ID=your-cloudbox-id
|
||||
|
||||
# Google Storage configuration
|
||||
GOOGLE_STORAGE_BUCKET_NAME=your-bucket-name
|
||||
|
|
@ -133,6 +134,7 @@ HUAWEI_OBS_BUCKET_NAME=your-bucket-name
|
|||
HUAWEI_OBS_SECRET_KEY=your-secret-key
|
||||
HUAWEI_OBS_ACCESS_KEY=your-access-key
|
||||
HUAWEI_OBS_SERVER=your-server-url
|
||||
HUAWEI_OBS_PATH_STYLE=false
|
||||
|
||||
# Baidu OBS Storage Configuration
|
||||
BAIDU_OBS_BUCKET_NAME=your-bucket-name
|
||||
|
|
@ -690,7 +692,6 @@ ANNOTATION_IMPORT_RATE_LIMIT_PER_MINUTE=5
|
|||
ANNOTATION_IMPORT_RATE_LIMIT_PER_HOUR=20
|
||||
# Maximum number of concurrent annotation import tasks per tenant
|
||||
ANNOTATION_IMPORT_MAX_CONCURRENT=5
|
||||
|
||||
# Sandbox expired records clean configuration
|
||||
SANDBOX_EXPIRED_RECORDS_CLEAN_GRACEFUL_PERIOD=21
|
||||
SANDBOX_EXPIRED_RECORDS_CLEAN_BATCH_SIZE=1000
|
||||
|
|
|
|||
|
|
@ -41,3 +41,8 @@ class AliyunOSSStorageConfig(BaseSettings):
|
|||
description="Base path within the bucket to store objects (e.g., 'my-app-data/')",
|
||||
default=None,
|
||||
)
|
||||
|
||||
ALIYUN_CLOUDBOX_ID: str | None = Field(
|
||||
description="Cloudbox id for aliyun cloudbox service",
|
||||
default=None,
|
||||
)
|
||||
|
|
|
|||
|
|
@ -26,3 +26,8 @@ class HuaweiCloudOBSStorageConfig(BaseSettings):
|
|||
description="Endpoint URL for Huawei Cloud OBS (e.g., 'https://obs.cn-north-4.myhuaweicloud.com')",
|
||||
default=None,
|
||||
)
|
||||
|
||||
HUAWEI_OBS_PATH_STYLE: bool = Field(
|
||||
description="Flag to indicate whether to use path-style URLs for OBS requests",
|
||||
default=False,
|
||||
)
|
||||
|
|
|
|||
|
|
@ -7,9 +7,9 @@ from controllers.console import console_ns
|
|||
from controllers.console.error import AlreadyActivateError
|
||||
from extensions.ext_database import db
|
||||
from libs.datetime_utils import naive_utc_now
|
||||
from libs.helper import EmailStr, extract_remote_ip, timezone
|
||||
from libs.helper import EmailStr, timezone
|
||||
from models import AccountStatus
|
||||
from services.account_service import AccountService, RegisterService
|
||||
from services.account_service import RegisterService
|
||||
|
||||
DEFAULT_REF_TEMPLATE_SWAGGER_2_0 = "#/definitions/{model}"
|
||||
|
||||
|
|
@ -92,7 +92,6 @@ class ActivateApi(Resource):
|
|||
"ActivationResponse",
|
||||
{
|
||||
"result": fields.String(description="Operation result"),
|
||||
"data": fields.Raw(description="Login token data"),
|
||||
},
|
||||
),
|
||||
)
|
||||
|
|
@ -117,6 +116,4 @@ class ActivateApi(Resource):
|
|||
account.initialized_at = naive_utc_now()
|
||||
db.session.commit()
|
||||
|
||||
token_pair = AccountService.login(account, ip_address=extract_remote_ip(request))
|
||||
|
||||
return {"result": "success", "data": token_pair.model_dump()}
|
||||
return {"result": "success"}
|
||||
|
|
|
|||
|
|
@ -40,7 +40,7 @@ from .. import console_ns
|
|||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class CompletionMessagePayload(BaseModel):
|
||||
class CompletionMessageExplorePayload(BaseModel):
|
||||
inputs: dict[str, Any]
|
||||
query: str = ""
|
||||
files: list[dict[str, Any]] | None = None
|
||||
|
|
@ -71,7 +71,7 @@ class ChatMessagePayload(BaseModel):
|
|||
raise ValueError("must be a valid UUID") from exc
|
||||
|
||||
|
||||
register_schema_models(console_ns, CompletionMessagePayload, ChatMessagePayload)
|
||||
register_schema_models(console_ns, CompletionMessageExplorePayload, ChatMessagePayload)
|
||||
|
||||
|
||||
# define completion api for user
|
||||
|
|
@ -80,13 +80,13 @@ register_schema_models(console_ns, CompletionMessagePayload, ChatMessagePayload)
|
|||
endpoint="installed_app_completion",
|
||||
)
|
||||
class CompletionApi(InstalledAppResource):
|
||||
@console_ns.expect(console_ns.models[CompletionMessagePayload.__name__])
|
||||
@console_ns.expect(console_ns.models[CompletionMessageExplorePayload.__name__])
|
||||
def post(self, installed_app):
|
||||
app_model = installed_app.app
|
||||
if app_model.mode != AppMode.COMPLETION:
|
||||
raise NotCompletionAppError()
|
||||
|
||||
payload = CompletionMessagePayload.model_validate(console_ns.payload or {})
|
||||
payload = CompletionMessageExplorePayload.model_validate(console_ns.payload or {})
|
||||
args = payload.model_dump(exclude_none=True)
|
||||
|
||||
streaming = payload.response_mode == "streaming"
|
||||
|
|
|
|||
|
|
@ -1,5 +1,4 @@
|
|||
from typing import Any
|
||||
from uuid import UUID
|
||||
|
||||
from flask import request
|
||||
from flask_restx import marshal_with
|
||||
|
|
@ -13,6 +12,7 @@ from controllers.console.explore.wraps import InstalledAppResource
|
|||
from core.app.entities.app_invoke_entities import InvokeFrom
|
||||
from extensions.ext_database import db
|
||||
from fields.conversation_fields import conversation_infinite_scroll_pagination_fields, simple_conversation_fields
|
||||
from libs.helper import UUIDStrOrEmpty
|
||||
from libs.login import current_user
|
||||
from models import Account
|
||||
from models.model import AppMode
|
||||
|
|
@ -24,7 +24,7 @@ from .. import console_ns
|
|||
|
||||
|
||||
class ConversationListQuery(BaseModel):
|
||||
last_id: UUID | None = None
|
||||
last_id: UUIDStrOrEmpty | None = None
|
||||
limit: int = Field(default=20, ge=1, le=100)
|
||||
pinned: bool | None = None
|
||||
|
||||
|
|
|
|||
|
|
@ -2,7 +2,8 @@ import logging
|
|||
from typing import Any
|
||||
|
||||
from flask import request
|
||||
from flask_restx import Resource, inputs, marshal_with, reqparse
|
||||
from flask_restx import Resource, marshal_with
|
||||
from pydantic import BaseModel
|
||||
from sqlalchemy import and_, select
|
||||
from werkzeug.exceptions import BadRequest, Forbidden, NotFound
|
||||
|
||||
|
|
@ -18,6 +19,15 @@ from services.account_service import TenantService
|
|||
from services.enterprise.enterprise_service import EnterpriseService
|
||||
from services.feature_service import FeatureService
|
||||
|
||||
|
||||
class InstalledAppCreatePayload(BaseModel):
|
||||
app_id: str
|
||||
|
||||
|
||||
class InstalledAppUpdatePayload(BaseModel):
|
||||
is_pinned: bool | None = None
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
|
|
@ -105,26 +115,25 @@ class InstalledAppsListApi(Resource):
|
|||
@account_initialization_required
|
||||
@cloud_edition_billing_resource_check("apps")
|
||||
def post(self):
|
||||
parser = reqparse.RequestParser().add_argument("app_id", type=str, required=True, help="Invalid app_id")
|
||||
args = parser.parse_args()
|
||||
payload = InstalledAppCreatePayload.model_validate(console_ns.payload or {})
|
||||
|
||||
recommended_app = db.session.query(RecommendedApp).where(RecommendedApp.app_id == args["app_id"]).first()
|
||||
recommended_app = db.session.query(RecommendedApp).where(RecommendedApp.app_id == payload.app_id).first()
|
||||
if recommended_app is None:
|
||||
raise NotFound("App not found")
|
||||
raise NotFound("Recommended app not found")
|
||||
|
||||
_, current_tenant_id = current_account_with_tenant()
|
||||
|
||||
app = db.session.query(App).where(App.id == args["app_id"]).first()
|
||||
app = db.session.query(App).where(App.id == payload.app_id).first()
|
||||
|
||||
if app is None:
|
||||
raise NotFound("App not found")
|
||||
raise NotFound("App entity not found")
|
||||
|
||||
if not app.is_public:
|
||||
raise Forbidden("You can't install a non-public app")
|
||||
|
||||
installed_app = (
|
||||
db.session.query(InstalledApp)
|
||||
.where(and_(InstalledApp.app_id == args["app_id"], InstalledApp.tenant_id == current_tenant_id))
|
||||
.where(and_(InstalledApp.app_id == payload.app_id, InstalledApp.tenant_id == current_tenant_id))
|
||||
.first()
|
||||
)
|
||||
|
||||
|
|
@ -133,7 +142,7 @@ class InstalledAppsListApi(Resource):
|
|||
recommended_app.install_count += 1
|
||||
|
||||
new_installed_app = InstalledApp(
|
||||
app_id=args["app_id"],
|
||||
app_id=payload.app_id,
|
||||
tenant_id=current_tenant_id,
|
||||
app_owner_tenant_id=app.tenant_id,
|
||||
is_pinned=False,
|
||||
|
|
@ -163,12 +172,11 @@ class InstalledAppApi(InstalledAppResource):
|
|||
return {"result": "success", "message": "App uninstalled successfully"}, 204
|
||||
|
||||
def patch(self, installed_app):
|
||||
parser = reqparse.RequestParser().add_argument("is_pinned", type=inputs.boolean)
|
||||
args = parser.parse_args()
|
||||
payload = InstalledAppUpdatePayload.model_validate(console_ns.payload or {})
|
||||
|
||||
commit_args = False
|
||||
if "is_pinned" in args:
|
||||
installed_app.is_pinned = args["is_pinned"]
|
||||
if payload.is_pinned is not None:
|
||||
installed_app.is_pinned = payload.is_pinned
|
||||
commit_args = True
|
||||
|
||||
if commit_args:
|
||||
|
|
|
|||
|
|
@ -18,6 +18,7 @@ from controllers.console.wraps import (
|
|||
setup_required,
|
||||
)
|
||||
from core.entities.mcp_provider import MCPAuthentication, MCPConfiguration
|
||||
from core.helper.tool_provider_cache import ToolProviderListCache
|
||||
from core.mcp.auth.auth_flow import auth, handle_callback
|
||||
from core.mcp.error import MCPAuthError, MCPError, MCPRefreshTokenError
|
||||
from core.mcp.mcp_client import MCPClient
|
||||
|
|
@ -944,7 +945,7 @@ class ToolProviderMCPApi(Resource):
|
|||
configuration = MCPConfiguration.model_validate(args["configuration"])
|
||||
authentication = MCPAuthentication.model_validate(args["authentication"]) if args["authentication"] else None
|
||||
|
||||
# Create provider
|
||||
# Create provider in transaction
|
||||
with Session(db.engine) as session, session.begin():
|
||||
service = MCPToolManageService(session=session)
|
||||
result = service.create_provider(
|
||||
|
|
@ -960,7 +961,11 @@ class ToolProviderMCPApi(Resource):
|
|||
configuration=configuration,
|
||||
authentication=authentication,
|
||||
)
|
||||
return jsonable_encoder(result)
|
||||
|
||||
# Invalidate cache AFTER transaction commits to avoid holding locks during Redis operations
|
||||
ToolProviderListCache.invalidate_cache(tenant_id)
|
||||
|
||||
return jsonable_encoder(result)
|
||||
|
||||
@console_ns.expect(parser_mcp_put)
|
||||
@setup_required
|
||||
|
|
@ -972,17 +977,23 @@ class ToolProviderMCPApi(Resource):
|
|||
authentication = MCPAuthentication.model_validate(args["authentication"]) if args["authentication"] else None
|
||||
_, current_tenant_id = current_account_with_tenant()
|
||||
|
||||
# Step 1: Validate server URL change if needed (includes URL format validation and network operation)
|
||||
validation_result = None
|
||||
# Step 1: Get provider data for URL validation (short-lived session, no network I/O)
|
||||
validation_data = None
|
||||
with Session(db.engine) as session:
|
||||
service = MCPToolManageService(session=session)
|
||||
validation_result = service.validate_server_url_change(
|
||||
tenant_id=current_tenant_id, provider_id=args["provider_id"], new_server_url=args["server_url"]
|
||||
validation_data = service.get_provider_for_url_validation(
|
||||
tenant_id=current_tenant_id, provider_id=args["provider_id"]
|
||||
)
|
||||
|
||||
# No need to check for errors here, exceptions will be raised directly
|
||||
# Step 2: Perform URL validation with network I/O OUTSIDE of any database session
|
||||
# This prevents holding database locks during potentially slow network operations
|
||||
validation_result = MCPToolManageService.validate_server_url_standalone(
|
||||
tenant_id=current_tenant_id,
|
||||
new_server_url=args["server_url"],
|
||||
validation_data=validation_data,
|
||||
)
|
||||
|
||||
# Step 2: Perform database update in a transaction
|
||||
# Step 3: Perform database update in a transaction
|
||||
with Session(db.engine) as session, session.begin():
|
||||
service = MCPToolManageService(session=session)
|
||||
service.update_provider(
|
||||
|
|
@ -999,7 +1010,11 @@ class ToolProviderMCPApi(Resource):
|
|||
authentication=authentication,
|
||||
validation_result=validation_result,
|
||||
)
|
||||
return {"result": "success"}
|
||||
|
||||
# Invalidate cache AFTER transaction commits to avoid holding locks during Redis operations
|
||||
ToolProviderListCache.invalidate_cache(current_tenant_id)
|
||||
|
||||
return {"result": "success"}
|
||||
|
||||
@console_ns.expect(parser_mcp_delete)
|
||||
@setup_required
|
||||
|
|
@ -1012,7 +1027,11 @@ class ToolProviderMCPApi(Resource):
|
|||
with Session(db.engine) as session, session.begin():
|
||||
service = MCPToolManageService(session=session)
|
||||
service.delete_provider(tenant_id=current_tenant_id, provider_id=args["provider_id"])
|
||||
return {"result": "success"}
|
||||
|
||||
# Invalidate cache AFTER transaction commits to avoid holding locks during Redis operations
|
||||
ToolProviderListCache.invalidate_cache(current_tenant_id)
|
||||
|
||||
return {"result": "success"}
|
||||
|
||||
|
||||
parser_auth = (
|
||||
|
|
@ -1062,6 +1081,8 @@ class ToolMCPAuthApi(Resource):
|
|||
credentials=provider_entity.credentials,
|
||||
authed=True,
|
||||
)
|
||||
# Invalidate cache after updating credentials
|
||||
ToolProviderListCache.invalidate_cache(tenant_id)
|
||||
return {"result": "success"}
|
||||
except MCPAuthError as e:
|
||||
try:
|
||||
|
|
@ -1075,16 +1096,22 @@ class ToolMCPAuthApi(Resource):
|
|||
with Session(db.engine) as session, session.begin():
|
||||
service = MCPToolManageService(session=session)
|
||||
response = service.execute_auth_actions(auth_result)
|
||||
# Invalidate cache after auth actions may have updated provider state
|
||||
ToolProviderListCache.invalidate_cache(tenant_id)
|
||||
return response
|
||||
except MCPRefreshTokenError as e:
|
||||
with Session(db.engine) as session, session.begin():
|
||||
service = MCPToolManageService(session=session)
|
||||
service.clear_provider_credentials(provider_id=provider_id, tenant_id=tenant_id)
|
||||
# Invalidate cache after clearing credentials
|
||||
ToolProviderListCache.invalidate_cache(tenant_id)
|
||||
raise ValueError(f"Failed to refresh token, please try to authorize again: {e}") from e
|
||||
except (MCPError, ValueError) as e:
|
||||
with Session(db.engine) as session, session.begin():
|
||||
service = MCPToolManageService(session=session)
|
||||
service.clear_provider_credentials(provider_id=provider_id, tenant_id=tenant_id)
|
||||
# Invalidate cache after clearing credentials
|
||||
ToolProviderListCache.invalidate_cache(tenant_id)
|
||||
raise ValueError(f"Failed to connect to MCP server: {e}") from e
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -4,7 +4,7 @@ from uuid import UUID
|
|||
from flask import request
|
||||
from flask_restx import Resource
|
||||
from flask_restx._http import HTTPStatus
|
||||
from pydantic import BaseModel, Field, model_validator
|
||||
from pydantic import BaseModel, Field, field_validator, model_validator
|
||||
from sqlalchemy.orm import Session
|
||||
from werkzeug.exceptions import BadRequest, NotFound
|
||||
|
||||
|
|
@ -51,6 +51,32 @@ class ConversationRenamePayload(BaseModel):
|
|||
class ConversationVariablesQuery(BaseModel):
|
||||
last_id: UUID | None = Field(default=None, description="Last variable ID for pagination")
|
||||
limit: int = Field(default=20, ge=1, le=100, description="Number of variables to return")
|
||||
variable_name: str | None = Field(
|
||||
default=None, description="Filter variables by name", min_length=1, max_length=255
|
||||
)
|
||||
|
||||
@field_validator("variable_name", mode="before")
|
||||
@classmethod
|
||||
def validate_variable_name(cls, v: str | None) -> str | None:
|
||||
"""
|
||||
Validate variable_name to prevent injection attacks.
|
||||
"""
|
||||
if v is None:
|
||||
return v
|
||||
|
||||
# Only allow safe characters: alphanumeric, underscore, hyphen, period
|
||||
if not v.replace("-", "").replace("_", "").replace(".", "").isalnum():
|
||||
raise ValueError(
|
||||
"Variable name can only contain letters, numbers, hyphens (-), underscores (_), and periods (.)"
|
||||
)
|
||||
|
||||
# Prevent SQL injection patterns
|
||||
dangerous_patterns = ["'", '"', ";", "--", "/*", "*/", "xp_", "sp_"]
|
||||
for pattern in dangerous_patterns:
|
||||
if pattern in v.lower():
|
||||
raise ValueError(f"Variable name contains invalid characters: {pattern}")
|
||||
|
||||
return v
|
||||
|
||||
|
||||
class ConversationVariableUpdatePayload(BaseModel):
|
||||
|
|
@ -199,7 +225,7 @@ class ConversationVariablesApi(Resource):
|
|||
|
||||
try:
|
||||
return ConversationService.get_conversational_variable(
|
||||
app_model, conversation_id, end_user, query_args.limit, last_id
|
||||
app_model, conversation_id, end_user, query_args.limit, last_id, query_args.variable_name
|
||||
)
|
||||
except services.errors.conversation.ConversationNotExistsError:
|
||||
raise NotFound("Conversation Not Exists.")
|
||||
|
|
|
|||
|
|
@ -1,14 +1,13 @@
|
|||
import logging
|
||||
|
||||
from flask import request
|
||||
from flask_restx import Resource, marshal_with, reqparse
|
||||
from flask_restx import Resource, marshal_with
|
||||
from pydantic import BaseModel, ConfigDict, Field
|
||||
from werkzeug.exceptions import Unauthorized
|
||||
|
||||
from constants import HEADER_NAME_APP_CODE
|
||||
from controllers.common import fields
|
||||
from controllers.web import web_ns
|
||||
from controllers.web.error import AppUnavailableError
|
||||
from controllers.web.wraps import WebApiResource
|
||||
from controllers.common.schema import register_schema_models
|
||||
from core.app.app_config.common.parameters_mapping import get_parameters_from_feature_dict
|
||||
from libs.passport import PassportService
|
||||
from libs.token import extract_webapp_passport
|
||||
|
|
@ -18,9 +17,23 @@ from services.enterprise.enterprise_service import EnterpriseService
|
|||
from services.feature_service import FeatureService
|
||||
from services.webapp_auth_service import WebAppAuthService
|
||||
|
||||
from . import web_ns
|
||||
from .error import AppUnavailableError
|
||||
from .wraps import WebApiResource
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class AppAccessModeQuery(BaseModel):
|
||||
model_config = ConfigDict(populate_by_name=True)
|
||||
|
||||
app_id: str | None = Field(default=None, alias="appId", description="Application ID")
|
||||
app_code: str | None = Field(default=None, alias="appCode", description="Application code")
|
||||
|
||||
|
||||
register_schema_models(web_ns, AppAccessModeQuery)
|
||||
|
||||
|
||||
@web_ns.route("/parameters")
|
||||
class AppParameterApi(WebApiResource):
|
||||
"""Resource for app variables."""
|
||||
|
|
@ -96,21 +109,16 @@ class AppAccessMode(Resource):
|
|||
}
|
||||
)
|
||||
def get(self):
|
||||
parser = (
|
||||
reqparse.RequestParser()
|
||||
.add_argument("appId", type=str, required=False, location="args")
|
||||
.add_argument("appCode", type=str, required=False, location="args")
|
||||
)
|
||||
args = parser.parse_args()
|
||||
raw_args = request.args.to_dict()
|
||||
args = AppAccessModeQuery.model_validate(raw_args)
|
||||
|
||||
features = FeatureService.get_system_features()
|
||||
if not features.webapp_auth.enabled:
|
||||
return {"accessMode": "public"}
|
||||
|
||||
app_id = args.get("appId")
|
||||
if args.get("appCode"):
|
||||
app_code = args["appCode"]
|
||||
app_id = AppService.get_app_id_by_code(app_code)
|
||||
app_id = args.app_id
|
||||
if args.app_code:
|
||||
app_id = AppService.get_app_id_by_code(args.app_code)
|
||||
|
||||
if not app_id:
|
||||
raise ValueError("appId or appCode must be provided")
|
||||
|
|
|
|||
|
|
@ -1,9 +1,11 @@
|
|||
import logging
|
||||
from typing import Any, Literal
|
||||
|
||||
from flask_restx import reqparse
|
||||
from pydantic import BaseModel, Field, field_validator
|
||||
from werkzeug.exceptions import InternalServerError, NotFound
|
||||
|
||||
import services
|
||||
from controllers.common.schema import register_schema_models
|
||||
from controllers.web import web_ns
|
||||
from controllers.web.error import (
|
||||
AppUnavailableError,
|
||||
|
|
@ -34,25 +36,44 @@ from services.errors.llm import InvokeRateLimitError
|
|||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class CompletionMessagePayload(BaseModel):
|
||||
inputs: dict[str, Any] = Field(description="Input variables for the completion")
|
||||
query: str = Field(default="", description="Query text for completion")
|
||||
files: list[dict[str, Any]] | None = Field(default=None, description="Files to be processed")
|
||||
response_mode: Literal["blocking", "streaming"] | None = Field(
|
||||
default=None, description="Response mode: blocking or streaming"
|
||||
)
|
||||
retriever_from: str = Field(default="web_app", description="Source of retriever")
|
||||
|
||||
|
||||
class ChatMessagePayload(BaseModel):
|
||||
inputs: dict[str, Any] = Field(description="Input variables for the chat")
|
||||
query: str = Field(description="User query/message")
|
||||
files: list[dict[str, Any]] | None = Field(default=None, description="Files to be processed")
|
||||
response_mode: Literal["blocking", "streaming"] | None = Field(
|
||||
default=None, description="Response mode: blocking or streaming"
|
||||
)
|
||||
conversation_id: str | None = Field(default=None, description="Conversation ID")
|
||||
parent_message_id: str | None = Field(default=None, description="Parent message ID")
|
||||
retriever_from: str = Field(default="web_app", description="Source of retriever")
|
||||
|
||||
@field_validator("conversation_id", "parent_message_id")
|
||||
@classmethod
|
||||
def validate_uuid(cls, value: str | None) -> str | None:
|
||||
if value is None:
|
||||
return value
|
||||
return uuid_value(value)
|
||||
|
||||
|
||||
register_schema_models(web_ns, CompletionMessagePayload, ChatMessagePayload)
|
||||
|
||||
|
||||
# define completion api for user
|
||||
@web_ns.route("/completion-messages")
|
||||
class CompletionApi(WebApiResource):
|
||||
@web_ns.doc("Create Completion Message")
|
||||
@web_ns.doc(description="Create a completion message for text generation applications.")
|
||||
@web_ns.doc(
|
||||
params={
|
||||
"inputs": {"description": "Input variables for the completion", "type": "object", "required": True},
|
||||
"query": {"description": "Query text for completion", "type": "string", "required": False},
|
||||
"files": {"description": "Files to be processed", "type": "array", "required": False},
|
||||
"response_mode": {
|
||||
"description": "Response mode: blocking or streaming",
|
||||
"type": "string",
|
||||
"enum": ["blocking", "streaming"],
|
||||
"required": False,
|
||||
},
|
||||
"retriever_from": {"description": "Source of retriever", "type": "string", "required": False},
|
||||
}
|
||||
)
|
||||
@web_ns.expect(web_ns.models[CompletionMessagePayload.__name__])
|
||||
@web_ns.doc(
|
||||
responses={
|
||||
200: "Success",
|
||||
|
|
@ -67,18 +88,10 @@ class CompletionApi(WebApiResource):
|
|||
if app_model.mode != AppMode.COMPLETION:
|
||||
raise NotCompletionAppError()
|
||||
|
||||
parser = (
|
||||
reqparse.RequestParser()
|
||||
.add_argument("inputs", type=dict, required=True, location="json")
|
||||
.add_argument("query", type=str, location="json", default="")
|
||||
.add_argument("files", type=list, required=False, location="json")
|
||||
.add_argument("response_mode", type=str, choices=["blocking", "streaming"], location="json")
|
||||
.add_argument("retriever_from", type=str, required=False, default="web_app", location="json")
|
||||
)
|
||||
payload = CompletionMessagePayload.model_validate(web_ns.payload or {})
|
||||
args = payload.model_dump(exclude_none=True)
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
streaming = args["response_mode"] == "streaming"
|
||||
streaming = payload.response_mode == "streaming"
|
||||
args["auto_generate_name"] = False
|
||||
|
||||
try:
|
||||
|
|
@ -142,22 +155,7 @@ class CompletionStopApi(WebApiResource):
|
|||
class ChatApi(WebApiResource):
|
||||
@web_ns.doc("Create Chat Message")
|
||||
@web_ns.doc(description="Create a chat message for conversational applications.")
|
||||
@web_ns.doc(
|
||||
params={
|
||||
"inputs": {"description": "Input variables for the chat", "type": "object", "required": True},
|
||||
"query": {"description": "User query/message", "type": "string", "required": True},
|
||||
"files": {"description": "Files to be processed", "type": "array", "required": False},
|
||||
"response_mode": {
|
||||
"description": "Response mode: blocking or streaming",
|
||||
"type": "string",
|
||||
"enum": ["blocking", "streaming"],
|
||||
"required": False,
|
||||
},
|
||||
"conversation_id": {"description": "Conversation UUID", "type": "string", "required": False},
|
||||
"parent_message_id": {"description": "Parent message UUID", "type": "string", "required": False},
|
||||
"retriever_from": {"description": "Source of retriever", "type": "string", "required": False},
|
||||
}
|
||||
)
|
||||
@web_ns.expect(web_ns.models[ChatMessagePayload.__name__])
|
||||
@web_ns.doc(
|
||||
responses={
|
||||
200: "Success",
|
||||
|
|
@ -173,20 +171,10 @@ class ChatApi(WebApiResource):
|
|||
if app_mode not in {AppMode.CHAT, AppMode.AGENT_CHAT, AppMode.ADVANCED_CHAT}:
|
||||
raise NotChatAppError()
|
||||
|
||||
parser = (
|
||||
reqparse.RequestParser()
|
||||
.add_argument("inputs", type=dict, required=True, location="json")
|
||||
.add_argument("query", type=str, required=True, location="json")
|
||||
.add_argument("files", type=list, required=False, location="json")
|
||||
.add_argument("response_mode", type=str, choices=["blocking", "streaming"], location="json")
|
||||
.add_argument("conversation_id", type=uuid_value, location="json")
|
||||
.add_argument("parent_message_id", type=uuid_value, required=False, location="json")
|
||||
.add_argument("retriever_from", type=str, required=False, default="web_app", location="json")
|
||||
)
|
||||
payload = ChatMessagePayload.model_validate(web_ns.payload or {})
|
||||
args = payload.model_dump(exclude_none=True)
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
streaming = args["response_mode"] == "streaming"
|
||||
streaming = payload.response_mode == "streaming"
|
||||
args["auto_generate_name"] = False
|
||||
|
||||
try:
|
||||
|
|
|
|||
|
|
@ -1,9 +1,12 @@
|
|||
import logging
|
||||
from typing import Literal
|
||||
|
||||
from flask_restx import fields, marshal_with, reqparse
|
||||
from flask_restx.inputs import int_range
|
||||
from flask import request
|
||||
from flask_restx import fields, marshal_with
|
||||
from pydantic import BaseModel, Field, field_validator
|
||||
from werkzeug.exceptions import InternalServerError, NotFound
|
||||
|
||||
from controllers.common.schema import register_schema_models
|
||||
from controllers.web import web_ns
|
||||
from controllers.web.error import (
|
||||
AppMoreLikeThisDisabledError,
|
||||
|
|
@ -38,6 +41,33 @@ from services.message_service import MessageService
|
|||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class MessageListQuery(BaseModel):
|
||||
conversation_id: str = Field(description="Conversation UUID")
|
||||
first_id: str | None = Field(default=None, description="First message ID for pagination")
|
||||
limit: int = Field(default=20, ge=1, le=100, description="Number of messages to return (1-100)")
|
||||
|
||||
@field_validator("conversation_id", "first_id")
|
||||
@classmethod
|
||||
def validate_uuid(cls, value: str | None) -> str | None:
|
||||
if value is None:
|
||||
return value
|
||||
return uuid_value(value)
|
||||
|
||||
|
||||
class MessageFeedbackPayload(BaseModel):
|
||||
rating: Literal["like", "dislike"] | None = Field(default=None, description="Feedback rating")
|
||||
content: str | None = Field(default=None, description="Feedback content")
|
||||
|
||||
|
||||
class MessageMoreLikeThisQuery(BaseModel):
|
||||
response_mode: Literal["blocking", "streaming"] = Field(
|
||||
description="Response mode",
|
||||
)
|
||||
|
||||
|
||||
register_schema_models(web_ns, MessageListQuery, MessageFeedbackPayload, MessageMoreLikeThisQuery)
|
||||
|
||||
|
||||
@web_ns.route("/messages")
|
||||
class MessageListApi(WebApiResource):
|
||||
message_fields = {
|
||||
|
|
@ -68,7 +98,11 @@ class MessageListApi(WebApiResource):
|
|||
@web_ns.doc(
|
||||
params={
|
||||
"conversation_id": {"description": "Conversation UUID", "type": "string", "required": True},
|
||||
"first_id": {"description": "First message ID for pagination", "type": "string", "required": False},
|
||||
"first_id": {
|
||||
"description": "First message ID for pagination",
|
||||
"type": "string",
|
||||
"required": False,
|
||||
},
|
||||
"limit": {
|
||||
"description": "Number of messages to return (1-100)",
|
||||
"type": "integer",
|
||||
|
|
@ -93,17 +127,12 @@ class MessageListApi(WebApiResource):
|
|||
if app_mode not in {AppMode.CHAT, AppMode.AGENT_CHAT, AppMode.ADVANCED_CHAT}:
|
||||
raise NotChatAppError()
|
||||
|
||||
parser = (
|
||||
reqparse.RequestParser()
|
||||
.add_argument("conversation_id", required=True, type=uuid_value, location="args")
|
||||
.add_argument("first_id", type=uuid_value, location="args")
|
||||
.add_argument("limit", type=int_range(1, 100), required=False, default=20, location="args")
|
||||
)
|
||||
args = parser.parse_args()
|
||||
raw_args = request.args.to_dict()
|
||||
query = MessageListQuery.model_validate(raw_args)
|
||||
|
||||
try:
|
||||
return MessageService.pagination_by_first_id(
|
||||
app_model, end_user, args["conversation_id"], args["first_id"], args["limit"]
|
||||
app_model, end_user, query.conversation_id, query.first_id, query.limit
|
||||
)
|
||||
except ConversationNotExistsError:
|
||||
raise NotFound("Conversation Not Exists.")
|
||||
|
|
@ -128,7 +157,7 @@ class MessageFeedbackApi(WebApiResource):
|
|||
"enum": ["like", "dislike"],
|
||||
"required": False,
|
||||
},
|
||||
"content": {"description": "Feedback content/comment", "type": "string", "required": False},
|
||||
"content": {"description": "Feedback content", "type": "string", "required": False},
|
||||
}
|
||||
)
|
||||
@web_ns.doc(
|
||||
|
|
@ -145,20 +174,15 @@ class MessageFeedbackApi(WebApiResource):
|
|||
def post(self, app_model, end_user, message_id):
|
||||
message_id = str(message_id)
|
||||
|
||||
parser = (
|
||||
reqparse.RequestParser()
|
||||
.add_argument("rating", type=str, choices=["like", "dislike", None], location="json")
|
||||
.add_argument("content", type=str, location="json", default=None)
|
||||
)
|
||||
args = parser.parse_args()
|
||||
payload = MessageFeedbackPayload.model_validate(web_ns.payload or {})
|
||||
|
||||
try:
|
||||
MessageService.create_feedback(
|
||||
app_model=app_model,
|
||||
message_id=message_id,
|
||||
user=end_user,
|
||||
rating=args.get("rating"),
|
||||
content=args.get("content"),
|
||||
rating=payload.rating,
|
||||
content=payload.content,
|
||||
)
|
||||
except MessageNotExistsError:
|
||||
raise NotFound("Message Not Exists.")
|
||||
|
|
@ -170,17 +194,7 @@ class MessageFeedbackApi(WebApiResource):
|
|||
class MessageMoreLikeThisApi(WebApiResource):
|
||||
@web_ns.doc("Generate More Like This")
|
||||
@web_ns.doc(description="Generate a new completion similar to an existing message (completion apps only).")
|
||||
@web_ns.doc(
|
||||
params={
|
||||
"message_id": {"description": "Message UUID", "type": "string", "required": True},
|
||||
"response_mode": {
|
||||
"description": "Response mode",
|
||||
"type": "string",
|
||||
"enum": ["blocking", "streaming"],
|
||||
"required": True,
|
||||
},
|
||||
}
|
||||
)
|
||||
@web_ns.expect(web_ns.models[MessageMoreLikeThisQuery.__name__])
|
||||
@web_ns.doc(
|
||||
responses={
|
||||
200: "Success",
|
||||
|
|
@ -197,12 +211,10 @@ class MessageMoreLikeThisApi(WebApiResource):
|
|||
|
||||
message_id = str(message_id)
|
||||
|
||||
parser = reqparse.RequestParser().add_argument(
|
||||
"response_mode", type=str, required=True, choices=["blocking", "streaming"], location="args"
|
||||
)
|
||||
args = parser.parse_args()
|
||||
raw_args = request.args.to_dict()
|
||||
query = MessageMoreLikeThisQuery.model_validate(raw_args)
|
||||
|
||||
streaming = args["response_mode"] == "streaming"
|
||||
streaming = query.response_mode == "streaming"
|
||||
|
||||
try:
|
||||
response = AppGenerateService.generate_more_like_this(
|
||||
|
|
|
|||
|
|
@ -105,8 +105,9 @@ class BaseAppGenerator:
|
|||
variable_entity.type in {VariableEntityType.FILE, VariableEntityType.FILE_LIST}
|
||||
and not variable_entity.required
|
||||
):
|
||||
# Treat empty string (frontend default) or empty list as unset
|
||||
if not value and isinstance(value, (str, list)):
|
||||
# Treat empty string (frontend default) as unset
|
||||
# For FILE_LIST, allow empty list [] to pass through
|
||||
if isinstance(value, str) and not value:
|
||||
return None
|
||||
|
||||
if variable_entity.type in {
|
||||
|
|
|
|||
|
|
@ -47,7 +47,11 @@ def build_protected_resource_metadata_discovery_urls(
|
|||
"""
|
||||
Build a list of URLs to try for Protected Resource Metadata discovery.
|
||||
|
||||
Per SEP-985, supports fallback when discovery fails at one URL.
|
||||
Per RFC 9728 Section 5.1, supports fallback when discovery fails at one URL.
|
||||
Priority order:
|
||||
1. URL from WWW-Authenticate header (if provided)
|
||||
2. Well-known URI with path: https://example.com/.well-known/oauth-protected-resource/public/mcp
|
||||
3. Well-known URI at root: https://example.com/.well-known/oauth-protected-resource
|
||||
"""
|
||||
urls = []
|
||||
|
||||
|
|
@ -58,9 +62,18 @@ def build_protected_resource_metadata_discovery_urls(
|
|||
# Fallback: construct from server URL
|
||||
parsed = urlparse(server_url)
|
||||
base_url = f"{parsed.scheme}://{parsed.netloc}"
|
||||
fallback_url = urljoin(base_url, "/.well-known/oauth-protected-resource")
|
||||
if fallback_url not in urls:
|
||||
urls.append(fallback_url)
|
||||
path = parsed.path.rstrip("/")
|
||||
|
||||
# Priority 2: With path insertion (e.g., /.well-known/oauth-protected-resource/public/mcp)
|
||||
if path:
|
||||
path_url = f"{base_url}/.well-known/oauth-protected-resource{path}"
|
||||
if path_url not in urls:
|
||||
urls.append(path_url)
|
||||
|
||||
# Priority 3: At root (e.g., /.well-known/oauth-protected-resource)
|
||||
root_url = f"{base_url}/.well-known/oauth-protected-resource"
|
||||
if root_url not in urls:
|
||||
urls.append(root_url)
|
||||
|
||||
return urls
|
||||
|
||||
|
|
@ -71,30 +84,34 @@ def build_oauth_authorization_server_metadata_discovery_urls(auth_server_url: st
|
|||
|
||||
Supports both OAuth 2.0 (RFC 8414) and OpenID Connect discovery.
|
||||
|
||||
Per RFC 8414 section 3:
|
||||
- If issuer has no path: https://example.com/.well-known/oauth-authorization-server
|
||||
- If issuer has path: https://example.com/.well-known/oauth-authorization-server{path}
|
||||
|
||||
Example:
|
||||
- issuer: https://example.com/oauth
|
||||
- metadata: https://example.com/.well-known/oauth-authorization-server/oauth
|
||||
Per RFC 8414 section 3.1 and section 5, try all possible endpoints:
|
||||
- OAuth 2.0 with path insertion: https://example.com/.well-known/oauth-authorization-server/tenant1
|
||||
- OpenID Connect with path insertion: https://example.com/.well-known/openid-configuration/tenant1
|
||||
- OpenID Connect path appending: https://example.com/tenant1/.well-known/openid-configuration
|
||||
- OAuth 2.0 at root: https://example.com/.well-known/oauth-authorization-server
|
||||
- OpenID Connect at root: https://example.com/.well-known/openid-configuration
|
||||
"""
|
||||
urls = []
|
||||
base_url = auth_server_url or server_url
|
||||
|
||||
parsed = urlparse(base_url)
|
||||
base = f"{parsed.scheme}://{parsed.netloc}"
|
||||
path = parsed.path.rstrip("/") # Remove trailing slash
|
||||
path = parsed.path.rstrip("/")
|
||||
# OAuth 2.0 Authorization Server Metadata at root (MCP-03-26)
|
||||
urls.append(f"{base}/.well-known/oauth-authorization-server")
|
||||
|
||||
# Try OpenID Connect discovery first (more common)
|
||||
urls.append(urljoin(base + "/", ".well-known/openid-configuration"))
|
||||
# OpenID Connect Discovery at root
|
||||
urls.append(f"{base}/.well-known/openid-configuration")
|
||||
|
||||
# OAuth 2.0 Authorization Server Metadata (RFC 8414)
|
||||
# Include the path component if present in the issuer URL
|
||||
if path:
|
||||
urls.append(urljoin(base, f".well-known/oauth-authorization-server{path}"))
|
||||
else:
|
||||
urls.append(urljoin(base, ".well-known/oauth-authorization-server"))
|
||||
# OpenID Connect Discovery with path insertion
|
||||
urls.append(f"{base}/.well-known/openid-configuration{path}")
|
||||
|
||||
# OpenID Connect Discovery path appending
|
||||
urls.append(f"{base}{path}/.well-known/openid-configuration")
|
||||
|
||||
# OAuth 2.0 Authorization Server Metadata with path insertion
|
||||
urls.append(f"{base}/.well-known/oauth-authorization-server{path}")
|
||||
|
||||
return urls
|
||||
|
||||
|
|
|
|||
|
|
@ -59,7 +59,7 @@ class MCPClient:
|
|||
try:
|
||||
logger.debug("Not supported method %s found in URL path, trying default 'mcp' method.", method_name)
|
||||
self.connect_server(sse_client, "sse")
|
||||
except MCPConnectionError:
|
||||
except (MCPConnectionError, ValueError):
|
||||
logger.debug("MCP connection failed with 'sse', falling back to 'mcp' method.")
|
||||
self.connect_server(streamablehttp_client, "mcp")
|
||||
|
||||
|
|
|
|||
|
|
@ -90,13 +90,17 @@ class Jieba(BaseKeyword):
|
|||
sorted_chunk_indices = self._retrieve_ids_by_query(keyword_table or {}, query, k)
|
||||
|
||||
documents = []
|
||||
|
||||
segment_query_stmt = db.session.query(DocumentSegment).where(
|
||||
DocumentSegment.dataset_id == self.dataset.id, DocumentSegment.index_node_id.in_(sorted_chunk_indices)
|
||||
)
|
||||
if document_ids_filter:
|
||||
segment_query_stmt = segment_query_stmt.where(DocumentSegment.document_id.in_(document_ids_filter))
|
||||
|
||||
segments = db.session.execute(segment_query_stmt).scalars().all()
|
||||
segment_map = {segment.index_node_id: segment for segment in segments}
|
||||
for chunk_index in sorted_chunk_indices:
|
||||
segment_query = db.session.query(DocumentSegment).where(
|
||||
DocumentSegment.dataset_id == self.dataset.id, DocumentSegment.index_node_id == chunk_index
|
||||
)
|
||||
if document_ids_filter:
|
||||
segment_query = segment_query.where(DocumentSegment.document_id.in_(document_ids_filter))
|
||||
segment = segment_query.first()
|
||||
segment = segment_map.get(chunk_index)
|
||||
|
||||
if segment:
|
||||
documents.append(
|
||||
|
|
|
|||
|
|
@ -7,6 +7,7 @@ from sqlalchemy import select
|
|||
from sqlalchemy.orm import Session, load_only
|
||||
|
||||
from configs import dify_config
|
||||
from core.db.session_factory import session_factory
|
||||
from core.model_manager import ModelManager
|
||||
from core.model_runtime.entities.model_entities import ModelType
|
||||
from core.rag.data_post_processor.data_post_processor import DataPostProcessor
|
||||
|
|
@ -138,37 +139,47 @@ class RetrievalService:
|
|||
|
||||
@classmethod
|
||||
def _deduplicate_documents(cls, documents: list[Document]) -> list[Document]:
|
||||
"""Deduplicate documents based on doc_id to avoid duplicate chunks in hybrid search."""
|
||||
"""Deduplicate documents in O(n) while preserving first-seen order.
|
||||
|
||||
Rules:
|
||||
- For provider == "dify" and metadata["doc_id"] exists: keep the doc with the highest
|
||||
metadata["score"] among duplicates; if a later duplicate has no score, ignore it.
|
||||
- For non-dify documents (or dify without doc_id): deduplicate by content key
|
||||
(provider, page_content), keeping the first occurrence.
|
||||
"""
|
||||
if not documents:
|
||||
return documents
|
||||
|
||||
unique_documents = []
|
||||
seen_doc_ids = set()
|
||||
# Map of dedup key -> chosen Document
|
||||
chosen: dict[tuple, Document] = {}
|
||||
# Preserve the order of first appearance of each dedup key
|
||||
order: list[tuple] = []
|
||||
|
||||
for document in documents:
|
||||
# For dify provider documents, use doc_id for deduplication
|
||||
if document.provider == "dify" and document.metadata is not None and "doc_id" in document.metadata:
|
||||
doc_id = document.metadata["doc_id"]
|
||||
if doc_id not in seen_doc_ids:
|
||||
seen_doc_ids.add(doc_id)
|
||||
unique_documents.append(document)
|
||||
# If duplicate, keep the one with higher score
|
||||
elif "score" in document.metadata:
|
||||
# Find existing document with same doc_id and compare scores
|
||||
for i, existing_doc in enumerate(unique_documents):
|
||||
if (
|
||||
existing_doc.metadata
|
||||
and existing_doc.metadata.get("doc_id") == doc_id
|
||||
and existing_doc.metadata.get("score", 0) < document.metadata.get("score", 0)
|
||||
):
|
||||
unique_documents[i] = document
|
||||
break
|
||||
for doc in documents:
|
||||
is_dify = doc.provider == "dify"
|
||||
doc_id = (doc.metadata or {}).get("doc_id") if is_dify else None
|
||||
|
||||
if is_dify and doc_id:
|
||||
key = ("dify", doc_id)
|
||||
if key not in chosen:
|
||||
chosen[key] = doc
|
||||
order.append(key)
|
||||
else:
|
||||
# Only replace if the new one has a score and it's strictly higher
|
||||
if "score" in doc.metadata:
|
||||
new_score = float(doc.metadata.get("score", 0.0))
|
||||
old_score = float(chosen[key].metadata.get("score", 0.0)) if chosen[key].metadata else 0.0
|
||||
if new_score > old_score:
|
||||
chosen[key] = doc
|
||||
else:
|
||||
# For non-dify documents, use content-based deduplication
|
||||
if document not in unique_documents:
|
||||
unique_documents.append(document)
|
||||
# Content-based dedup for non-dify or dify without doc_id
|
||||
content_key = (doc.provider or "dify", doc.page_content)
|
||||
if content_key not in chosen:
|
||||
chosen[content_key] = doc
|
||||
order.append(content_key)
|
||||
# If duplicate content appears, we keep the first occurrence (no score comparison)
|
||||
|
||||
return unique_documents
|
||||
return [chosen[k] for k in order]
|
||||
|
||||
@classmethod
|
||||
def _get_dataset(cls, dataset_id: str) -> Dataset | None:
|
||||
|
|
@ -371,58 +382,96 @@ class RetrievalService:
|
|||
include_segment_ids = set()
|
||||
segment_child_map = {}
|
||||
segment_file_map = {}
|
||||
with Session(bind=db.engine, expire_on_commit=False) as session:
|
||||
# Process documents
|
||||
for document in documents:
|
||||
segment_id = None
|
||||
attachment_info = None
|
||||
child_chunk = None
|
||||
document_id = document.metadata.get("document_id")
|
||||
if document_id not in dataset_documents:
|
||||
continue
|
||||
|
||||
dataset_document = dataset_documents[document_id]
|
||||
if not dataset_document:
|
||||
continue
|
||||
valid_dataset_documents = {}
|
||||
image_doc_ids = []
|
||||
child_index_node_ids = []
|
||||
index_node_ids = []
|
||||
doc_to_document_map = {}
|
||||
for document in documents:
|
||||
document_id = document.metadata.get("document_id")
|
||||
if document_id not in dataset_documents:
|
||||
continue
|
||||
|
||||
if dataset_document.doc_form == IndexStructureType.PARENT_CHILD_INDEX:
|
||||
# Handle parent-child documents
|
||||
if document.metadata.get("doc_type") == DocType.IMAGE:
|
||||
attachment_info_dict = cls.get_segment_attachment_info(
|
||||
dataset_document.dataset_id,
|
||||
dataset_document.tenant_id,
|
||||
document.metadata.get("doc_id") or "",
|
||||
session,
|
||||
)
|
||||
if attachment_info_dict:
|
||||
attachment_info = attachment_info_dict["attachment_info"]
|
||||
segment_id = attachment_info_dict["segment_id"]
|
||||
else:
|
||||
child_index_node_id = document.metadata.get("doc_id")
|
||||
child_chunk_stmt = select(ChildChunk).where(ChildChunk.index_node_id == child_index_node_id)
|
||||
child_chunk = session.scalar(child_chunk_stmt)
|
||||
dataset_document = dataset_documents[document_id]
|
||||
if not dataset_document:
|
||||
continue
|
||||
valid_dataset_documents[document_id] = dataset_document
|
||||
|
||||
if not child_chunk:
|
||||
continue
|
||||
segment_id = child_chunk.segment_id
|
||||
if dataset_document.doc_form == IndexStructureType.PARENT_CHILD_INDEX:
|
||||
doc_id = document.metadata.get("doc_id") or ""
|
||||
doc_to_document_map[doc_id] = document
|
||||
if document.metadata.get("doc_type") == DocType.IMAGE:
|
||||
image_doc_ids.append(doc_id)
|
||||
else:
|
||||
child_index_node_ids.append(doc_id)
|
||||
else:
|
||||
doc_id = document.metadata.get("doc_id") or ""
|
||||
doc_to_document_map[doc_id] = document
|
||||
if document.metadata.get("doc_type") == DocType.IMAGE:
|
||||
image_doc_ids.append(doc_id)
|
||||
else:
|
||||
index_node_ids.append(doc_id)
|
||||
|
||||
if not segment_id:
|
||||
continue
|
||||
image_doc_ids = [i for i in image_doc_ids if i]
|
||||
child_index_node_ids = [i for i in child_index_node_ids if i]
|
||||
index_node_ids = [i for i in index_node_ids if i]
|
||||
|
||||
segment = (
|
||||
session.query(DocumentSegment)
|
||||
.where(
|
||||
DocumentSegment.dataset_id == dataset_document.dataset_id,
|
||||
DocumentSegment.enabled == True,
|
||||
DocumentSegment.status == "completed",
|
||||
DocumentSegment.id == segment_id,
|
||||
)
|
||||
.first()
|
||||
)
|
||||
segment_ids = []
|
||||
index_node_segments: list[DocumentSegment] = []
|
||||
segments: list[DocumentSegment] = []
|
||||
attachment_map = {}
|
||||
child_chunk_map = {}
|
||||
doc_segment_map = {}
|
||||
|
||||
if not segment:
|
||||
continue
|
||||
with session_factory.create_session() as session:
|
||||
attachments = cls.get_segment_attachment_infos(image_doc_ids, session)
|
||||
|
||||
for attachment in attachments:
|
||||
segment_ids.append(attachment["segment_id"])
|
||||
attachment_map[attachment["segment_id"]] = attachment
|
||||
doc_segment_map[attachment["segment_id"]] = attachment["attachment_id"]
|
||||
|
||||
child_chunk_stmt = select(ChildChunk).where(ChildChunk.index_node_id.in_(child_index_node_ids))
|
||||
child_index_nodes = session.execute(child_chunk_stmt).scalars().all()
|
||||
|
||||
for i in child_index_nodes:
|
||||
segment_ids.append(i.segment_id)
|
||||
child_chunk_map[i.segment_id] = i
|
||||
doc_segment_map[i.segment_id] = i.index_node_id
|
||||
|
||||
if index_node_ids:
|
||||
document_segment_stmt = select(DocumentSegment).where(
|
||||
DocumentSegment.enabled == True,
|
||||
DocumentSegment.status == "completed",
|
||||
DocumentSegment.index_node_id.in_(index_node_ids),
|
||||
)
|
||||
index_node_segments = session.execute(document_segment_stmt).scalars().all() # type: ignore
|
||||
for index_node_segment in index_node_segments:
|
||||
doc_segment_map[index_node_segment.id] = index_node_segment.index_node_id
|
||||
if segment_ids:
|
||||
document_segment_stmt = select(DocumentSegment).where(
|
||||
DocumentSegment.enabled == True,
|
||||
DocumentSegment.status == "completed",
|
||||
DocumentSegment.id.in_(segment_ids),
|
||||
)
|
||||
segments = session.execute(document_segment_stmt).scalars().all() # type: ignore
|
||||
|
||||
if index_node_segments:
|
||||
segments.extend(index_node_segments)
|
||||
|
||||
for segment in segments:
|
||||
doc_id = doc_segment_map.get(segment.id)
|
||||
child_chunk = child_chunk_map.get(segment.id)
|
||||
attachment_info = attachment_map.get(segment.id)
|
||||
|
||||
if doc_id:
|
||||
document = doc_to_document_map[doc_id]
|
||||
ds_dataset_document: DatasetDocument | None = valid_dataset_documents.get(
|
||||
document.metadata.get("document_id")
|
||||
)
|
||||
|
||||
if ds_dataset_document and ds_dataset_document.doc_form == IndexStructureType.PARENT_CHILD_INDEX:
|
||||
if segment.id not in include_segment_ids:
|
||||
include_segment_ids.add(segment.id)
|
||||
if child_chunk:
|
||||
|
|
@ -430,10 +479,10 @@ class RetrievalService:
|
|||
"id": child_chunk.id,
|
||||
"content": child_chunk.content,
|
||||
"position": child_chunk.position,
|
||||
"score": document.metadata.get("score", 0.0),
|
||||
"score": document.metadata.get("score", 0.0) if document else 0.0,
|
||||
}
|
||||
map_detail = {
|
||||
"max_score": document.metadata.get("score", 0.0),
|
||||
"max_score": document.metadata.get("score", 0.0) if document else 0.0,
|
||||
"child_chunks": [child_chunk_detail],
|
||||
}
|
||||
segment_child_map[segment.id] = map_detail
|
||||
|
|
@ -452,13 +501,14 @@ class RetrievalService:
|
|||
"score": document.metadata.get("score", 0.0),
|
||||
}
|
||||
if segment.id in segment_child_map:
|
||||
segment_child_map[segment.id]["child_chunks"].append(child_chunk_detail)
|
||||
segment_child_map[segment.id]["child_chunks"].append(child_chunk_detail) # type: ignore
|
||||
segment_child_map[segment.id]["max_score"] = max(
|
||||
segment_child_map[segment.id]["max_score"], document.metadata.get("score", 0.0)
|
||||
segment_child_map[segment.id]["max_score"],
|
||||
document.metadata.get("score", 0.0) if document else 0.0,
|
||||
)
|
||||
else:
|
||||
segment_child_map[segment.id] = {
|
||||
"max_score": document.metadata.get("score", 0.0),
|
||||
"max_score": document.metadata.get("score", 0.0) if document else 0.0,
|
||||
"child_chunks": [child_chunk_detail],
|
||||
}
|
||||
if attachment_info:
|
||||
|
|
@ -467,46 +517,11 @@ class RetrievalService:
|
|||
else:
|
||||
segment_file_map[segment.id] = [attachment_info]
|
||||
else:
|
||||
# Handle normal documents
|
||||
segment = None
|
||||
if document.metadata.get("doc_type") == DocType.IMAGE:
|
||||
attachment_info_dict = cls.get_segment_attachment_info(
|
||||
dataset_document.dataset_id,
|
||||
dataset_document.tenant_id,
|
||||
document.metadata.get("doc_id") or "",
|
||||
session,
|
||||
)
|
||||
if attachment_info_dict:
|
||||
attachment_info = attachment_info_dict["attachment_info"]
|
||||
segment_id = attachment_info_dict["segment_id"]
|
||||
document_segment_stmt = select(DocumentSegment).where(
|
||||
DocumentSegment.dataset_id == dataset_document.dataset_id,
|
||||
DocumentSegment.enabled == True,
|
||||
DocumentSegment.status == "completed",
|
||||
DocumentSegment.id == segment_id,
|
||||
)
|
||||
segment = session.scalar(document_segment_stmt)
|
||||
if segment:
|
||||
segment_file_map[segment.id] = [attachment_info]
|
||||
else:
|
||||
index_node_id = document.metadata.get("doc_id")
|
||||
if not index_node_id:
|
||||
continue
|
||||
document_segment_stmt = select(DocumentSegment).where(
|
||||
DocumentSegment.dataset_id == dataset_document.dataset_id,
|
||||
DocumentSegment.enabled == True,
|
||||
DocumentSegment.status == "completed",
|
||||
DocumentSegment.index_node_id == index_node_id,
|
||||
)
|
||||
segment = session.scalar(document_segment_stmt)
|
||||
|
||||
if not segment:
|
||||
continue
|
||||
if segment.id not in include_segment_ids:
|
||||
include_segment_ids.add(segment.id)
|
||||
record = {
|
||||
"segment": segment,
|
||||
"score": document.metadata.get("score"), # type: ignore
|
||||
"score": document.metadata.get("score", 0.0), # type: ignore
|
||||
}
|
||||
if attachment_info:
|
||||
segment_file_map[segment.id] = [attachment_info]
|
||||
|
|
@ -522,7 +537,7 @@ class RetrievalService:
|
|||
for record in records:
|
||||
if record["segment"].id in segment_child_map:
|
||||
record["child_chunks"] = segment_child_map[record["segment"].id].get("child_chunks") # type: ignore
|
||||
record["score"] = segment_child_map[record["segment"].id]["max_score"]
|
||||
record["score"] = segment_child_map[record["segment"].id]["max_score"] # type: ignore
|
||||
if record["segment"].id in segment_file_map:
|
||||
record["files"] = segment_file_map[record["segment"].id] # type: ignore[assignment]
|
||||
|
||||
|
|
@ -565,6 +580,8 @@ class RetrievalService:
|
|||
flask_app: Flask,
|
||||
retrieval_method: RetrievalMethod,
|
||||
dataset: Dataset,
|
||||
all_documents: list[Document],
|
||||
exceptions: list[str],
|
||||
query: str | None = None,
|
||||
top_k: int = 4,
|
||||
score_threshold: float | None = 0.0,
|
||||
|
|
@ -573,8 +590,6 @@ class RetrievalService:
|
|||
weights: dict | None = None,
|
||||
document_ids_filter: list[str] | None = None,
|
||||
attachment_id: str | None = None,
|
||||
all_documents: list[Document] = [],
|
||||
exceptions: list[str] = [],
|
||||
):
|
||||
if not query and not attachment_id:
|
||||
return
|
||||
|
|
@ -696,3 +711,37 @@ class RetrievalService:
|
|||
}
|
||||
return {"attachment_info": attachment_info, "segment_id": attachment_binding.segment_id}
|
||||
return None
|
||||
|
||||
@classmethod
|
||||
def get_segment_attachment_infos(cls, attachment_ids: list[str], session: Session) -> list[dict[str, Any]]:
|
||||
attachment_infos = []
|
||||
upload_files = session.query(UploadFile).where(UploadFile.id.in_(attachment_ids)).all()
|
||||
if upload_files:
|
||||
upload_file_ids = [upload_file.id for upload_file in upload_files]
|
||||
attachment_bindings = (
|
||||
session.query(SegmentAttachmentBinding)
|
||||
.where(SegmentAttachmentBinding.attachment_id.in_(upload_file_ids))
|
||||
.all()
|
||||
)
|
||||
attachment_binding_map = {binding.attachment_id: binding for binding in attachment_bindings}
|
||||
|
||||
if attachment_bindings:
|
||||
for upload_file in upload_files:
|
||||
attachment_binding = attachment_binding_map.get(upload_file.id)
|
||||
attachment_info = {
|
||||
"id": upload_file.id,
|
||||
"name": upload_file.name,
|
||||
"extension": "." + upload_file.extension,
|
||||
"mime_type": upload_file.mime_type,
|
||||
"source_url": sign_upload_file(upload_file.id, upload_file.extension),
|
||||
"size": upload_file.size,
|
||||
}
|
||||
if attachment_binding:
|
||||
attachment_infos.append(
|
||||
{
|
||||
"attachment_id": attachment_binding.attachment_id,
|
||||
"attachment_info": attachment_info,
|
||||
"segment_id": attachment_binding.segment_id,
|
||||
}
|
||||
)
|
||||
return attachment_infos
|
||||
|
|
|
|||
|
|
@ -289,7 +289,8 @@ class OracleVector(BaseVector):
|
|||
words = pseg.cut(query)
|
||||
current_entity = ""
|
||||
for word, pos in words:
|
||||
if pos in {"nr", "Ng", "eng", "nz", "n", "ORG", "v"}: # nr: 人名,ns: 地名,nt: 机构名
|
||||
# `nr`: Person, `ns`: Location, `nt`: Organization
|
||||
if pos in {"nr", "Ng", "eng", "nz", "n", "ORG", "v"}:
|
||||
current_entity += word
|
||||
else:
|
||||
if current_entity:
|
||||
|
|
|
|||
|
|
@ -213,7 +213,7 @@ class VastbaseVector(BaseVector):
|
|||
|
||||
with self._get_cursor() as cur:
|
||||
cur.execute(SQL_CREATE_TABLE.format(table_name=self.table_name, dimension=dimension))
|
||||
# Vastbase 支持的向量维度取值范围为 [1,16000]
|
||||
# Vastbase supports vector dimensions in the range [1, 16,000]
|
||||
if dimension <= 16000:
|
||||
cur.execute(SQL_CREATE_INDEX.format(table_name=self.table_name))
|
||||
redis_client.set(collection_exist_cache_key, 1, ex=3600)
|
||||
|
|
|
|||
|
|
@ -83,6 +83,7 @@ class WordExtractor(BaseExtractor):
|
|||
def _extract_images_from_docx(self, doc):
|
||||
image_count = 0
|
||||
image_map = {}
|
||||
base_url = dify_config.INTERNAL_FILES_URL or dify_config.FILES_URL
|
||||
|
||||
for r_id, rel in doc.part.rels.items():
|
||||
if "image" in rel.target_ref:
|
||||
|
|
@ -121,8 +122,7 @@ class WordExtractor(BaseExtractor):
|
|||
used_at=naive_utc_now(),
|
||||
)
|
||||
db.session.add(upload_file)
|
||||
# Use r_id as key for external images since target_part is undefined
|
||||
image_map[r_id] = f""
|
||||
image_map[r_id] = f""
|
||||
else:
|
||||
image_ext = rel.target_ref.split(".")[-1]
|
||||
if image_ext is None:
|
||||
|
|
@ -150,10 +150,7 @@ class WordExtractor(BaseExtractor):
|
|||
used_at=naive_utc_now(),
|
||||
)
|
||||
db.session.add(upload_file)
|
||||
# Use target_part as key for internal images
|
||||
image_map[rel.target_part] = (
|
||||
f""
|
||||
)
|
||||
image_map[rel.target_part] = f""
|
||||
db.session.commit()
|
||||
return image_map
|
||||
|
||||
|
|
|
|||
|
|
@ -231,7 +231,7 @@ class BaseIndexProcessor(ABC):
|
|||
|
||||
if not filename:
|
||||
parsed_url = urlparse(image_url)
|
||||
# unquote 处理 URL 中的中文
|
||||
# Decode percent-encoded characters in the URL path.
|
||||
path = unquote(parsed_url.path)
|
||||
filename = os.path.basename(path)
|
||||
|
||||
|
|
|
|||
|
|
@ -151,20 +151,14 @@ class DatasetRetrieval:
|
|||
if ModelFeature.TOOL_CALL in features or ModelFeature.MULTI_TOOL_CALL in features:
|
||||
planning_strategy = PlanningStrategy.ROUTER
|
||||
available_datasets = []
|
||||
for dataset_id in dataset_ids:
|
||||
# get dataset from dataset id
|
||||
dataset_stmt = select(Dataset).where(Dataset.tenant_id == tenant_id, Dataset.id == dataset_id)
|
||||
dataset = db.session.scalar(dataset_stmt)
|
||||
|
||||
# pass if dataset is not available
|
||||
if not dataset:
|
||||
dataset_stmt = select(Dataset).where(Dataset.tenant_id == tenant_id, Dataset.id.in_(dataset_ids))
|
||||
datasets: list[Dataset] = db.session.execute(dataset_stmt).scalars().all() # type: ignore
|
||||
for dataset in datasets:
|
||||
if dataset.available_document_count == 0 and dataset.provider != "external":
|
||||
continue
|
||||
|
||||
# pass if dataset is not available
|
||||
if dataset and dataset.available_document_count == 0 and dataset.provider != "external":
|
||||
continue
|
||||
|
||||
available_datasets.append(dataset)
|
||||
|
||||
if inputs:
|
||||
inputs = {key: str(value) for key, value in inputs.items()}
|
||||
else:
|
||||
|
|
@ -282,26 +276,35 @@ class DatasetRetrieval:
|
|||
)
|
||||
context_files.append(attachment_info)
|
||||
if show_retrieve_source:
|
||||
dataset_ids = [record.segment.dataset_id for record in records]
|
||||
document_ids = [record.segment.document_id for record in records]
|
||||
dataset_document_stmt = select(DatasetDocument).where(
|
||||
DatasetDocument.id.in_(document_ids),
|
||||
DatasetDocument.enabled == True,
|
||||
DatasetDocument.archived == False,
|
||||
)
|
||||
documents = db.session.execute(dataset_document_stmt).scalars().all() # type: ignore
|
||||
dataset_stmt = select(Dataset).where(
|
||||
Dataset.id.in_(dataset_ids),
|
||||
)
|
||||
datasets = db.session.execute(dataset_stmt).scalars().all() # type: ignore
|
||||
dataset_map = {i.id: i for i in datasets}
|
||||
document_map = {i.id: i for i in documents}
|
||||
for record in records:
|
||||
segment = record.segment
|
||||
dataset = db.session.query(Dataset).filter_by(id=segment.dataset_id).first()
|
||||
dataset_document_stmt = select(DatasetDocument).where(
|
||||
DatasetDocument.id == segment.document_id,
|
||||
DatasetDocument.enabled == True,
|
||||
DatasetDocument.archived == False,
|
||||
)
|
||||
document = db.session.scalar(dataset_document_stmt)
|
||||
if dataset and document:
|
||||
dataset_item = dataset_map.get(segment.dataset_id)
|
||||
document_item = document_map.get(segment.document_id)
|
||||
if dataset_item and document_item:
|
||||
source = RetrievalSourceMetadata(
|
||||
dataset_id=dataset.id,
|
||||
dataset_name=dataset.name,
|
||||
document_id=document.id,
|
||||
document_name=document.name,
|
||||
data_source_type=document.data_source_type,
|
||||
dataset_id=dataset_item.id,
|
||||
dataset_name=dataset_item.name,
|
||||
document_id=document_item.id,
|
||||
document_name=document_item.name,
|
||||
data_source_type=document_item.data_source_type,
|
||||
segment_id=segment.id,
|
||||
retriever_from=invoke_from.to_source(),
|
||||
score=record.score or 0.0,
|
||||
doc_metadata=document.doc_metadata,
|
||||
doc_metadata=document_item.doc_metadata,
|
||||
)
|
||||
|
||||
if invoke_from.to_source() == "dev":
|
||||
|
|
|
|||
|
|
@ -86,6 +86,11 @@ class Executor:
|
|||
node_data.authorization.config.api_key = variable_pool.convert_template(
|
||||
node_data.authorization.config.api_key
|
||||
).text
|
||||
# Validate that API key is not empty after template conversion
|
||||
if not node_data.authorization.config.api_key or not node_data.authorization.config.api_key.strip():
|
||||
raise AuthorizationConfigError(
|
||||
"API key is required for authorization but was empty. Please provide a valid API key."
|
||||
)
|
||||
|
||||
self.url = node_data.url
|
||||
self.method = node_data.method
|
||||
|
|
|
|||
|
|
@ -26,6 +26,7 @@ class AliyunOssStorage(BaseStorage):
|
|||
self.bucket_name,
|
||||
connect_timeout=30,
|
||||
region=region,
|
||||
cloudbox_id=dify_config.ALIYUN_CLOUDBOX_ID,
|
||||
)
|
||||
|
||||
def save(self, filename, data):
|
||||
|
|
|
|||
|
|
@ -17,6 +17,7 @@ class HuaweiObsStorage(BaseStorage):
|
|||
access_key_id=dify_config.HUAWEI_OBS_ACCESS_KEY,
|
||||
secret_access_key=dify_config.HUAWEI_OBS_SECRET_KEY,
|
||||
server=dify_config.HUAWEI_OBS_SERVER,
|
||||
path_style=dify_config.HUAWEI_OBS_PATH_STYLE,
|
||||
)
|
||||
|
||||
def save(self, filename, data):
|
||||
|
|
|
|||
|
|
@ -11,6 +11,7 @@ from collections.abc import Generator, Mapping
|
|||
from datetime import datetime
|
||||
from hashlib import sha256
|
||||
from typing import TYPE_CHECKING, Annotated, Any, Optional, Union, cast
|
||||
from uuid import UUID
|
||||
from zoneinfo import available_timezones
|
||||
|
||||
from flask import Response, stream_with_context
|
||||
|
|
@ -119,6 +120,19 @@ def uuid_value(value: Any) -> str:
|
|||
raise ValueError(error)
|
||||
|
||||
|
||||
def normalize_uuid(value: str | UUID) -> str:
|
||||
if not value:
|
||||
return ""
|
||||
|
||||
try:
|
||||
return uuid_value(value)
|
||||
except ValueError as exc:
|
||||
raise ValueError("must be a valid UUID") from exc
|
||||
|
||||
|
||||
UUIDStrOrEmpty = Annotated[str, AfterValidator(normalize_uuid)]
|
||||
|
||||
|
||||
def alphanumeric(value: str):
|
||||
# check if the value is alphanumeric and underlined
|
||||
if re.match(r"^[a-zA-Z0-9_]+$", value):
|
||||
|
|
|
|||
|
|
@ -69,7 +69,7 @@ dependencies = [
|
|||
"pydantic-extra-types~=2.10.3",
|
||||
"pydantic-settings~=2.11.0",
|
||||
"pyjwt~=2.10.1",
|
||||
"pypdfium2==4.30.0",
|
||||
"pypdfium2==5.2.0",
|
||||
"python-docx~=1.1.0",
|
||||
"python-dotenv==1.0.1",
|
||||
"pyyaml~=6.0.1",
|
||||
|
|
|
|||
|
|
@ -6,7 +6,9 @@ from typing import Any, Union
|
|||
from sqlalchemy import asc, desc, func, or_, select
|
||||
from sqlalchemy.orm import Session
|
||||
|
||||
from configs import dify_config
|
||||
from core.app.entities.app_invoke_entities import InvokeFrom
|
||||
from core.db.session_factory import session_factory
|
||||
from core.llm_generator.llm_generator import LLMGenerator
|
||||
from core.variables.types import SegmentType
|
||||
from core.workflow.nodes.variable_assigner.common.impl import conversation_variable_updater_factory
|
||||
|
|
@ -202,6 +204,7 @@ class ConversationService:
|
|||
user: Union[Account, EndUser] | None,
|
||||
limit: int,
|
||||
last_id: str | None,
|
||||
variable_name: str | None = None,
|
||||
) -> InfiniteScrollPagination:
|
||||
conversation = cls.get_conversation(app_model, conversation_id, user)
|
||||
|
||||
|
|
@ -212,7 +215,25 @@ class ConversationService:
|
|||
.order_by(ConversationVariable.created_at)
|
||||
)
|
||||
|
||||
with Session(db.engine) as session:
|
||||
# Apply variable_name filter if provided
|
||||
if variable_name:
|
||||
# Filter using JSON extraction to match variable names case-insensitively
|
||||
escaped_variable_name = variable_name.replace("\\", "\\\\").replace("%", "\\%").replace("_", "\\_")
|
||||
# Filter using JSON extraction to match variable names case-insensitively
|
||||
if dify_config.DB_TYPE in ["mysql", "oceanbase", "seekdb"]:
|
||||
stmt = stmt.where(
|
||||
func.json_extract(ConversationVariable.data, "$.name").ilike(
|
||||
f"%{escaped_variable_name}%", escape="\\"
|
||||
)
|
||||
)
|
||||
elif dify_config.DB_TYPE == "postgresql":
|
||||
stmt = stmt.where(
|
||||
func.json_extract_path_text(ConversationVariable.data, "name").ilike(
|
||||
f"%{escaped_variable_name}%", escape="\\"
|
||||
)
|
||||
)
|
||||
|
||||
with session_factory.create_session() as session:
|
||||
if last_id:
|
||||
last_variable = session.scalar(stmt.where(ConversationVariable.id == last_id))
|
||||
if not last_variable:
|
||||
|
|
@ -279,7 +300,7 @@ class ConversationService:
|
|||
.where(ConversationVariable.id == variable_id)
|
||||
)
|
||||
|
||||
with Session(db.engine) as session:
|
||||
with session_factory.create_session() as session:
|
||||
existing_variable = session.scalar(stmt)
|
||||
if not existing_variable:
|
||||
raise ConversationVariableNotExistsError()
|
||||
|
|
|
|||
|
|
@ -15,7 +15,6 @@ from sqlalchemy.orm import Session
|
|||
from core.entities.mcp_provider import MCPAuthentication, MCPConfiguration, MCPProviderEntity
|
||||
from core.helper import encrypter
|
||||
from core.helper.provider_cache import NoOpProviderCredentialCache
|
||||
from core.helper.tool_provider_cache import ToolProviderListCache
|
||||
from core.mcp.auth.auth_flow import auth
|
||||
from core.mcp.auth_client import MCPClientWithAuthRetry
|
||||
from core.mcp.error import MCPAuthError, MCPError
|
||||
|
|
@ -65,6 +64,15 @@ class ServerUrlValidationResult(BaseModel):
|
|||
return self.needs_validation and self.validation_passed and self.reconnect_result is not None
|
||||
|
||||
|
||||
class ProviderUrlValidationData(BaseModel):
|
||||
"""Data required for URL validation, extracted from database to perform network operations outside of session"""
|
||||
|
||||
current_server_url_hash: str
|
||||
headers: dict[str, str]
|
||||
timeout: float | None
|
||||
sse_read_timeout: float | None
|
||||
|
||||
|
||||
class MCPToolManageService:
|
||||
"""Service class for managing MCP tools and providers."""
|
||||
|
||||
|
|
@ -166,9 +174,6 @@ class MCPToolManageService:
|
|||
self._session.add(mcp_tool)
|
||||
self._session.flush()
|
||||
|
||||
# Invalidate tool providers cache
|
||||
ToolProviderListCache.invalidate_cache(tenant_id)
|
||||
|
||||
mcp_providers = ToolTransformService.mcp_provider_to_user_provider(mcp_tool, for_list=True)
|
||||
return mcp_providers
|
||||
|
||||
|
|
@ -192,7 +197,7 @@ class MCPToolManageService:
|
|||
Update an MCP provider.
|
||||
|
||||
Args:
|
||||
validation_result: Pre-validation result from validate_server_url_change.
|
||||
validation_result: Pre-validation result from validate_server_url_standalone.
|
||||
If provided and contains reconnect_result, it will be used
|
||||
instead of performing network operations.
|
||||
"""
|
||||
|
|
@ -251,8 +256,6 @@ class MCPToolManageService:
|
|||
# Flush changes to database
|
||||
self._session.flush()
|
||||
|
||||
# Invalidate tool providers cache
|
||||
ToolProviderListCache.invalidate_cache(tenant_id)
|
||||
except IntegrityError as e:
|
||||
self._handle_integrity_error(e, name, server_url, server_identifier)
|
||||
|
||||
|
|
@ -261,9 +264,6 @@ class MCPToolManageService:
|
|||
mcp_tool = self.get_provider(provider_id=provider_id, tenant_id=tenant_id)
|
||||
self._session.delete(mcp_tool)
|
||||
|
||||
# Invalidate tool providers cache
|
||||
ToolProviderListCache.invalidate_cache(tenant_id)
|
||||
|
||||
def list_providers(
|
||||
self, *, tenant_id: str, for_list: bool = False, include_sensitive: bool = True
|
||||
) -> list[ToolProviderApiEntity]:
|
||||
|
|
@ -546,30 +546,39 @@ class MCPToolManageService:
|
|||
)
|
||||
return self.execute_auth_actions(auth_result)
|
||||
|
||||
def _reconnect_provider(self, *, server_url: str, provider: MCPToolProvider) -> ReconnectResult:
|
||||
"""Attempt to reconnect to MCP provider with new server URL."""
|
||||
def get_provider_for_url_validation(self, *, tenant_id: str, provider_id: str) -> ProviderUrlValidationData:
|
||||
"""
|
||||
Get provider data required for URL validation.
|
||||
This method performs database read and should be called within a session.
|
||||
|
||||
Returns:
|
||||
ProviderUrlValidationData: Data needed for standalone URL validation
|
||||
"""
|
||||
provider = self.get_provider(provider_id=provider_id, tenant_id=tenant_id)
|
||||
provider_entity = provider.to_entity()
|
||||
headers = provider_entity.headers
|
||||
return ProviderUrlValidationData(
|
||||
current_server_url_hash=provider.server_url_hash,
|
||||
headers=provider_entity.headers,
|
||||
timeout=provider_entity.timeout,
|
||||
sse_read_timeout=provider_entity.sse_read_timeout,
|
||||
)
|
||||
|
||||
try:
|
||||
tools = self._retrieve_remote_mcp_tools(server_url, headers, provider_entity)
|
||||
return ReconnectResult(
|
||||
authed=True,
|
||||
tools=json.dumps([tool.model_dump() for tool in tools]),
|
||||
encrypted_credentials=EMPTY_CREDENTIALS_JSON,
|
||||
)
|
||||
except MCPAuthError:
|
||||
return ReconnectResult(authed=False, tools=EMPTY_TOOLS_JSON, encrypted_credentials=EMPTY_CREDENTIALS_JSON)
|
||||
except MCPError as e:
|
||||
raise ValueError(f"Failed to re-connect MCP server: {e}") from e
|
||||
|
||||
def validate_server_url_change(
|
||||
self, *, tenant_id: str, provider_id: str, new_server_url: str
|
||||
@staticmethod
|
||||
def validate_server_url_standalone(
|
||||
*,
|
||||
tenant_id: str,
|
||||
new_server_url: str,
|
||||
validation_data: ProviderUrlValidationData,
|
||||
) -> ServerUrlValidationResult:
|
||||
"""
|
||||
Validate server URL change by attempting to connect to the new server.
|
||||
This method should be called BEFORE update_provider to perform network operations
|
||||
outside of the database transaction.
|
||||
This method performs network operations and MUST be called OUTSIDE of any database session
|
||||
to avoid holding locks during network I/O.
|
||||
|
||||
Args:
|
||||
tenant_id: Tenant ID for encryption
|
||||
new_server_url: The new server URL to validate
|
||||
validation_data: Provider data obtained from get_provider_for_url_validation
|
||||
|
||||
Returns:
|
||||
ServerUrlValidationResult: Validation result with connection status and tools if successful
|
||||
|
|
@ -579,25 +588,30 @@ class MCPToolManageService:
|
|||
return ServerUrlValidationResult(needs_validation=False)
|
||||
|
||||
# Validate URL format
|
||||
if not self._is_valid_url(new_server_url):
|
||||
parsed = urlparse(new_server_url)
|
||||
if not all([parsed.scheme, parsed.netloc]) or parsed.scheme not in ["http", "https"]:
|
||||
raise ValueError("Server URL is not valid.")
|
||||
|
||||
# Always encrypt and hash the URL
|
||||
encrypted_server_url = encrypter.encrypt_token(tenant_id, new_server_url)
|
||||
new_server_url_hash = hashlib.sha256(new_server_url.encode()).hexdigest()
|
||||
|
||||
# Get current provider
|
||||
provider = self.get_provider(provider_id=provider_id, tenant_id=tenant_id)
|
||||
|
||||
# Check if URL is actually different
|
||||
if new_server_url_hash == provider.server_url_hash:
|
||||
if new_server_url_hash == validation_data.current_server_url_hash:
|
||||
# URL hasn't changed, but still return the encrypted data
|
||||
return ServerUrlValidationResult(
|
||||
needs_validation=False, encrypted_server_url=encrypted_server_url, server_url_hash=new_server_url_hash
|
||||
needs_validation=False,
|
||||
encrypted_server_url=encrypted_server_url,
|
||||
server_url_hash=new_server_url_hash,
|
||||
)
|
||||
|
||||
# Perform validation by attempting to connect
|
||||
reconnect_result = self._reconnect_provider(server_url=new_server_url, provider=provider)
|
||||
# Perform network validation - this is the expensive operation that should be outside session
|
||||
reconnect_result = MCPToolManageService._reconnect_with_url(
|
||||
server_url=new_server_url,
|
||||
headers=validation_data.headers,
|
||||
timeout=validation_data.timeout,
|
||||
sse_read_timeout=validation_data.sse_read_timeout,
|
||||
)
|
||||
return ServerUrlValidationResult(
|
||||
needs_validation=True,
|
||||
validation_passed=True,
|
||||
|
|
@ -606,6 +620,38 @@ class MCPToolManageService:
|
|||
server_url_hash=new_server_url_hash,
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def _reconnect_with_url(
|
||||
*,
|
||||
server_url: str,
|
||||
headers: dict[str, str],
|
||||
timeout: float | None,
|
||||
sse_read_timeout: float | None,
|
||||
) -> ReconnectResult:
|
||||
"""
|
||||
Attempt to connect to MCP server with given URL.
|
||||
This is a static method that performs network I/O without database access.
|
||||
"""
|
||||
from core.mcp.mcp_client import MCPClient
|
||||
|
||||
try:
|
||||
with MCPClient(
|
||||
server_url=server_url,
|
||||
headers=headers,
|
||||
timeout=timeout,
|
||||
sse_read_timeout=sse_read_timeout,
|
||||
) as mcp_client:
|
||||
tools = mcp_client.list_tools()
|
||||
return ReconnectResult(
|
||||
authed=True,
|
||||
tools=json.dumps([tool.model_dump() for tool in tools]),
|
||||
encrypted_credentials=EMPTY_CREDENTIALS_JSON,
|
||||
)
|
||||
except MCPAuthError:
|
||||
return ReconnectResult(authed=False, tools=EMPTY_TOOLS_JSON, encrypted_credentials=EMPTY_CREDENTIALS_JSON)
|
||||
except MCPError as e:
|
||||
raise ValueError(f"Failed to re-connect MCP server: {e}") from e
|
||||
|
||||
def _build_tool_provider_response(
|
||||
self, db_provider: MCPToolProvider, provider_entity: MCPProviderEntity, tools: list
|
||||
) -> ToolProviderApiEntity:
|
||||
|
|
|
|||
|
|
@ -2,7 +2,6 @@ import logging
|
|||
import time
|
||||
|
||||
import click
|
||||
import sqlalchemy as sa
|
||||
from celery import shared_task
|
||||
from sqlalchemy import select
|
||||
|
||||
|
|
@ -12,7 +11,7 @@ from core.rag.index_processor.index_processor_factory import IndexProcessorFacto
|
|||
from extensions.ext_database import db
|
||||
from libs.datetime_utils import naive_utc_now
|
||||
from models.dataset import Dataset, Document, DocumentSegment
|
||||
from models.source import DataSourceOauthBinding
|
||||
from services.datasource_provider_service import DatasourceProviderService
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
|
@ -48,27 +47,36 @@ def document_indexing_sync_task(dataset_id: str, document_id: str):
|
|||
page_id = data_source_info["notion_page_id"]
|
||||
page_type = data_source_info["type"]
|
||||
page_edited_time = data_source_info["last_edited_time"]
|
||||
credential_id = data_source_info.get("credential_id")
|
||||
|
||||
data_source_binding = (
|
||||
db.session.query(DataSourceOauthBinding)
|
||||
.where(
|
||||
sa.and_(
|
||||
DataSourceOauthBinding.tenant_id == document.tenant_id,
|
||||
DataSourceOauthBinding.provider == "notion",
|
||||
DataSourceOauthBinding.disabled == False,
|
||||
DataSourceOauthBinding.source_info["workspace_id"] == f'"{workspace_id}"',
|
||||
)
|
||||
)
|
||||
.first()
|
||||
# Get credentials from datasource provider
|
||||
datasource_provider_service = DatasourceProviderService()
|
||||
credential = datasource_provider_service.get_datasource_credentials(
|
||||
tenant_id=document.tenant_id,
|
||||
credential_id=credential_id,
|
||||
provider="notion_datasource",
|
||||
plugin_id="langgenius/notion_datasource",
|
||||
)
|
||||
if not data_source_binding:
|
||||
raise ValueError("Data source binding not found.")
|
||||
|
||||
if not credential:
|
||||
logger.error(
|
||||
"Datasource credential not found for document %s, tenant_id: %s, credential_id: %s",
|
||||
document_id,
|
||||
document.tenant_id,
|
||||
credential_id,
|
||||
)
|
||||
document.indexing_status = "error"
|
||||
document.error = "Datasource credential not found. Please reconnect your Notion workspace."
|
||||
document.stopped_at = naive_utc_now()
|
||||
db.session.commit()
|
||||
db.session.close()
|
||||
return
|
||||
|
||||
loader = NotionExtractor(
|
||||
notion_workspace_id=workspace_id,
|
||||
notion_obj_id=page_id,
|
||||
notion_page_type=page_type,
|
||||
notion_access_token=data_source_binding.access_token,
|
||||
notion_access_token=credential.get("integration_secret"),
|
||||
tenant_id=document.tenant_id,
|
||||
)
|
||||
|
||||
|
|
|
|||
|
|
@ -6,6 +6,7 @@ import pytest
|
|||
|
||||
from core.app.entities.app_invoke_entities import InvokeFrom
|
||||
from core.workflow.entities import GraphInitParams
|
||||
from core.workflow.enums import WorkflowNodeExecutionStatus
|
||||
from core.workflow.graph import Graph
|
||||
from core.workflow.nodes.http_request.node import HttpRequestNode
|
||||
from core.workflow.nodes.node_factory import DifyNodeFactory
|
||||
|
|
@ -169,13 +170,14 @@ def test_custom_authorization_header(setup_http_mock):
|
|||
|
||||
|
||||
@pytest.mark.parametrize("setup_http_mock", [["none"]], indirect=True)
|
||||
def test_custom_auth_with_empty_api_key_does_not_set_header(setup_http_mock):
|
||||
"""Test: In custom authentication mode, when the api_key is empty, no header should be set."""
|
||||
def test_custom_auth_with_empty_api_key_raises_error(setup_http_mock):
|
||||
"""Test: In custom authentication mode, when the api_key is empty, AuthorizationConfigError should be raised."""
|
||||
from core.workflow.nodes.http_request.entities import (
|
||||
HttpRequestNodeAuthorization,
|
||||
HttpRequestNodeData,
|
||||
HttpRequestNodeTimeout,
|
||||
)
|
||||
from core.workflow.nodes.http_request.exc import AuthorizationConfigError
|
||||
from core.workflow.nodes.http_request.executor import Executor
|
||||
from core.workflow.runtime import VariablePool
|
||||
from core.workflow.system_variable import SystemVariable
|
||||
|
|
@ -208,16 +210,13 @@ def test_custom_auth_with_empty_api_key_does_not_set_header(setup_http_mock):
|
|||
ssl_verify=True,
|
||||
)
|
||||
|
||||
# Create executor
|
||||
executor = Executor(
|
||||
node_data=node_data, timeout=HttpRequestNodeTimeout(connect=10, read=30, write=10), variable_pool=variable_pool
|
||||
)
|
||||
|
||||
# Get assembled headers
|
||||
headers = executor._assembling_headers()
|
||||
|
||||
# When api_key is empty, the custom header should NOT be set
|
||||
assert "X-Custom-Auth" not in headers
|
||||
# Create executor should raise AuthorizationConfigError
|
||||
with pytest.raises(AuthorizationConfigError, match="API key is required"):
|
||||
Executor(
|
||||
node_data=node_data,
|
||||
timeout=HttpRequestNodeTimeout(connect=10, read=30, write=10),
|
||||
variable_pool=variable_pool,
|
||||
)
|
||||
|
||||
|
||||
@pytest.mark.parametrize("setup_http_mock", [["none"]], indirect=True)
|
||||
|
|
@ -305,9 +304,10 @@ def test_basic_authorization_with_custom_header_ignored(setup_http_mock):
|
|||
@pytest.mark.parametrize("setup_http_mock", [["none"]], indirect=True)
|
||||
def test_custom_authorization_with_empty_api_key(setup_http_mock):
|
||||
"""
|
||||
Test that custom authorization doesn't set header when api_key is empty.
|
||||
This test verifies the fix for issue #23554.
|
||||
Test that custom authorization raises error when api_key is empty.
|
||||
This test verifies the fix for issue #21830.
|
||||
"""
|
||||
|
||||
node = init_http_node(
|
||||
config={
|
||||
"id": "1",
|
||||
|
|
@ -333,11 +333,10 @@ def test_custom_authorization_with_empty_api_key(setup_http_mock):
|
|||
)
|
||||
|
||||
result = node._run()
|
||||
assert result.process_data is not None
|
||||
data = result.process_data.get("request", "")
|
||||
|
||||
# Custom header should NOT be set when api_key is empty
|
||||
assert "X-Custom-Auth:" not in data
|
||||
# Should fail with AuthorizationConfigError
|
||||
assert result.status == WorkflowNodeExecutionStatus.FAILED
|
||||
assert "API key is required" in result.error
|
||||
assert result.error_type == "AuthorizationConfigError"
|
||||
|
||||
|
||||
@pytest.mark.parametrize("setup_http_mock", [["none"]], indirect=True)
|
||||
|
|
|
|||
|
|
@ -2,7 +2,9 @@ from unittest.mock import patch
|
|||
|
||||
import pytest
|
||||
from faker import Faker
|
||||
from pydantic import TypeAdapter, ValidationError
|
||||
|
||||
from core.tools.entities.tool_entities import ApiProviderSchemaType
|
||||
from models import Account, Tenant
|
||||
from models.tools import ApiToolProvider
|
||||
from services.tools.api_tools_manage_service import ApiToolManageService
|
||||
|
|
@ -298,7 +300,7 @@ class TestApiToolManageService:
|
|||
provider_name = fake.company()
|
||||
icon = {"type": "emoji", "value": "🔧"}
|
||||
credentials = {"auth_type": "none", "api_key_header": "X-API-Key", "api_key_value": ""}
|
||||
schema_type = "openapi"
|
||||
schema_type = ApiProviderSchemaType.OPENAPI
|
||||
schema = self._create_test_openapi_schema()
|
||||
privacy_policy = "https://example.com/privacy"
|
||||
custom_disclaimer = "Custom disclaimer text"
|
||||
|
|
@ -364,7 +366,7 @@ class TestApiToolManageService:
|
|||
provider_name = fake.company()
|
||||
icon = {"type": "emoji", "value": "🔧"}
|
||||
credentials = {"auth_type": "none"}
|
||||
schema_type = "openapi"
|
||||
schema_type = ApiProviderSchemaType.OPENAPI
|
||||
schema = self._create_test_openapi_schema()
|
||||
privacy_policy = "https://example.com/privacy"
|
||||
custom_disclaimer = "Custom disclaimer text"
|
||||
|
|
@ -428,21 +430,10 @@ class TestApiToolManageService:
|
|||
labels = ["test"]
|
||||
|
||||
# Act & Assert: Try to create provider with invalid schema type
|
||||
with pytest.raises(ValueError) as exc_info:
|
||||
ApiToolManageService.create_api_tool_provider(
|
||||
user_id=account.id,
|
||||
tenant_id=tenant.id,
|
||||
provider_name=provider_name,
|
||||
icon=icon,
|
||||
credentials=credentials,
|
||||
schema_type=schema_type,
|
||||
schema=schema,
|
||||
privacy_policy=privacy_policy,
|
||||
custom_disclaimer=custom_disclaimer,
|
||||
labels=labels,
|
||||
)
|
||||
with pytest.raises(ValidationError) as exc_info:
|
||||
TypeAdapter(ApiProviderSchemaType).validate_python(schema_type)
|
||||
|
||||
assert "invalid schema type" in str(exc_info.value)
|
||||
assert "validation error" in str(exc_info.value)
|
||||
|
||||
def test_create_api_tool_provider_missing_auth_type(
|
||||
self, flask_req_ctx_with_containers, db_session_with_containers, mock_external_service_dependencies
|
||||
|
|
@ -464,7 +455,7 @@ class TestApiToolManageService:
|
|||
provider_name = fake.company()
|
||||
icon = {"type": "emoji", "value": "🔧"}
|
||||
credentials = {} # Missing auth_type
|
||||
schema_type = "openapi"
|
||||
schema_type = ApiProviderSchemaType.OPENAPI
|
||||
schema = self._create_test_openapi_schema()
|
||||
privacy_policy = "https://example.com/privacy"
|
||||
custom_disclaimer = "Custom disclaimer text"
|
||||
|
|
@ -507,7 +498,7 @@ class TestApiToolManageService:
|
|||
provider_name = fake.company()
|
||||
icon = {"type": "emoji", "value": "🔑"}
|
||||
credentials = {"auth_type": "api_key", "api_key_header": "X-API-Key", "api_key_value": fake.uuid4()}
|
||||
schema_type = "openapi"
|
||||
schema_type = ApiProviderSchemaType.OPENAPI
|
||||
schema = self._create_test_openapi_schema()
|
||||
privacy_policy = "https://example.com/privacy"
|
||||
custom_disclaimer = "Custom disclaimer text"
|
||||
|
|
|
|||
|
|
@ -1308,18 +1308,17 @@ class TestMCPToolManageService:
|
|||
type("MockTool", (), {"model_dump": lambda self: {"name": "test_tool_2", "description": "Test tool 2"}})(),
|
||||
]
|
||||
|
||||
with patch("services.tools.mcp_tools_manage_service.MCPClientWithAuthRetry") as mock_mcp_client:
|
||||
with patch("core.mcp.mcp_client.MCPClient") as mock_mcp_client:
|
||||
# Setup mock client
|
||||
mock_client_instance = mock_mcp_client.return_value.__enter__.return_value
|
||||
mock_client_instance.list_tools.return_value = mock_tools
|
||||
|
||||
# Act: Execute the method under test
|
||||
from extensions.ext_database import db
|
||||
|
||||
service = MCPToolManageService(db.session())
|
||||
result = service._reconnect_provider(
|
||||
result = MCPToolManageService._reconnect_with_url(
|
||||
server_url="https://example.com/mcp",
|
||||
provider=mcp_provider,
|
||||
headers={"X-Test": "1"},
|
||||
timeout=mcp_provider.timeout,
|
||||
sse_read_timeout=mcp_provider.sse_read_timeout,
|
||||
)
|
||||
|
||||
# Assert: Verify the expected outcomes
|
||||
|
|
@ -1337,8 +1336,12 @@ class TestMCPToolManageService:
|
|||
assert tools_data[1]["name"] == "test_tool_2"
|
||||
|
||||
# Verify mock interactions
|
||||
provider_entity = mcp_provider.to_entity()
|
||||
mock_mcp_client.assert_called_once()
|
||||
mock_mcp_client.assert_called_once_with(
|
||||
server_url="https://example.com/mcp",
|
||||
headers={"X-Test": "1"},
|
||||
timeout=mcp_provider.timeout,
|
||||
sse_read_timeout=mcp_provider.sse_read_timeout,
|
||||
)
|
||||
|
||||
def test_re_connect_mcp_provider_auth_error(self, db_session_with_containers, mock_external_service_dependencies):
|
||||
"""
|
||||
|
|
@ -1361,19 +1364,18 @@ class TestMCPToolManageService:
|
|||
)
|
||||
|
||||
# Mock MCPClient to raise authentication error
|
||||
with patch("services.tools.mcp_tools_manage_service.MCPClientWithAuthRetry") as mock_mcp_client:
|
||||
with patch("core.mcp.mcp_client.MCPClient") as mock_mcp_client:
|
||||
from core.mcp.error import MCPAuthError
|
||||
|
||||
mock_client_instance = mock_mcp_client.return_value.__enter__.return_value
|
||||
mock_client_instance.list_tools.side_effect = MCPAuthError("Authentication required")
|
||||
|
||||
# Act: Execute the method under test
|
||||
from extensions.ext_database import db
|
||||
|
||||
service = MCPToolManageService(db.session())
|
||||
result = service._reconnect_provider(
|
||||
result = MCPToolManageService._reconnect_with_url(
|
||||
server_url="https://example.com/mcp",
|
||||
provider=mcp_provider,
|
||||
headers={},
|
||||
timeout=mcp_provider.timeout,
|
||||
sse_read_timeout=mcp_provider.sse_read_timeout,
|
||||
)
|
||||
|
||||
# Assert: Verify the expected outcomes
|
||||
|
|
@ -1404,18 +1406,17 @@ class TestMCPToolManageService:
|
|||
)
|
||||
|
||||
# Mock MCPClient to raise connection error
|
||||
with patch("services.tools.mcp_tools_manage_service.MCPClientWithAuthRetry") as mock_mcp_client:
|
||||
with patch("core.mcp.mcp_client.MCPClient") as mock_mcp_client:
|
||||
from core.mcp.error import MCPError
|
||||
|
||||
mock_client_instance = mock_mcp_client.return_value.__enter__.return_value
|
||||
mock_client_instance.list_tools.side_effect = MCPError("Connection failed")
|
||||
|
||||
# Act & Assert: Verify proper error handling
|
||||
from extensions.ext_database import db
|
||||
|
||||
service = MCPToolManageService(db.session())
|
||||
with pytest.raises(ValueError, match="Failed to re-connect MCP server: Connection failed"):
|
||||
service._reconnect_provider(
|
||||
MCPToolManageService._reconnect_with_url(
|
||||
server_url="https://example.com/mcp",
|
||||
provider=mcp_provider,
|
||||
headers={"X-Test": "1"},
|
||||
timeout=mcp_provider.timeout,
|
||||
sse_read_timeout=mcp_provider.sse_read_timeout,
|
||||
)
|
||||
|
|
|
|||
|
|
@ -177,34 +177,17 @@ class TestActivateApi:
|
|||
"account": mock_account,
|
||||
}
|
||||
|
||||
@pytest.fixture
|
||||
def mock_token_pair(self):
|
||||
"""Create mock token pair object."""
|
||||
token_pair = MagicMock()
|
||||
token_pair.access_token = "access_token"
|
||||
token_pair.refresh_token = "refresh_token"
|
||||
token_pair.csrf_token = "csrf_token"
|
||||
token_pair.model_dump.return_value = {
|
||||
"access_token": "access_token",
|
||||
"refresh_token": "refresh_token",
|
||||
"csrf_token": "csrf_token",
|
||||
}
|
||||
return token_pair
|
||||
|
||||
@patch("controllers.console.auth.activate.RegisterService.get_invitation_with_case_fallback")
|
||||
@patch("controllers.console.auth.activate.RegisterService.get_invitation_if_token_valid")
|
||||
@patch("controllers.console.auth.activate.RegisterService.revoke_token")
|
||||
@patch("controllers.console.auth.activate.db")
|
||||
@patch("controllers.console.auth.activate.AccountService.login")
|
||||
def test_successful_account_activation(
|
||||
self,
|
||||
mock_login,
|
||||
mock_db,
|
||||
mock_revoke_token,
|
||||
mock_get_invitation,
|
||||
app,
|
||||
mock_invitation,
|
||||
mock_account,
|
||||
mock_token_pair,
|
||||
):
|
||||
"""
|
||||
Test successful account activation.
|
||||
|
|
@ -212,12 +195,10 @@ class TestActivateApi:
|
|||
Verifies that:
|
||||
- Account is activated with user preferences
|
||||
- Account status is set to ACTIVE
|
||||
- User is logged in after activation
|
||||
- Invitation token is revoked
|
||||
"""
|
||||
# Arrange
|
||||
mock_get_invitation.return_value = mock_invitation
|
||||
mock_login.return_value = mock_token_pair
|
||||
|
||||
# Act
|
||||
with app.test_request_context(
|
||||
|
|
@ -244,7 +225,6 @@ class TestActivateApi:
|
|||
assert mock_account.initialized_at is not None
|
||||
mock_revoke_token.assert_called_once_with("workspace-123", "invitee@example.com", "valid_token")
|
||||
mock_db.session.commit.assert_called_once()
|
||||
mock_login.assert_called_once()
|
||||
|
||||
@patch("controllers.console.auth.activate.RegisterService.get_invitation_with_case_fallback")
|
||||
def test_activation_with_invalid_token(self, mock_get_invitation, app):
|
||||
|
|
@ -278,17 +258,14 @@ class TestActivateApi:
|
|||
@patch("controllers.console.auth.activate.RegisterService.get_invitation_with_case_fallback")
|
||||
@patch("controllers.console.auth.activate.RegisterService.revoke_token")
|
||||
@patch("controllers.console.auth.activate.db")
|
||||
@patch("controllers.console.auth.activate.AccountService.login")
|
||||
def test_activation_sets_interface_theme(
|
||||
self,
|
||||
mock_login,
|
||||
mock_db,
|
||||
mock_revoke_token,
|
||||
mock_get_invitation,
|
||||
app,
|
||||
mock_invitation,
|
||||
mock_account,
|
||||
mock_token_pair,
|
||||
):
|
||||
"""
|
||||
Test that activation sets default interface theme.
|
||||
|
|
@ -298,7 +275,6 @@ class TestActivateApi:
|
|||
"""
|
||||
# Arrange
|
||||
mock_get_invitation.return_value = mock_invitation
|
||||
mock_login.return_value = mock_token_pair
|
||||
|
||||
# Act
|
||||
with app.test_request_context(
|
||||
|
|
@ -331,17 +307,14 @@ class TestActivateApi:
|
|||
@patch("controllers.console.auth.activate.RegisterService.get_invitation_with_case_fallback")
|
||||
@patch("controllers.console.auth.activate.RegisterService.revoke_token")
|
||||
@patch("controllers.console.auth.activate.db")
|
||||
@patch("controllers.console.auth.activate.AccountService.login")
|
||||
def test_activation_with_different_locales(
|
||||
self,
|
||||
mock_login,
|
||||
mock_db,
|
||||
mock_revoke_token,
|
||||
mock_get_invitation,
|
||||
app,
|
||||
mock_invitation,
|
||||
mock_account,
|
||||
mock_token_pair,
|
||||
language,
|
||||
timezone,
|
||||
):
|
||||
|
|
@ -355,7 +328,6 @@ class TestActivateApi:
|
|||
"""
|
||||
# Arrange
|
||||
mock_get_invitation.return_value = mock_invitation
|
||||
mock_login.return_value = mock_token_pair
|
||||
|
||||
# Act
|
||||
with app.test_request_context(
|
||||
|
|
@ -381,27 +353,23 @@ class TestActivateApi:
|
|||
@patch("controllers.console.auth.activate.RegisterService.get_invitation_with_case_fallback")
|
||||
@patch("controllers.console.auth.activate.RegisterService.revoke_token")
|
||||
@patch("controllers.console.auth.activate.db")
|
||||
@patch("controllers.console.auth.activate.AccountService.login")
|
||||
def test_activation_returns_token_data(
|
||||
def test_activation_returns_success_response(
|
||||
self,
|
||||
mock_login,
|
||||
mock_db,
|
||||
mock_revoke_token,
|
||||
mock_get_invitation,
|
||||
app,
|
||||
mock_invitation,
|
||||
mock_token_pair,
|
||||
):
|
||||
"""
|
||||
Test that activation returns authentication tokens.
|
||||
Test that activation returns a success response without authentication tokens.
|
||||
|
||||
Verifies that:
|
||||
- Token pair is returned in response
|
||||
- All token types are included (access, refresh, csrf)
|
||||
- Response contains a success result
|
||||
- No token data is returned
|
||||
"""
|
||||
# Arrange
|
||||
mock_get_invitation.return_value = mock_invitation
|
||||
mock_login.return_value = mock_token_pair
|
||||
|
||||
# Act
|
||||
with app.test_request_context(
|
||||
|
|
@ -420,24 +388,18 @@ class TestActivateApi:
|
|||
response = api.post()
|
||||
|
||||
# Assert
|
||||
assert "data" in response
|
||||
assert response["data"]["access_token"] == "access_token"
|
||||
assert response["data"]["refresh_token"] == "refresh_token"
|
||||
assert response["data"]["csrf_token"] == "csrf_token"
|
||||
assert response == {"result": "success"}
|
||||
|
||||
@patch("controllers.console.auth.activate.RegisterService.get_invitation_with_case_fallback")
|
||||
@patch("controllers.console.auth.activate.RegisterService.revoke_token")
|
||||
@patch("controllers.console.auth.activate.db")
|
||||
@patch("controllers.console.auth.activate.AccountService.login")
|
||||
def test_activation_without_workspace_id(
|
||||
self,
|
||||
mock_login,
|
||||
mock_db,
|
||||
mock_revoke_token,
|
||||
mock_get_invitation,
|
||||
app,
|
||||
mock_invitation,
|
||||
mock_token_pair,
|
||||
):
|
||||
"""
|
||||
Test account activation without workspace_id.
|
||||
|
|
@ -448,7 +410,6 @@ class TestActivateApi:
|
|||
"""
|
||||
# Arrange
|
||||
mock_get_invitation.return_value = mock_invitation
|
||||
mock_login.return_value = mock_token_pair
|
||||
|
||||
# Act
|
||||
with app.test_request_context(
|
||||
|
|
|
|||
|
|
@ -287,7 +287,7 @@ def test_validate_inputs_optional_file_with_empty_string():
|
|||
|
||||
|
||||
def test_validate_inputs_optional_file_list_with_empty_list():
|
||||
"""Test that optional FILE_LIST variable with empty list returns None"""
|
||||
"""Test that optional FILE_LIST variable with empty list returns empty list (not None)"""
|
||||
base_app_generator = BaseAppGenerator()
|
||||
|
||||
var_file_list = VariableEntity(
|
||||
|
|
@ -302,6 +302,28 @@ def test_validate_inputs_optional_file_list_with_empty_list():
|
|||
value=[],
|
||||
)
|
||||
|
||||
# Empty list should be preserved, not converted to None
|
||||
# This allows downstream components like document_extractor to handle empty lists properly
|
||||
assert result == []
|
||||
|
||||
|
||||
def test_validate_inputs_optional_file_list_with_empty_string():
|
||||
"""Test that optional FILE_LIST variable with empty string returns None"""
|
||||
base_app_generator = BaseAppGenerator()
|
||||
|
||||
var_file_list = VariableEntity(
|
||||
variable="test_file_list",
|
||||
label="test_file_list",
|
||||
type=VariableEntityType.FILE_LIST,
|
||||
required=False,
|
||||
)
|
||||
|
||||
result = base_app_generator._validate_inputs(
|
||||
variable_entity=var_file_list,
|
||||
value="",
|
||||
)
|
||||
|
||||
# Empty string should be treated as unset
|
||||
assert result is None
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -132,3 +132,36 @@ def test_extract_images_from_docx(monkeypatch):
|
|||
# DB interactions should be recorded
|
||||
assert len(db_stub.session.added) == 2
|
||||
assert db_stub.session.committed is True
|
||||
|
||||
|
||||
def test_extract_images_from_docx_uses_internal_files_url():
|
||||
"""Test that INTERNAL_FILES_URL takes precedence over FILES_URL for plugin access."""
|
||||
# Test the URL generation logic directly
|
||||
from configs import dify_config
|
||||
|
||||
# Mock the configuration values
|
||||
original_files_url = getattr(dify_config, "FILES_URL", None)
|
||||
original_internal_files_url = getattr(dify_config, "INTERNAL_FILES_URL", None)
|
||||
|
||||
try:
|
||||
# Set both URLs - INTERNAL should take precedence
|
||||
dify_config.FILES_URL = "http://external.example.com"
|
||||
dify_config.INTERNAL_FILES_URL = "http://internal.docker:5001"
|
||||
|
||||
# Test the URL generation logic (same as in word_extractor.py)
|
||||
upload_file_id = "test_file_id"
|
||||
|
||||
# This is the pattern we fixed in the word extractor
|
||||
base_url = dify_config.INTERNAL_FILES_URL or dify_config.FILES_URL
|
||||
generated_url = f"{base_url}/files/{upload_file_id}/file-preview"
|
||||
|
||||
# Verify that INTERNAL_FILES_URL is used instead of FILES_URL
|
||||
assert "http://internal.docker:5001" in generated_url, f"Expected internal URL, got: {generated_url}"
|
||||
assert "http://external.example.com" not in generated_url, f"Should not use external URL, got: {generated_url}"
|
||||
|
||||
finally:
|
||||
# Restore original values
|
||||
if original_files_url is not None:
|
||||
dify_config.FILES_URL = original_files_url
|
||||
if original_internal_files_url is not None:
|
||||
dify_config.INTERNAL_FILES_URL = original_internal_files_url
|
||||
|
|
|
|||
|
|
@ -1,3 +1,5 @@
|
|||
import pytest
|
||||
|
||||
from core.workflow.nodes.http_request import (
|
||||
BodyData,
|
||||
HttpRequestNodeAuthorization,
|
||||
|
|
@ -5,6 +7,7 @@ from core.workflow.nodes.http_request import (
|
|||
HttpRequestNodeData,
|
||||
)
|
||||
from core.workflow.nodes.http_request.entities import HttpRequestNodeTimeout
|
||||
from core.workflow.nodes.http_request.exc import AuthorizationConfigError
|
||||
from core.workflow.nodes.http_request.executor import Executor
|
||||
from core.workflow.runtime import VariablePool
|
||||
from core.workflow.system_variable import SystemVariable
|
||||
|
|
@ -348,3 +351,127 @@ def test_init_params():
|
|||
executor = create_executor("key1:value1\n\nkey2:value2\n\n")
|
||||
executor._init_params()
|
||||
assert executor.params == [("key1", "value1"), ("key2", "value2")]
|
||||
|
||||
|
||||
def test_empty_api_key_raises_error_bearer():
|
||||
"""Test that empty API key raises AuthorizationConfigError for bearer auth."""
|
||||
variable_pool = VariablePool(system_variables=SystemVariable.empty())
|
||||
node_data = HttpRequestNodeData(
|
||||
title="test",
|
||||
method="get",
|
||||
url="http://example.com",
|
||||
headers="",
|
||||
params="",
|
||||
authorization=HttpRequestNodeAuthorization(
|
||||
type="api-key",
|
||||
config={"type": "bearer", "api_key": ""},
|
||||
),
|
||||
)
|
||||
timeout = HttpRequestNodeTimeout(connect=10, read=30, write=30)
|
||||
|
||||
with pytest.raises(AuthorizationConfigError, match="API key is required"):
|
||||
Executor(
|
||||
node_data=node_data,
|
||||
timeout=timeout,
|
||||
variable_pool=variable_pool,
|
||||
)
|
||||
|
||||
|
||||
def test_empty_api_key_raises_error_basic():
|
||||
"""Test that empty API key raises AuthorizationConfigError for basic auth."""
|
||||
variable_pool = VariablePool(system_variables=SystemVariable.empty())
|
||||
node_data = HttpRequestNodeData(
|
||||
title="test",
|
||||
method="get",
|
||||
url="http://example.com",
|
||||
headers="",
|
||||
params="",
|
||||
authorization=HttpRequestNodeAuthorization(
|
||||
type="api-key",
|
||||
config={"type": "basic", "api_key": ""},
|
||||
),
|
||||
)
|
||||
timeout = HttpRequestNodeTimeout(connect=10, read=30, write=30)
|
||||
|
||||
with pytest.raises(AuthorizationConfigError, match="API key is required"):
|
||||
Executor(
|
||||
node_data=node_data,
|
||||
timeout=timeout,
|
||||
variable_pool=variable_pool,
|
||||
)
|
||||
|
||||
|
||||
def test_empty_api_key_raises_error_custom():
|
||||
"""Test that empty API key raises AuthorizationConfigError for custom auth."""
|
||||
variable_pool = VariablePool(system_variables=SystemVariable.empty())
|
||||
node_data = HttpRequestNodeData(
|
||||
title="test",
|
||||
method="get",
|
||||
url="http://example.com",
|
||||
headers="",
|
||||
params="",
|
||||
authorization=HttpRequestNodeAuthorization(
|
||||
type="api-key",
|
||||
config={"type": "custom", "api_key": "", "header": "X-Custom-Auth"},
|
||||
),
|
||||
)
|
||||
timeout = HttpRequestNodeTimeout(connect=10, read=30, write=30)
|
||||
|
||||
with pytest.raises(AuthorizationConfigError, match="API key is required"):
|
||||
Executor(
|
||||
node_data=node_data,
|
||||
timeout=timeout,
|
||||
variable_pool=variable_pool,
|
||||
)
|
||||
|
||||
|
||||
def test_whitespace_only_api_key_raises_error():
|
||||
"""Test that whitespace-only API key raises AuthorizationConfigError."""
|
||||
variable_pool = VariablePool(system_variables=SystemVariable.empty())
|
||||
node_data = HttpRequestNodeData(
|
||||
title="test",
|
||||
method="get",
|
||||
url="http://example.com",
|
||||
headers="",
|
||||
params="",
|
||||
authorization=HttpRequestNodeAuthorization(
|
||||
type="api-key",
|
||||
config={"type": "bearer", "api_key": " "},
|
||||
),
|
||||
)
|
||||
timeout = HttpRequestNodeTimeout(connect=10, read=30, write=30)
|
||||
|
||||
with pytest.raises(AuthorizationConfigError, match="API key is required"):
|
||||
Executor(
|
||||
node_data=node_data,
|
||||
timeout=timeout,
|
||||
variable_pool=variable_pool,
|
||||
)
|
||||
|
||||
|
||||
def test_valid_api_key_works():
|
||||
"""Test that valid API key works correctly for bearer auth."""
|
||||
variable_pool = VariablePool(system_variables=SystemVariable.empty())
|
||||
node_data = HttpRequestNodeData(
|
||||
title="test",
|
||||
method="get",
|
||||
url="http://example.com",
|
||||
headers="",
|
||||
params="",
|
||||
authorization=HttpRequestNodeAuthorization(
|
||||
type="api-key",
|
||||
config={"type": "bearer", "api_key": "valid-api-key-123"},
|
||||
),
|
||||
)
|
||||
timeout = HttpRequestNodeTimeout(connect=10, read=30, write=30)
|
||||
|
||||
executor = Executor(
|
||||
node_data=node_data,
|
||||
timeout=timeout,
|
||||
variable_pool=variable_pool,
|
||||
)
|
||||
|
||||
# Should not raise an error
|
||||
headers = executor._assembling_headers()
|
||||
assert "Authorization" in headers
|
||||
assert headers["Authorization"] == "Bearer valid-api-key-123"
|
||||
|
|
|
|||
|
|
@ -0,0 +1,520 @@
|
|||
"""
|
||||
Unit tests for document indexing sync task.
|
||||
|
||||
This module tests the document indexing sync task functionality including:
|
||||
- Syncing Notion documents when updated
|
||||
- Validating document and data source existence
|
||||
- Credential validation and retrieval
|
||||
- Cleaning old segments before re-indexing
|
||||
- Error handling and edge cases
|
||||
"""
|
||||
|
||||
import uuid
|
||||
from unittest.mock import MagicMock, Mock, patch
|
||||
|
||||
import pytest
|
||||
|
||||
from core.indexing_runner import DocumentIsPausedError, IndexingRunner
|
||||
from models.dataset import Dataset, Document, DocumentSegment
|
||||
from tasks.document_indexing_sync_task import document_indexing_sync_task
|
||||
|
||||
# ============================================================================
|
||||
# Fixtures
|
||||
# ============================================================================
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def tenant_id():
|
||||
"""Generate a unique tenant ID for testing."""
|
||||
return str(uuid.uuid4())
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def dataset_id():
|
||||
"""Generate a unique dataset ID for testing."""
|
||||
return str(uuid.uuid4())
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def document_id():
|
||||
"""Generate a unique document ID for testing."""
|
||||
return str(uuid.uuid4())
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def notion_workspace_id():
|
||||
"""Generate a Notion workspace ID for testing."""
|
||||
return str(uuid.uuid4())
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def notion_page_id():
|
||||
"""Generate a Notion page ID for testing."""
|
||||
return str(uuid.uuid4())
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def credential_id():
|
||||
"""Generate a credential ID for testing."""
|
||||
return str(uuid.uuid4())
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def mock_dataset(dataset_id, tenant_id):
|
||||
"""Create a mock Dataset object."""
|
||||
dataset = Mock(spec=Dataset)
|
||||
dataset.id = dataset_id
|
||||
dataset.tenant_id = tenant_id
|
||||
dataset.indexing_technique = "high_quality"
|
||||
dataset.embedding_model_provider = "openai"
|
||||
dataset.embedding_model = "text-embedding-ada-002"
|
||||
return dataset
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def mock_document(document_id, dataset_id, tenant_id, notion_workspace_id, notion_page_id, credential_id):
|
||||
"""Create a mock Document object with Notion data source."""
|
||||
doc = Mock(spec=Document)
|
||||
doc.id = document_id
|
||||
doc.dataset_id = dataset_id
|
||||
doc.tenant_id = tenant_id
|
||||
doc.data_source_type = "notion_import"
|
||||
doc.indexing_status = "completed"
|
||||
doc.error = None
|
||||
doc.stopped_at = None
|
||||
doc.processing_started_at = None
|
||||
doc.doc_form = "text_model"
|
||||
doc.data_source_info_dict = {
|
||||
"notion_workspace_id": notion_workspace_id,
|
||||
"notion_page_id": notion_page_id,
|
||||
"type": "page",
|
||||
"last_edited_time": "2024-01-01T00:00:00Z",
|
||||
"credential_id": credential_id,
|
||||
}
|
||||
return doc
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def mock_document_segments(document_id):
|
||||
"""Create mock DocumentSegment objects."""
|
||||
segments = []
|
||||
for i in range(3):
|
||||
segment = Mock(spec=DocumentSegment)
|
||||
segment.id = str(uuid.uuid4())
|
||||
segment.document_id = document_id
|
||||
segment.index_node_id = f"node-{document_id}-{i}"
|
||||
segments.append(segment)
|
||||
return segments
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def mock_db_session():
|
||||
"""Mock database session."""
|
||||
with patch("tasks.document_indexing_sync_task.db.session") as mock_session:
|
||||
mock_query = MagicMock()
|
||||
mock_session.query.return_value = mock_query
|
||||
mock_query.where.return_value = mock_query
|
||||
mock_session.scalars.return_value = MagicMock()
|
||||
yield mock_session
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def mock_datasource_provider_service():
|
||||
"""Mock DatasourceProviderService."""
|
||||
with patch("tasks.document_indexing_sync_task.DatasourceProviderService") as mock_service_class:
|
||||
mock_service = MagicMock()
|
||||
mock_service.get_datasource_credentials.return_value = {"integration_secret": "test_token"}
|
||||
mock_service_class.return_value = mock_service
|
||||
yield mock_service
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def mock_notion_extractor():
|
||||
"""Mock NotionExtractor."""
|
||||
with patch("tasks.document_indexing_sync_task.NotionExtractor") as mock_extractor_class:
|
||||
mock_extractor = MagicMock()
|
||||
mock_extractor.get_notion_last_edited_time.return_value = "2024-01-02T00:00:00Z" # Updated time
|
||||
mock_extractor_class.return_value = mock_extractor
|
||||
yield mock_extractor
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def mock_index_processor_factory():
|
||||
"""Mock IndexProcessorFactory."""
|
||||
with patch("tasks.document_indexing_sync_task.IndexProcessorFactory") as mock_factory:
|
||||
mock_processor = MagicMock()
|
||||
mock_processor.clean = Mock()
|
||||
mock_factory.return_value.init_index_processor.return_value = mock_processor
|
||||
yield mock_factory
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def mock_indexing_runner():
|
||||
"""Mock IndexingRunner."""
|
||||
with patch("tasks.document_indexing_sync_task.IndexingRunner") as mock_runner_class:
|
||||
mock_runner = MagicMock(spec=IndexingRunner)
|
||||
mock_runner.run = Mock()
|
||||
mock_runner_class.return_value = mock_runner
|
||||
yield mock_runner
|
||||
|
||||
|
||||
# ============================================================================
|
||||
# Tests for document_indexing_sync_task
|
||||
# ============================================================================
|
||||
|
||||
|
||||
class TestDocumentIndexingSyncTask:
|
||||
"""Tests for the document_indexing_sync_task function."""
|
||||
|
||||
def test_document_not_found(self, mock_db_session, dataset_id, document_id):
|
||||
"""Test that task handles document not found gracefully."""
|
||||
# Arrange
|
||||
mock_db_session.query.return_value.where.return_value.first.return_value = None
|
||||
|
||||
# Act
|
||||
document_indexing_sync_task(dataset_id, document_id)
|
||||
|
||||
# Assert
|
||||
mock_db_session.close.assert_called_once()
|
||||
|
||||
def test_missing_notion_workspace_id(self, mock_db_session, mock_document, dataset_id, document_id):
|
||||
"""Test that task raises error when notion_workspace_id is missing."""
|
||||
# Arrange
|
||||
mock_document.data_source_info_dict = {"notion_page_id": "page123", "type": "page"}
|
||||
mock_db_session.query.return_value.where.return_value.first.return_value = mock_document
|
||||
|
||||
# Act & Assert
|
||||
with pytest.raises(ValueError, match="no notion page found"):
|
||||
document_indexing_sync_task(dataset_id, document_id)
|
||||
|
||||
def test_missing_notion_page_id(self, mock_db_session, mock_document, dataset_id, document_id):
|
||||
"""Test that task raises error when notion_page_id is missing."""
|
||||
# Arrange
|
||||
mock_document.data_source_info_dict = {"notion_workspace_id": "ws123", "type": "page"}
|
||||
mock_db_session.query.return_value.where.return_value.first.return_value = mock_document
|
||||
|
||||
# Act & Assert
|
||||
with pytest.raises(ValueError, match="no notion page found"):
|
||||
document_indexing_sync_task(dataset_id, document_id)
|
||||
|
||||
def test_empty_data_source_info(self, mock_db_session, mock_document, dataset_id, document_id):
|
||||
"""Test that task raises error when data_source_info is empty."""
|
||||
# Arrange
|
||||
mock_document.data_source_info_dict = None
|
||||
mock_db_session.query.return_value.where.return_value.first.return_value = mock_document
|
||||
|
||||
# Act & Assert
|
||||
with pytest.raises(ValueError, match="no notion page found"):
|
||||
document_indexing_sync_task(dataset_id, document_id)
|
||||
|
||||
def test_credential_not_found(
|
||||
self,
|
||||
mock_db_session,
|
||||
mock_datasource_provider_service,
|
||||
mock_document,
|
||||
dataset_id,
|
||||
document_id,
|
||||
):
|
||||
"""Test that task handles missing credentials by updating document status."""
|
||||
# Arrange
|
||||
mock_db_session.query.return_value.where.return_value.first.return_value = mock_document
|
||||
mock_datasource_provider_service.get_datasource_credentials.return_value = None
|
||||
|
||||
# Act
|
||||
document_indexing_sync_task(dataset_id, document_id)
|
||||
|
||||
# Assert
|
||||
assert mock_document.indexing_status == "error"
|
||||
assert "Datasource credential not found" in mock_document.error
|
||||
assert mock_document.stopped_at is not None
|
||||
mock_db_session.commit.assert_called()
|
||||
mock_db_session.close.assert_called()
|
||||
|
||||
def test_page_not_updated(
|
||||
self,
|
||||
mock_db_session,
|
||||
mock_datasource_provider_service,
|
||||
mock_notion_extractor,
|
||||
mock_document,
|
||||
dataset_id,
|
||||
document_id,
|
||||
):
|
||||
"""Test that task does nothing when page has not been updated."""
|
||||
# Arrange
|
||||
mock_db_session.query.return_value.where.return_value.first.return_value = mock_document
|
||||
# Return same time as stored in document
|
||||
mock_notion_extractor.get_notion_last_edited_time.return_value = "2024-01-01T00:00:00Z"
|
||||
|
||||
# Act
|
||||
document_indexing_sync_task(dataset_id, document_id)
|
||||
|
||||
# Assert
|
||||
# Document status should remain unchanged
|
||||
assert mock_document.indexing_status == "completed"
|
||||
# No session operations should be performed beyond the initial query
|
||||
mock_db_session.close.assert_not_called()
|
||||
|
||||
def test_successful_sync_when_page_updated(
|
||||
self,
|
||||
mock_db_session,
|
||||
mock_datasource_provider_service,
|
||||
mock_notion_extractor,
|
||||
mock_index_processor_factory,
|
||||
mock_indexing_runner,
|
||||
mock_dataset,
|
||||
mock_document,
|
||||
mock_document_segments,
|
||||
dataset_id,
|
||||
document_id,
|
||||
):
|
||||
"""Test successful sync flow when Notion page has been updated."""
|
||||
# Arrange
|
||||
mock_db_session.query.return_value.where.return_value.first.side_effect = [mock_document, mock_dataset]
|
||||
mock_db_session.scalars.return_value.all.return_value = mock_document_segments
|
||||
# NotionExtractor returns updated time
|
||||
mock_notion_extractor.get_notion_last_edited_time.return_value = "2024-01-02T00:00:00Z"
|
||||
|
||||
# Act
|
||||
document_indexing_sync_task(dataset_id, document_id)
|
||||
|
||||
# Assert
|
||||
# Verify document status was updated to parsing
|
||||
assert mock_document.indexing_status == "parsing"
|
||||
assert mock_document.processing_started_at is not None
|
||||
|
||||
# Verify segments were cleaned
|
||||
mock_processor = mock_index_processor_factory.return_value.init_index_processor.return_value
|
||||
mock_processor.clean.assert_called_once()
|
||||
|
||||
# Verify segments were deleted from database
|
||||
for segment in mock_document_segments:
|
||||
mock_db_session.delete.assert_any_call(segment)
|
||||
|
||||
# Verify indexing runner was called
|
||||
mock_indexing_runner.run.assert_called_once_with([mock_document])
|
||||
|
||||
# Verify session operations
|
||||
assert mock_db_session.commit.called
|
||||
mock_db_session.close.assert_called_once()
|
||||
|
||||
def test_dataset_not_found_during_cleaning(
|
||||
self,
|
||||
mock_db_session,
|
||||
mock_datasource_provider_service,
|
||||
mock_notion_extractor,
|
||||
mock_document,
|
||||
dataset_id,
|
||||
document_id,
|
||||
):
|
||||
"""Test that task handles dataset not found during cleaning phase."""
|
||||
# Arrange
|
||||
mock_db_session.query.return_value.where.return_value.first.side_effect = [mock_document, None]
|
||||
mock_notion_extractor.get_notion_last_edited_time.return_value = "2024-01-02T00:00:00Z"
|
||||
|
||||
# Act
|
||||
document_indexing_sync_task(dataset_id, document_id)
|
||||
|
||||
# Assert
|
||||
# Document should still be set to parsing
|
||||
assert mock_document.indexing_status == "parsing"
|
||||
# Session should be closed after error
|
||||
mock_db_session.close.assert_called_once()
|
||||
|
||||
def test_cleaning_error_continues_to_indexing(
|
||||
self,
|
||||
mock_db_session,
|
||||
mock_datasource_provider_service,
|
||||
mock_notion_extractor,
|
||||
mock_index_processor_factory,
|
||||
mock_indexing_runner,
|
||||
mock_dataset,
|
||||
mock_document,
|
||||
dataset_id,
|
||||
document_id,
|
||||
):
|
||||
"""Test that indexing continues even if cleaning fails."""
|
||||
# Arrange
|
||||
mock_db_session.query.return_value.where.return_value.first.side_effect = [mock_document, mock_dataset]
|
||||
mock_db_session.scalars.return_value.all.side_effect = Exception("Cleaning error")
|
||||
mock_notion_extractor.get_notion_last_edited_time.return_value = "2024-01-02T00:00:00Z"
|
||||
|
||||
# Act
|
||||
document_indexing_sync_task(dataset_id, document_id)
|
||||
|
||||
# Assert
|
||||
# Indexing should still be attempted despite cleaning error
|
||||
mock_indexing_runner.run.assert_called_once_with([mock_document])
|
||||
mock_db_session.close.assert_called_once()
|
||||
|
||||
def test_indexing_runner_document_paused_error(
|
||||
self,
|
||||
mock_db_session,
|
||||
mock_datasource_provider_service,
|
||||
mock_notion_extractor,
|
||||
mock_index_processor_factory,
|
||||
mock_indexing_runner,
|
||||
mock_dataset,
|
||||
mock_document,
|
||||
mock_document_segments,
|
||||
dataset_id,
|
||||
document_id,
|
||||
):
|
||||
"""Test that DocumentIsPausedError is handled gracefully."""
|
||||
# Arrange
|
||||
mock_db_session.query.return_value.where.return_value.first.side_effect = [mock_document, mock_dataset]
|
||||
mock_db_session.scalars.return_value.all.return_value = mock_document_segments
|
||||
mock_notion_extractor.get_notion_last_edited_time.return_value = "2024-01-02T00:00:00Z"
|
||||
mock_indexing_runner.run.side_effect = DocumentIsPausedError("Document paused")
|
||||
|
||||
# Act
|
||||
document_indexing_sync_task(dataset_id, document_id)
|
||||
|
||||
# Assert
|
||||
# Session should be closed after handling error
|
||||
mock_db_session.close.assert_called_once()
|
||||
|
||||
def test_indexing_runner_general_error(
|
||||
self,
|
||||
mock_db_session,
|
||||
mock_datasource_provider_service,
|
||||
mock_notion_extractor,
|
||||
mock_index_processor_factory,
|
||||
mock_indexing_runner,
|
||||
mock_dataset,
|
||||
mock_document,
|
||||
mock_document_segments,
|
||||
dataset_id,
|
||||
document_id,
|
||||
):
|
||||
"""Test that general exceptions during indexing are handled."""
|
||||
# Arrange
|
||||
mock_db_session.query.return_value.where.return_value.first.side_effect = [mock_document, mock_dataset]
|
||||
mock_db_session.scalars.return_value.all.return_value = mock_document_segments
|
||||
mock_notion_extractor.get_notion_last_edited_time.return_value = "2024-01-02T00:00:00Z"
|
||||
mock_indexing_runner.run.side_effect = Exception("Indexing error")
|
||||
|
||||
# Act
|
||||
document_indexing_sync_task(dataset_id, document_id)
|
||||
|
||||
# Assert
|
||||
# Session should be closed after error
|
||||
mock_db_session.close.assert_called_once()
|
||||
|
||||
def test_notion_extractor_initialized_with_correct_params(
|
||||
self,
|
||||
mock_db_session,
|
||||
mock_datasource_provider_service,
|
||||
mock_notion_extractor,
|
||||
mock_document,
|
||||
dataset_id,
|
||||
document_id,
|
||||
notion_workspace_id,
|
||||
notion_page_id,
|
||||
):
|
||||
"""Test that NotionExtractor is initialized with correct parameters."""
|
||||
# Arrange
|
||||
mock_db_session.query.return_value.where.return_value.first.return_value = mock_document
|
||||
mock_notion_extractor.get_notion_last_edited_time.return_value = "2024-01-01T00:00:00Z" # No update
|
||||
|
||||
# Act
|
||||
with patch("tasks.document_indexing_sync_task.NotionExtractor") as mock_extractor_class:
|
||||
mock_extractor = MagicMock()
|
||||
mock_extractor.get_notion_last_edited_time.return_value = "2024-01-01T00:00:00Z"
|
||||
mock_extractor_class.return_value = mock_extractor
|
||||
|
||||
document_indexing_sync_task(dataset_id, document_id)
|
||||
|
||||
# Assert
|
||||
mock_extractor_class.assert_called_once_with(
|
||||
notion_workspace_id=notion_workspace_id,
|
||||
notion_obj_id=notion_page_id,
|
||||
notion_page_type="page",
|
||||
notion_access_token="test_token",
|
||||
tenant_id=mock_document.tenant_id,
|
||||
)
|
||||
|
||||
def test_datasource_credentials_requested_correctly(
|
||||
self,
|
||||
mock_db_session,
|
||||
mock_datasource_provider_service,
|
||||
mock_notion_extractor,
|
||||
mock_document,
|
||||
dataset_id,
|
||||
document_id,
|
||||
credential_id,
|
||||
):
|
||||
"""Test that datasource credentials are requested with correct parameters."""
|
||||
# Arrange
|
||||
mock_db_session.query.return_value.where.return_value.first.return_value = mock_document
|
||||
mock_notion_extractor.get_notion_last_edited_time.return_value = "2024-01-01T00:00:00Z"
|
||||
|
||||
# Act
|
||||
document_indexing_sync_task(dataset_id, document_id)
|
||||
|
||||
# Assert
|
||||
mock_datasource_provider_service.get_datasource_credentials.assert_called_once_with(
|
||||
tenant_id=mock_document.tenant_id,
|
||||
credential_id=credential_id,
|
||||
provider="notion_datasource",
|
||||
plugin_id="langgenius/notion_datasource",
|
||||
)
|
||||
|
||||
def test_credential_id_missing_uses_none(
|
||||
self,
|
||||
mock_db_session,
|
||||
mock_datasource_provider_service,
|
||||
mock_notion_extractor,
|
||||
mock_document,
|
||||
dataset_id,
|
||||
document_id,
|
||||
):
|
||||
"""Test that task handles missing credential_id by passing None."""
|
||||
# Arrange
|
||||
mock_document.data_source_info_dict = {
|
||||
"notion_workspace_id": "ws123",
|
||||
"notion_page_id": "page123",
|
||||
"type": "page",
|
||||
"last_edited_time": "2024-01-01T00:00:00Z",
|
||||
}
|
||||
mock_db_session.query.return_value.where.return_value.first.return_value = mock_document
|
||||
mock_notion_extractor.get_notion_last_edited_time.return_value = "2024-01-01T00:00:00Z"
|
||||
|
||||
# Act
|
||||
document_indexing_sync_task(dataset_id, document_id)
|
||||
|
||||
# Assert
|
||||
mock_datasource_provider_service.get_datasource_credentials.assert_called_once_with(
|
||||
tenant_id=mock_document.tenant_id,
|
||||
credential_id=None,
|
||||
provider="notion_datasource",
|
||||
plugin_id="langgenius/notion_datasource",
|
||||
)
|
||||
|
||||
def test_index_processor_clean_called_with_correct_params(
|
||||
self,
|
||||
mock_db_session,
|
||||
mock_datasource_provider_service,
|
||||
mock_notion_extractor,
|
||||
mock_index_processor_factory,
|
||||
mock_indexing_runner,
|
||||
mock_dataset,
|
||||
mock_document,
|
||||
mock_document_segments,
|
||||
dataset_id,
|
||||
document_id,
|
||||
):
|
||||
"""Test that index processor clean is called with correct parameters."""
|
||||
# Arrange
|
||||
mock_db_session.query.return_value.where.return_value.first.side_effect = [mock_document, mock_dataset]
|
||||
mock_db_session.scalars.return_value.all.return_value = mock_document_segments
|
||||
mock_notion_extractor.get_notion_last_edited_time.return_value = "2024-01-02T00:00:00Z"
|
||||
|
||||
# Act
|
||||
document_indexing_sync_task(dataset_id, document_id)
|
||||
|
||||
# Assert
|
||||
mock_processor = mock_index_processor_factory.return_value.init_index_processor.return_value
|
||||
expected_node_ids = [seg.index_node_id for seg in mock_document_segments]
|
||||
mock_processor.clean.assert_called_once_with(
|
||||
mock_dataset, expected_node_ids, with_keywords=True, delete_child_chunks=True
|
||||
)
|
||||
39
api/uv.lock
39
api/uv.lock
|
|
@ -1636,7 +1636,7 @@ requires-dist = [
|
|||
{ name = "pydantic-extra-types", specifier = "~=2.10.3" },
|
||||
{ name = "pydantic-settings", specifier = "~=2.11.0" },
|
||||
{ name = "pyjwt", specifier = "~=2.10.1" },
|
||||
{ name = "pypdfium2", specifier = "==4.30.0" },
|
||||
{ name = "pypdfium2", specifier = "==5.2.0" },
|
||||
{ name = "python-docx", specifier = "~=1.1.0" },
|
||||
{ name = "python-dotenv", specifier = "==1.0.1" },
|
||||
{ name = "pyyaml", specifier = "~=6.0.1" },
|
||||
|
|
@ -4993,22 +4993,31 @@ wheels = [
|
|||
|
||||
[[package]]
|
||||
name = "pypdfium2"
|
||||
version = "4.30.0"
|
||||
version = "5.2.0"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/a1/14/838b3ba247a0ba92e4df5d23f2bea9478edcfd72b78a39d6ca36ccd84ad2/pypdfium2-4.30.0.tar.gz", hash = "sha256:48b5b7e5566665bc1015b9d69c1ebabe21f6aee468b509531c3c8318eeee2e16", size = 140239, upload-time = "2024-05-09T18:33:17.552Z" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/f6/ab/73c7d24e4eac9ba952569403b32b7cca9412fc5b9bef54fdbd669551389f/pypdfium2-5.2.0.tar.gz", hash = "sha256:43863625231ce999c1ebbed6721a88de818b2ab4d909c1de558d413b9a400256", size = 269999, upload-time = "2025-12-12T13:20:15.353Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/c7/9a/c8ff5cc352c1b60b0b97642ae734f51edbab6e28b45b4fcdfe5306ee3c83/pypdfium2-4.30.0-py3-none-macosx_10_13_x86_64.whl", hash = "sha256:b33ceded0b6ff5b2b93bc1fe0ad4b71aa6b7e7bd5875f1ca0cdfb6ba6ac01aab", size = 2837254, upload-time = "2024-05-09T18:32:48.653Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/21/8b/27d4d5409f3c76b985f4ee4afe147b606594411e15ac4dc1c3363c9a9810/pypdfium2-4.30.0-py3-none-macosx_11_0_arm64.whl", hash = "sha256:4e55689f4b06e2d2406203e771f78789bd4f190731b5d57383d05cf611d829de", size = 2707624, upload-time = "2024-05-09T18:32:51.458Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/11/63/28a73ca17c24b41a205d658e177d68e198d7dde65a8c99c821d231b6ee3d/pypdfium2-4.30.0-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4e6e50f5ce7f65a40a33d7c9edc39f23140c57e37144c2d6d9e9262a2a854854", size = 2793126, upload-time = "2024-05-09T18:32:53.581Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/d1/96/53b3ebf0955edbd02ac6da16a818ecc65c939e98fdeb4e0958362bd385c8/pypdfium2-4.30.0-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:3d0dd3ecaffd0b6dbda3da663220e705cb563918249bda26058c6036752ba3a2", size = 2591077, upload-time = "2024-05-09T18:32:55.99Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/ec/ee/0394e56e7cab8b5b21f744d988400948ef71a9a892cbeb0b200d324ab2c7/pypdfium2-4.30.0-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cc3bf29b0db8c76cdfaac1ec1cde8edf211a7de7390fbf8934ad2aa9b4d6dfad", size = 2864431, upload-time = "2024-05-09T18:32:57.911Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/65/cd/3f1edf20a0ef4a212a5e20a5900e64942c5a374473671ac0780eaa08ea80/pypdfium2-4.30.0-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f1f78d2189e0ddf9ac2b7a9b9bd4f0c66f54d1389ff6c17e9fd9dc034d06eb3f", size = 2812008, upload-time = "2024-05-09T18:32:59.886Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/c8/91/2d517db61845698f41a2a974de90762e50faeb529201c6b3574935969045/pypdfium2-4.30.0-py3-none-musllinux_1_1_aarch64.whl", hash = "sha256:5eda3641a2da7a7a0b2f4dbd71d706401a656fea521b6b6faa0675b15d31a163", size = 6181543, upload-time = "2024-05-09T18:33:02.597Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/ba/c4/ed1315143a7a84b2c7616569dfb472473968d628f17c231c39e29ae9d780/pypdfium2-4.30.0-py3-none-musllinux_1_1_i686.whl", hash = "sha256:0dfa61421b5eb68e1188b0b2231e7ba35735aef2d867d86e48ee6cab6975195e", size = 6175911, upload-time = "2024-05-09T18:33:05.376Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/7a/c4/9e62d03f414e0e3051c56d5943c3bf42aa9608ede4e19dc96438364e9e03/pypdfium2-4.30.0-py3-none-musllinux_1_1_x86_64.whl", hash = "sha256:f33bd79e7a09d5f7acca3b0b69ff6c8a488869a7fab48fdf400fec6e20b9c8be", size = 6267430, upload-time = "2024-05-09T18:33:08.067Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/90/47/eda4904f715fb98561e34012826e883816945934a851745570521ec89520/pypdfium2-4.30.0-py3-none-win32.whl", hash = "sha256:ee2410f15d576d976c2ab2558c93d392a25fb9f6635e8dd0a8a3a5241b275e0e", size = 2775951, upload-time = "2024-05-09T18:33:10.567Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/25/bd/56d9ec6b9f0fc4e0d95288759f3179f0fcd34b1a1526b75673d2f6d5196f/pypdfium2-4.30.0-py3-none-win_amd64.whl", hash = "sha256:90dbb2ac07be53219f56be09961eb95cf2473f834d01a42d901d13ccfad64b4c", size = 2892098, upload-time = "2024-05-09T18:33:13.107Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/be/7a/097801205b991bc3115e8af1edb850d30aeaf0118520b016354cf5ccd3f6/pypdfium2-4.30.0-py3-none-win_arm64.whl", hash = "sha256:119b2969a6d6b1e8d55e99caaf05290294f2d0fe49c12a3f17102d01c441bd29", size = 2752118, upload-time = "2024-05-09T18:33:15.489Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/fb/0c/9108ae5266ee4cdf495f99205c44d4b5c83b4eb227c2b610d35c9e9fe961/pypdfium2-5.2.0-py3-none-android_23_arm64_v8a.whl", hash = "sha256:1ba4187a45ce4cf08f2a8c7e0f8970c36b9aa1770c8a3412a70781c1d80fb145", size = 2763268, upload-time = "2025-12-12T13:19:37.354Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/35/8c/55f5c8a2c6b293f5c020be4aa123eaa891e797c514e5eccd8cb042740d37/pypdfium2-5.2.0-py3-none-android_23_armeabi_v7a.whl", hash = "sha256:80c55e10a8c9242f0901d35a9a306dd09accce8e497507bb23fcec017d45fe2e", size = 2301821, upload-time = "2025-12-12T13:19:39.484Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/5e/7d/efa013e3795b41c59dd1e472f7201c241232c3a6553be4917e3a26b9f225/pypdfium2-5.2.0-py3-none-macosx_11_0_arm64.whl", hash = "sha256:73523ae69cd95c084c1342096893b2143ea73c36fdde35494780ba431e6a7d6e", size = 2816428, upload-time = "2025-12-12T13:19:41.735Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/ec/ae/8c30af6ff2ab41a7cb84753ee79dd1e0a8932c9bda9fe19759d69cbbf115/pypdfium2-5.2.0-py3-none-macosx_11_0_x86_64.whl", hash = "sha256:19c501d22ef5eb98e42416d22cc3ac66d4808b436e3d06686392f24d8d9f708d", size = 2939486, upload-time = "2025-12-12T13:19:43.176Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/64/64/454a73c49a04c2c290917ad86184e4da959e9e5aba94b3b046328c89be93/pypdfium2-5.2.0-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6ed15a3f58d6ee4905f0d0a731e30b381b457c30689512589c7f57950b0cdcec", size = 2979235, upload-time = "2025-12-12T13:19:44.635Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/4e/29/f1cab8e31192dd367dc7b1afa71f45cfcb8ff0b176f1d2a0f528faf04052/pypdfium2-5.2.0-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:329cd1e9f068e8729e0d0b79a070d6126f52bc48ff1e40505cb207a5e20ce0ba", size = 2763001, upload-time = "2025-12-12T13:19:47.598Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/bc/5d/e95fad8fdac960854173469c4b6931d5de5e09d05e6ee7d9756f8b95eef0/pypdfium2-5.2.0-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:325259759886e66619504df4721fef3b8deabf8a233e4f4a66e0c32ebae60c2f", size = 3057024, upload-time = "2025-12-12T13:19:49.179Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/f4/32/468591d017ab67f8142d40f4db8163b6d8bb404fe0d22da75a5c661dc144/pypdfium2-5.2.0-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5683e8f08ab38ed05e0e59e611451ec74332803d4e78f8c45658ea1d372a17af", size = 3448598, upload-time = "2025-12-12T13:19:50.979Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/f9/a5/57b4e389b77ab5f7e9361dc7fc03b5378e678ba81b21e791e85350fbb235/pypdfium2-5.2.0-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:da4815426a5adcf03bf4d2c5f26c0ff8109dbfaf2c3415984689931bc6006ef9", size = 2993946, upload-time = "2025-12-12T13:19:53.154Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/84/3a/e03e9978f817632aa56183bb7a4989284086fdd45de3245ead35f147179b/pypdfium2-5.2.0-py3-none-manylinux_2_27_s390x.manylinux_2_28_s390x.whl", hash = "sha256:64bf5c039b2c314dab1fd158bfff99db96299a5b5c6d96fc056071166056f1de", size = 3673148, upload-time = "2025-12-12T13:19:54.528Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/13/ee/e581506806553afa4b7939d47bf50dca35c1151b8cc960f4542a6eb135ce/pypdfium2-5.2.0-py3-none-manylinux_2_38_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:76b42a17748ac7dc04d5ef04d0561c6a0a4b546d113ec1d101d59650c6a340f7", size = 2964757, upload-time = "2025-12-12T13:19:56.406Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/00/be/3715c652aff30f12284523dd337843d0efe3e721020f0ec303a99ffffd8d/pypdfium2-5.2.0-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:9d4367d471439fae846f0aba91ff9e8d66e524edcf3c8d6e02fe96fa306e13b9", size = 4130319, upload-time = "2025-12-12T13:19:57.889Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/b0/0b/28aa2ede9004dd4192266bbad394df0896787f7c7bcfa4d1a6e091ad9a2c/pypdfium2-5.2.0-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:613f6bb2b47d76b66c0bf2ca581c7c33e3dd9dcb29d65d8c34fef4135f933149", size = 3746488, upload-time = "2025-12-12T13:19:59.469Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/bc/04/1b791e1219652bbfc51df6498267d8dcec73ad508b99388b2890902ccd9d/pypdfium2-5.2.0-py3-none-musllinux_1_2_i686.whl", hash = "sha256:c03fad3f2fa68d358f5dd4deb07e438482fa26fae439c49d127576d969769ca1", size = 4336534, upload-time = "2025-12-12T13:20:01.28Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/4f/e3/6f00f963bb702ffd2e3e2d9c7286bc3bb0bebcdfa96ca897d466f66976c6/pypdfium2-5.2.0-py3-none-musllinux_1_2_ppc64le.whl", hash = "sha256:f10be1900ae21879d02d9f4d58c2d2db3a2e6da611736a8e9decc22d1fb02909", size = 4375079, upload-time = "2025-12-12T13:20:03.117Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/3a/2a/7ec2b191b5e1b7716a0dfc14e6860e89bb355fb3b94ed0c1d46db526858c/pypdfium2-5.2.0-py3-none-musllinux_1_2_riscv64.whl", hash = "sha256:97c1a126d30378726872f94866e38c055740cae80313638dafd1cd448d05e7c0", size = 3928648, upload-time = "2025-12-12T13:20:05.041Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/bf/c3/c6d972fa095ff3ace76f9d3a91ceaf8a9dbbe0d9a5a84ac1d6178a46630e/pypdfium2-5.2.0-py3-none-musllinux_1_2_s390x.whl", hash = "sha256:c369f183a90781b788af9a357a877bc8caddc24801e8346d0bf23f3295f89f3a", size = 4997772, upload-time = "2025-12-12T13:20:06.453Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/22/45/2c64584b7a3ca5c4652280a884f4b85b8ed24e27662adeebdc06d991c917/pypdfium2-5.2.0-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:b391f1cceb454934b612a05b54e90f98aafeffe5e73830d71700b17f0812226b", size = 4180046, upload-time = "2025-12-12T13:20:08.715Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/d6/99/8d1ff87b626649400e62a2840e6e10fe258443ba518798e071fee4cd86f9/pypdfium2-5.2.0-py3-none-win32.whl", hash = "sha256:c68067938f617c37e4d17b18de7cac231fc7ce0eb7b6653b7283ebe8764d4999", size = 2990175, upload-time = "2025-12-12T13:20:10.241Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/93/fc/114fff8895b620aac4984808e93d01b6d7b93e342a1635fcfe2a5f39cf39/pypdfium2-5.2.0-py3-none-win_amd64.whl", hash = "sha256:eb0591b720e8aaeab9475c66d653655ec1be0464b946f3f48a53922e843f0f3b", size = 3098615, upload-time = "2025-12-12T13:20:11.795Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/08/97/eb738bff5998760d6e0cbcb7dd04cbf1a95a97b997fac6d4e57562a58992/pypdfium2-5.2.0-py3-none-win_arm64.whl", hash = "sha256:5dd1ef579f19fa3719aee4959b28bda44b1072405756708b5e83df8806a19521", size = 2939479, upload-time = "2025-12-12T13:20:13.815Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
|
|
|||
|
|
@ -468,6 +468,7 @@ ALIYUN_OSS_REGION=ap-southeast-1
|
|||
ALIYUN_OSS_AUTH_VERSION=v4
|
||||
# Don't start with '/'. OSS doesn't support leading slash in object names.
|
||||
ALIYUN_OSS_PATH=your-path
|
||||
ALIYUN_CLOUDBOX_ID=your-cloudbox-id
|
||||
|
||||
# Tencent COS Configuration
|
||||
#
|
||||
|
|
@ -491,6 +492,7 @@ HUAWEI_OBS_BUCKET_NAME=your-bucket-name
|
|||
HUAWEI_OBS_SECRET_KEY=your-secret-key
|
||||
HUAWEI_OBS_ACCESS_KEY=your-access-key
|
||||
HUAWEI_OBS_SERVER=your-server-url
|
||||
HUAWEI_OBS_PATH_STYLE=false
|
||||
|
||||
# Volcengine TOS Configuration
|
||||
#
|
||||
|
|
|
|||
|
|
@ -270,7 +270,7 @@ services:
|
|||
|
||||
# plugin daemon
|
||||
plugin_daemon:
|
||||
image: langgenius/dify-plugin-daemon:0.5.1-local
|
||||
image: langgenius/dify-plugin-daemon:0.5.2-local
|
||||
restart: always
|
||||
environment:
|
||||
# Use the shared environment variables.
|
||||
|
|
|
|||
|
|
@ -123,7 +123,7 @@ services:
|
|||
|
||||
# plugin daemon
|
||||
plugin_daemon:
|
||||
image: langgenius/dify-plugin-daemon:0.5.1-local
|
||||
image: langgenius/dify-plugin-daemon:0.5.2-local
|
||||
restart: always
|
||||
env_file:
|
||||
- ./middleware.env
|
||||
|
|
|
|||
|
|
@ -134,6 +134,7 @@ x-shared-env: &shared-api-worker-env
|
|||
ALIYUN_OSS_REGION: ${ALIYUN_OSS_REGION:-ap-southeast-1}
|
||||
ALIYUN_OSS_AUTH_VERSION: ${ALIYUN_OSS_AUTH_VERSION:-v4}
|
||||
ALIYUN_OSS_PATH: ${ALIYUN_OSS_PATH:-your-path}
|
||||
ALIYUN_CLOUDBOX_ID: ${ALIYUN_CLOUDBOX_ID:-your-cloudbox-id}
|
||||
TENCENT_COS_BUCKET_NAME: ${TENCENT_COS_BUCKET_NAME:-your-bucket-name}
|
||||
TENCENT_COS_SECRET_KEY: ${TENCENT_COS_SECRET_KEY:-your-secret-key}
|
||||
TENCENT_COS_SECRET_ID: ${TENCENT_COS_SECRET_ID:-your-secret-id}
|
||||
|
|
@ -148,6 +149,7 @@ x-shared-env: &shared-api-worker-env
|
|||
HUAWEI_OBS_SECRET_KEY: ${HUAWEI_OBS_SECRET_KEY:-your-secret-key}
|
||||
HUAWEI_OBS_ACCESS_KEY: ${HUAWEI_OBS_ACCESS_KEY:-your-access-key}
|
||||
HUAWEI_OBS_SERVER: ${HUAWEI_OBS_SERVER:-your-server-url}
|
||||
HUAWEI_OBS_PATH_STYLE: ${HUAWEI_OBS_PATH_STYLE:-false}
|
||||
VOLCENGINE_TOS_BUCKET_NAME: ${VOLCENGINE_TOS_BUCKET_NAME:-your-bucket-name}
|
||||
VOLCENGINE_TOS_SECRET_KEY: ${VOLCENGINE_TOS_SECRET_KEY:-your-secret-key}
|
||||
VOLCENGINE_TOS_ACCESS_KEY: ${VOLCENGINE_TOS_ACCESS_KEY:-your-access-key}
|
||||
|
|
@ -939,7 +941,7 @@ services:
|
|||
|
||||
# plugin daemon
|
||||
plugin_daemon:
|
||||
image: langgenius/dify-plugin-daemon:0.5.1-local
|
||||
image: langgenius/dify-plugin-daemon:0.5.2-local
|
||||
restart: always
|
||||
environment:
|
||||
# Use the shared environment variables.
|
||||
|
|
|
|||
|
|
@ -1,7 +1,6 @@
|
|||
{
|
||||
"recommendations": [
|
||||
"bradlc.vscode-tailwindcss",
|
||||
"firsttris.vscode-jest-runner",
|
||||
"kisstkondoros.vscode-codemetrics"
|
||||
]
|
||||
}
|
||||
|
|
|
|||
|
|
@ -99,14 +99,14 @@ If your IDE is VSCode, rename `web/.vscode/settings.example.json` to `web/.vscod
|
|||
|
||||
## Test
|
||||
|
||||
We use [Jest](https://jestjs.io/) and [React Testing Library](https://testing-library.com/docs/react-testing-library/intro/) for Unit Testing.
|
||||
We use [Vitest](https://vitest.dev/) and [React Testing Library](https://testing-library.com/docs/react-testing-library/intro/) for Unit Testing.
|
||||
|
||||
**📖 Complete Testing Guide**: See [web/testing/testing.md](./testing/testing.md) for detailed testing specifications, best practices, and examples.
|
||||
|
||||
Run test:
|
||||
|
||||
```bash
|
||||
pnpm run test
|
||||
pnpm test
|
||||
```
|
||||
|
||||
### Example Code
|
||||
|
|
|
|||
|
|
@ -1,9 +1,41 @@
|
|||
import { merge, noop } from 'lodash-es'
|
||||
import { defaultPlan } from '@/app/components/billing/config'
|
||||
import { baseProviderContextValue } from '@/context/provider-context'
|
||||
import type { ProviderContextState } from '@/context/provider-context'
|
||||
import type { Plan, UsagePlanInfo } from '@/app/components/billing/type'
|
||||
|
||||
// Avoid being mocked in tests
|
||||
export const baseProviderContextValue: ProviderContextState = {
|
||||
modelProviders: [],
|
||||
refreshModelProviders: noop,
|
||||
textGenerationModelList: [],
|
||||
supportRetrievalMethods: [],
|
||||
isAPIKeySet: true,
|
||||
plan: defaultPlan,
|
||||
isFetchedPlan: false,
|
||||
enableBilling: false,
|
||||
onPlanInfoChanged: noop,
|
||||
enableReplaceWebAppLogo: false,
|
||||
modelLoadBalancingEnabled: false,
|
||||
datasetOperatorEnabled: false,
|
||||
enableEducationPlan: false,
|
||||
isEducationWorkspace: false,
|
||||
isEducationAccount: false,
|
||||
allowRefreshEducationVerify: false,
|
||||
educationAccountExpireAt: null,
|
||||
isLoadingEducationAccountInfo: false,
|
||||
isFetchingEducationAccountInfo: false,
|
||||
webappCopyrightEnabled: false,
|
||||
licenseLimit: {
|
||||
workspace_members: {
|
||||
size: 0,
|
||||
limit: 0,
|
||||
},
|
||||
},
|
||||
refreshLicenseLimit: noop,
|
||||
isAllowTransferWorkspace: false,
|
||||
isAllowPublishAsCustomKnowledgePipelineTemplate: false,
|
||||
}
|
||||
|
||||
export const createMockProviderContextValue = (overrides: Partial<ProviderContextState> = {}): ProviderContextState => {
|
||||
const merged = merge({}, baseProviderContextValue, overrides)
|
||||
|
||||
|
|
|
|||
|
|
@ -1,40 +0,0 @@
|
|||
/**
|
||||
* Shared mock for react-i18next
|
||||
*
|
||||
* Jest automatically uses this mock when react-i18next is imported in tests.
|
||||
* The default behavior returns the translation key as-is, which is suitable
|
||||
* for most test scenarios.
|
||||
*
|
||||
* For tests that need custom translations, you can override with jest.mock():
|
||||
*
|
||||
* @example
|
||||
* jest.mock('react-i18next', () => ({
|
||||
* useTranslation: () => ({
|
||||
* t: (key: string) => {
|
||||
* if (key === 'some.key') return 'Custom translation'
|
||||
* return key
|
||||
* },
|
||||
* }),
|
||||
* }))
|
||||
*/
|
||||
|
||||
export const useTranslation = () => ({
|
||||
t: (key: string, options?: Record<string, unknown>) => {
|
||||
if (options?.returnObjects)
|
||||
return [`${key}-feature-1`, `${key}-feature-2`]
|
||||
if (options)
|
||||
return `${key}:${JSON.stringify(options)}`
|
||||
return key
|
||||
},
|
||||
i18n: {
|
||||
language: 'en',
|
||||
changeLanguage: jest.fn(),
|
||||
},
|
||||
})
|
||||
|
||||
export const Trans = ({ children }: { children?: React.ReactNode }) => children
|
||||
|
||||
export const initReactI18next = {
|
||||
type: '3rdParty',
|
||||
init: jest.fn(),
|
||||
}
|
||||
|
|
@ -1,3 +1,4 @@
|
|||
import type { Mock } from 'vitest'
|
||||
/**
|
||||
* Document Detail Navigation Fix Verification Test
|
||||
*
|
||||
|
|
@ -10,32 +11,32 @@ import { useRouter } from 'next/navigation'
|
|||
import { useDocumentDetail, useDocumentMetadata } from '@/service/knowledge/use-document'
|
||||
|
||||
// Mock Next.js router
|
||||
const mockPush = jest.fn()
|
||||
jest.mock('next/navigation', () => ({
|
||||
useRouter: jest.fn(() => ({
|
||||
const mockPush = vi.fn()
|
||||
vi.mock('next/navigation', () => ({
|
||||
useRouter: vi.fn(() => ({
|
||||
push: mockPush,
|
||||
})),
|
||||
}))
|
||||
|
||||
// Mock the document service hooks
|
||||
jest.mock('@/service/knowledge/use-document', () => ({
|
||||
useDocumentDetail: jest.fn(),
|
||||
useDocumentMetadata: jest.fn(),
|
||||
useInvalidDocumentList: jest.fn(() => jest.fn()),
|
||||
vi.mock('@/service/knowledge/use-document', () => ({
|
||||
useDocumentDetail: vi.fn(),
|
||||
useDocumentMetadata: vi.fn(),
|
||||
useInvalidDocumentList: vi.fn(() => vi.fn()),
|
||||
}))
|
||||
|
||||
// Mock other dependencies
|
||||
jest.mock('@/context/dataset-detail', () => ({
|
||||
useDatasetDetailContext: jest.fn(() => [null]),
|
||||
vi.mock('@/context/dataset-detail', () => ({
|
||||
useDatasetDetailContext: vi.fn(() => [null]),
|
||||
}))
|
||||
|
||||
jest.mock('@/service/use-base', () => ({
|
||||
useInvalid: jest.fn(() => jest.fn()),
|
||||
vi.mock('@/service/use-base', () => ({
|
||||
useInvalid: vi.fn(() => vi.fn()),
|
||||
}))
|
||||
|
||||
jest.mock('@/service/knowledge/use-segment', () => ({
|
||||
useSegmentListKey: jest.fn(),
|
||||
useChildSegmentListKey: jest.fn(),
|
||||
vi.mock('@/service/knowledge/use-segment', () => ({
|
||||
useSegmentListKey: vi.fn(),
|
||||
useChildSegmentListKey: vi.fn(),
|
||||
}))
|
||||
|
||||
// Create a minimal version of the DocumentDetail component that includes our fix
|
||||
|
|
@ -66,10 +67,10 @@ const DocumentDetailWithFix = ({ datasetId, documentId }: { datasetId: string; d
|
|||
|
||||
describe('Document Detail Navigation Fix Verification', () => {
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks()
|
||||
vi.clearAllMocks()
|
||||
|
||||
// Mock successful API responses
|
||||
;(useDocumentDetail as jest.Mock).mockReturnValue({
|
||||
;(useDocumentDetail as Mock).mockReturnValue({
|
||||
data: {
|
||||
id: 'doc-123',
|
||||
name: 'Test Document',
|
||||
|
|
@ -80,7 +81,7 @@ describe('Document Detail Navigation Fix Verification', () => {
|
|||
error: null,
|
||||
})
|
||||
|
||||
;(useDocumentMetadata as jest.Mock).mockReturnValue({
|
||||
;(useDocumentMetadata as Mock).mockReturnValue({
|
||||
data: null,
|
||||
error: null,
|
||||
})
|
||||
|
|
|
|||
|
|
@ -4,16 +4,17 @@ import { fireEvent, render, screen, waitFor } from '@testing-library/react'
|
|||
import MailAndPasswordAuth from '@/app/(shareLayout)/webapp-signin/components/mail-and-password-auth'
|
||||
import CheckCode from '@/app/(shareLayout)/webapp-signin/check-code/page'
|
||||
|
||||
const replaceMock = jest.fn()
|
||||
const backMock = jest.fn()
|
||||
const replaceMock = vi.fn()
|
||||
const backMock = vi.fn()
|
||||
const useSearchParamsMock = vi.fn(() => new URLSearchParams())
|
||||
|
||||
jest.mock('next/navigation', () => ({
|
||||
usePathname: jest.fn(() => '/chatbot/test-app'),
|
||||
useRouter: jest.fn(() => ({
|
||||
vi.mock('next/navigation', () => ({
|
||||
usePathname: vi.fn(() => '/chatbot/test-app'),
|
||||
useRouter: vi.fn(() => ({
|
||||
replace: replaceMock,
|
||||
back: backMock,
|
||||
})),
|
||||
useSearchParams: jest.fn(),
|
||||
useSearchParams: () => useSearchParamsMock(),
|
||||
}))
|
||||
|
||||
const mockStoreState = {
|
||||
|
|
@ -21,59 +22,55 @@ const mockStoreState = {
|
|||
shareCode: 'test-app',
|
||||
}
|
||||
|
||||
const useWebAppStoreMock = jest.fn((selector?: (state: typeof mockStoreState) => any) => {
|
||||
const useWebAppStoreMock = vi.fn((selector?: (state: typeof mockStoreState) => any) => {
|
||||
return selector ? selector(mockStoreState) : mockStoreState
|
||||
})
|
||||
|
||||
jest.mock('@/context/web-app-context', () => ({
|
||||
vi.mock('@/context/web-app-context', () => ({
|
||||
useWebAppStore: (selector?: (state: typeof mockStoreState) => any) => useWebAppStoreMock(selector),
|
||||
}))
|
||||
|
||||
const webAppLoginMock = jest.fn()
|
||||
const webAppEmailLoginWithCodeMock = jest.fn()
|
||||
const sendWebAppEMailLoginCodeMock = jest.fn()
|
||||
const webAppLoginMock = vi.fn()
|
||||
const webAppEmailLoginWithCodeMock = vi.fn()
|
||||
const sendWebAppEMailLoginCodeMock = vi.fn()
|
||||
|
||||
jest.mock('@/service/common', () => ({
|
||||
vi.mock('@/service/common', () => ({
|
||||
webAppLogin: (...args: any[]) => webAppLoginMock(...args),
|
||||
webAppEmailLoginWithCode: (...args: any[]) => webAppEmailLoginWithCodeMock(...args),
|
||||
sendWebAppEMailLoginCode: (...args: any[]) => sendWebAppEMailLoginCodeMock(...args),
|
||||
}))
|
||||
|
||||
const fetchAccessTokenMock = jest.fn()
|
||||
const fetchAccessTokenMock = vi.fn()
|
||||
|
||||
jest.mock('@/service/share', () => ({
|
||||
vi.mock('@/service/share', () => ({
|
||||
fetchAccessToken: (...args: any[]) => fetchAccessTokenMock(...args),
|
||||
}))
|
||||
|
||||
const setWebAppAccessTokenMock = jest.fn()
|
||||
const setWebAppPassportMock = jest.fn()
|
||||
const setWebAppAccessTokenMock = vi.fn()
|
||||
const setWebAppPassportMock = vi.fn()
|
||||
|
||||
jest.mock('@/service/webapp-auth', () => ({
|
||||
vi.mock('@/service/webapp-auth', () => ({
|
||||
setWebAppAccessToken: (...args: any[]) => setWebAppAccessTokenMock(...args),
|
||||
setWebAppPassport: (...args: any[]) => setWebAppPassportMock(...args),
|
||||
webAppLogout: jest.fn(),
|
||||
webAppLogout: vi.fn(),
|
||||
}))
|
||||
|
||||
jest.mock('@/app/components/signin/countdown', () => () => <div data-testid="countdown" />)
|
||||
vi.mock('@/app/components/signin/countdown', () => ({ default: () => <div data-testid="countdown" /> }))
|
||||
|
||||
jest.mock('@remixicon/react', () => ({
|
||||
vi.mock('@remixicon/react', () => ({
|
||||
RiMailSendFill: () => <div data-testid="mail-icon" />,
|
||||
RiArrowLeftLine: () => <div data-testid="arrow-icon" />,
|
||||
}))
|
||||
|
||||
const { useSearchParams } = jest.requireMock('next/navigation') as {
|
||||
useSearchParams: jest.Mock
|
||||
}
|
||||
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks()
|
||||
vi.clearAllMocks()
|
||||
})
|
||||
|
||||
describe('embedded user id propagation in authentication flows', () => {
|
||||
it('passes embedded user id when logging in with email and password', async () => {
|
||||
const params = new URLSearchParams()
|
||||
params.set('redirect_url', encodeURIComponent('/chatbot/test-app'))
|
||||
useSearchParams.mockReturnValue(params)
|
||||
useSearchParamsMock.mockReturnValue(params)
|
||||
|
||||
webAppLoginMock.mockResolvedValue({ result: 'success', data: { access_token: 'login-token' } })
|
||||
fetchAccessTokenMock.mockResolvedValue({ access_token: 'passport-token' })
|
||||
|
|
@ -100,7 +97,7 @@ describe('embedded user id propagation in authentication flows', () => {
|
|||
params.set('redirect_url', encodeURIComponent('/chatbot/test-app'))
|
||||
params.set('email', encodeURIComponent('user@example.com'))
|
||||
params.set('token', encodeURIComponent('token-abc'))
|
||||
useSearchParams.mockReturnValue(params)
|
||||
useSearchParamsMock.mockReturnValue(params)
|
||||
|
||||
webAppEmailLoginWithCodeMock.mockResolvedValue({ result: 'success', data: { access_token: 'code-token' } })
|
||||
fetchAccessTokenMock.mockResolvedValue({ access_token: 'passport-token' })
|
||||
|
|
|
|||
|
|
@ -1,42 +1,42 @@
|
|||
import React from 'react'
|
||||
import { render, screen, waitFor } from '@testing-library/react'
|
||||
import { AccessMode } from '@/models/access-control'
|
||||
|
||||
import WebAppStoreProvider, { useWebAppStore } from '@/context/web-app-context'
|
||||
|
||||
jest.mock('next/navigation', () => ({
|
||||
usePathname: jest.fn(() => '/chatbot/sample-app'),
|
||||
useSearchParams: jest.fn(() => {
|
||||
vi.mock('next/navigation', () => ({
|
||||
usePathname: vi.fn(() => '/chatbot/sample-app'),
|
||||
useSearchParams: vi.fn(() => {
|
||||
const params = new URLSearchParams()
|
||||
return params
|
||||
}),
|
||||
}))
|
||||
|
||||
jest.mock('@/service/use-share', () => {
|
||||
const { AccessMode } = jest.requireActual('@/models/access-control')
|
||||
return {
|
||||
useGetWebAppAccessModeByCode: jest.fn(() => ({
|
||||
isLoading: false,
|
||||
data: { accessMode: AccessMode.PUBLIC },
|
||||
})),
|
||||
}
|
||||
})
|
||||
|
||||
jest.mock('@/app/components/base/chat/utils', () => ({
|
||||
getProcessedSystemVariablesFromUrlParams: jest.fn(),
|
||||
vi.mock('@/service/use-share', () => ({
|
||||
useGetWebAppAccessModeByCode: vi.fn(() => ({
|
||||
isLoading: false,
|
||||
data: { accessMode: AccessMode.PUBLIC },
|
||||
})),
|
||||
}))
|
||||
|
||||
const { getProcessedSystemVariablesFromUrlParams: mockGetProcessedSystemVariablesFromUrlParams }
|
||||
= jest.requireMock('@/app/components/base/chat/utils') as {
|
||||
getProcessedSystemVariablesFromUrlParams: jest.Mock
|
||||
}
|
||||
// Store the mock implementation in a way that survives hoisting
|
||||
const mockGetProcessedSystemVariablesFromUrlParams = vi.fn()
|
||||
|
||||
jest.mock('@/context/global-public-context', () => {
|
||||
const mockGlobalStoreState = {
|
||||
vi.mock('@/app/components/base/chat/utils', () => ({
|
||||
getProcessedSystemVariablesFromUrlParams: (...args: any[]) => mockGetProcessedSystemVariablesFromUrlParams(...args),
|
||||
}))
|
||||
|
||||
// Use vi.hoisted to define mock state before vi.mock hoisting
|
||||
const { mockGlobalStoreState } = vi.hoisted(() => ({
|
||||
mockGlobalStoreState: {
|
||||
isGlobalPending: false,
|
||||
setIsGlobalPending: jest.fn(),
|
||||
setIsGlobalPending: vi.fn(),
|
||||
systemFeatures: {},
|
||||
setSystemFeatures: jest.fn(),
|
||||
}
|
||||
setSystemFeatures: vi.fn(),
|
||||
},
|
||||
}))
|
||||
|
||||
vi.mock('@/context/global-public-context', () => {
|
||||
const useGlobalPublicStore = Object.assign(
|
||||
(selector?: (state: typeof mockGlobalStoreState) => any) =>
|
||||
selector ? selector(mockGlobalStoreState) : mockGlobalStoreState,
|
||||
|
|
@ -56,21 +56,6 @@ jest.mock('@/context/global-public-context', () => {
|
|||
}
|
||||
})
|
||||
|
||||
const {
|
||||
useGlobalPublicStore: useGlobalPublicStoreMock,
|
||||
} = jest.requireMock('@/context/global-public-context') as {
|
||||
useGlobalPublicStore: ((selector?: (state: any) => any) => any) & {
|
||||
setState: (updater: any) => void
|
||||
__mockState: {
|
||||
isGlobalPending: boolean
|
||||
setIsGlobalPending: jest.Mock
|
||||
systemFeatures: Record<string, unknown>
|
||||
setSystemFeatures: jest.Mock
|
||||
}
|
||||
}
|
||||
}
|
||||
const mockGlobalStoreState = useGlobalPublicStoreMock.__mockState
|
||||
|
||||
const TestConsumer = () => {
|
||||
const embeddedUserId = useWebAppStore(state => state.embeddedUserId)
|
||||
const embeddedConversationId = useWebAppStore(state => state.embeddedConversationId)
|
||||
|
|
|
|||
|
|
@ -1,10 +1,9 @@
|
|||
import React from 'react'
|
||||
import { fireEvent, render, screen } from '@testing-library/react'
|
||||
import '@testing-library/jest-dom'
|
||||
import CommandSelector from '../../app/components/goto-anything/command-selector'
|
||||
import type { ActionItem } from '../../app/components/goto-anything/actions/types'
|
||||
|
||||
jest.mock('cmdk', () => ({
|
||||
vi.mock('cmdk', () => ({
|
||||
Command: {
|
||||
Group: ({ children, className }: any) => <div className={className}>{children}</div>,
|
||||
Item: ({ children, onSelect, value, className }: any) => (
|
||||
|
|
@ -27,36 +26,36 @@ describe('CommandSelector', () => {
|
|||
shortcut: '@app',
|
||||
title: 'Search Applications',
|
||||
description: 'Search apps',
|
||||
search: jest.fn(),
|
||||
search: vi.fn(),
|
||||
},
|
||||
knowledge: {
|
||||
key: '@knowledge',
|
||||
shortcut: '@kb',
|
||||
title: 'Search Knowledge',
|
||||
description: 'Search knowledge bases',
|
||||
search: jest.fn(),
|
||||
search: vi.fn(),
|
||||
},
|
||||
plugin: {
|
||||
key: '@plugin',
|
||||
shortcut: '@plugin',
|
||||
title: 'Search Plugins',
|
||||
description: 'Search plugins',
|
||||
search: jest.fn(),
|
||||
search: vi.fn(),
|
||||
},
|
||||
node: {
|
||||
key: '@node',
|
||||
shortcut: '@node',
|
||||
title: 'Search Nodes',
|
||||
description: 'Search workflow nodes',
|
||||
search: jest.fn(),
|
||||
search: vi.fn(),
|
||||
},
|
||||
}
|
||||
|
||||
const mockOnCommandSelect = jest.fn()
|
||||
const mockOnCommandValueChange = jest.fn()
|
||||
const mockOnCommandSelect = vi.fn()
|
||||
const mockOnCommandValueChange = vi.fn()
|
||||
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks()
|
||||
vi.clearAllMocks()
|
||||
})
|
||||
|
||||
describe('Basic Rendering', () => {
|
||||
|
|
|
|||
|
|
@ -1,11 +1,12 @@
|
|||
import type { Mock } from 'vitest'
|
||||
import type { ActionItem } from '../../app/components/goto-anything/actions/types'
|
||||
|
||||
// Mock the entire actions module to avoid import issues
|
||||
jest.mock('../../app/components/goto-anything/actions', () => ({
|
||||
matchAction: jest.fn(),
|
||||
vi.mock('../../app/components/goto-anything/actions', () => ({
|
||||
matchAction: vi.fn(),
|
||||
}))
|
||||
|
||||
jest.mock('../../app/components/goto-anything/actions/commands/registry')
|
||||
vi.mock('../../app/components/goto-anything/actions/commands/registry')
|
||||
|
||||
// Import after mocking to get mocked version
|
||||
import { matchAction } from '../../app/components/goto-anything/actions'
|
||||
|
|
@ -39,7 +40,7 @@ const actualMatchAction = (query: string, actions: Record<string, ActionItem>) =
|
|||
}
|
||||
|
||||
// Replace mock with actual implementation
|
||||
;(matchAction as jest.Mock).mockImplementation(actualMatchAction)
|
||||
;(matchAction as Mock).mockImplementation(actualMatchAction)
|
||||
|
||||
describe('matchAction Logic', () => {
|
||||
const mockActions: Record<string, ActionItem> = {
|
||||
|
|
@ -48,27 +49,27 @@ describe('matchAction Logic', () => {
|
|||
shortcut: '@a',
|
||||
title: 'Search Applications',
|
||||
description: 'Search apps',
|
||||
search: jest.fn(),
|
||||
search: vi.fn(),
|
||||
},
|
||||
knowledge: {
|
||||
key: '@knowledge',
|
||||
shortcut: '@kb',
|
||||
title: 'Search Knowledge',
|
||||
description: 'Search knowledge bases',
|
||||
search: jest.fn(),
|
||||
search: vi.fn(),
|
||||
},
|
||||
slash: {
|
||||
key: '/',
|
||||
shortcut: '/',
|
||||
title: 'Commands',
|
||||
description: 'Execute commands',
|
||||
search: jest.fn(),
|
||||
search: vi.fn(),
|
||||
},
|
||||
}
|
||||
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks()
|
||||
;(slashCommandRegistry.getAllCommands as jest.Mock).mockReturnValue([
|
||||
vi.clearAllMocks()
|
||||
;(slashCommandRegistry.getAllCommands as Mock).mockReturnValue([
|
||||
{ name: 'docs', mode: 'direct' },
|
||||
{ name: 'community', mode: 'direct' },
|
||||
{ name: 'feedback', mode: 'direct' },
|
||||
|
|
@ -188,7 +189,7 @@ describe('matchAction Logic', () => {
|
|||
|
||||
describe('Mode-based Filtering', () => {
|
||||
it('should filter direct mode commands from matching', () => {
|
||||
;(slashCommandRegistry.getAllCommands as jest.Mock).mockReturnValue([
|
||||
;(slashCommandRegistry.getAllCommands as Mock).mockReturnValue([
|
||||
{ name: 'test', mode: 'direct' },
|
||||
])
|
||||
|
||||
|
|
@ -197,7 +198,7 @@ describe('matchAction Logic', () => {
|
|||
})
|
||||
|
||||
it('should allow submenu mode commands to match', () => {
|
||||
;(slashCommandRegistry.getAllCommands as jest.Mock).mockReturnValue([
|
||||
;(slashCommandRegistry.getAllCommands as Mock).mockReturnValue([
|
||||
{ name: 'test', mode: 'submenu' },
|
||||
])
|
||||
|
||||
|
|
@ -206,7 +207,7 @@ describe('matchAction Logic', () => {
|
|||
})
|
||||
|
||||
it('should treat undefined mode as submenu', () => {
|
||||
;(slashCommandRegistry.getAllCommands as jest.Mock).mockReturnValue([
|
||||
;(slashCommandRegistry.getAllCommands as Mock).mockReturnValue([
|
||||
{ name: 'test' }, // No mode specified
|
||||
])
|
||||
|
||||
|
|
@ -227,7 +228,7 @@ describe('matchAction Logic', () => {
|
|||
})
|
||||
|
||||
it('should handle empty command list', () => {
|
||||
;(slashCommandRegistry.getAllCommands as jest.Mock).mockReturnValue([])
|
||||
;(slashCommandRegistry.getAllCommands as Mock).mockReturnValue([])
|
||||
const result = matchAction('/anything', mockActions)
|
||||
expect(result).toBeUndefined()
|
||||
})
|
||||
|
|
|
|||
|
|
@ -1,6 +1,5 @@
|
|||
import React from 'react'
|
||||
import { render, screen } from '@testing-library/react'
|
||||
import '@testing-library/jest-dom'
|
||||
|
||||
// Type alias for search mode
|
||||
type SearchMode = 'scopes' | 'commands' | null
|
||||
|
|
|
|||
|
|
@ -1,3 +1,4 @@
|
|||
import type { MockedFunction } from 'vitest'
|
||||
/**
|
||||
* Test GotoAnything search error handling mechanisms
|
||||
*
|
||||
|
|
@ -14,33 +15,33 @@ import { fetchAppList } from '@/service/apps'
|
|||
import { fetchDatasets } from '@/service/datasets'
|
||||
|
||||
// Mock API functions
|
||||
jest.mock('@/service/base', () => ({
|
||||
postMarketplace: jest.fn(),
|
||||
vi.mock('@/service/base', () => ({
|
||||
postMarketplace: vi.fn(),
|
||||
}))
|
||||
|
||||
jest.mock('@/service/apps', () => ({
|
||||
fetchAppList: jest.fn(),
|
||||
vi.mock('@/service/apps', () => ({
|
||||
fetchAppList: vi.fn(),
|
||||
}))
|
||||
|
||||
jest.mock('@/service/datasets', () => ({
|
||||
fetchDatasets: jest.fn(),
|
||||
vi.mock('@/service/datasets', () => ({
|
||||
fetchDatasets: vi.fn(),
|
||||
}))
|
||||
|
||||
const mockPostMarketplace = postMarketplace as jest.MockedFunction<typeof postMarketplace>
|
||||
const mockFetchAppList = fetchAppList as jest.MockedFunction<typeof fetchAppList>
|
||||
const mockFetchDatasets = fetchDatasets as jest.MockedFunction<typeof fetchDatasets>
|
||||
const mockPostMarketplace = postMarketplace as MockedFunction<typeof postMarketplace>
|
||||
const mockFetchAppList = fetchAppList as MockedFunction<typeof fetchAppList>
|
||||
const mockFetchDatasets = fetchDatasets as MockedFunction<typeof fetchDatasets>
|
||||
|
||||
describe('GotoAnything Search Error Handling', () => {
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks()
|
||||
vi.clearAllMocks()
|
||||
// Suppress console.warn for clean test output
|
||||
jest.spyOn(console, 'warn').mockImplementation(() => {
|
||||
vi.spyOn(console, 'warn').mockImplementation(() => {
|
||||
// Suppress console.warn for clean test output
|
||||
})
|
||||
})
|
||||
|
||||
afterEach(() => {
|
||||
jest.restoreAllMocks()
|
||||
vi.restoreAllMocks()
|
||||
})
|
||||
|
||||
describe('@plugin search error handling', () => {
|
||||
|
|
|
|||
|
|
@ -1,17 +1,16 @@
|
|||
import '@testing-library/jest-dom'
|
||||
import { slashCommandRegistry } from '../../app/components/goto-anything/actions/commands/registry'
|
||||
import type { SlashCommandHandler } from '../../app/components/goto-anything/actions/commands/types'
|
||||
|
||||
// Mock the registry
|
||||
jest.mock('../../app/components/goto-anything/actions/commands/registry')
|
||||
vi.mock('../../app/components/goto-anything/actions/commands/registry')
|
||||
|
||||
describe('Slash Command Dual-Mode System', () => {
|
||||
const mockDirectCommand: SlashCommandHandler = {
|
||||
name: 'docs',
|
||||
description: 'Open documentation',
|
||||
mode: 'direct',
|
||||
execute: jest.fn(),
|
||||
search: jest.fn().mockResolvedValue([
|
||||
execute: vi.fn(),
|
||||
search: vi.fn().mockResolvedValue([
|
||||
{
|
||||
id: 'docs',
|
||||
title: 'Documentation',
|
||||
|
|
@ -20,15 +19,15 @@ describe('Slash Command Dual-Mode System', () => {
|
|||
data: { command: 'navigation.docs', args: {} },
|
||||
},
|
||||
]),
|
||||
register: jest.fn(),
|
||||
unregister: jest.fn(),
|
||||
register: vi.fn(),
|
||||
unregister: vi.fn(),
|
||||
}
|
||||
|
||||
const mockSubmenuCommand: SlashCommandHandler = {
|
||||
name: 'theme',
|
||||
description: 'Change theme',
|
||||
mode: 'submenu',
|
||||
search: jest.fn().mockResolvedValue([
|
||||
search: vi.fn().mockResolvedValue([
|
||||
{
|
||||
id: 'theme-light',
|
||||
title: 'Light Theme',
|
||||
|
|
@ -44,18 +43,18 @@ describe('Slash Command Dual-Mode System', () => {
|
|||
data: { command: 'theme.set', args: { theme: 'dark' } },
|
||||
},
|
||||
]),
|
||||
register: jest.fn(),
|
||||
unregister: jest.fn(),
|
||||
register: vi.fn(),
|
||||
unregister: vi.fn(),
|
||||
}
|
||||
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks()
|
||||
;(slashCommandRegistry as any).findCommand = jest.fn((name: string) => {
|
||||
vi.clearAllMocks()
|
||||
;(slashCommandRegistry as any).findCommand = vi.fn((name: string) => {
|
||||
if (name === 'docs') return mockDirectCommand
|
||||
if (name === 'theme') return mockSubmenuCommand
|
||||
return null
|
||||
})
|
||||
;(slashCommandRegistry as any).getAllCommands = jest.fn(() => [
|
||||
;(slashCommandRegistry as any).getAllCommands = vi.fn(() => [
|
||||
mockDirectCommand,
|
||||
mockSubmenuCommand,
|
||||
])
|
||||
|
|
@ -63,8 +62,8 @@ describe('Slash Command Dual-Mode System', () => {
|
|||
|
||||
describe('Direct Mode Commands', () => {
|
||||
it('should execute immediately when selected', () => {
|
||||
const mockSetShow = jest.fn()
|
||||
const mockSetSearchQuery = jest.fn()
|
||||
const mockSetShow = vi.fn()
|
||||
const mockSetSearchQuery = vi.fn()
|
||||
|
||||
// Simulate command selection
|
||||
const handler = slashCommandRegistry.findCommand('docs')
|
||||
|
|
@ -88,7 +87,7 @@ describe('Slash Command Dual-Mode System', () => {
|
|||
})
|
||||
|
||||
it('should close modal after execution', () => {
|
||||
const mockModalClose = jest.fn()
|
||||
const mockModalClose = vi.fn()
|
||||
|
||||
const handler = slashCommandRegistry.findCommand('docs')
|
||||
if (handler?.mode === 'direct' && handler.execute) {
|
||||
|
|
@ -118,7 +117,7 @@ describe('Slash Command Dual-Mode System', () => {
|
|||
})
|
||||
|
||||
it('should keep modal open for selection', () => {
|
||||
const mockModalClose = jest.fn()
|
||||
const mockModalClose = vi.fn()
|
||||
|
||||
const handler = slashCommandRegistry.findCommand('theme')
|
||||
// For submenu mode, modal should not close immediately
|
||||
|
|
@ -141,12 +140,12 @@ describe('Slash Command Dual-Mode System', () => {
|
|||
const commandWithoutMode: SlashCommandHandler = {
|
||||
name: 'test',
|
||||
description: 'Test command',
|
||||
search: jest.fn(),
|
||||
register: jest.fn(),
|
||||
unregister: jest.fn(),
|
||||
search: vi.fn(),
|
||||
register: vi.fn(),
|
||||
unregister: vi.fn(),
|
||||
}
|
||||
|
||||
;(slashCommandRegistry as any).findCommand = jest.fn(() => commandWithoutMode)
|
||||
;(slashCommandRegistry as any).findCommand = vi.fn(() => commandWithoutMode)
|
||||
|
||||
const handler = slashCommandRegistry.findCommand('test')
|
||||
// Default behavior should be submenu when mode is not specified
|
||||
|
|
@ -189,7 +188,7 @@ describe('Slash Command Dual-Mode System', () => {
|
|||
describe('Command Registration', () => {
|
||||
it('should register both direct and submenu commands', () => {
|
||||
mockDirectCommand.register?.({})
|
||||
mockSubmenuCommand.register?.({ setTheme: jest.fn() })
|
||||
mockSubmenuCommand.register?.({ setTheme: vi.fn() })
|
||||
|
||||
expect(mockDirectCommand.register).toHaveBeenCalled()
|
||||
expect(mockSubmenuCommand.register).toHaveBeenCalled()
|
||||
|
|
|
|||
|
|
@ -15,12 +15,12 @@ import {
|
|||
} from '@/utils/navigation'
|
||||
|
||||
// Mock router for testing
|
||||
const mockPush = jest.fn()
|
||||
const mockPush = vi.fn()
|
||||
const mockRouter = { push: mockPush }
|
||||
|
||||
describe('Navigation Utilities', () => {
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks()
|
||||
vi.clearAllMocks()
|
||||
})
|
||||
|
||||
describe('createNavigationPath', () => {
|
||||
|
|
@ -63,7 +63,7 @@ describe('Navigation Utilities', () => {
|
|||
configurable: true,
|
||||
})
|
||||
|
||||
const consoleSpy = jest.spyOn(console, 'warn').mockImplementation()
|
||||
const consoleSpy = vi.spyOn(console, 'warn').mockImplementation(() => { /* noop */ })
|
||||
const path = createNavigationPath('/datasets/123/documents')
|
||||
|
||||
expect(path).toBe('/datasets/123/documents')
|
||||
|
|
@ -134,7 +134,7 @@ describe('Navigation Utilities', () => {
|
|||
configurable: true,
|
||||
})
|
||||
|
||||
const consoleSpy = jest.spyOn(console, 'warn').mockImplementation()
|
||||
const consoleSpy = vi.spyOn(console, 'warn').mockImplementation(() => { /* noop */ })
|
||||
const params = extractQueryParams(['page', 'limit'])
|
||||
|
||||
expect(params).toEqual({})
|
||||
|
|
@ -169,11 +169,11 @@ describe('Navigation Utilities', () => {
|
|||
test('handles errors gracefully', () => {
|
||||
// Mock URLSearchParams to throw an error
|
||||
const originalURLSearchParams = globalThis.URLSearchParams
|
||||
globalThis.URLSearchParams = jest.fn(() => {
|
||||
globalThis.URLSearchParams = vi.fn(() => {
|
||||
throw new Error('URLSearchParams error')
|
||||
}) as any
|
||||
|
||||
const consoleSpy = jest.spyOn(console, 'warn').mockImplementation()
|
||||
const consoleSpy = vi.spyOn(console, 'warn').mockImplementation(() => { /* noop */ })
|
||||
const path = createNavigationPathWithParams('/datasets/123/documents', { page: 1 })
|
||||
|
||||
expect(path).toBe('/datasets/123/documents')
|
||||
|
|
|
|||
|
|
@ -76,7 +76,7 @@ const setupMockEnvironment = (storedTheme: string | null, systemPrefersDark = fa
|
|||
return mediaQueryList
|
||||
}
|
||||
|
||||
jest.spyOn(window, 'matchMedia').mockImplementation(mockMatchMedia)
|
||||
vi.spyOn(window, 'matchMedia').mockImplementation(mockMatchMedia)
|
||||
}
|
||||
|
||||
// Helper function to create timing page component
|
||||
|
|
@ -240,8 +240,8 @@ const TestThemeProvider = ({ children }: { children: React.ReactNode }) => (
|
|||
|
||||
describe('Real Browser Environment Dark Mode Flicker Test', () => {
|
||||
beforeEach(() => {
|
||||
jest.restoreAllMocks()
|
||||
jest.clearAllMocks()
|
||||
vi.restoreAllMocks()
|
||||
vi.clearAllMocks()
|
||||
if (typeof window !== 'undefined') {
|
||||
try {
|
||||
window.localStorage.clear()
|
||||
|
|
@ -424,12 +424,12 @@ describe('Real Browser Environment Dark Mode Flicker Test', () => {
|
|||
setupMockEnvironment(null)
|
||||
|
||||
const mockStorage = {
|
||||
getItem: jest.fn(() => {
|
||||
getItem: vi.fn(() => {
|
||||
throw new Error('LocalStorage access denied')
|
||||
}),
|
||||
setItem: jest.fn(),
|
||||
removeItem: jest.fn(),
|
||||
clear: jest.fn(),
|
||||
setItem: vi.fn(),
|
||||
removeItem: vi.fn(),
|
||||
clear: vi.fn(),
|
||||
}
|
||||
|
||||
Object.defineProperty(window, 'localStorage', {
|
||||
|
|
|
|||
|
|
@ -1,15 +1,16 @@
|
|||
import type { Mock } from 'vitest'
|
||||
import { BlockEnum } from '@/app/components/workflow/types'
|
||||
import { useWorkflowStore } from '@/app/components/workflow/store'
|
||||
|
||||
// Type for mocked store
|
||||
type MockWorkflowStore = {
|
||||
showOnboarding: boolean
|
||||
setShowOnboarding: jest.Mock
|
||||
setShowOnboarding: Mock
|
||||
hasShownOnboarding: boolean
|
||||
setHasShownOnboarding: jest.Mock
|
||||
setHasShownOnboarding: Mock
|
||||
hasSelectedStartNode: boolean
|
||||
setHasSelectedStartNode: jest.Mock
|
||||
setShouldAutoOpenStartNodeSelector: jest.Mock
|
||||
setHasSelectedStartNode: Mock
|
||||
setShouldAutoOpenStartNodeSelector: Mock
|
||||
notInitialWorkflow: boolean
|
||||
}
|
||||
|
||||
|
|
@ -20,11 +21,11 @@ type MockNode = {
|
|||
}
|
||||
|
||||
// Mock zustand store
|
||||
jest.mock('@/app/components/workflow/store')
|
||||
vi.mock('@/app/components/workflow/store')
|
||||
|
||||
// Mock ReactFlow store
|
||||
const mockGetNodes = jest.fn()
|
||||
jest.mock('reactflow', () => ({
|
||||
const mockGetNodes = vi.fn()
|
||||
vi.mock('reactflow', () => ({
|
||||
useStoreApi: () => ({
|
||||
getState: () => ({
|
||||
getNodes: mockGetNodes,
|
||||
|
|
@ -33,16 +34,16 @@ jest.mock('reactflow', () => ({
|
|||
}))
|
||||
|
||||
describe('Workflow Onboarding Integration Logic', () => {
|
||||
const mockSetShowOnboarding = jest.fn()
|
||||
const mockSetHasSelectedStartNode = jest.fn()
|
||||
const mockSetHasShownOnboarding = jest.fn()
|
||||
const mockSetShouldAutoOpenStartNodeSelector = jest.fn()
|
||||
const mockSetShowOnboarding = vi.fn()
|
||||
const mockSetHasSelectedStartNode = vi.fn()
|
||||
const mockSetHasShownOnboarding = vi.fn()
|
||||
const mockSetShouldAutoOpenStartNodeSelector = vi.fn()
|
||||
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks()
|
||||
vi.clearAllMocks()
|
||||
|
||||
// Mock store implementation
|
||||
;(useWorkflowStore as jest.Mock).mockReturnValue({
|
||||
;(useWorkflowStore as Mock).mockReturnValue({
|
||||
showOnboarding: false,
|
||||
setShowOnboarding: mockSetShowOnboarding,
|
||||
hasSelectedStartNode: false,
|
||||
|
|
@ -373,12 +374,12 @@ describe('Workflow Onboarding Integration Logic', () => {
|
|||
it('should trigger onboarding for new workflow when draft does not exist', () => {
|
||||
// Simulate the error handling logic from use-workflow-init.ts
|
||||
const error = {
|
||||
json: jest.fn().mockResolvedValue({ code: 'draft_workflow_not_exist' }),
|
||||
json: vi.fn().mockResolvedValue({ code: 'draft_workflow_not_exist' }),
|
||||
bodyUsed: false,
|
||||
}
|
||||
|
||||
const mockWorkflowStore = {
|
||||
setState: jest.fn(),
|
||||
setState: vi.fn(),
|
||||
}
|
||||
|
||||
// Simulate error handling
|
||||
|
|
@ -404,7 +405,7 @@ describe('Workflow Onboarding Integration Logic', () => {
|
|||
it('should not trigger onboarding for existing workflows', () => {
|
||||
// Simulate successful draft fetch
|
||||
const mockWorkflowStore = {
|
||||
setState: jest.fn(),
|
||||
setState: vi.fn(),
|
||||
}
|
||||
|
||||
// Normal initialization path should not set showOnboarding: true
|
||||
|
|
@ -419,7 +420,7 @@ describe('Workflow Onboarding Integration Logic', () => {
|
|||
})
|
||||
|
||||
it('should create empty draft with proper structure', () => {
|
||||
const mockSyncWorkflowDraft = jest.fn()
|
||||
const mockSyncWorkflowDraft = vi.fn()
|
||||
const appId = 'test-app-id'
|
||||
|
||||
// Simulate the syncWorkflowDraft call from use-workflow-init.ts
|
||||
|
|
@ -467,7 +468,7 @@ describe('Workflow Onboarding Integration Logic', () => {
|
|||
mockGetNodes.mockReturnValue([])
|
||||
|
||||
// Mock store with proper state for auto-detection
|
||||
;(useWorkflowStore as jest.Mock).mockReturnValue({
|
||||
;(useWorkflowStore as Mock).mockReturnValue({
|
||||
showOnboarding: false,
|
||||
hasShownOnboarding: false,
|
||||
notInitialWorkflow: false,
|
||||
|
|
@ -550,7 +551,7 @@ describe('Workflow Onboarding Integration Logic', () => {
|
|||
mockGetNodes.mockReturnValue([])
|
||||
|
||||
// Mock store with hasShownOnboarding = true
|
||||
;(useWorkflowStore as jest.Mock).mockReturnValue({
|
||||
;(useWorkflowStore as Mock).mockReturnValue({
|
||||
showOnboarding: false,
|
||||
hasShownOnboarding: true, // Already shown in this session
|
||||
notInitialWorkflow: false,
|
||||
|
|
@ -584,7 +585,7 @@ describe('Workflow Onboarding Integration Logic', () => {
|
|||
mockGetNodes.mockReturnValue([])
|
||||
|
||||
// Mock store with notInitialWorkflow = true (initial creation)
|
||||
;(useWorkflowStore as jest.Mock).mockReturnValue({
|
||||
;(useWorkflowStore as Mock).mockReturnValue({
|
||||
showOnboarding: false,
|
||||
hasShownOnboarding: false,
|
||||
notInitialWorkflow: true, // Initial workflow creation
|
||||
|
|
|
|||
|
|
@ -19,7 +19,7 @@ function setupEnvironment(value?: string) {
|
|||
delete process.env.NEXT_PUBLIC_MAX_PARALLEL_LIMIT
|
||||
|
||||
// Clear module cache to force re-evaluation
|
||||
jest.resetModules()
|
||||
vi.resetModules()
|
||||
}
|
||||
|
||||
function restoreEnvironment() {
|
||||
|
|
@ -28,11 +28,11 @@ function restoreEnvironment() {
|
|||
else
|
||||
delete process.env.NEXT_PUBLIC_MAX_PARALLEL_LIMIT
|
||||
|
||||
jest.resetModules()
|
||||
vi.resetModules()
|
||||
}
|
||||
|
||||
// Mock i18next with proper implementation
|
||||
jest.mock('react-i18next', () => ({
|
||||
vi.mock('react-i18next', () => ({
|
||||
useTranslation: () => ({
|
||||
t: (key: string) => {
|
||||
if (key.includes('MaxParallelismTitle')) return 'Max Parallelism'
|
||||
|
|
@ -45,20 +45,20 @@ jest.mock('react-i18next', () => ({
|
|||
}),
|
||||
initReactI18next: {
|
||||
type: '3rdParty',
|
||||
init: jest.fn(),
|
||||
init: vi.fn(),
|
||||
},
|
||||
}))
|
||||
|
||||
// Mock i18next module completely to prevent initialization issues
|
||||
jest.mock('i18next', () => ({
|
||||
use: jest.fn().mockReturnThis(),
|
||||
init: jest.fn().mockReturnThis(),
|
||||
t: jest.fn(key => key),
|
||||
vi.mock('i18next', () => ({
|
||||
use: vi.fn().mockReturnThis(),
|
||||
init: vi.fn().mockReturnThis(),
|
||||
t: vi.fn(key => key),
|
||||
isInitialized: true,
|
||||
}))
|
||||
|
||||
// Mock the useConfig hook
|
||||
jest.mock('@/app/components/workflow/nodes/iteration/use-config', () => ({
|
||||
vi.mock('@/app/components/workflow/nodes/iteration/use-config', () => ({
|
||||
__esModule: true,
|
||||
default: () => ({
|
||||
inputs: {
|
||||
|
|
@ -66,82 +66,39 @@ jest.mock('@/app/components/workflow/nodes/iteration/use-config', () => ({
|
|||
parallel_nums: 5,
|
||||
error_handle_mode: 'terminated',
|
||||
},
|
||||
changeParallel: jest.fn(),
|
||||
changeParallelNums: jest.fn(),
|
||||
changeErrorHandleMode: jest.fn(),
|
||||
changeParallel: vi.fn(),
|
||||
changeParallelNums: vi.fn(),
|
||||
changeErrorHandleMode: vi.fn(),
|
||||
}),
|
||||
}))
|
||||
|
||||
// Mock other components
|
||||
jest.mock('@/app/components/workflow/nodes/_base/components/variable/var-reference-picker', () => {
|
||||
return function MockVarReferencePicker() {
|
||||
vi.mock('@/app/components/workflow/nodes/_base/components/variable/var-reference-picker', () => ({
|
||||
default: function MockVarReferencePicker() {
|
||||
return <div data-testid="var-reference-picker">VarReferencePicker</div>
|
||||
}
|
||||
})
|
||||
},
|
||||
}))
|
||||
|
||||
jest.mock('@/app/components/workflow/nodes/_base/components/split', () => {
|
||||
return function MockSplit() {
|
||||
vi.mock('@/app/components/workflow/nodes/_base/components/split', () => ({
|
||||
default: function MockSplit() {
|
||||
return <div data-testid="split">Split</div>
|
||||
}
|
||||
})
|
||||
},
|
||||
}))
|
||||
|
||||
jest.mock('@/app/components/workflow/nodes/_base/components/field', () => {
|
||||
return function MockField({ title, children }: { title: string, children: React.ReactNode }) {
|
||||
vi.mock('@/app/components/workflow/nodes/_base/components/field', () => ({
|
||||
default: function MockField({ title, children }: { title: string, children: React.ReactNode }) {
|
||||
return (
|
||||
<div data-testid="field">
|
||||
<label>{title}</label>
|
||||
{children}
|
||||
</div>
|
||||
)
|
||||
}
|
||||
})
|
||||
},
|
||||
}))
|
||||
|
||||
jest.mock('@/app/components/base/switch', () => {
|
||||
return function MockSwitch({ defaultValue }: { defaultValue: boolean }) {
|
||||
return <input type="checkbox" defaultChecked={defaultValue} data-testid="switch" />
|
||||
}
|
||||
})
|
||||
|
||||
jest.mock('@/app/components/base/select', () => {
|
||||
return function MockSelect() {
|
||||
return <select data-testid="select">Select</select>
|
||||
}
|
||||
})
|
||||
|
||||
// Use defaultValue to avoid controlled input warnings
|
||||
jest.mock('@/app/components/base/slider', () => {
|
||||
return function MockSlider({ value, max, min }: { value: number, max: number, min: number }) {
|
||||
return (
|
||||
<input
|
||||
type="range"
|
||||
defaultValue={value}
|
||||
max={max}
|
||||
min={min}
|
||||
data-testid="slider"
|
||||
data-max={max}
|
||||
data-min={min}
|
||||
readOnly
|
||||
/>
|
||||
)
|
||||
}
|
||||
})
|
||||
|
||||
// Use defaultValue to avoid controlled input warnings
|
||||
jest.mock('@/app/components/base/input', () => {
|
||||
return function MockInput({ type, max, min, value }: { type: string, max: number, min: number, value: number }) {
|
||||
return (
|
||||
<input
|
||||
type={type}
|
||||
defaultValue={value}
|
||||
max={max}
|
||||
min={min}
|
||||
data-testid="number-input"
|
||||
data-max={max}
|
||||
data-min={min}
|
||||
readOnly
|
||||
/>
|
||||
)
|
||||
}
|
||||
const getParallelControls = () => ({
|
||||
numberInput: screen.getByRole('spinbutton'),
|
||||
slider: screen.getByRole('slider'),
|
||||
})
|
||||
|
||||
describe('MAX_PARALLEL_LIMIT Configuration Bug', () => {
|
||||
|
|
@ -160,7 +117,7 @@ describe('MAX_PARALLEL_LIMIT Configuration Bug', () => {
|
|||
}
|
||||
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks()
|
||||
vi.clearAllMocks()
|
||||
})
|
||||
|
||||
afterEach(() => {
|
||||
|
|
@ -172,115 +129,114 @@ describe('MAX_PARALLEL_LIMIT Configuration Bug', () => {
|
|||
})
|
||||
|
||||
describe('Environment Variable Parsing', () => {
|
||||
it('should parse MAX_PARALLEL_LIMIT from NEXT_PUBLIC_MAX_PARALLEL_LIMIT environment variable', () => {
|
||||
it('should parse MAX_PARALLEL_LIMIT from NEXT_PUBLIC_MAX_PARALLEL_LIMIT environment variable', async () => {
|
||||
setupEnvironment('25')
|
||||
const { MAX_PARALLEL_LIMIT } = require('@/config')
|
||||
const { MAX_PARALLEL_LIMIT } = await import('@/config')
|
||||
expect(MAX_PARALLEL_LIMIT).toBe(25)
|
||||
})
|
||||
|
||||
it('should fallback to default when environment variable is not set', () => {
|
||||
it('should fallback to default when environment variable is not set', async () => {
|
||||
setupEnvironment() // No environment variable
|
||||
const { MAX_PARALLEL_LIMIT } = require('@/config')
|
||||
const { MAX_PARALLEL_LIMIT } = await import('@/config')
|
||||
expect(MAX_PARALLEL_LIMIT).toBe(10)
|
||||
})
|
||||
|
||||
it('should handle invalid environment variable values', () => {
|
||||
it('should handle invalid environment variable values', async () => {
|
||||
setupEnvironment('invalid')
|
||||
const { MAX_PARALLEL_LIMIT } = require('@/config')
|
||||
const { MAX_PARALLEL_LIMIT } = await import('@/config')
|
||||
|
||||
// Should fall back to default when parsing fails
|
||||
expect(MAX_PARALLEL_LIMIT).toBe(10)
|
||||
})
|
||||
|
||||
it('should handle empty environment variable', () => {
|
||||
it('should handle empty environment variable', async () => {
|
||||
setupEnvironment('')
|
||||
const { MAX_PARALLEL_LIMIT } = require('@/config')
|
||||
const { MAX_PARALLEL_LIMIT } = await import('@/config')
|
||||
|
||||
// Should fall back to default when empty
|
||||
expect(MAX_PARALLEL_LIMIT).toBe(10)
|
||||
})
|
||||
|
||||
// Edge cases for boundary values
|
||||
it('should clamp MAX_PARALLEL_LIMIT to MIN when env is 0 or negative', () => {
|
||||
it('should clamp MAX_PARALLEL_LIMIT to MIN when env is 0 or negative', async () => {
|
||||
setupEnvironment('0')
|
||||
let { MAX_PARALLEL_LIMIT } = require('@/config')
|
||||
let { MAX_PARALLEL_LIMIT } = await import('@/config')
|
||||
expect(MAX_PARALLEL_LIMIT).toBe(10) // Falls back to default
|
||||
|
||||
setupEnvironment('-5')
|
||||
;({ MAX_PARALLEL_LIMIT } = require('@/config'))
|
||||
;({ MAX_PARALLEL_LIMIT } = await import('@/config'))
|
||||
expect(MAX_PARALLEL_LIMIT).toBe(10) // Falls back to default
|
||||
})
|
||||
|
||||
it('should handle float numbers by parseInt behavior', () => {
|
||||
it('should handle float numbers by parseInt behavior', async () => {
|
||||
setupEnvironment('12.7')
|
||||
const { MAX_PARALLEL_LIMIT } = require('@/config')
|
||||
const { MAX_PARALLEL_LIMIT } = await import('@/config')
|
||||
// parseInt truncates to integer
|
||||
expect(MAX_PARALLEL_LIMIT).toBe(12)
|
||||
})
|
||||
})
|
||||
|
||||
describe('UI Component Integration (Main Fix Verification)', () => {
|
||||
it('should render iteration panel with environment-configured max value', () => {
|
||||
it('should render iteration panel with environment-configured max value', async () => {
|
||||
// Set environment variable to a different value
|
||||
setupEnvironment('30')
|
||||
|
||||
// Import Panel after setting environment
|
||||
const Panel = require('@/app/components/workflow/nodes/iteration/panel').default
|
||||
const { MAX_PARALLEL_LIMIT } = require('@/config')
|
||||
const Panel = await import('@/app/components/workflow/nodes/iteration/panel').then(mod => mod.default)
|
||||
const { MAX_PARALLEL_LIMIT } = await import('@/config')
|
||||
|
||||
render(
|
||||
<Panel
|
||||
id="test-node"
|
||||
// @ts-expect-error key type mismatch
|
||||
data={mockNodeData.data}
|
||||
/>,
|
||||
)
|
||||
|
||||
// Behavior-focused assertion: UI max should equal MAX_PARALLEL_LIMIT
|
||||
const numberInput = screen.getByTestId('number-input')
|
||||
expect(numberInput).toHaveAttribute('data-max', String(MAX_PARALLEL_LIMIT))
|
||||
|
||||
const slider = screen.getByTestId('slider')
|
||||
expect(slider).toHaveAttribute('data-max', String(MAX_PARALLEL_LIMIT))
|
||||
const { numberInput, slider } = getParallelControls()
|
||||
expect(numberInput).toHaveAttribute('max', String(MAX_PARALLEL_LIMIT))
|
||||
expect(slider).toHaveAttribute('aria-valuemax', String(MAX_PARALLEL_LIMIT))
|
||||
|
||||
// Verify the actual values
|
||||
expect(MAX_PARALLEL_LIMIT).toBe(30)
|
||||
expect(numberInput.getAttribute('data-max')).toBe('30')
|
||||
expect(slider.getAttribute('data-max')).toBe('30')
|
||||
expect(numberInput.getAttribute('max')).toBe('30')
|
||||
expect(slider.getAttribute('aria-valuemax')).toBe('30')
|
||||
})
|
||||
|
||||
it('should maintain UI consistency with different environment values', () => {
|
||||
it('should maintain UI consistency with different environment values', async () => {
|
||||
setupEnvironment('15')
|
||||
const Panel = require('@/app/components/workflow/nodes/iteration/panel').default
|
||||
const { MAX_PARALLEL_LIMIT } = require('@/config')
|
||||
const Panel = await import('@/app/components/workflow/nodes/iteration/panel').then(mod => mod.default)
|
||||
const { MAX_PARALLEL_LIMIT } = await import('@/config')
|
||||
|
||||
render(
|
||||
<Panel
|
||||
id="test-node"
|
||||
// @ts-expect-error key type mismatch
|
||||
data={mockNodeData.data}
|
||||
/>,
|
||||
)
|
||||
|
||||
// Both input and slider should use the same max value from MAX_PARALLEL_LIMIT
|
||||
const numberInput = screen.getByTestId('number-input')
|
||||
const slider = screen.getByTestId('slider')
|
||||
const { numberInput, slider } = getParallelControls()
|
||||
|
||||
expect(numberInput.getAttribute('data-max')).toBe(slider.getAttribute('data-max'))
|
||||
expect(numberInput.getAttribute('data-max')).toBe(String(MAX_PARALLEL_LIMIT))
|
||||
expect(numberInput.getAttribute('max')).toBe(slider.getAttribute('aria-valuemax'))
|
||||
expect(numberInput.getAttribute('max')).toBe(String(MAX_PARALLEL_LIMIT))
|
||||
})
|
||||
})
|
||||
|
||||
describe('Legacy Constant Verification (For Transition Period)', () => {
|
||||
// Marked as transition/deprecation tests
|
||||
it('should maintain MAX_ITERATION_PARALLEL_NUM for backward compatibility', () => {
|
||||
const { MAX_ITERATION_PARALLEL_NUM } = require('@/app/components/workflow/constants')
|
||||
it('should maintain MAX_ITERATION_PARALLEL_NUM for backward compatibility', async () => {
|
||||
const { MAX_ITERATION_PARALLEL_NUM } = await import('@/app/components/workflow/constants')
|
||||
expect(typeof MAX_ITERATION_PARALLEL_NUM).toBe('number')
|
||||
expect(MAX_ITERATION_PARALLEL_NUM).toBe(10) // Hardcoded legacy value
|
||||
})
|
||||
|
||||
it('should demonstrate MAX_PARALLEL_LIMIT vs legacy constant difference', () => {
|
||||
it('should demonstrate MAX_PARALLEL_LIMIT vs legacy constant difference', async () => {
|
||||
setupEnvironment('50')
|
||||
const { MAX_PARALLEL_LIMIT } = require('@/config')
|
||||
const { MAX_ITERATION_PARALLEL_NUM } = require('@/app/components/workflow/constants')
|
||||
const { MAX_PARALLEL_LIMIT } = await import('@/config')
|
||||
const { MAX_ITERATION_PARALLEL_NUM } = await import('@/app/components/workflow/constants')
|
||||
|
||||
// MAX_PARALLEL_LIMIT is configurable, MAX_ITERATION_PARALLEL_NUM is not
|
||||
expect(MAX_PARALLEL_LIMIT).toBe(50)
|
||||
|
|
@ -290,9 +246,9 @@ describe('MAX_PARALLEL_LIMIT Configuration Bug', () => {
|
|||
})
|
||||
|
||||
describe('Constants Validation', () => {
|
||||
it('should validate that required constants exist and have correct types', () => {
|
||||
const { MAX_PARALLEL_LIMIT } = require('@/config')
|
||||
const { MIN_ITERATION_PARALLEL_NUM } = require('@/app/components/workflow/constants')
|
||||
it('should validate that required constants exist and have correct types', async () => {
|
||||
const { MAX_PARALLEL_LIMIT } = await import('@/config')
|
||||
const { MIN_ITERATION_PARALLEL_NUM } = await import('@/app/components/workflow/constants')
|
||||
expect(typeof MAX_PARALLEL_LIMIT).toBe('number')
|
||||
expect(typeof MIN_ITERATION_PARALLEL_NUM).toBe('number')
|
||||
expect(MAX_PARALLEL_LIMIT).toBeGreaterThanOrEqual(MIN_ITERATION_PARALLEL_NUM)
|
||||
|
|
|
|||
|
|
@ -7,13 +7,14 @@
|
|||
|
||||
import React from 'react'
|
||||
import { cleanup, render } from '@testing-library/react'
|
||||
import '@testing-library/jest-dom'
|
||||
import BlockInput from '../app/components/base/block-input'
|
||||
import SupportVarInput from '../app/components/workflow/nodes/_base/components/support-var-input'
|
||||
|
||||
// Mock styles
|
||||
jest.mock('../app/components/app/configuration/base/var-highlight/style.module.css', () => ({
|
||||
item: 'mock-item-class',
|
||||
vi.mock('../app/components/app/configuration/base/var-highlight/style.module.css', () => ({
|
||||
default: {
|
||||
item: 'mock-item-class',
|
||||
},
|
||||
}))
|
||||
|
||||
describe('XSS Prevention - Block Input and Support Var Input Security', () => {
|
||||
|
|
|
|||
|
|
@ -16,7 +16,7 @@ import {
|
|||
import { useTranslation } from 'react-i18next'
|
||||
import { useShallow } from 'zustand/react/shallow'
|
||||
import s from './style.module.css'
|
||||
import cn from '@/utils/classnames'
|
||||
import { cn } from '@/utils/classnames'
|
||||
import { useStore } from '@/app/components/app/store'
|
||||
import AppSideBar from '@/app/components/app-sidebar'
|
||||
import type { NavIcon } from '@/app/components/app-sidebar/navLink'
|
||||
|
|
|
|||
|
|
@ -3,7 +3,7 @@ import { RiCalendarLine } from '@remixicon/react'
|
|||
import type { Dayjs } from 'dayjs'
|
||||
import type { FC } from 'react'
|
||||
import React, { useCallback } from 'react'
|
||||
import cn from '@/utils/classnames'
|
||||
import { cn } from '@/utils/classnames'
|
||||
import { formatToLocalTime } from '@/utils/format'
|
||||
import { useI18N } from '@/context/i18n'
|
||||
import Picker from '@/app/components/base/date-and-time-picker/date-picker'
|
||||
|
|
|
|||
|
|
@ -6,7 +6,7 @@ import { SimpleSelect } from '@/app/components/base/select'
|
|||
import type { Item } from '@/app/components/base/select'
|
||||
import dayjs from 'dayjs'
|
||||
import { RiArrowDownSLine, RiCheckLine } from '@remixicon/react'
|
||||
import cn from '@/utils/classnames'
|
||||
import { cn } from '@/utils/classnames'
|
||||
import { useTranslation } from 'react-i18next'
|
||||
|
||||
const today = dayjs()
|
||||
|
|
|
|||
|
|
@ -1,7 +1,8 @@
|
|||
import React from 'react'
|
||||
import { render } from '@testing-library/react'
|
||||
import '@testing-library/jest-dom'
|
||||
import { OpikIconBig } from '@/app/components/base/icons/src/public/tracing'
|
||||
import { normalizeAttrs } from '@/app/components/base/icons/utils'
|
||||
import iconData from '@/app/components/base/icons/src/public/tracing/OpikIconBig.json'
|
||||
|
||||
describe('SVG Attribute Error Reproduction', () => {
|
||||
// Capture console errors
|
||||
|
|
@ -10,7 +11,7 @@ describe('SVG Attribute Error Reproduction', () => {
|
|||
|
||||
beforeEach(() => {
|
||||
errorMessages = []
|
||||
console.error = jest.fn((message) => {
|
||||
console.error = vi.fn((message) => {
|
||||
errorMessages.push(message)
|
||||
originalError(message)
|
||||
})
|
||||
|
|
@ -54,9 +55,6 @@ describe('SVG Attribute Error Reproduction', () => {
|
|||
it('should analyze the SVG structure causing the errors', () => {
|
||||
console.log('\n=== ANALYZING SVG STRUCTURE ===')
|
||||
|
||||
// Import the JSON data directly
|
||||
const iconData = require('@/app/components/base/icons/src/public/tracing/OpikIconBig.json')
|
||||
|
||||
console.log('Icon structure analysis:')
|
||||
console.log('- Root element:', iconData.icon.name)
|
||||
console.log('- Children count:', iconData.icon.children?.length || 0)
|
||||
|
|
@ -113,8 +111,6 @@ describe('SVG Attribute Error Reproduction', () => {
|
|||
it('should test the normalizeAttrs function behavior', () => {
|
||||
console.log('\n=== TESTING normalizeAttrs FUNCTION ===')
|
||||
|
||||
const { normalizeAttrs } = require('@/app/components/base/icons/utils')
|
||||
|
||||
const testAttributes = {
|
||||
'inkscape:showpageshadow': '2',
|
||||
'inkscape:pageopacity': '0.0',
|
||||
|
|
|
|||
|
|
@ -4,7 +4,7 @@ import React, { useCallback, useRef, useState } from 'react'
|
|||
|
||||
import type { PopupProps } from './config-popup'
|
||||
import ConfigPopup from './config-popup'
|
||||
import cn from '@/utils/classnames'
|
||||
import { cn } from '@/utils/classnames'
|
||||
import {
|
||||
PortalToFollowElem,
|
||||
PortalToFollowElemContent,
|
||||
|
|
|
|||
|
|
@ -12,7 +12,7 @@ import Indicator from '@/app/components/header/indicator'
|
|||
import Switch from '@/app/components/base/switch'
|
||||
import Tooltip from '@/app/components/base/tooltip'
|
||||
import Divider from '@/app/components/base/divider'
|
||||
import cn from '@/utils/classnames'
|
||||
import { cn } from '@/utils/classnames'
|
||||
|
||||
const I18N_PREFIX = 'app.tracing'
|
||||
|
||||
|
|
|
|||
|
|
@ -1,7 +1,7 @@
|
|||
'use client'
|
||||
import type { FC } from 'react'
|
||||
import React from 'react'
|
||||
import cn from '@/utils/classnames'
|
||||
import { cn } from '@/utils/classnames'
|
||||
import Input from '@/app/components/base/input'
|
||||
|
||||
type Props = {
|
||||
|
|
|
|||
|
|
@ -12,7 +12,7 @@ import type { AliyunConfig, ArizeConfig, DatabricksConfig, LangFuseConfig, LangS
|
|||
import { TracingProvider } from './type'
|
||||
import TracingIcon from './tracing-icon'
|
||||
import ConfigButton from './config-button'
|
||||
import cn from '@/utils/classnames'
|
||||
import { cn } from '@/utils/classnames'
|
||||
import { AliyunIcon, ArizeIcon, DatabricksIcon, LangfuseIcon, LangsmithIcon, MlflowIcon, OpikIcon, PhoenixIcon, TencentIcon, WeaveIcon } from '@/app/components/base/icons/src/public/tracing'
|
||||
import Indicator from '@/app/components/header/indicator'
|
||||
import { fetchTracingConfig as doFetchTracingConfig, fetchTracingStatus, updateTracingStatus } from '@/service/apps'
|
||||
|
|
|
|||
|
|
@ -6,7 +6,7 @@ import {
|
|||
} from '@remixicon/react'
|
||||
import { useTranslation } from 'react-i18next'
|
||||
import { TracingProvider } from './type'
|
||||
import cn from '@/utils/classnames'
|
||||
import { cn } from '@/utils/classnames'
|
||||
import { AliyunIconBig, ArizeIconBig, DatabricksIconBig, LangfuseIconBig, LangsmithIconBig, MlflowIconBig, OpikIconBig, PhoenixIconBig, TencentIconBig, WeaveIconBig } from '@/app/components/base/icons/src/public/tracing'
|
||||
import { Eye as View } from '@/app/components/base/icons/src/vender/solid/general'
|
||||
|
||||
|
|
|
|||
|
|
@ -1,7 +1,7 @@
|
|||
'use client'
|
||||
import type { FC } from 'react'
|
||||
import React from 'react'
|
||||
import cn from '@/utils/classnames'
|
||||
import { cn } from '@/utils/classnames'
|
||||
import { TracingIcon as Icon } from '@/app/components/base/icons/src/public/tracing'
|
||||
|
||||
type Props = {
|
||||
|
|
|
|||
|
|
@ -23,7 +23,7 @@ import { useDatasetDetail, useDatasetRelatedApps } from '@/service/knowledge/use
|
|||
import useDocumentTitle from '@/hooks/use-document-title'
|
||||
import ExtraInfo from '@/app/components/datasets/extra-info'
|
||||
import { useEventEmitterContextContext } from '@/context/event-emitter'
|
||||
import cn from '@/utils/classnames'
|
||||
import { cn } from '@/utils/classnames'
|
||||
|
||||
export type IAppDetailLayoutProps = {
|
||||
children: React.ReactNode
|
||||
|
|
|
|||
|
|
@ -1,7 +1,7 @@
|
|||
'use client'
|
||||
import Header from '@/app/signin/_header'
|
||||
|
||||
import cn from '@/utils/classnames'
|
||||
import { cn } from '@/utils/classnames'
|
||||
import { useGlobalPublicStore } from '@/context/global-public-context'
|
||||
|
||||
export default function SignInLayout({ children }: any) {
|
||||
|
|
|
|||
|
|
@ -2,7 +2,7 @@
|
|||
import { useCallback, useState } from 'react'
|
||||
import { useTranslation } from 'react-i18next'
|
||||
import { useRouter, useSearchParams } from 'next/navigation'
|
||||
import cn from 'classnames'
|
||||
import { cn } from '@/utils/classnames'
|
||||
import { RiCheckboxCircleFill } from '@remixicon/react'
|
||||
import { useCountDown } from 'ahooks'
|
||||
import Button from '@/app/components/base/button'
|
||||
|
|
|
|||
|
|
@ -1,6 +1,6 @@
|
|||
'use client'
|
||||
|
||||
import cn from '@/utils/classnames'
|
||||
import { cn } from '@/utils/classnames'
|
||||
import { useGlobalPublicStore } from '@/context/global-public-context'
|
||||
import useDocumentTitle from '@/hooks/use-document-title'
|
||||
import type { PropsWithChildren } from 'react'
|
||||
|
|
|
|||
|
|
@ -7,7 +7,7 @@ import Loading from '@/app/components/base/loading'
|
|||
import MailAndCodeAuth from './components/mail-and-code-auth'
|
||||
import MailAndPasswordAuth from './components/mail-and-password-auth'
|
||||
import SSOAuth from './components/sso-auth'
|
||||
import cn from '@/utils/classnames'
|
||||
import { cn } from '@/utils/classnames'
|
||||
import { LicenseStatus } from '@/types/feature'
|
||||
import { IS_CE_EDITION } from '@/config'
|
||||
import { useGlobalPublicStore } from '@/context/global-public-context'
|
||||
|
|
|
|||
|
|
@ -1,7 +1,7 @@
|
|||
'use client'
|
||||
import Header from '@/app/signin/_header'
|
||||
|
||||
import cn from '@/utils/classnames'
|
||||
import { cn } from '@/utils/classnames'
|
||||
import { useGlobalPublicStore } from '@/context/global-public-context'
|
||||
import useDocumentTitle from '@/hooks/use-document-title'
|
||||
import { AppContextProvider } from '@/context/app-context'
|
||||
|
|
|
|||
|
|
@ -1,13 +1,13 @@
|
|||
'use client'
|
||||
import { useEffect } from 'react'
|
||||
import { useTranslation } from 'react-i18next'
|
||||
import useSWR from 'swr'
|
||||
import { useRouter, useSearchParams } from 'next/navigation'
|
||||
import cn from '@/utils/classnames'
|
||||
import { cn } from '@/utils/classnames'
|
||||
import Button from '@/app/components/base/button'
|
||||
|
||||
import { invitationCheck } from '@/service/common'
|
||||
import Loading from '@/app/components/base/loading'
|
||||
import useDocumentTitle from '@/hooks/use-document-title'
|
||||
import { useInvitationCheck } from '@/service/use-common'
|
||||
|
||||
const ActivateForm = () => {
|
||||
useDocumentTitle('')
|
||||
|
|
@ -26,19 +26,21 @@ const ActivateForm = () => {
|
|||
token,
|
||||
},
|
||||
}
|
||||
const { data: checkRes } = useSWR(checkParams, invitationCheck, {
|
||||
revalidateOnFocus: false,
|
||||
onSuccess(data) {
|
||||
if (data.is_valid) {
|
||||
const params = new URLSearchParams(searchParams)
|
||||
const { email, workspace_id } = data.data
|
||||
params.set('email', encodeURIComponent(email))
|
||||
params.set('workspace_id', encodeURIComponent(workspace_id))
|
||||
params.set('invite_token', encodeURIComponent(token as string))
|
||||
router.replace(`/signin?${params.toString()}`)
|
||||
}
|
||||
},
|
||||
})
|
||||
const { data: checkRes } = useInvitationCheck({
|
||||
...checkParams.params,
|
||||
token: token || undefined,
|
||||
}, true)
|
||||
|
||||
useEffect(() => {
|
||||
if (checkRes?.is_valid) {
|
||||
const params = new URLSearchParams(searchParams)
|
||||
const { email, workspace_id } = checkRes.data
|
||||
params.set('email', encodeURIComponent(email))
|
||||
params.set('workspace_id', encodeURIComponent(workspace_id))
|
||||
params.set('invite_token', encodeURIComponent(token as string))
|
||||
router.replace(`/signin?${params.toString()}`)
|
||||
}
|
||||
}, [checkRes, router, searchParams, token])
|
||||
|
||||
return (
|
||||
<div className={
|
||||
|
|
|
|||
|
|
@ -2,7 +2,7 @@
|
|||
import React from 'react'
|
||||
import Header from '../signin/_header'
|
||||
import ActivateForm from './activateForm'
|
||||
import cn from '@/utils/classnames'
|
||||
import { cn } from '@/utils/classnames'
|
||||
import { useGlobalPublicStore } from '@/context/global-public-context'
|
||||
|
||||
const Activate = () => {
|
||||
|
|
|
|||
|
|
@ -29,7 +29,7 @@ import CardView from '@/app/(commonLayout)/app/(appDetailLayout)/[appId]/overvie
|
|||
import type { Operation } from './app-operations'
|
||||
import AppOperations from './app-operations'
|
||||
import dynamic from 'next/dynamic'
|
||||
import cn from '@/utils/classnames'
|
||||
import { cn } from '@/utils/classnames'
|
||||
import { AppModeEnum } from '@/types/app'
|
||||
|
||||
const SwitchAppModal = dynamic(() => import('@/app/components/app/switch-app-modal'), {
|
||||
|
|
|
|||
|
|
@ -16,7 +16,7 @@ import AppInfo from './app-info'
|
|||
import NavLink from './navLink'
|
||||
import { useStore as useAppStore } from '@/app/components/app/store'
|
||||
import type { NavIcon } from './navLink'
|
||||
import cn from '@/utils/classnames'
|
||||
import { cn } from '@/utils/classnames'
|
||||
import { AppModeEnum } from '@/types/app'
|
||||
|
||||
type Props = {
|
||||
|
|
|
|||
|
|
@ -2,7 +2,7 @@ import React, { useCallback, useState } from 'react'
|
|||
import { PortalToFollowElem, PortalToFollowElemContent, PortalToFollowElemTrigger } from '../../base/portal-to-follow-elem'
|
||||
import ActionButton from '../../base/action-button'
|
||||
import { RiMoreFill } from '@remixicon/react'
|
||||
import cn from '@/utils/classnames'
|
||||
import { cn } from '@/utils/classnames'
|
||||
import Menu from './menu'
|
||||
import { useSelector as useAppContextWithSelector } from '@/context/app-context'
|
||||
import { useDatasetDetailContextWithSelector } from '@/context/dataset-detail'
|
||||
|
|
|
|||
|
|
@ -0,0 +1,379 @@
|
|||
import React from 'react'
|
||||
import { render, screen, waitFor } from '@testing-library/react'
|
||||
import userEvent from '@testing-library/user-event'
|
||||
import DatasetInfo from './index'
|
||||
import Dropdown from './dropdown'
|
||||
import Menu from './menu'
|
||||
import MenuItem from './menu-item'
|
||||
import type { DataSet } from '@/models/datasets'
|
||||
import {
|
||||
ChunkingMode,
|
||||
DataSourceType,
|
||||
DatasetPermission,
|
||||
} from '@/models/datasets'
|
||||
import { RETRIEVE_METHOD } from '@/types/app'
|
||||
import { RiEditLine } from '@remixicon/react'
|
||||
|
||||
let mockDataset: DataSet
|
||||
let mockIsDatasetOperator = false
|
||||
const mockReplace = vi.fn()
|
||||
const mockInvalidDatasetList = vi.fn()
|
||||
const mockInvalidDatasetDetail = vi.fn()
|
||||
const mockExportPipeline = vi.fn()
|
||||
const mockCheckIsUsedInApp = vi.fn()
|
||||
const mockDeleteDataset = vi.fn()
|
||||
|
||||
const createDataset = (overrides: Partial<DataSet> = {}): DataSet => ({
|
||||
id: 'dataset-1',
|
||||
name: 'Dataset Name',
|
||||
indexing_status: 'completed',
|
||||
icon_info: {
|
||||
icon: '📙',
|
||||
icon_background: '#FFF4ED',
|
||||
icon_type: 'emoji',
|
||||
icon_url: '',
|
||||
},
|
||||
description: 'Dataset description',
|
||||
permission: DatasetPermission.onlyMe,
|
||||
data_source_type: DataSourceType.FILE,
|
||||
indexing_technique: 'high_quality' as DataSet['indexing_technique'],
|
||||
created_by: 'user-1',
|
||||
updated_by: 'user-1',
|
||||
updated_at: 1690000000,
|
||||
app_count: 0,
|
||||
doc_form: ChunkingMode.text,
|
||||
document_count: 1,
|
||||
total_document_count: 1,
|
||||
word_count: 1000,
|
||||
provider: 'internal',
|
||||
embedding_model: 'text-embedding-3',
|
||||
embedding_model_provider: 'openai',
|
||||
embedding_available: true,
|
||||
retrieval_model_dict: {
|
||||
search_method: RETRIEVE_METHOD.semantic,
|
||||
reranking_enable: false,
|
||||
reranking_model: {
|
||||
reranking_provider_name: '',
|
||||
reranking_model_name: '',
|
||||
},
|
||||
top_k: 5,
|
||||
score_threshold_enabled: false,
|
||||
score_threshold: 0,
|
||||
},
|
||||
retrieval_model: {
|
||||
search_method: RETRIEVE_METHOD.semantic,
|
||||
reranking_enable: false,
|
||||
reranking_model: {
|
||||
reranking_provider_name: '',
|
||||
reranking_model_name: '',
|
||||
},
|
||||
top_k: 5,
|
||||
score_threshold_enabled: false,
|
||||
score_threshold: 0,
|
||||
},
|
||||
tags: [],
|
||||
external_knowledge_info: {
|
||||
external_knowledge_id: '',
|
||||
external_knowledge_api_id: '',
|
||||
external_knowledge_api_name: '',
|
||||
external_knowledge_api_endpoint: '',
|
||||
},
|
||||
external_retrieval_model: {
|
||||
top_k: 0,
|
||||
score_threshold: 0,
|
||||
score_threshold_enabled: false,
|
||||
},
|
||||
built_in_field_enabled: false,
|
||||
runtime_mode: 'rag_pipeline',
|
||||
enable_api: false,
|
||||
is_multimodal: false,
|
||||
...overrides,
|
||||
})
|
||||
|
||||
vi.mock('next/navigation', () => ({
|
||||
useRouter: () => ({
|
||||
replace: mockReplace,
|
||||
}),
|
||||
}))
|
||||
|
||||
vi.mock('@/context/dataset-detail', () => ({
|
||||
useDatasetDetailContextWithSelector: (selector: (state: { dataset?: DataSet }) => unknown) => selector({ dataset: mockDataset }),
|
||||
}))
|
||||
|
||||
vi.mock('@/context/app-context', () => ({
|
||||
useSelector: (selector: (state: { isCurrentWorkspaceDatasetOperator: boolean }) => unknown) =>
|
||||
selector({ isCurrentWorkspaceDatasetOperator: mockIsDatasetOperator }),
|
||||
}))
|
||||
|
||||
vi.mock('@/service/knowledge/use-dataset', () => ({
|
||||
datasetDetailQueryKeyPrefix: ['dataset', 'detail'],
|
||||
useInvalidDatasetList: () => mockInvalidDatasetList,
|
||||
}))
|
||||
|
||||
vi.mock('@/service/use-base', () => ({
|
||||
useInvalid: () => mockInvalidDatasetDetail,
|
||||
}))
|
||||
|
||||
vi.mock('@/service/use-pipeline', () => ({
|
||||
useExportPipelineDSL: () => ({
|
||||
mutateAsync: mockExportPipeline,
|
||||
}),
|
||||
}))
|
||||
|
||||
vi.mock('@/service/datasets', () => ({
|
||||
checkIsUsedInApp: (...args: unknown[]) => mockCheckIsUsedInApp(...args),
|
||||
deleteDataset: (...args: unknown[]) => mockDeleteDataset(...args),
|
||||
}))
|
||||
|
||||
vi.mock('@/hooks/use-knowledge', () => ({
|
||||
useKnowledge: () => ({
|
||||
formatIndexingTechniqueAndMethod: () => 'indexing-technique',
|
||||
}),
|
||||
}))
|
||||
|
||||
vi.mock('@/app/components/datasets/rename-modal', () => ({
|
||||
__esModule: true,
|
||||
default: ({
|
||||
show,
|
||||
onClose,
|
||||
onSuccess,
|
||||
}: {
|
||||
show: boolean
|
||||
onClose: () => void
|
||||
onSuccess?: () => void
|
||||
}) => {
|
||||
if (!show)
|
||||
return null
|
||||
return (
|
||||
<div data-testid="rename-modal">
|
||||
<button type="button" onClick={onSuccess}>Success</button>
|
||||
<button type="button" onClick={onClose}>Close</button>
|
||||
</div>
|
||||
)
|
||||
},
|
||||
}))
|
||||
|
||||
const openMenu = async (user: ReturnType<typeof userEvent.setup>) => {
|
||||
const trigger = screen.getByRole('button')
|
||||
await user.click(trigger)
|
||||
}
|
||||
|
||||
describe('DatasetInfo', () => {
|
||||
beforeEach(() => {
|
||||
vi.clearAllMocks()
|
||||
mockDataset = createDataset()
|
||||
mockIsDatasetOperator = false
|
||||
})
|
||||
|
||||
// Rendering of dataset summary details based on expand and dataset state.
|
||||
describe('Rendering', () => {
|
||||
it('should show dataset details when expanded', () => {
|
||||
// Arrange
|
||||
mockDataset = createDataset({ is_published: true })
|
||||
render(<DatasetInfo expand />)
|
||||
|
||||
// Assert
|
||||
expect(screen.getByText('Dataset Name')).toBeInTheDocument()
|
||||
expect(screen.getByText('Dataset description')).toBeInTheDocument()
|
||||
expect(screen.getByText('dataset.chunkingMode.general')).toBeInTheDocument()
|
||||
expect(screen.getByText('indexing-technique')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should show external tag when provider is external', () => {
|
||||
// Arrange
|
||||
mockDataset = createDataset({ provider: 'external', is_published: false })
|
||||
render(<DatasetInfo expand />)
|
||||
|
||||
// Assert
|
||||
expect(screen.getByText('dataset.externalTag')).toBeInTheDocument()
|
||||
expect(screen.queryByText('dataset.chunkingMode.general')).not.toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should hide detailed fields when collapsed', () => {
|
||||
// Arrange
|
||||
render(<DatasetInfo expand={false} />)
|
||||
|
||||
// Assert
|
||||
expect(screen.queryByText('Dataset Name')).not.toBeInTheDocument()
|
||||
expect(screen.queryByText('Dataset description')).not.toBeInTheDocument()
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe('MenuItem', () => {
|
||||
beforeEach(() => {
|
||||
vi.clearAllMocks()
|
||||
})
|
||||
|
||||
// Event handling for menu item interactions.
|
||||
describe('Interactions', () => {
|
||||
it('should call handler when clicked', async () => {
|
||||
const user = userEvent.setup()
|
||||
const handleClick = vi.fn()
|
||||
// Arrange
|
||||
render(<MenuItem name="Edit" Icon={RiEditLine} handleClick={handleClick} />)
|
||||
|
||||
// Act
|
||||
await user.click(screen.getByText('Edit'))
|
||||
|
||||
// Assert
|
||||
expect(handleClick).toHaveBeenCalledTimes(1)
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe('Menu', () => {
|
||||
beforeEach(() => {
|
||||
vi.clearAllMocks()
|
||||
mockDataset = createDataset()
|
||||
})
|
||||
|
||||
// Rendering of menu options based on runtime mode and delete visibility.
|
||||
describe('Rendering', () => {
|
||||
it('should show edit, export, and delete options when rag pipeline and deletable', () => {
|
||||
// Arrange
|
||||
mockDataset = createDataset({ runtime_mode: 'rag_pipeline' })
|
||||
render(
|
||||
<Menu
|
||||
showDelete
|
||||
openRenameModal={vi.fn()}
|
||||
handleExportPipeline={vi.fn()}
|
||||
detectIsUsedByApp={vi.fn()}
|
||||
/>,
|
||||
)
|
||||
|
||||
// Assert
|
||||
expect(screen.getByText('common.operation.edit')).toBeInTheDocument()
|
||||
expect(screen.getByText('datasetPipeline.operations.exportPipeline')).toBeInTheDocument()
|
||||
expect(screen.getByText('common.operation.delete')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should hide export and delete options when not rag pipeline and not deletable', () => {
|
||||
// Arrange
|
||||
mockDataset = createDataset({ runtime_mode: 'general' })
|
||||
render(
|
||||
<Menu
|
||||
showDelete={false}
|
||||
openRenameModal={vi.fn()}
|
||||
handleExportPipeline={vi.fn()}
|
||||
detectIsUsedByApp={vi.fn()}
|
||||
/>,
|
||||
)
|
||||
|
||||
// Assert
|
||||
expect(screen.getByText('common.operation.edit')).toBeInTheDocument()
|
||||
expect(screen.queryByText('datasetPipeline.operations.exportPipeline')).not.toBeInTheDocument()
|
||||
expect(screen.queryByText('common.operation.delete')).not.toBeInTheDocument()
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe('Dropdown', () => {
|
||||
beforeEach(() => {
|
||||
vi.clearAllMocks()
|
||||
mockDataset = createDataset({ pipeline_id: 'pipeline-1', runtime_mode: 'rag_pipeline' })
|
||||
mockIsDatasetOperator = false
|
||||
mockExportPipeline.mockResolvedValue({ data: 'pipeline-content' })
|
||||
mockCheckIsUsedInApp.mockResolvedValue({ is_using: false })
|
||||
mockDeleteDataset.mockResolvedValue({})
|
||||
if (!('createObjectURL' in URL)) {
|
||||
Object.defineProperty(URL, 'createObjectURL', {
|
||||
value: vi.fn(),
|
||||
writable: true,
|
||||
})
|
||||
}
|
||||
if (!('revokeObjectURL' in URL)) {
|
||||
Object.defineProperty(URL, 'revokeObjectURL', {
|
||||
value: vi.fn(),
|
||||
writable: true,
|
||||
})
|
||||
}
|
||||
})
|
||||
|
||||
// Rendering behavior based on workspace role.
|
||||
describe('Rendering', () => {
|
||||
it('should hide delete option when user is dataset operator', async () => {
|
||||
const user = userEvent.setup()
|
||||
// Arrange
|
||||
mockIsDatasetOperator = true
|
||||
render(<Dropdown expand />)
|
||||
|
||||
// Act
|
||||
await openMenu(user)
|
||||
|
||||
// Assert
|
||||
expect(screen.queryByText('common.operation.delete')).not.toBeInTheDocument()
|
||||
})
|
||||
})
|
||||
|
||||
// User interactions that trigger modals and exports.
|
||||
describe('Interactions', () => {
|
||||
it('should open rename modal when edit is clicked', async () => {
|
||||
const user = userEvent.setup()
|
||||
// Arrange
|
||||
render(<Dropdown expand />)
|
||||
|
||||
// Act
|
||||
await openMenu(user)
|
||||
await user.click(screen.getByText('common.operation.edit'))
|
||||
|
||||
// Assert
|
||||
expect(screen.getByTestId('rename-modal')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should export pipeline when export is clicked', async () => {
|
||||
const user = userEvent.setup()
|
||||
const anchorClickSpy = vi.spyOn(HTMLAnchorElement.prototype, 'click')
|
||||
const createObjectURLSpy = vi.spyOn(URL, 'createObjectURL')
|
||||
// Arrange
|
||||
render(<Dropdown expand />)
|
||||
|
||||
// Act
|
||||
await openMenu(user)
|
||||
await user.click(screen.getByText('datasetPipeline.operations.exportPipeline'))
|
||||
|
||||
// Assert
|
||||
await waitFor(() => {
|
||||
expect(mockExportPipeline).toHaveBeenCalledWith({
|
||||
pipelineId: 'pipeline-1',
|
||||
include: false,
|
||||
})
|
||||
})
|
||||
expect(createObjectURLSpy).toHaveBeenCalledTimes(1)
|
||||
expect(anchorClickSpy).toHaveBeenCalledTimes(1)
|
||||
})
|
||||
|
||||
it('should show delete confirmation when delete is clicked', async () => {
|
||||
const user = userEvent.setup()
|
||||
// Arrange
|
||||
render(<Dropdown expand />)
|
||||
|
||||
// Act
|
||||
await openMenu(user)
|
||||
await user.click(screen.getByText('common.operation.delete'))
|
||||
|
||||
// Assert
|
||||
await waitFor(() => {
|
||||
expect(screen.getByText('dataset.deleteDatasetConfirmContent')).toBeInTheDocument()
|
||||
})
|
||||
})
|
||||
|
||||
it('should delete dataset and redirect when confirm is clicked', async () => {
|
||||
const user = userEvent.setup()
|
||||
// Arrange
|
||||
render(<Dropdown expand />)
|
||||
|
||||
// Act
|
||||
await openMenu(user)
|
||||
await user.click(screen.getByText('common.operation.delete'))
|
||||
await user.click(await screen.findByRole('button', { name: 'common.operation.confirm' }))
|
||||
|
||||
// Assert
|
||||
await waitFor(() => {
|
||||
expect(mockDeleteDataset).toHaveBeenCalledWith('dataset-1')
|
||||
})
|
||||
expect(mockInvalidDatasetList).toHaveBeenCalledTimes(1)
|
||||
expect(mockReplace).toHaveBeenCalledWith('/datasets')
|
||||
})
|
||||
})
|
||||
})
|
||||
|
|
@ -8,7 +8,7 @@ import { useDatasetDetailContextWithSelector } from '@/context/dataset-detail'
|
|||
import type { DataSet } from '@/models/datasets'
|
||||
import { DOC_FORM_TEXT } from '@/models/datasets'
|
||||
import { useKnowledge } from '@/hooks/use-knowledge'
|
||||
import cn from '@/utils/classnames'
|
||||
import { cn } from '@/utils/classnames'
|
||||
import Dropdown from './dropdown'
|
||||
|
||||
type DatasetInfoProps = {
|
||||
|
|
|
|||
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue