diff --git a/.claude/settings.json b/.claude/settings.json new file mode 100644 index 0000000000..72dcb5ec73 --- /dev/null +++ b/.claude/settings.json @@ -0,0 +1,22 @@ +{ + "enabledPlugins": { + "feature-dev@claude-plugins-official": true, + "context7@claude-plugins-official": true, + "typescript-lsp@claude-plugins-official": true, + "pyright-lsp@claude-plugins-official": true, + "ralph-loop@claude-plugins-official": true + }, + "hooks": { + "PreToolUse": [ + { + "matcher": "Bash", + "hooks": [ + { + "type": "command", + "command": "npx -y block-no-verify@1.1.1" + } + ] + } + ] + } +} diff --git a/.claude/settings.json.example b/.claude/settings.json.example deleted file mode 100644 index 1149895340..0000000000 --- a/.claude/settings.json.example +++ /dev/null @@ -1,19 +0,0 @@ -{ - "permissions": { - "allow": [], - "deny": [] - }, - "env": { - "__comment": "Environment variables for MCP servers. Override in .claude/settings.local.json with actual values.", - "GITHUB_PERSONAL_ACCESS_TOKEN": "ghp_xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx" - }, - "enabledMcpjsonServers": [ - "context7", - "sequential-thinking", - "github", - "fetch", - "playwright", - "ide" - ], - "enableAllProjectMcpServers": true - } \ No newline at end of file diff --git a/.claude/skills/component-refactoring/SKILL.md b/.claude/skills/component-refactoring/SKILL.md new file mode 100644 index 0000000000..7006c382c8 --- /dev/null +++ b/.claude/skills/component-refactoring/SKILL.md @@ -0,0 +1,483 @@ +--- +name: component-refactoring +description: Refactor high-complexity React components in Dify frontend. Use when `pnpm analyze-component --json` shows complexity > 50 or lineCount > 300, when the user asks for code splitting, hook extraction, or complexity reduction, or when `pnpm analyze-component` warns to refactor before testing; avoid for simple/well-structured components, third-party wrappers, or when the user explicitly wants testing without refactoring. +--- + +# Dify Component Refactoring Skill + +Refactor high-complexity React components in the Dify frontend codebase with the patterns and workflow below. + +> **Complexity Threshold**: Components with complexity > 50 (measured by `pnpm analyze-component`) should be refactored before testing. + +## Quick Reference + +### Commands (run from `web/`) + +Use paths relative to `web/` (e.g., `app/components/...`). +Use `refactor-component` for refactoring prompts and `analyze-component` for testing prompts and metrics. + +```bash +cd web + +# Generate refactoring prompt +pnpm refactor-component + +# Output refactoring analysis as JSON +pnpm refactor-component --json + +# Generate testing prompt (after refactoring) +pnpm analyze-component + +# Output testing analysis as JSON +pnpm analyze-component --json +``` + +### Complexity Analysis + +```bash +# Analyze component complexity +pnpm analyze-component --json + +# Key metrics to check: +# - complexity: normalized score 0-100 (target < 50) +# - maxComplexity: highest single function complexity +# - lineCount: total lines (target < 300) +``` + +### Complexity Score Interpretation + +| Score | Level | Action | +|-------|-------|--------| +| 0-25 | 🟢 Simple | Ready for testing | +| 26-50 | 🟡 Medium | Consider minor refactoring | +| 51-75 | 🟠 Complex | **Refactor before testing** | +| 76-100 | 🔴 Very Complex | **Must refactor** | + +## Core Refactoring Patterns + +### Pattern 1: Extract Custom Hooks + +**When**: Component has complex state management, multiple `useState`/`useEffect`, or business logic mixed with UI. + +**Dify Convention**: Place hooks in a `hooks/` subdirectory or alongside the component as `use-.ts`. + +```typescript +// ❌ Before: Complex state logic in component +const Configuration: FC = () => { + const [modelConfig, setModelConfig] = useState(...) + const [datasetConfigs, setDatasetConfigs] = useState(...) + const [completionParams, setCompletionParams] = useState({}) + + // 50+ lines of state management logic... + + return
...
+} + +// ✅ After: Extract to custom hook +// hooks/use-model-config.ts +export const useModelConfig = (appId: string) => { + const [modelConfig, setModelConfig] = useState(...) + const [completionParams, setCompletionParams] = useState({}) + + // Related state management logic here + + return { modelConfig, setModelConfig, completionParams, setCompletionParams } +} + +// Component becomes cleaner +const Configuration: FC = () => { + const { modelConfig, setModelConfig } = useModelConfig(appId) + return
...
+} +``` + +**Dify Examples**: +- `web/app/components/app/configuration/hooks/use-advanced-prompt-config.ts` +- `web/app/components/app/configuration/debug/hooks.tsx` +- `web/app/components/workflow/hooks/use-workflow.ts` + +### Pattern 2: Extract Sub-Components + +**When**: Single component has multiple UI sections, conditional rendering blocks, or repeated patterns. + +**Dify Convention**: Place sub-components in subdirectories or as separate files in the same directory. + +```typescript +// ❌ Before: Monolithic JSX with multiple sections +const AppInfo = () => { + return ( +
+ {/* 100 lines of header UI */} + {/* 100 lines of operations UI */} + {/* 100 lines of modals */} +
+ ) +} + +// ✅ After: Split into focused components +// app-info/ +// ├── index.tsx (orchestration only) +// ├── app-header.tsx (header UI) +// ├── app-operations.tsx (operations UI) +// └── app-modals.tsx (modal management) + +const AppInfo = () => { + const { showModal, setShowModal } = useAppInfoModals() + + return ( +
+ + + setShowModal(null)} /> +
+ ) +} +``` + +**Dify Examples**: +- `web/app/components/app/configuration/` directory structure +- `web/app/components/workflow/nodes/` per-node organization + +### Pattern 3: Simplify Conditional Logic + +**When**: Deep nesting (> 3 levels), complex ternaries, or multiple `if/else` chains. + +```typescript +// ❌ Before: Deeply nested conditionals +const Template = useMemo(() => { + if (appDetail?.mode === AppModeEnum.CHAT) { + switch (locale) { + case LanguagesSupported[1]: + return + case LanguagesSupported[7]: + return + default: + return + } + } + if (appDetail?.mode === AppModeEnum.ADVANCED_CHAT) { + // Another 15 lines... + } + // More conditions... +}, [appDetail, locale]) + +// ✅ After: Use lookup tables + early returns +const TEMPLATE_MAP = { + [AppModeEnum.CHAT]: { + [LanguagesSupported[1]]: TemplateChatZh, + [LanguagesSupported[7]]: TemplateChatJa, + default: TemplateChatEn, + }, + [AppModeEnum.ADVANCED_CHAT]: { + [LanguagesSupported[1]]: TemplateAdvancedChatZh, + // ... + }, +} + +const Template = useMemo(() => { + const modeTemplates = TEMPLATE_MAP[appDetail?.mode] + if (!modeTemplates) return null + + const TemplateComponent = modeTemplates[locale] || modeTemplates.default + return +}, [appDetail, locale]) +``` + +### Pattern 4: Extract API/Data Logic + +**When**: Component directly handles API calls, data transformation, or complex async operations. + +**Dify Convention**: Use `@tanstack/react-query` hooks from `web/service/use-*.ts` or create custom data hooks. + +```typescript +// ❌ Before: API logic in component +const MCPServiceCard = () => { + const [basicAppConfig, setBasicAppConfig] = useState({}) + + useEffect(() => { + if (isBasicApp && appId) { + (async () => { + const res = await fetchAppDetail({ url: '/apps', id: appId }) + setBasicAppConfig(res?.model_config || {}) + })() + } + }, [appId, isBasicApp]) + + // More API-related logic... +} + +// ✅ After: Extract to data hook using React Query +// use-app-config.ts +import { useQuery } from '@tanstack/react-query' +import { get } from '@/service/base' + +const NAME_SPACE = 'appConfig' + +export const useAppConfig = (appId: string, isBasicApp: boolean) => { + return useQuery({ + enabled: isBasicApp && !!appId, + queryKey: [NAME_SPACE, 'detail', appId], + queryFn: () => get(`/apps/${appId}`), + select: data => data?.model_config || {}, + }) +} + +// Component becomes cleaner +const MCPServiceCard = () => { + const { data: config, isLoading } = useAppConfig(appId, isBasicApp) + // UI only +} +``` + +**React Query Best Practices in Dify**: +- Define `NAME_SPACE` for query key organization +- Use `enabled` option for conditional fetching +- Use `select` for data transformation +- Export invalidation hooks: `useInvalidXxx` + +**Dify Examples**: +- `web/service/use-workflow.ts` +- `web/service/use-common.ts` +- `web/service/knowledge/use-dataset.ts` +- `web/service/knowledge/use-document.ts` + +### Pattern 5: Extract Modal/Dialog Management + +**When**: Component manages multiple modals with complex open/close states. + +**Dify Convention**: Modals should be extracted with their state management. + +```typescript +// ❌ Before: Multiple modal states in component +const AppInfo = () => { + const [showEditModal, setShowEditModal] = useState(false) + const [showDuplicateModal, setShowDuplicateModal] = useState(false) + const [showConfirmDelete, setShowConfirmDelete] = useState(false) + const [showSwitchModal, setShowSwitchModal] = useState(false) + const [showImportDSLModal, setShowImportDSLModal] = useState(false) + // 5+ more modal states... +} + +// ✅ After: Extract to modal management hook +type ModalType = 'edit' | 'duplicate' | 'delete' | 'switch' | 'import' | null + +const useAppInfoModals = () => { + const [activeModal, setActiveModal] = useState(null) + + const openModal = useCallback((type: ModalType) => setActiveModal(type), []) + const closeModal = useCallback(() => setActiveModal(null), []) + + return { + activeModal, + openModal, + closeModal, + isOpen: (type: ModalType) => activeModal === type, + } +} +``` + +### Pattern 6: Extract Form Logic + +**When**: Complex form validation, submission handling, or field transformation. + +**Dify Convention**: Use `@tanstack/react-form` patterns from `web/app/components/base/form/`. + +```typescript +// ✅ Use existing form infrastructure +import { useAppForm } from '@/app/components/base/form' + +const ConfigForm = () => { + const form = useAppForm({ + defaultValues: { name: '', description: '' }, + onSubmit: handleSubmit, + }) + + return ... +} +``` + +## Dify-Specific Refactoring Guidelines + +### 1. Context Provider Extraction + +**When**: Component provides complex context values with multiple states. + +```typescript +// ❌ Before: Large context value object +const value = { + appId, isAPIKeySet, isTrailFinished, mode, modelModeType, + promptMode, isAdvancedMode, isAgent, isOpenAI, isFunctionCall, + // 50+ more properties... +} +return ... + +// ✅ After: Split into domain-specific contexts + + + + {children} + + + +``` + +**Dify Reference**: `web/context/` directory structure + +### 2. Workflow Node Components + +**When**: Refactoring workflow node components (`web/app/components/workflow/nodes/`). + +**Conventions**: +- Keep node logic in `use-interactions.ts` +- Extract panel UI to separate files +- Use `_base` components for common patterns + +``` +nodes// + ├── index.tsx # Node registration + ├── node.tsx # Node visual component + ├── panel.tsx # Configuration panel + ├── use-interactions.ts # Node-specific hooks + └── types.ts # Type definitions +``` + +### 3. Configuration Components + +**When**: Refactoring app configuration components. + +**Conventions**: +- Separate config sections into subdirectories +- Use existing patterns from `web/app/components/app/configuration/` +- Keep feature toggles in dedicated components + +### 4. Tool/Plugin Components + +**When**: Refactoring tool-related components (`web/app/components/tools/`). + +**Conventions**: +- Follow existing modal patterns +- Use service hooks from `web/service/use-tools.ts` +- Keep provider-specific logic isolated + +## Refactoring Workflow + +### Step 1: Generate Refactoring Prompt + +```bash +pnpm refactor-component +``` + +This command will: +- Analyze component complexity and features +- Identify specific refactoring actions needed +- Generate a prompt for AI assistant (auto-copied to clipboard on macOS) +- Provide detailed requirements based on detected patterns + +### Step 2: Analyze Details + +```bash +pnpm analyze-component --json +``` + +Identify: +- Total complexity score +- Max function complexity +- Line count +- Features detected (state, effects, API, etc.) + +### Step 3: Plan + +Create a refactoring plan based on detected features: + +| Detected Feature | Refactoring Action | +|------------------|-------------------| +| `hasState: true` + `hasEffects: true` | Extract custom hook | +| `hasAPI: true` | Extract data/service hook | +| `hasEvents: true` (many) | Extract event handlers | +| `lineCount > 300` | Split into sub-components | +| `maxComplexity > 50` | Simplify conditional logic | + +### Step 4: Execute Incrementally + +1. **Extract one piece at a time** +2. **Run lint, type-check, and tests after each extraction** +3. **Verify functionality before next step** + +``` +For each extraction: + ┌────────────────────────────────────────┐ + │ 1. Extract code │ + │ 2. Run: pnpm lint:fix │ + │ 3. Run: pnpm type-check:tsgo │ + │ 4. Run: pnpm test │ + │ 5. Test functionality manually │ + │ 6. PASS? → Next extraction │ + │ FAIL? → Fix before continuing │ + └────────────────────────────────────────┘ +``` + +### Step 5: Verify + +After refactoring: + +```bash +# Re-run refactor command to verify improvements +pnpm refactor-component + +# If complexity < 25 and lines < 200, you'll see: +# ✅ COMPONENT IS WELL-STRUCTURED + +# For detailed metrics: +pnpm analyze-component --json + +# Target metrics: +# - complexity < 50 +# - lineCount < 300 +# - maxComplexity < 30 +``` + +## Common Mistakes to Avoid + +### ❌ Over-Engineering + +```typescript +// ❌ Too many tiny hooks +const useButtonText = () => useState('Click') +const useButtonDisabled = () => useState(false) +const useButtonLoading = () => useState(false) + +// ✅ Cohesive hook with related state +const useButtonState = () => { + const [text, setText] = useState('Click') + const [disabled, setDisabled] = useState(false) + const [loading, setLoading] = useState(false) + return { text, setText, disabled, setDisabled, loading, setLoading } +} +``` + +### ❌ Breaking Existing Patterns + +- Follow existing directory structures +- Maintain naming conventions +- Preserve export patterns for compatibility + +### ❌ Premature Abstraction + +- Only extract when there's clear complexity benefit +- Don't create abstractions for single-use code +- Keep refactored code in the same domain area + +## References + +### Dify Codebase Examples + +- **Hook extraction**: `web/app/components/app/configuration/hooks/` +- **Component splitting**: `web/app/components/app/configuration/` +- **Service hooks**: `web/service/use-*.ts` +- **Workflow patterns**: `web/app/components/workflow/hooks/` +- **Form patterns**: `web/app/components/base/form/` + +### Related Skills + +- `frontend-testing` - For testing refactored components +- `web/testing/testing.md` - Testing specification diff --git a/.claude/skills/component-refactoring/references/complexity-patterns.md b/.claude/skills/component-refactoring/references/complexity-patterns.md new file mode 100644 index 0000000000..5a0a268f38 --- /dev/null +++ b/.claude/skills/component-refactoring/references/complexity-patterns.md @@ -0,0 +1,493 @@ +# Complexity Reduction Patterns + +This document provides patterns for reducing cognitive complexity in Dify React components. + +## Understanding Complexity + +### SonarJS Cognitive Complexity + +The `pnpm analyze-component` tool uses SonarJS cognitive complexity metrics: + +- **Total Complexity**: Sum of all functions' complexity in the file +- **Max Complexity**: Highest single function complexity + +### What Increases Complexity + +| Pattern | Complexity Impact | +|---------|-------------------| +| `if/else` | +1 per branch | +| Nested conditions | +1 per nesting level | +| `switch/case` | +1 per case | +| `for/while/do` | +1 per loop | +| `&&`/`||` chains | +1 per operator | +| Nested callbacks | +1 per nesting level | +| `try/catch` | +1 per catch | +| Ternary expressions | +1 per nesting | + +## Pattern 1: Replace Conditionals with Lookup Tables + +**Before** (complexity: ~15): + +```typescript +const Template = useMemo(() => { + if (appDetail?.mode === AppModeEnum.CHAT) { + switch (locale) { + case LanguagesSupported[1]: + return + case LanguagesSupported[7]: + return + default: + return + } + } + if (appDetail?.mode === AppModeEnum.ADVANCED_CHAT) { + switch (locale) { + case LanguagesSupported[1]: + return + case LanguagesSupported[7]: + return + default: + return + } + } + if (appDetail?.mode === AppModeEnum.WORKFLOW) { + // Similar pattern... + } + return null +}, [appDetail, locale]) +``` + +**After** (complexity: ~3): + +```typescript +// Define lookup table outside component +const TEMPLATE_MAP: Record>> = { + [AppModeEnum.CHAT]: { + [LanguagesSupported[1]]: TemplateChatZh, + [LanguagesSupported[7]]: TemplateChatJa, + default: TemplateChatEn, + }, + [AppModeEnum.ADVANCED_CHAT]: { + [LanguagesSupported[1]]: TemplateAdvancedChatZh, + [LanguagesSupported[7]]: TemplateAdvancedChatJa, + default: TemplateAdvancedChatEn, + }, + [AppModeEnum.WORKFLOW]: { + [LanguagesSupported[1]]: TemplateWorkflowZh, + [LanguagesSupported[7]]: TemplateWorkflowJa, + default: TemplateWorkflowEn, + }, + // ... +} + +// Clean component logic +const Template = useMemo(() => { + if (!appDetail?.mode) return null + + const templates = TEMPLATE_MAP[appDetail.mode] + if (!templates) return null + + const TemplateComponent = templates[locale] ?? templates.default + return +}, [appDetail, locale]) +``` + +## Pattern 2: Use Early Returns + +**Before** (complexity: ~10): + +```typescript +const handleSubmit = () => { + if (isValid) { + if (hasChanges) { + if (isConnected) { + submitData() + } else { + showConnectionError() + } + } else { + showNoChangesMessage() + } + } else { + showValidationError() + } +} +``` + +**After** (complexity: ~4): + +```typescript +const handleSubmit = () => { + if (!isValid) { + showValidationError() + return + } + + if (!hasChanges) { + showNoChangesMessage() + return + } + + if (!isConnected) { + showConnectionError() + return + } + + submitData() +} +``` + +## Pattern 3: Extract Complex Conditions + +**Before** (complexity: high): + +```typescript +const canPublish = (() => { + if (mode !== AppModeEnum.COMPLETION) { + if (!isAdvancedMode) + return true + + if (modelModeType === ModelModeType.completion) { + if (!hasSetBlockStatus.history || !hasSetBlockStatus.query) + return false + return true + } + return true + } + return !promptEmpty +})() +``` + +**After** (complexity: lower): + +```typescript +// Extract to named functions +const canPublishInCompletionMode = () => !promptEmpty + +const canPublishInChatMode = () => { + if (!isAdvancedMode) return true + if (modelModeType !== ModelModeType.completion) return true + return hasSetBlockStatus.history && hasSetBlockStatus.query +} + +// Clean main logic +const canPublish = mode === AppModeEnum.COMPLETION + ? canPublishInCompletionMode() + : canPublishInChatMode() +``` + +## Pattern 4: Replace Chained Ternaries + +**Before** (complexity: ~5): + +```typescript +const statusText = serverActivated + ? t('status.running') + : serverPublished + ? t('status.inactive') + : appUnpublished + ? t('status.unpublished') + : t('status.notConfigured') +``` + +**After** (complexity: ~2): + +```typescript +const getStatusText = () => { + if (serverActivated) return t('status.running') + if (serverPublished) return t('status.inactive') + if (appUnpublished) return t('status.unpublished') + return t('status.notConfigured') +} + +const statusText = getStatusText() +``` + +Or use lookup: + +```typescript +const STATUS_TEXT_MAP = { + running: 'status.running', + inactive: 'status.inactive', + unpublished: 'status.unpublished', + notConfigured: 'status.notConfigured', +} as const + +const getStatusKey = (): keyof typeof STATUS_TEXT_MAP => { + if (serverActivated) return 'running' + if (serverPublished) return 'inactive' + if (appUnpublished) return 'unpublished' + return 'notConfigured' +} + +const statusText = t(STATUS_TEXT_MAP[getStatusKey()]) +``` + +## Pattern 5: Flatten Nested Loops + +**Before** (complexity: high): + +```typescript +const processData = (items: Item[]) => { + const results: ProcessedItem[] = [] + + for (const item of items) { + if (item.isValid) { + for (const child of item.children) { + if (child.isActive) { + for (const prop of child.properties) { + if (prop.value !== null) { + results.push({ + itemId: item.id, + childId: child.id, + propValue: prop.value, + }) + } + } + } + } + } + } + + return results +} +``` + +**After** (complexity: lower): + +```typescript +// Use functional approach +const processData = (items: Item[]) => { + return items + .filter(item => item.isValid) + .flatMap(item => + item.children + .filter(child => child.isActive) + .flatMap(child => + child.properties + .filter(prop => prop.value !== null) + .map(prop => ({ + itemId: item.id, + childId: child.id, + propValue: prop.value, + })) + ) + ) +} +``` + +## Pattern 6: Extract Event Handler Logic + +**Before** (complexity: high in component): + +```typescript +const Component = () => { + const handleSelect = (data: DataSet[]) => { + if (isEqual(data.map(item => item.id), dataSets.map(item => item.id))) { + hideSelectDataSet() + return + } + + formattingChangedDispatcher() + let newDatasets = data + if (data.find(item => !item.name)) { + const newSelected = produce(data, (draft) => { + data.forEach((item, index) => { + if (!item.name) { + const newItem = dataSets.find(i => i.id === item.id) + if (newItem) + draft[index] = newItem + } + }) + }) + setDataSets(newSelected) + newDatasets = newSelected + } + else { + setDataSets(data) + } + hideSelectDataSet() + + // 40 more lines of logic... + } + + return
...
+} +``` + +**After** (complexity: lower): + +```typescript +// Extract to hook or utility +const useDatasetSelection = (dataSets: DataSet[], setDataSets: SetState) => { + const normalizeSelection = (data: DataSet[]) => { + const hasUnloadedItem = data.some(item => !item.name) + if (!hasUnloadedItem) return data + + return produce(data, (draft) => { + data.forEach((item, index) => { + if (!item.name) { + const existing = dataSets.find(i => i.id === item.id) + if (existing) draft[index] = existing + } + }) + }) + } + + const hasSelectionChanged = (newData: DataSet[]) => { + return !isEqual( + newData.map(item => item.id), + dataSets.map(item => item.id) + ) + } + + return { normalizeSelection, hasSelectionChanged } +} + +// Component becomes cleaner +const Component = () => { + const { normalizeSelection, hasSelectionChanged } = useDatasetSelection(dataSets, setDataSets) + + const handleSelect = (data: DataSet[]) => { + if (!hasSelectionChanged(data)) { + hideSelectDataSet() + return + } + + formattingChangedDispatcher() + const normalized = normalizeSelection(data) + setDataSets(normalized) + hideSelectDataSet() + } + + return
...
+} +``` + +## Pattern 7: Reduce Boolean Logic Complexity + +**Before** (complexity: ~8): + +```typescript +const toggleDisabled = hasInsufficientPermissions + || appUnpublished + || missingStartNode + || triggerModeDisabled + || (isAdvancedApp && !currentWorkflow?.graph) + || (isBasicApp && !basicAppConfig.updated_at) +``` + +**After** (complexity: ~3): + +```typescript +// Extract meaningful boolean functions +const isAppReady = () => { + if (isAdvancedApp) return !!currentWorkflow?.graph + return !!basicAppConfig.updated_at +} + +const hasRequiredPermissions = () => { + return isCurrentWorkspaceEditor && !hasInsufficientPermissions +} + +const canToggle = () => { + if (!hasRequiredPermissions()) return false + if (!isAppReady()) return false + if (missingStartNode) return false + if (triggerModeDisabled) return false + return true +} + +const toggleDisabled = !canToggle() +``` + +## Pattern 8: Simplify useMemo/useCallback Dependencies + +**Before** (complexity: multiple recalculations): + +```typescript +const payload = useMemo(() => { + let parameters: Parameter[] = [] + let outputParameters: OutputParameter[] = [] + + if (!published) { + parameters = (inputs || []).map((item) => ({ + name: item.variable, + description: '', + form: 'llm', + required: item.required, + type: item.type, + })) + outputParameters = (outputs || []).map((item) => ({ + name: item.variable, + description: '', + type: item.value_type, + })) + } + else if (detail && detail.tool) { + parameters = (inputs || []).map((item) => ({ + // Complex transformation... + })) + outputParameters = (outputs || []).map((item) => ({ + // Complex transformation... + })) + } + + return { + icon: detail?.icon || icon, + label: detail?.label || name, + // ...more fields + } +}, [detail, published, workflowAppId, icon, name, description, inputs, outputs]) +``` + +**After** (complexity: separated concerns): + +```typescript +// Separate transformations +const useParameterTransform = (inputs: InputVar[], detail?: ToolDetail, published?: boolean) => { + return useMemo(() => { + if (!published) { + return inputs.map(item => ({ + name: item.variable, + description: '', + form: 'llm', + required: item.required, + type: item.type, + })) + } + + if (!detail?.tool) return [] + + return inputs.map(item => ({ + name: item.variable, + required: item.required, + type: item.type === 'paragraph' ? 'string' : item.type, + description: detail.tool.parameters.find(p => p.name === item.variable)?.llm_description || '', + form: detail.tool.parameters.find(p => p.name === item.variable)?.form || 'llm', + })) + }, [inputs, detail, published]) +} + +// Component uses hook +const parameters = useParameterTransform(inputs, detail, published) +const outputParameters = useOutputTransform(outputs, detail, published) + +const payload = useMemo(() => ({ + icon: detail?.icon || icon, + label: detail?.label || name, + parameters, + outputParameters, + // ... +}), [detail, icon, name, parameters, outputParameters]) +``` + +## Target Metrics After Refactoring + +| Metric | Target | +|--------|--------| +| Total Complexity | < 50 | +| Max Function Complexity | < 30 | +| Function Length | < 30 lines | +| Nesting Depth | ≤ 3 levels | +| Conditional Chains | ≤ 3 conditions | diff --git a/.claude/skills/component-refactoring/references/component-splitting.md b/.claude/skills/component-refactoring/references/component-splitting.md new file mode 100644 index 0000000000..78a3389100 --- /dev/null +++ b/.claude/skills/component-refactoring/references/component-splitting.md @@ -0,0 +1,477 @@ +# Component Splitting Patterns + +This document provides detailed guidance on splitting large components into smaller, focused components in Dify. + +## When to Split Components + +Split a component when you identify: + +1. **Multiple UI sections** - Distinct visual areas with minimal coupling that can be composed independently +1. **Conditional rendering blocks** - Large `{condition && }` blocks +1. **Repeated patterns** - Similar UI structures used multiple times +1. **300+ lines** - Component exceeds manageable size +1. **Modal clusters** - Multiple modals rendered in one component + +## Splitting Strategies + +### Strategy 1: Section-Based Splitting + +Identify visual sections and extract each as a component. + +```typescript +// ❌ Before: Monolithic component (500+ lines) +const ConfigurationPage = () => { + return ( +
+ {/* Header Section - 50 lines */} +
+

{t('configuration.title')}

+
+ {isAdvancedMode && Advanced} + + +
+
+ + {/* Config Section - 200 lines */} +
+ +
+ + {/* Debug Section - 150 lines */} +
+ +
+ + {/* Modals Section - 100 lines */} + {showSelectDataSet && } + {showHistoryModal && } + {showUseGPT4Confirm && } +
+ ) +} + +// ✅ After: Split into focused components +// configuration/ +// ├── index.tsx (orchestration) +// ├── configuration-header.tsx +// ├── configuration-content.tsx +// ├── configuration-debug.tsx +// └── configuration-modals.tsx + +// configuration-header.tsx +interface ConfigurationHeaderProps { + isAdvancedMode: boolean + onPublish: () => void +} + +const ConfigurationHeader: FC = ({ + isAdvancedMode, + onPublish, +}) => { + const { t } = useTranslation() + + return ( +
+

{t('configuration.title')}

+
+ {isAdvancedMode && Advanced} + + +
+
+ ) +} + +// index.tsx (orchestration only) +const ConfigurationPage = () => { + const { modelConfig, setModelConfig } = useModelConfig() + const { activeModal, openModal, closeModal } = useModalState() + + return ( +
+ + + {!isMobile && ( + + )} + +
+ ) +} +``` + +### Strategy 2: Conditional Block Extraction + +Extract large conditional rendering blocks. + +```typescript +// ❌ Before: Large conditional blocks +const AppInfo = () => { + return ( +
+ {expand ? ( +
+ {/* 100 lines of expanded view */} +
+ ) : ( +
+ {/* 50 lines of collapsed view */} +
+ )} +
+ ) +} + +// ✅ After: Separate view components +const AppInfoExpanded: FC = ({ appDetail, onAction }) => { + return ( +
+ {/* Clean, focused expanded view */} +
+ ) +} + +const AppInfoCollapsed: FC = ({ appDetail, onAction }) => { + return ( +
+ {/* Clean, focused collapsed view */} +
+ ) +} + +const AppInfo = () => { + return ( +
+ {expand + ? + : + } +
+ ) +} +``` + +### Strategy 3: Modal Extraction + +Extract modals with their trigger logic. + +```typescript +// ❌ Before: Multiple modals in one component +const AppInfo = () => { + const [showEdit, setShowEdit] = useState(false) + const [showDuplicate, setShowDuplicate] = useState(false) + const [showDelete, setShowDelete] = useState(false) + const [showSwitch, setShowSwitch] = useState(false) + + const onEdit = async (data) => { /* 20 lines */ } + const onDuplicate = async (data) => { /* 20 lines */ } + const onDelete = async () => { /* 15 lines */ } + + return ( +
+ {/* Main content */} + + {showEdit && setShowEdit(false)} />} + {showDuplicate && setShowDuplicate(false)} />} + {showDelete && setShowDelete(false)} />} + {showSwitch && } +
+ ) +} + +// ✅ After: Modal manager component +// app-info-modals.tsx +type ModalType = 'edit' | 'duplicate' | 'delete' | 'switch' | null + +interface AppInfoModalsProps { + appDetail: AppDetail + activeModal: ModalType + onClose: () => void + onSuccess: () => void +} + +const AppInfoModals: FC = ({ + appDetail, + activeModal, + onClose, + onSuccess, +}) => { + const handleEdit = async (data) => { /* logic */ } + const handleDuplicate = async (data) => { /* logic */ } + const handleDelete = async () => { /* logic */ } + + return ( + <> + {activeModal === 'edit' && ( + + )} + {activeModal === 'duplicate' && ( + + )} + {activeModal === 'delete' && ( + + )} + {activeModal === 'switch' && ( + + )} + + ) +} + +// Parent component +const AppInfo = () => { + const { activeModal, openModal, closeModal } = useModalState() + + return ( +
+ {/* Main content with openModal triggers */} + + + +
+ ) +} +``` + +### Strategy 4: List Item Extraction + +Extract repeated item rendering. + +```typescript +// ❌ Before: Inline item rendering +const OperationsList = () => { + return ( +
+ {operations.map(op => ( +
+ {op.icon} + {op.title} + {op.description} + + {op.badge && {op.badge}} + {/* More complex rendering... */} +
+ ))} +
+ ) +} + +// ✅ After: Extracted item component +interface OperationItemProps { + operation: Operation + onAction: (id: string) => void +} + +const OperationItem: FC = ({ operation, onAction }) => { + return ( +
+ {operation.icon} + {operation.title} + {operation.description} + + {operation.badge && {operation.badge}} +
+ ) +} + +const OperationsList = () => { + const handleAction = useCallback((id: string) => { + const op = operations.find(o => o.id === id) + op?.onClick() + }, [operations]) + + return ( +
+ {operations.map(op => ( + + ))} +
+ ) +} +``` + +## Directory Structure Patterns + +### Pattern A: Flat Structure (Simple Components) + +For components with 2-3 sub-components: + +``` +component-name/ + ├── index.tsx # Main component + ├── sub-component-a.tsx + ├── sub-component-b.tsx + └── types.ts # Shared types +``` + +### Pattern B: Nested Structure (Complex Components) + +For components with many sub-components: + +``` +component-name/ + ├── index.tsx # Main orchestration + ├── types.ts # Shared types + ├── hooks/ + │ ├── use-feature-a.ts + │ └── use-feature-b.ts + ├── components/ + │ ├── header/ + │ │ └── index.tsx + │ ├── content/ + │ │ └── index.tsx + │ └── modals/ + │ └── index.tsx + └── utils/ + └── helpers.ts +``` + +### Pattern C: Feature-Based Structure (Dify Standard) + +Following Dify's existing patterns: + +``` +configuration/ + ├── index.tsx # Main page component + ├── base/ # Base/shared components + │ ├── feature-panel/ + │ ├── group-name/ + │ └── operation-btn/ + ├── config/ # Config section + │ ├── index.tsx + │ ├── agent/ + │ └── automatic/ + ├── dataset-config/ # Dataset section + │ ├── index.tsx + │ ├── card-item/ + │ └── params-config/ + ├── debug/ # Debug section + │ ├── index.tsx + │ └── hooks.tsx + └── hooks/ # Shared hooks + └── use-advanced-prompt-config.ts +``` + +## Props Design + +### Minimal Props Principle + +Pass only what's needed: + +```typescript +// ❌ Bad: Passing entire objects when only some fields needed + + +// ✅ Good: Destructure to minimum required + +``` + +### Callback Props Pattern + +Use callbacks for child-to-parent communication: + +```typescript +// Parent +const Parent = () => { + const [value, setValue] = useState('') + + return ( + + ) +} + +// Child +interface ChildProps { + value: string + onChange: (value: string) => void + onSubmit: () => void +} + +const Child: FC = ({ value, onChange, onSubmit }) => { + return ( +
+ onChange(e.target.value)} /> + +
+ ) +} +``` + +### Render Props for Flexibility + +When sub-components need parent context: + +```typescript +interface ListProps { + items: T[] + renderItem: (item: T, index: number) => React.ReactNode + renderEmpty?: () => React.ReactNode +} + +function List({ items, renderItem, renderEmpty }: ListProps) { + if (items.length === 0 && renderEmpty) { + return <>{renderEmpty()} + } + + return ( +
+ {items.map((item, index) => renderItem(item, index))} +
+ ) +} + +// Usage + } + renderEmpty={() => } +/> +``` diff --git a/.claude/skills/component-refactoring/references/hook-extraction.md b/.claude/skills/component-refactoring/references/hook-extraction.md new file mode 100644 index 0000000000..a8d75deffd --- /dev/null +++ b/.claude/skills/component-refactoring/references/hook-extraction.md @@ -0,0 +1,317 @@ +# Hook Extraction Patterns + +This document provides detailed guidance on extracting custom hooks from complex components in Dify. + +## When to Extract Hooks + +Extract a custom hook when you identify: + +1. **Coupled state groups** - Multiple `useState` hooks that are always used together +1. **Complex effects** - `useEffect` with multiple dependencies or cleanup logic +1. **Business logic** - Data transformations, validations, or calculations +1. **Reusable patterns** - Logic that appears in multiple components + +## Extraction Process + +### Step 1: Identify State Groups + +Look for state variables that are logically related: + +```typescript +// ❌ These belong together - extract to hook +const [modelConfig, setModelConfig] = useState(...) +const [completionParams, setCompletionParams] = useState({}) +const [modelModeType, setModelModeType] = useState(...) + +// These are model-related state that should be in useModelConfig() +``` + +### Step 2: Identify Related Effects + +Find effects that modify the grouped state: + +```typescript +// ❌ These effects belong with the state above +useEffect(() => { + if (hasFetchedDetail && !modelModeType) { + const mode = currModel?.model_properties.mode + if (mode) { + const newModelConfig = produce(modelConfig, (draft) => { + draft.mode = mode + }) + setModelConfig(newModelConfig) + } + } +}, [textGenerationModelList, hasFetchedDetail, modelModeType, currModel]) +``` + +### Step 3: Create the Hook + +```typescript +// hooks/use-model-config.ts +import type { FormValue } from '@/app/components/header/account-setting/model-provider-page/declarations' +import type { ModelConfig } from '@/models/debug' +import { produce } from 'immer' +import { useEffect, useState } from 'react' +import { ModelModeType } from '@/types/app' + +interface UseModelConfigParams { + initialConfig?: Partial + currModel?: { model_properties?: { mode?: ModelModeType } } + hasFetchedDetail: boolean +} + +interface UseModelConfigReturn { + modelConfig: ModelConfig + setModelConfig: (config: ModelConfig) => void + completionParams: FormValue + setCompletionParams: (params: FormValue) => void + modelModeType: ModelModeType +} + +export const useModelConfig = ({ + initialConfig, + currModel, + hasFetchedDetail, +}: UseModelConfigParams): UseModelConfigReturn => { + const [modelConfig, setModelConfig] = useState({ + provider: 'langgenius/openai/openai', + model_id: 'gpt-3.5-turbo', + mode: ModelModeType.unset, + // ... default values + ...initialConfig, + }) + + const [completionParams, setCompletionParams] = useState({}) + + const modelModeType = modelConfig.mode + + // Fill old app data missing model mode + useEffect(() => { + if (hasFetchedDetail && !modelModeType) { + const mode = currModel?.model_properties?.mode + if (mode) { + setModelConfig(produce(modelConfig, (draft) => { + draft.mode = mode + })) + } + } + }, [hasFetchedDetail, modelModeType, currModel]) + + return { + modelConfig, + setModelConfig, + completionParams, + setCompletionParams, + modelModeType, + } +} +``` + +### Step 4: Update Component + +```typescript +// Before: 50+ lines of state management +const Configuration: FC = () => { + const [modelConfig, setModelConfig] = useState(...) + // ... lots of related state and effects +} + +// After: Clean component +const Configuration: FC = () => { + const { + modelConfig, + setModelConfig, + completionParams, + setCompletionParams, + modelModeType, + } = useModelConfig({ + currModel, + hasFetchedDetail, + }) + + // Component now focuses on UI +} +``` + +## Naming Conventions + +### Hook Names + +- Use `use` prefix: `useModelConfig`, `useDatasetConfig` +- Be specific: `useAdvancedPromptConfig` not `usePrompt` +- Include domain: `useWorkflowVariables`, `useMCPServer` + +### File Names + +- Kebab-case: `use-model-config.ts` +- Place in `hooks/` subdirectory when multiple hooks exist +- Place alongside component for single-use hooks + +### Return Type Names + +- Suffix with `Return`: `UseModelConfigReturn` +- Suffix params with `Params`: `UseModelConfigParams` + +## Common Hook Patterns in Dify + +### 1. Data Fetching Hook (React Query) + +```typescript +// Pattern: Use @tanstack/react-query for data fetching +import { useQuery, useQueryClient } from '@tanstack/react-query' +import { get } from '@/service/base' +import { useInvalid } from '@/service/use-base' + +const NAME_SPACE = 'appConfig' + +// Query keys for cache management +export const appConfigQueryKeys = { + detail: (appId: string) => [NAME_SPACE, 'detail', appId] as const, +} + +// Main data hook +export const useAppConfig = (appId: string) => { + return useQuery({ + enabled: !!appId, + queryKey: appConfigQueryKeys.detail(appId), + queryFn: () => get(`/apps/${appId}`), + select: data => data?.model_config || null, + }) +} + +// Invalidation hook for refreshing data +export const useInvalidAppConfig = () => { + return useInvalid([NAME_SPACE]) +} + +// Usage in component +const Component = () => { + const { data: config, isLoading, error, refetch } = useAppConfig(appId) + const invalidAppConfig = useInvalidAppConfig() + + const handleRefresh = () => { + invalidAppConfig() // Invalidates cache and triggers refetch + } + + return
...
+} +``` + +### 2. Form State Hook + +```typescript +// Pattern: Form state + validation + submission +export const useConfigForm = (initialValues: ConfigFormValues) => { + const [values, setValues] = useState(initialValues) + const [errors, setErrors] = useState>({}) + const [isSubmitting, setIsSubmitting] = useState(false) + + const validate = useCallback(() => { + const newErrors: Record = {} + if (!values.name) newErrors.name = 'Name is required' + setErrors(newErrors) + return Object.keys(newErrors).length === 0 + }, [values]) + + const handleChange = useCallback((field: string, value: any) => { + setValues(prev => ({ ...prev, [field]: value })) + }, []) + + const handleSubmit = useCallback(async (onSubmit: (values: ConfigFormValues) => Promise) => { + if (!validate()) return + setIsSubmitting(true) + try { + await onSubmit(values) + } finally { + setIsSubmitting(false) + } + }, [values, validate]) + + return { values, errors, isSubmitting, handleChange, handleSubmit } +} +``` + +### 3. Modal State Hook + +```typescript +// Pattern: Multiple modal management +type ModalType = 'edit' | 'delete' | 'duplicate' | null + +export const useModalState = () => { + const [activeModal, setActiveModal] = useState(null) + const [modalData, setModalData] = useState(null) + + const openModal = useCallback((type: ModalType, data?: any) => { + setActiveModal(type) + setModalData(data) + }, []) + + const closeModal = useCallback(() => { + setActiveModal(null) + setModalData(null) + }, []) + + return { + activeModal, + modalData, + openModal, + closeModal, + isOpen: useCallback((type: ModalType) => activeModal === type, [activeModal]), + } +} +``` + +### 4. Toggle/Boolean Hook + +```typescript +// Pattern: Boolean state with convenience methods +export const useToggle = (initialValue = false) => { + const [value, setValue] = useState(initialValue) + + const toggle = useCallback(() => setValue(v => !v), []) + const setTrue = useCallback(() => setValue(true), []) + const setFalse = useCallback(() => setValue(false), []) + + return [value, { toggle, setTrue, setFalse, set: setValue }] as const +} + +// Usage +const [isExpanded, { toggle, setTrue: expand, setFalse: collapse }] = useToggle() +``` + +## Testing Extracted Hooks + +After extraction, test hooks in isolation: + +```typescript +// use-model-config.spec.ts +import { renderHook, act } from '@testing-library/react' +import { useModelConfig } from './use-model-config' + +describe('useModelConfig', () => { + it('should initialize with default values', () => { + const { result } = renderHook(() => useModelConfig({ + hasFetchedDetail: false, + })) + + expect(result.current.modelConfig.provider).toBe('langgenius/openai/openai') + expect(result.current.modelModeType).toBe(ModelModeType.unset) + }) + + it('should update model config', () => { + const { result } = renderHook(() => useModelConfig({ + hasFetchedDetail: true, + })) + + act(() => { + result.current.setModelConfig({ + ...result.current.modelConfig, + model_id: 'gpt-4', + }) + }) + + expect(result.current.modelConfig.model_id).toBe('gpt-4') + }) +}) +``` diff --git a/.claude/skills/frontend-code-review/SKILL.md b/.claude/skills/frontend-code-review/SKILL.md new file mode 100644 index 0000000000..6cc23ca171 --- /dev/null +++ b/.claude/skills/frontend-code-review/SKILL.md @@ -0,0 +1,73 @@ +--- +name: frontend-code-review +description: "Trigger when the user requests a review of frontend files (e.g., `.tsx`, `.ts`, `.js`). Support both pending-change reviews and focused file reviews while applying the checklist rules." +--- + +# Frontend Code Review + +## Intent +Use this skill whenever the user asks to review frontend code (especially `.tsx`, `.ts`, or `.js` files). Support two review modes: + +1. **Pending-change review** – inspect staged/working-tree files slated for commit and flag checklist violations before submission. +2. **File-targeted review** – review the specific file(s) the user names and report the relevant checklist findings. + +Stick to the checklist below for every applicable file and mode. + +## Checklist +See [references/code-quality.md](references/code-quality.md), [references/performance.md](references/performance.md), [references/business-logic.md](references/business-logic.md) for the living checklist split by category—treat it as the canonical set of rules to follow. + +Flag each rule violation with urgency metadata so future reviewers can prioritize fixes. + +## Review Process +1. Open the relevant component/module. Gather lines that relate to class names, React Flow hooks, prop memoization, and styling. +2. For each rule in the review point, note where the code deviates and capture a representative snippet. +3. Compose the review section per the template below. Group violations first by **Urgent** flag, then by category order (Code Quality, Performance, Business Logic). + +## Required output +When invoked, the response must exactly follow one of the two templates: + +### Template A (any findings) +``` +# Code review +Found urgent issues need to be fixed: + +## 1 +FilePath: line + + + +### Suggested fix + + +--- +... (repeat for each urgent issue) ... + +Found suggestions for improvement: + +## 1 +FilePath: line + + + +### Suggested fix + + +--- + +... (repeat for each suggestion) ... +``` + +If there are no urgent issues, omit that section. If there are no suggestions, omit that section. + +If the issue number is more than 10, summarize as "10+ urgent issues" or "10+ suggestions" and just output the first 10 issues. + +Don't compress the blank lines between sections; keep them as-is for readability. + +If you use Template A (i.e., there are issues to fix) and at least one issue requires code changes, append a brief follow-up question after the structured output asking whether the user wants you to apply the suggested fix(es). For example: "Would you like me to use the Suggested fix section to address these issues?" + +### Template B (no issues) +``` +## Code review +No issues found. +``` + diff --git a/.claude/skills/frontend-code-review/references/business-logic.md b/.claude/skills/frontend-code-review/references/business-logic.md new file mode 100644 index 0000000000..4584f99dfc --- /dev/null +++ b/.claude/skills/frontend-code-review/references/business-logic.md @@ -0,0 +1,15 @@ +# Rule Catalog — Business Logic + +## Can't use workflowStore in Node components + +IsUrgent: True + +### Description + +File path pattern of node components: `web/app/components/workflow/nodes/[nodeName]/node.tsx` + +Node components are also used when creating a RAG Pipe from a template, but in that context there is no workflowStore Provider, which results in a blank screen. [This Issue](https://github.com/langgenius/dify/issues/29168) was caused by exactly this reason. + +### Suggested Fix + +Use `import { useNodes } from 'reactflow'` instead of `import useNodes from '@/app/components/workflow/store/workflow/use-nodes'`. diff --git a/.claude/skills/frontend-code-review/references/code-quality.md b/.claude/skills/frontend-code-review/references/code-quality.md new file mode 100644 index 0000000000..afdd40deb3 --- /dev/null +++ b/.claude/skills/frontend-code-review/references/code-quality.md @@ -0,0 +1,44 @@ +# Rule Catalog — Code Quality + +## Conditional class names use utility function + +IsUrgent: True +Category: Code Quality + +### Description + +Ensure conditional CSS is handled via the shared `classNames` instead of custom ternaries, string concatenation, or template strings. Centralizing class logic keeps components consistent and easier to maintain. + +### Suggested Fix + +```ts +import { cn } from '@/utils/classnames' +const classNames = cn(isActive ? 'text-primary-600' : 'text-gray-500') +``` + +## Tailwind-first styling + +IsUrgent: True +Category: Code Quality + +### Description + +Favor Tailwind CSS utility classes instead of adding new `.module.css` files unless a Tailwind combination cannot achieve the required styling. Keeping styles in Tailwind improves consistency and reduces maintenance overhead. + +Update this file when adding, editing, or removing Code Quality rules so the catalog remains accurate. + +## Classname ordering for easy overrides + +### Description + +When writing components, always place the incoming `className` prop after the component’s own class values so that downstream consumers can override or extend the styling. This keeps your component’s defaults but still lets external callers change or remove specific styles. + +Example: + +```tsx +import { cn } from '@/utils/classnames' + +const Button = ({ className }) => { + return
+} +``` diff --git a/.claude/skills/frontend-code-review/references/performance.md b/.claude/skills/frontend-code-review/references/performance.md new file mode 100644 index 0000000000..2d60072f5c --- /dev/null +++ b/.claude/skills/frontend-code-review/references/performance.md @@ -0,0 +1,45 @@ +# Rule Catalog — Performance + +## React Flow data usage + +IsUrgent: True +Category: Performance + +### Description + +When rendering React Flow, prefer `useNodes`/`useEdges` for UI consumption and rely on `useStoreApi` inside callbacks that mutate or read node/edge state. Avoid manually pulling Flow data outside of these hooks. + +## Complex prop memoization + +IsUrgent: True +Category: Performance + +### Description + +Wrap complex prop values (objects, arrays, maps) in `useMemo` prior to passing them into child components to guarantee stable references and prevent unnecessary renders. + +Update this file when adding, editing, or removing Performance rules so the catalog remains accurate. + +Wrong: + +```tsx + +``` + +Right: + +```tsx +const config = useMemo(() => ({ + provider: ..., + detail: ... +}), [provider, detail]); + + +``` diff --git a/.claude/skills/frontend-testing/SKILL.md b/.claude/skills/frontend-testing/SKILL.md index dac604ac4b..dd9677a78e 100644 --- a/.claude/skills/frontend-testing/SKILL.md +++ b/.claude/skills/frontend-testing/SKILL.md @@ -1,13 +1,13 @@ --- -name: Dify Frontend Testing -description: Generate Jest + React Testing Library tests for Dify frontend components, hooks, and utilities. Triggers on testing, spec files, coverage, Jest, RTL, unit tests, integration tests, or write/review test requests. +name: frontend-testing +description: Generate Vitest + React Testing Library tests for Dify frontend components, hooks, and utilities. Triggers on testing, spec files, coverage, Vitest, RTL, unit tests, integration tests, or write/review test requests. --- # Dify Frontend Testing Skill This skill enables Claude to generate high-quality, comprehensive frontend tests for the Dify project following established conventions and best practices. -> **⚠️ Authoritative Source**: This skill is derived from `web/testing/testing.md`. When in doubt, always refer to that document as the canonical specification. +> **⚠️ Authoritative Source**: This skill is derived from `web/testing/testing.md`. Use Vitest mock/timer APIs (`vi.*`). ## When to Apply This Skill @@ -15,7 +15,7 @@ Apply this skill when the user: - Asks to **write tests** for a component, hook, or utility - Asks to **review existing tests** for completeness -- Mentions **Jest**, **React Testing Library**, **RTL**, or **spec files** +- Mentions **Vitest**, **React Testing Library**, **RTL**, or **spec files** - Requests **test coverage** improvement - Uses `pnpm analyze-component` output as context - Mentions **testing**, **unit tests**, or **integration tests** for frontend code @@ -33,9 +33,9 @@ Apply this skill when the user: | Tool | Version | Purpose | |------|---------|---------| -| Jest | 29.7 | Test runner | +| Vitest | 4.0.16 | Test runner | | React Testing Library | 16.0 | Component testing | -| happy-dom | - | Test environment | +| jsdom | - | Test environment | | nock | 14.0 | HTTP mocking | | TypeScript | 5.x | Type safety | @@ -46,13 +46,13 @@ Apply this skill when the user: pnpm test # Watch mode -pnpm test -- --watch +pnpm test:watch # Run specific file -pnpm test -- path/to/file.spec.tsx +pnpm test path/to/file.spec.tsx # Generate coverage report -pnpm test -- --coverage +pnpm test:coverage # Analyze component complexity pnpm analyze-component @@ -77,9 +77,9 @@ import Component from './index' // import { ChildComponent } from './child-component' // ✅ Mock external dependencies only -jest.mock('@/service/api') -jest.mock('next/navigation', () => ({ - useRouter: () => ({ push: jest.fn() }), +vi.mock('@/service/api') +vi.mock('next/navigation', () => ({ + useRouter: () => ({ push: vi.fn() }), usePathname: () => '/test', })) @@ -88,7 +88,7 @@ let mockSharedState = false describe('ComponentName', () => { beforeEach(() => { - jest.clearAllMocks() // ✅ Reset mocks BEFORE each test + vi.clearAllMocks() // ✅ Reset mocks BEFORE each test mockSharedState = false // ✅ Reset shared state }) @@ -117,7 +117,7 @@ describe('ComponentName', () => { // User Interactions describe('User Interactions', () => { it('should handle click events', () => { - const handleClick = jest.fn() + const handleClick = vi.fn() render() fireEvent.click(screen.getByRole('button')) @@ -155,7 +155,7 @@ describe('ComponentName', () => { For each file: ┌────────────────────────────────────────┐ │ 1. Write test │ - │ 2. Run: pnpm test -- .spec.tsx │ + │ 2. Run: pnpm test .spec.tsx │ │ 3. PASS? → Mark complete, next file │ │ FAIL? → Fix first, then continue │ └────────────────────────────────────────┘ @@ -178,7 +178,7 @@ Process in this order for multi-file testing: - **500+ lines**: Consider splitting before testing - **Many dependencies**: Extract logic into hooks first -> 📖 See `guides/workflow.md` for complete workflow details and todo list format. +> 📖 See `references/workflow.md` for complete workflow details and todo list format. ## Testing Strategy @@ -289,17 +289,18 @@ For each test file generated, aim for: - ✅ **>95%** branch coverage - ✅ **>95%** line coverage -> **Note**: For multi-file directories, process one file at a time with full coverage each. See `guides/workflow.md`. +> **Note**: For multi-file directories, process one file at a time with full coverage each. See `references/workflow.md`. ## Detailed Guides For more detailed information, refer to: -- `guides/workflow.md` - **Incremental testing workflow** (MUST READ for multi-file testing) -- `guides/mocking.md` - Mock patterns and best practices -- `guides/async-testing.md` - Async operations and API calls -- `guides/domain-components.md` - Workflow, Dataset, Configuration testing -- `guides/common-patterns.md` - Frequently used testing patterns +- `references/workflow.md` - **Incremental testing workflow** (MUST READ for multi-file testing) +- `references/mocking.md` - Mock patterns and best practices +- `references/async-testing.md` - Async operations and API calls +- `references/domain-components.md` - Workflow, Dataset, Configuration testing +- `references/common-patterns.md` - Frequently used testing patterns +- `references/checklist.md` - Test generation checklist and validation steps ## Authoritative References @@ -315,6 +316,7 @@ For more detailed information, refer to: ### Project Configuration -- `web/jest.config.ts` - Jest configuration -- `web/jest.setup.ts` - Test environment setup -- `web/testing/analyze-component.js` - Component analysis tool +- `web/vitest.config.ts` - Vitest configuration +- `web/vitest.setup.ts` - Test environment setup +- `web/scripts/analyze-component.js` - Component analysis tool +- Modules are not mocked automatically. Global mocks live in `web/vitest.setup.ts` (for example `react-i18next`, `next/image`); mock other modules like `ky` or `mime` locally in test files. diff --git a/.claude/skills/frontend-testing/templates/component-test.template.tsx b/.claude/skills/frontend-testing/assets/component-test.template.tsx similarity index 92% rename from .claude/skills/frontend-testing/templates/component-test.template.tsx rename to .claude/skills/frontend-testing/assets/component-test.template.tsx index 9b1542b676..6b7803bd4b 100644 --- a/.claude/skills/frontend-testing/templates/component-test.template.tsx +++ b/.claude/skills/frontend-testing/assets/component-test.template.tsx @@ -23,30 +23,34 @@ import userEvent from '@testing-library/user-event' // ============================================================================ // Mocks // ============================================================================ -// WHY: Mocks must be hoisted to top of file (Jest requirement). +// WHY: Mocks must be hoisted to top of file (Vitest requirement). // They run BEFORE imports, so keep them before component imports. -// i18n (always required in Dify) -// WHY: Returns key instead of translation so tests don't depend on i18n files -jest.mock('react-i18next', () => ({ - useTranslation: () => ({ - t: (key: string) => key, - }), -})) +// i18n (automatically mocked) +// WHY: Global mock in web/vitest.setup.ts is auto-loaded by Vitest setup +// The global mock provides: useTranslation, Trans, useMixedTranslation, useGetLanguage +// No explicit mock needed for most tests +// +// Override only if custom translations are required: +// import { createReactI18nextMock } from '@/test/i18n-mock' +// vi.mock('react-i18next', () => createReactI18nextMock({ +// 'my.custom.key': 'Custom Translation', +// 'button.save': 'Save', +// })) // Router (if component uses useRouter, usePathname, useSearchParams) // WHY: Isolates tests from Next.js routing, enables testing navigation behavior -// const mockPush = jest.fn() -// jest.mock('next/navigation', () => ({ +// const mockPush = vi.fn() +// vi.mock('next/navigation', () => ({ // useRouter: () => ({ push: mockPush }), // usePathname: () => '/test-path', // })) // API services (if component fetches data) // WHY: Prevents real network calls, enables testing all states (loading/success/error) -// jest.mock('@/service/api') +// vi.mock('@/service/api') // import * as api from '@/service/api' -// const mockedApi = api as jest.Mocked +// const mockedApi = vi.mocked(api) // Shared mock state (for portal/dropdown components) // WHY: Portal components like PortalToFollowElem need shared state between @@ -91,7 +95,7 @@ describe('ComponentName', () => { // - Prevents mock call history from leaking between tests // - MUST be beforeEach (not afterEach) to reset BEFORE assertions like toHaveBeenCalledTimes beforeEach(() => { - jest.clearAllMocks() + vi.clearAllMocks() // Reset shared mock state if used (CRITICAL for portal/dropdown tests) // mockOpenState = false }) @@ -148,7 +152,7 @@ describe('ComponentName', () => { // - userEvent simulates real user behavior (focus, hover, then click) // - fireEvent is lower-level, doesn't trigger all browser events // const user = userEvent.setup() - // const handleClick = jest.fn() + // const handleClick = vi.fn() // render() // // await user.click(screen.getByRole('button')) @@ -158,7 +162,7 @@ describe('ComponentName', () => { it('should call onChange when value changes', async () => { // const user = userEvent.setup() - // const handleChange = jest.fn() + // const handleChange = vi.fn() // render() // // await user.type(screen.getByRole('textbox'), 'new value') @@ -191,7 +195,7 @@ describe('ComponentName', () => { }) // -------------------------------------------------------------------------- - // Async Operations (if component fetches data - useSWR, useQuery, fetch) + // Async Operations (if component fetches data - useQuery, fetch) // -------------------------------------------------------------------------- // WHY: Async operations have 3 states users experience: loading, success, error describe('Async Operations', () => { diff --git a/.claude/skills/frontend-testing/templates/hook-test.template.ts b/.claude/skills/frontend-testing/assets/hook-test.template.ts similarity index 95% rename from .claude/skills/frontend-testing/templates/hook-test.template.ts rename to .claude/skills/frontend-testing/assets/hook-test.template.ts index 4fb7fd21ec..99161848a4 100644 --- a/.claude/skills/frontend-testing/templates/hook-test.template.ts +++ b/.claude/skills/frontend-testing/assets/hook-test.template.ts @@ -15,9 +15,9 @@ import { renderHook, act, waitFor } from '@testing-library/react' // ============================================================================ // API services (if hook fetches data) -// jest.mock('@/service/api') +// vi.mock('@/service/api') // import * as api from '@/service/api' -// const mockedApi = api as jest.Mocked +// const mockedApi = vi.mocked(api) // ============================================================================ // Test Helpers @@ -38,7 +38,7 @@ import { renderHook, act, waitFor } from '@testing-library/react' describe('useHookName', () => { beforeEach(() => { - jest.clearAllMocks() + vi.clearAllMocks() }) // -------------------------------------------------------------------------- @@ -145,7 +145,7 @@ describe('useHookName', () => { // -------------------------------------------------------------------------- describe('Side Effects', () => { it('should call callback when value changes', () => { - // const callback = jest.fn() + // const callback = vi.fn() // const { result } = renderHook(() => useHookName({ onChange: callback })) // // act(() => { @@ -156,9 +156,9 @@ describe('useHookName', () => { }) it('should cleanup on unmount', () => { - // const cleanup = jest.fn() - // jest.spyOn(window, 'addEventListener') - // jest.spyOn(window, 'removeEventListener') + // const cleanup = vi.fn() + // vi.spyOn(window, 'addEventListener') + // vi.spyOn(window, 'removeEventListener') // // const { unmount } = renderHook(() => useHookName()) // diff --git a/.claude/skills/frontend-testing/templates/utility-test.template.ts b/.claude/skills/frontend-testing/assets/utility-test.template.ts similarity index 100% rename from .claude/skills/frontend-testing/templates/utility-test.template.ts rename to .claude/skills/frontend-testing/assets/utility-test.template.ts diff --git a/.claude/skills/frontend-testing/guides/async-testing.md b/.claude/skills/frontend-testing/references/async-testing.md similarity index 92% rename from .claude/skills/frontend-testing/guides/async-testing.md rename to .claude/skills/frontend-testing/references/async-testing.md index f9912debbf..ae775a87a9 100644 --- a/.claude/skills/frontend-testing/guides/async-testing.md +++ b/.claude/skills/frontend-testing/references/async-testing.md @@ -49,7 +49,7 @@ import userEvent from '@testing-library/user-event' it('should submit form', async () => { const user = userEvent.setup() - const onSubmit = jest.fn() + const onSubmit = vi.fn() render(
) @@ -77,15 +77,15 @@ it('should submit form', async () => { ```typescript describe('Debounced Search', () => { beforeEach(() => { - jest.useFakeTimers() + vi.useFakeTimers() }) afterEach(() => { - jest.useRealTimers() + vi.useRealTimers() }) it('should debounce search input', async () => { - const onSearch = jest.fn() + const onSearch = vi.fn() render() // Type in the input @@ -95,7 +95,7 @@ describe('Debounced Search', () => { expect(onSearch).not.toHaveBeenCalled() // Advance timers - jest.advanceTimersByTime(300) + vi.advanceTimersByTime(300) // Now search is called expect(onSearch).toHaveBeenCalledWith('query') @@ -107,8 +107,8 @@ describe('Debounced Search', () => { ```typescript it('should retry on failure', async () => { - jest.useFakeTimers() - const fetchData = jest.fn() + vi.useFakeTimers() + const fetchData = vi.fn() .mockRejectedValueOnce(new Error('Network error')) .mockResolvedValueOnce({ data: 'success' }) @@ -120,7 +120,7 @@ it('should retry on failure', async () => { }) // Advance timer for retry - jest.advanceTimersByTime(1000) + vi.advanceTimersByTime(1000) // Second call succeeds await waitFor(() => { @@ -128,7 +128,7 @@ it('should retry on failure', async () => { expect(screen.getByText('success')).toBeInTheDocument() }) - jest.useRealTimers() + vi.useRealTimers() }) ``` @@ -136,19 +136,19 @@ it('should retry on failure', async () => { ```typescript // Run all pending timers -jest.runAllTimers() +vi.runAllTimers() // Run only pending timers (not new ones created during execution) -jest.runOnlyPendingTimers() +vi.runOnlyPendingTimers() // Advance by specific time -jest.advanceTimersByTime(1000) +vi.advanceTimersByTime(1000) // Get current fake time -jest.now() +Date.now() // Clear all timers -jest.clearAllTimers() +vi.clearAllTimers() ``` ## API Testing Patterns @@ -158,7 +158,7 @@ jest.clearAllTimers() ```typescript describe('DataFetcher', () => { beforeEach(() => { - jest.clearAllMocks() + vi.clearAllMocks() }) it('should show loading state', () => { @@ -241,7 +241,7 @@ it('should submit form and show success', async () => { ```typescript it('should fetch data on mount', async () => { - const fetchData = jest.fn().mockResolvedValue({ data: 'test' }) + const fetchData = vi.fn().mockResolvedValue({ data: 'test' }) render() @@ -255,7 +255,7 @@ it('should fetch data on mount', async () => { ```typescript it('should refetch when id changes', async () => { - const fetchData = jest.fn().mockResolvedValue({ data: 'test' }) + const fetchData = vi.fn().mockResolvedValue({ data: 'test' }) const { rerender } = render() @@ -276,8 +276,8 @@ it('should refetch when id changes', async () => { ```typescript it('should cleanup subscription on unmount', () => { - const subscribe = jest.fn() - const unsubscribe = jest.fn() + const subscribe = vi.fn() + const unsubscribe = vi.fn() subscribe.mockReturnValue(unsubscribe) const { unmount } = render() @@ -332,14 +332,14 @@ expect(description).toBeInTheDocument() ```typescript // Bad - fake timers don't work well with real Promises -jest.useFakeTimers() +vi.useFakeTimers() await waitFor(() => { expect(screen.getByText('Data')).toBeInTheDocument() }) // May timeout! // Good - use runAllTimers or advanceTimersByTime -jest.useFakeTimers() +vi.useFakeTimers() render() -jest.runAllTimers() +vi.runAllTimers() expect(screen.getByText('Data')).toBeInTheDocument() ``` diff --git a/.claude/skills/frontend-testing/CHECKLIST.md b/.claude/skills/frontend-testing/references/checklist.md similarity index 88% rename from .claude/skills/frontend-testing/CHECKLIST.md rename to .claude/skills/frontend-testing/references/checklist.md index 95e04aec3f..1ff2b27bbb 100644 --- a/.claude/skills/frontend-testing/CHECKLIST.md +++ b/.claude/skills/frontend-testing/references/checklist.md @@ -74,9 +74,9 @@ Use this checklist when generating or reviewing tests for Dify frontend componen ### Mocks - [ ] **DO NOT mock base components** (`@/app/components/base/*`) -- [ ] `jest.clearAllMocks()` in `beforeEach` (not `afterEach`) +- [ ] `vi.clearAllMocks()` in `beforeEach` (not `afterEach`) - [ ] Shared mock state reset in `beforeEach` -- [ ] i18n mock returns keys (not empty strings) +- [ ] i18n uses global mock (auto-loaded in `web/vitest.setup.ts`); only override locally for custom translations - [ ] Router mocks match actual Next.js API - [ ] Mocks reflect actual component conditional behavior - [ ] Only mock: API services, complex context providers, third-party libs @@ -114,15 +114,15 @@ For the current file being tested: **Run these checks after EACH test file, not just at the end:** -- [ ] Run `pnpm test -- path/to/file.spec.tsx` - **MUST PASS before next file** +- [ ] Run `pnpm test path/to/file.spec.tsx` - **MUST PASS before next file** - [ ] Fix any failures immediately - [ ] Mark file as complete in todo list - [ ] Only then proceed to next file ### After All Files Complete -- [ ] Run full directory test: `pnpm test -- path/to/directory/` -- [ ] Check coverage report: `pnpm test -- --coverage` +- [ ] Run full directory test: `pnpm test path/to/directory/` +- [ ] Check coverage report: `pnpm test:coverage` - [ ] Run `pnpm lint:fix` on all test files - [ ] Run `pnpm type-check:tsgo` @@ -132,10 +132,10 @@ For the current file being tested: ```typescript // ❌ Mock doesn't match actual behavior -jest.mock('./Component', () => () =>
Mocked
) +vi.mock('./Component', () => () =>
Mocked
) // ✅ Mock matches actual conditional logic -jest.mock('./Component', () => ({ isOpen }: any) => +vi.mock('./Component', () => ({ isOpen }: any) => isOpen ?
Content
: null ) ``` @@ -145,7 +145,7 @@ jest.mock('./Component', () => ({ isOpen }: any) => ```typescript // ❌ Shared state not reset let mockState = false -jest.mock('./useHook', () => () => mockState) +vi.mock('./useHook', () => () => mockState) // ✅ Reset in beforeEach beforeEach(() => { @@ -186,16 +186,16 @@ Always test these scenarios: ```bash # Run specific test -pnpm test -- path/to/file.spec.tsx +pnpm test path/to/file.spec.tsx # Run with coverage -pnpm test -- --coverage path/to/file.spec.tsx +pnpm test:coverage path/to/file.spec.tsx # Watch mode -pnpm test -- --watch path/to/file.spec.tsx +pnpm test:watch path/to/file.spec.tsx # Update snapshots (use sparingly) -pnpm test -- -u path/to/file.spec.tsx +pnpm test -u path/to/file.spec.tsx # Analyze component pnpm analyze-component path/to/component.tsx diff --git a/.claude/skills/frontend-testing/guides/common-patterns.md b/.claude/skills/frontend-testing/references/common-patterns.md similarity index 94% rename from .claude/skills/frontend-testing/guides/common-patterns.md rename to .claude/skills/frontend-testing/references/common-patterns.md index 84a6045b04..6eded5ceba 100644 --- a/.claude/skills/frontend-testing/guides/common-patterns.md +++ b/.claude/skills/frontend-testing/references/common-patterns.md @@ -126,7 +126,7 @@ describe('Counter', () => { describe('ControlledInput', () => { it('should call onChange with new value', async () => { const user = userEvent.setup() - const handleChange = jest.fn() + const handleChange = vi.fn() render() @@ -136,7 +136,7 @@ describe('ControlledInput', () => { }) it('should display controlled value', () => { - render() + render() expect(screen.getByRole('textbox')).toHaveValue('controlled') }) @@ -195,7 +195,7 @@ describe('ItemList', () => { it('should handle item selection', async () => { const user = userEvent.setup() - const onSelect = jest.fn() + const onSelect = vi.fn() render() @@ -217,20 +217,20 @@ describe('ItemList', () => { ```typescript describe('Modal', () => { it('should not render when closed', () => { - render() + render() expect(screen.queryByRole('dialog')).not.toBeInTheDocument() }) it('should render when open', () => { - render() + render() expect(screen.getByRole('dialog')).toBeInTheDocument() }) it('should call onClose when clicking overlay', async () => { const user = userEvent.setup() - const handleClose = jest.fn() + const handleClose = vi.fn() render() @@ -241,7 +241,7 @@ describe('Modal', () => { it('should call onClose when pressing Escape', async () => { const user = userEvent.setup() - const handleClose = jest.fn() + const handleClose = vi.fn() render() @@ -254,7 +254,7 @@ describe('Modal', () => { const user = userEvent.setup() render( - + @@ -279,7 +279,7 @@ describe('Modal', () => { describe('LoginForm', () => { it('should submit valid form', async () => { const user = userEvent.setup() - const onSubmit = jest.fn() + const onSubmit = vi.fn() render() @@ -296,7 +296,7 @@ describe('LoginForm', () => { it('should show validation errors', async () => { const user = userEvent.setup() - render() + render() // Submit empty form await user.click(screen.getByRole('button', { name: /sign in/i })) @@ -308,7 +308,7 @@ describe('LoginForm', () => { it('should validate email format', async () => { const user = userEvent.setup() - render() + render() await user.type(screen.getByLabelText(/email/i), 'invalid-email') await user.click(screen.getByRole('button', { name: /sign in/i })) @@ -318,7 +318,7 @@ describe('LoginForm', () => { it('should disable submit button while submitting', async () => { const user = userEvent.setup() - const onSubmit = jest.fn(() => new Promise(resolve => setTimeout(resolve, 100))) + const onSubmit = vi.fn(() => new Promise(resolve => setTimeout(resolve, 100))) render() @@ -407,7 +407,7 @@ it('test 1', () => { // Good - cleanup is automatic with RTL, but reset mocks beforeEach(() => { - jest.clearAllMocks() + vi.clearAllMocks() }) ``` diff --git a/.claude/skills/frontend-testing/guides/domain-components.md b/.claude/skills/frontend-testing/references/domain-components.md similarity index 95% rename from .claude/skills/frontend-testing/guides/domain-components.md rename to .claude/skills/frontend-testing/references/domain-components.md index ed2cc6eb8a..5535d28f3d 100644 --- a/.claude/skills/frontend-testing/guides/domain-components.md +++ b/.claude/skills/frontend-testing/references/domain-components.md @@ -23,7 +23,7 @@ import NodeConfigPanel from './node-config-panel' import { createMockNode, createMockWorkflowContext } from '@/__mocks__/workflow' // Mock workflow context -jest.mock('@/app/components/workflow/hooks', () => ({ +vi.mock('@/app/components/workflow/hooks', () => ({ useWorkflowStore: () => mockWorkflowStore, useNodesInteractions: () => mockNodesInteractions, })) @@ -31,21 +31,21 @@ jest.mock('@/app/components/workflow/hooks', () => ({ let mockWorkflowStore = { nodes: [], edges: [], - updateNode: jest.fn(), + updateNode: vi.fn(), } let mockNodesInteractions = { - handleNodeSelect: jest.fn(), - handleNodeDelete: jest.fn(), + handleNodeSelect: vi.fn(), + handleNodeDelete: vi.fn(), } describe('NodeConfigPanel', () => { beforeEach(() => { - jest.clearAllMocks() + vi.clearAllMocks() mockWorkflowStore = { nodes: [], edges: [], - updateNode: jest.fn(), + updateNode: vi.fn(), } }) @@ -161,23 +161,23 @@ import { render, screen, fireEvent, waitFor } from '@testing-library/react' import userEvent from '@testing-library/user-event' import DocumentUploader from './document-uploader' -jest.mock('@/service/datasets', () => ({ - uploadDocument: jest.fn(), - parseDocument: jest.fn(), +vi.mock('@/service/datasets', () => ({ + uploadDocument: vi.fn(), + parseDocument: vi.fn(), })) import * as datasetService from '@/service/datasets' -const mockedService = datasetService as jest.Mocked +const mockedService = vi.mocked(datasetService) describe('DocumentUploader', () => { beforeEach(() => { - jest.clearAllMocks() + vi.clearAllMocks() }) describe('File Upload', () => { it('should accept valid file types', async () => { const user = userEvent.setup() - const onUpload = jest.fn() + const onUpload = vi.fn() mockedService.uploadDocument.mockResolvedValue({ id: 'doc-1' }) render() @@ -326,14 +326,14 @@ describe('DocumentList', () => { describe('Search & Filtering', () => { it('should filter by search query', async () => { const user = userEvent.setup() - jest.useFakeTimers() + vi.useFakeTimers() render() await user.type(screen.getByPlaceholderText(/search/i), 'test query') // Debounce - jest.advanceTimersByTime(300) + vi.advanceTimersByTime(300) await waitFor(() => { expect(mockedService.getDocuments).toHaveBeenCalledWith( @@ -342,7 +342,7 @@ describe('DocumentList', () => { ) }) - jest.useRealTimers() + vi.useRealTimers() }) }) }) @@ -367,13 +367,13 @@ import { render, screen, fireEvent, waitFor } from '@testing-library/react' import userEvent from '@testing-library/user-event' import AppConfigForm from './app-config-form' -jest.mock('@/service/apps', () => ({ - updateAppConfig: jest.fn(), - getAppConfig: jest.fn(), +vi.mock('@/service/apps', () => ({ + updateAppConfig: vi.fn(), + getAppConfig: vi.fn(), })) import * as appService from '@/service/apps' -const mockedService = appService as jest.Mocked +const mockedService = vi.mocked(appService) describe('AppConfigForm', () => { const defaultConfig = { @@ -384,7 +384,7 @@ describe('AppConfigForm', () => { } beforeEach(() => { - jest.clearAllMocks() + vi.clearAllMocks() mockedService.getAppConfig.mockResolvedValue(defaultConfig) }) diff --git a/.claude/skills/frontend-testing/guides/mocking.md b/.claude/skills/frontend-testing/references/mocking.md similarity index 80% rename from .claude/skills/frontend-testing/guides/mocking.md rename to .claude/skills/frontend-testing/references/mocking.md index 6b2c517cb6..c70bcf0ae5 100644 --- a/.claude/skills/frontend-testing/guides/mocking.md +++ b/.claude/skills/frontend-testing/references/mocking.md @@ -19,8 +19,8 @@ ```typescript // ❌ WRONG: Don't mock base components -jest.mock('@/app/components/base/loading', () => () =>
Loading
) -jest.mock('@/app/components/base/button', () => ({ children }: any) => ) +vi.mock('@/app/components/base/loading', () => () =>
Loading
) +vi.mock('@/app/components/base/button', () => ({ children }: any) => ) // ✅ CORRECT: Import and use real base components import Loading from '@/app/components/base/loading' @@ -41,33 +41,52 @@ Only mock these categories: | Location | Purpose | |----------|---------| -| `web/__mocks__/` | Reusable mocks shared across multiple test files | -| Test file | Test-specific mocks, inline with `jest.mock()` | +| `web/vitest.setup.ts` | Global mocks shared by all tests (for example `react-i18next`, `next/image`) | +| `web/__mocks__/` | Reusable mock factories shared across multiple test files | +| Test file | Test-specific mocks, inline with `vi.mock()` | + +Modules are not mocked automatically. Use `vi.mock` in test files, or add global mocks in `web/vitest.setup.ts`. ## Essential Mocks -### 1. i18n (Always Required) +### 1. i18n (Auto-loaded via Global Mock) + +A global mock is defined in `web/vitest.setup.ts` and is auto-loaded by Vitest setup. + +The global mock provides: + +- `useTranslation` - returns translation keys with namespace prefix +- `Trans` component - renders i18nKey and components +- `useMixedTranslation` (from `@/app/components/plugins/marketplace/hooks`) +- `useGetLanguage` (from `@/context/i18n`) - returns `'en-US'` + +**Default behavior**: Most tests should use the global mock (no local override needed). + +**For custom translations**: Use the helper function from `@/test/i18n-mock`: ```typescript -jest.mock('react-i18next', () => ({ - useTranslation: () => ({ - t: (key: string) => key, - }), +import { createReactI18nextMock } from '@/test/i18n-mock' + +vi.mock('react-i18next', () => createReactI18nextMock({ + 'my.custom.key': 'Custom translation', + 'button.save': 'Save', })) ``` +**Avoid**: Manually defining `useTranslation` mocks that just return the key - the global mock already does this. + ### 2. Next.js Router ```typescript -const mockPush = jest.fn() -const mockReplace = jest.fn() +const mockPush = vi.fn() +const mockReplace = vi.fn() -jest.mock('next/navigation', () => ({ +vi.mock('next/navigation', () => ({ useRouter: () => ({ push: mockPush, replace: mockReplace, - back: jest.fn(), - prefetch: jest.fn(), + back: vi.fn(), + prefetch: vi.fn(), }), usePathname: () => '/current-path', useSearchParams: () => new URLSearchParams('?key=value'), @@ -75,7 +94,7 @@ jest.mock('next/navigation', () => ({ describe('Component', () => { beforeEach(() => { - jest.clearAllMocks() + vi.clearAllMocks() }) it('should navigate on click', () => { @@ -92,7 +111,7 @@ describe('Component', () => { // ⚠️ Important: Use shared state for components that depend on each other let mockPortalOpenState = false -jest.mock('@/app/components/base/portal-to-follow-elem', () => ({ +vi.mock('@/app/components/base/portal-to-follow-elem', () => ({ PortalToFollowElem: ({ children, open, ...props }: any) => { mockPortalOpenState = open || false // Update shared state return
{children}
@@ -109,7 +128,7 @@ jest.mock('@/app/components/base/portal-to-follow-elem', () => ({ describe('Component', () => { beforeEach(() => { - jest.clearAllMocks() + vi.clearAllMocks() mockPortalOpenState = false // ✅ Reset shared state }) }) @@ -120,13 +139,13 @@ describe('Component', () => { ```typescript import * as api from '@/service/api' -jest.mock('@/service/api') +vi.mock('@/service/api') -const mockedApi = api as jest.Mocked +const mockedApi = vi.mocked(api) describe('Component', () => { beforeEach(() => { - jest.clearAllMocks() + vi.clearAllMocks() // Setup default mock implementation mockedApi.fetchData.mockResolvedValue({ data: [] }) @@ -229,32 +248,9 @@ describe('Component with Context', () => { }) ``` -### 7. SWR / React Query +### 7. React Query ```typescript -// SWR -jest.mock('swr', () => ({ - __esModule: true, - default: jest.fn(), -})) - -import useSWR from 'swr' -const mockedUseSWR = useSWR as jest.Mock - -describe('Component with SWR', () => { - it('should show loading state', () => { - mockedUseSWR.mockReturnValue({ - data: undefined, - error: undefined, - isLoading: true, - }) - - render() - expect(screen.getByText(/loading/i)).toBeInTheDocument() - }) -}) - -// React Query import { QueryClient, QueryClientProvider } from '@tanstack/react-query' const createTestQueryClient = () => new QueryClient({ @@ -313,7 +309,7 @@ Need to use a component in test? │ └─ YES → Mock it (next/navigation, external SDKs) │ └─ Is it i18n? - └─ YES → Mock to return keys + └─ YES → Uses shared mock (auto-loaded). Override only for custom translations ``` ## Factory Function Pattern diff --git a/.claude/skills/frontend-testing/guides/workflow.md b/.claude/skills/frontend-testing/references/workflow.md similarity index 96% rename from .claude/skills/frontend-testing/guides/workflow.md rename to .claude/skills/frontend-testing/references/workflow.md index b0f2994bde..009c3e013b 100644 --- a/.claude/skills/frontend-testing/guides/workflow.md +++ b/.claude/skills/frontend-testing/references/workflow.md @@ -35,7 +35,7 @@ When testing a **single component, hook, or utility**: 2. Run `pnpm analyze-component ` (if available) 3. Check complexity score and features detected 4. Write the test file -5. Run test: `pnpm test -- .spec.tsx` +5. Run test: `pnpm test .spec.tsx` 6. Fix any failures 7. Verify coverage meets goals (100% function, >95% branch) ``` @@ -80,7 +80,7 @@ Process files in this recommended order: ``` ┌─────────────────────────────────────────────┐ │ 1. Write test file │ -│ 2. Run: pnpm test -- .spec.tsx │ +│ 2. Run: pnpm test .spec.tsx │ │ 3. If FAIL → Fix immediately, re-run │ │ 4. If PASS → Mark complete in todo list │ │ 5. ONLY THEN proceed to next file │ @@ -95,10 +95,10 @@ After all individual tests pass: ```bash # Run all tests in the directory together -pnpm test -- path/to/directory/ +pnpm test path/to/directory/ # Check coverage -pnpm test -- --coverage path/to/directory/ +pnpm test:coverage path/to/directory/ ``` ## Component Complexity Guidelines @@ -201,9 +201,9 @@ Run pnpm test ← Multiple failures, hard to debug ``` # GOOD: Incremental with verification Write component-a.spec.tsx -Run pnpm test -- component-a.spec.tsx ✅ +Run pnpm test component-a.spec.tsx ✅ Write component-b.spec.tsx -Run pnpm test -- component-b.spec.tsx ✅ +Run pnpm test component-b.spec.tsx ✅ ...continue... ``` diff --git a/.claude/skills/skill-creator/SKILL.md b/.claude/skills/skill-creator/SKILL.md new file mode 100644 index 0000000000..b49da5ac68 --- /dev/null +++ b/.claude/skills/skill-creator/SKILL.md @@ -0,0 +1,355 @@ +--- +name: skill-creator +description: Guide for creating effective skills. This skill should be used when users want to create a new skill (or update an existing skill) that extends Claude's capabilities with specialized knowledge, workflows, or tool integrations. +--- + +# Skill Creator + +This skill provides guidance for creating effective skills. + +## About Skills + +Skills are modular, self-contained packages that extend Claude's capabilities by providing +specialized knowledge, workflows, and tools. Think of them as "onboarding guides" for specific +domains or tasks—they transform Claude from a general-purpose agent into a specialized agent +equipped with procedural knowledge that no model can fully possess. + +### What Skills Provide + +1. Specialized workflows - Multi-step procedures for specific domains +2. Tool integrations - Instructions for working with specific file formats or APIs +3. Domain expertise - Company-specific knowledge, schemas, business logic +4. Bundled resources - Scripts, references, and assets for complex and repetitive tasks + +## Core Principles + +### Concise is Key + +The context window is a public good. Skills share the context window with everything else Claude needs: system prompt, conversation history, other Skills' metadata, and the actual user request. + +**Default assumption: Claude is already very smart.** Only add context Claude doesn't already have. Challenge each piece of information: "Does Claude really need this explanation?" and "Does this paragraph justify its token cost?" + +Prefer concise examples over verbose explanations. + +### Set Appropriate Degrees of Freedom + +Match the level of specificity to the task's fragility and variability: + +**High freedom (text-based instructions)**: Use when multiple approaches are valid, decisions depend on context, or heuristics guide the approach. + +**Medium freedom (pseudocode or scripts with parameters)**: Use when a preferred pattern exists, some variation is acceptable, or configuration affects behavior. + +**Low freedom (specific scripts, few parameters)**: Use when operations are fragile and error-prone, consistency is critical, or a specific sequence must be followed. + +Think of Claude as exploring a path: a narrow bridge with cliffs needs specific guardrails (low freedom), while an open field allows many routes (high freedom). + +### Anatomy of a Skill + +Every skill consists of a required SKILL.md file and optional bundled resources: + +``` +skill-name/ +├── SKILL.md (required) +│ ├── YAML frontmatter metadata (required) +│ │ ├── name: (required) +│ │ └── description: (required) +│ └── Markdown instructions (required) +└── Bundled Resources (optional) + ├── scripts/ - Executable code (Python/Bash/etc.) + ├── references/ - Documentation intended to be loaded into context as needed + └── assets/ - Files used in output (templates, icons, fonts, etc.) +``` + +#### SKILL.md (required) + +Every SKILL.md consists of: + +- **Frontmatter** (YAML): Contains `name` and `description` fields. These are the only fields that Claude reads to determine when the skill gets used, thus it is very important to be clear and comprehensive in describing what the skill is, and when it should be used. +- **Body** (Markdown): Instructions and guidance for using the skill. Only loaded AFTER the skill triggers (if at all). + +#### Bundled Resources (optional) + +##### Scripts (`scripts/`) + +Executable code (Python/Bash/etc.) for tasks that require deterministic reliability or are repeatedly rewritten. + +- **When to include**: When the same code is being rewritten repeatedly or deterministic reliability is needed +- **Example**: `scripts/rotate_pdf.py` for PDF rotation tasks +- **Benefits**: Token efficient, deterministic, may be executed without loading into context +- **Note**: Scripts may still need to be read by Claude for patching or environment-specific adjustments + +##### References (`references/`) + +Documentation and reference material intended to be loaded as needed into context to inform Claude's process and thinking. + +- **When to include**: For documentation that Claude should reference while working +- **Examples**: `references/finance.md` for financial schemas, `references/mnda.md` for company NDA template, `references/policies.md` for company policies, `references/api_docs.md` for API specifications +- **Use cases**: Database schemas, API documentation, domain knowledge, company policies, detailed workflow guides +- **Benefits**: Keeps SKILL.md lean, loaded only when Claude determines it's needed +- **Best practice**: If files are large (>10k words), include grep search patterns in SKILL.md +- **Avoid duplication**: Information should live in either SKILL.md or references files, not both. Prefer references files for detailed information unless it's truly core to the skill—this keeps SKILL.md lean while making information discoverable without hogging the context window. Keep only essential procedural instructions and workflow guidance in SKILL.md; move detailed reference material, schemas, and examples to references files. + +##### Assets (`assets/`) + +Files not intended to be loaded into context, but rather used within the output Claude produces. + +- **When to include**: When the skill needs files that will be used in the final output +- **Examples**: `assets/logo.png` for brand assets, `assets/slides.pptx` for PowerPoint templates, `assets/frontend-template/` for HTML/React boilerplate, `assets/font.ttf` for typography +- **Use cases**: Templates, images, icons, boilerplate code, fonts, sample documents that get copied or modified +- **Benefits**: Separates output resources from documentation, enables Claude to use files without loading them into context + +#### What to Not Include in a Skill + +A skill should only contain essential files that directly support its functionality. Do NOT create extraneous documentation or auxiliary files, including: + +- README.md +- INSTALLATION_GUIDE.md +- QUICK_REFERENCE.md +- CHANGELOG.md +- etc. + +The skill should only contain the information needed for an AI agent to do the job at hand. It should not contain auxilary context about the process that went into creating it, setup and testing procedures, user-facing documentation, etc. Creating additional documentation files just adds clutter and confusion. + +### Progressive Disclosure Design Principle + +Skills use a three-level loading system to manage context efficiently: + +1. **Metadata (name + description)** - Always in context (~100 words) +2. **SKILL.md body** - When skill triggers (<5k words) +3. **Bundled resources** - As needed by Claude (Unlimited because scripts can be executed without reading into context window) + +#### Progressive Disclosure Patterns + +Keep SKILL.md body to the essentials and under 500 lines to minimize context bloat. Split content into separate files when approaching this limit. When splitting out content into other files, it is very important to reference them from SKILL.md and describe clearly when to read them, to ensure the reader of the skill knows they exist and when to use them. + +**Key principle:** When a skill supports multiple variations, frameworks, or options, keep only the core workflow and selection guidance in SKILL.md. Move variant-specific details (patterns, examples, configuration) into separate reference files. + +**Pattern 1: High-level guide with references** + +```markdown +# PDF Processing + +## Quick start + +Extract text with pdfplumber: +[code example] + +## Advanced features + +- **Form filling**: See [FORMS.md](FORMS.md) for complete guide +- **API reference**: See [REFERENCE.md](REFERENCE.md) for all methods +- **Examples**: See [EXAMPLES.md](EXAMPLES.md) for common patterns +``` + +Claude loads FORMS.md, REFERENCE.md, or EXAMPLES.md only when needed. + +**Pattern 2: Domain-specific organization** + +For Skills with multiple domains, organize content by domain to avoid loading irrelevant context: + +``` +bigquery-skill/ +├── SKILL.md (overview and navigation) +└── reference/ + ├── finance.md (revenue, billing metrics) + ├── sales.md (opportunities, pipeline) + ├── product.md (API usage, features) + └── marketing.md (campaigns, attribution) +``` + +When a user asks about sales metrics, Claude only reads sales.md. + +Similarly, for skills supporting multiple frameworks or variants, organize by variant: + +``` +cloud-deploy/ +├── SKILL.md (workflow + provider selection) +└── references/ + ├── aws.md (AWS deployment patterns) + ├── gcp.md (GCP deployment patterns) + └── azure.md (Azure deployment patterns) +``` + +When the user chooses AWS, Claude only reads aws.md. + +**Pattern 3: Conditional details** + +Show basic content, link to advanced content: + +```markdown +# DOCX Processing + +## Creating documents + +Use docx-js for new documents. See [DOCX-JS.md](DOCX-JS.md). + +## Editing documents + +For simple edits, modify the XML directly. + +**For tracked changes**: See [REDLINING.md](REDLINING.md) +**For OOXML details**: See [OOXML.md](OOXML.md) +``` + +Claude reads REDLINING.md or OOXML.md only when the user needs those features. + +**Important guidelines:** + +- **Avoid deeply nested references** - Keep references one level deep from SKILL.md. All reference files should link directly from SKILL.md. +- **Structure longer reference files** - For files longer than 100 lines, include a table of contents at the top so Claude can see the full scope when previewing. + +## Skill Creation Process + +Skill creation involves these steps: + +1. Understand the skill with concrete examples +2. Plan reusable skill contents (scripts, references, assets) +3. Initialize the skill (run init_skill.py) +4. Edit the skill (implement resources and write SKILL.md) +5. Package the skill (run package_skill.py) +6. Iterate based on real usage + +Follow these steps in order, skipping only if there is a clear reason why they are not applicable. + +### Step 1: Understanding the Skill with Concrete Examples + +Skip this step only when the skill's usage patterns are already clearly understood. It remains valuable even when working with an existing skill. + +To create an effective skill, clearly understand concrete examples of how the skill will be used. This understanding can come from either direct user examples or generated examples that are validated with user feedback. + +For example, when building an image-editor skill, relevant questions include: + +- "What functionality should the image-editor skill support? Editing, rotating, anything else?" +- "Can you give some examples of how this skill would be used?" +- "I can imagine users asking for things like 'Remove the red-eye from this image' or 'Rotate this image'. Are there other ways you imagine this skill being used?" +- "What would a user say that should trigger this skill?" + +To avoid overwhelming users, avoid asking too many questions in a single message. Start with the most important questions and follow up as needed for better effectiveness. + +Conclude this step when there is a clear sense of the functionality the skill should support. + +### Step 2: Planning the Reusable Skill Contents + +To turn concrete examples into an effective skill, analyze each example by: + +1. Considering how to execute on the example from scratch +2. Identifying what scripts, references, and assets would be helpful when executing these workflows repeatedly + +Example: When building a `pdf-editor` skill to handle queries like "Help me rotate this PDF," the analysis shows: + +1. Rotating a PDF requires re-writing the same code each time +2. A `scripts/rotate_pdf.py` script would be helpful to store in the skill + +Example: When designing a `frontend-webapp-builder` skill for queries like "Build me a todo app" or "Build me a dashboard to track my steps," the analysis shows: + +1. Writing a frontend webapp requires the same boilerplate HTML/React each time +2. An `assets/hello-world/` template containing the boilerplate HTML/React project files would be helpful to store in the skill + +Example: When building a `big-query` skill to handle queries like "How many users have logged in today?" the analysis shows: + +1. Querying BigQuery requires re-discovering the table schemas and relationships each time +2. A `references/schema.md` file documenting the table schemas would be helpful to store in the skill + +To establish the skill's contents, analyze each concrete example to create a list of the reusable resources to include: scripts, references, and assets. + +### Step 3: Initializing the Skill + +At this point, it is time to actually create the skill. + +Skip this step only if the skill being developed already exists, and iteration or packaging is needed. In this case, continue to the next step. + +When creating a new skill from scratch, always run the `init_skill.py` script. The script conveniently generates a new template skill directory that automatically includes everything a skill requires, making the skill creation process much more efficient and reliable. + +Usage: + +```bash +scripts/init_skill.py --path +``` + +The script: + +- Creates the skill directory at the specified path +- Generates a SKILL.md template with proper frontmatter and TODO placeholders +- Creates example resource directories: `scripts/`, `references/`, and `assets/` +- Adds example files in each directory that can be customized or deleted + +After initialization, customize or remove the generated SKILL.md and example files as needed. + +### Step 4: Edit the Skill + +When editing the (newly-generated or existing) skill, remember that the skill is being created for another instance of Claude to use. Include information that would be beneficial and non-obvious to Claude. Consider what procedural knowledge, domain-specific details, or reusable assets would help another Claude instance execute these tasks more effectively. + +#### Learn Proven Design Patterns + +Consult these helpful guides based on your skill's needs: + +- **Multi-step processes**: See references/workflows.md for sequential workflows and conditional logic +- **Specific output formats or quality standards**: See references/output-patterns.md for template and example patterns + +These files contain established best practices for effective skill design. + +#### Start with Reusable Skill Contents + +To begin implementation, start with the reusable resources identified above: `scripts/`, `references/`, and `assets/` files. Note that this step may require user input. For example, when implementing a `brand-guidelines` skill, the user may need to provide brand assets or templates to store in `assets/`, or documentation to store in `references/`. + +Added scripts must be tested by actually running them to ensure there are no bugs and that the output matches what is expected. If there are many similar scripts, only a representative sample needs to be tested to ensure confidence that they all work while balancing time to completion. + +Any example files and directories not needed for the skill should be deleted. The initialization script creates example files in `scripts/`, `references/`, and `assets/` to demonstrate structure, but most skills won't need all of them. + +#### Update SKILL.md + +**Writing Guidelines:** Always use imperative/infinitive form. + +##### Frontmatter + +Write the YAML frontmatter with `name` and `description`: + +- `name`: The skill name +- `description`: This is the primary triggering mechanism for your skill, and helps Claude understand when to use the skill. + - Include both what the Skill does and specific triggers/contexts for when to use it. + - Include all "when to use" information here - Not in the body. The body is only loaded after triggering, so "When to Use This Skill" sections in the body are not helpful to Claude. + - Example description for a `docx` skill: "Comprehensive document creation, editing, and analysis with support for tracked changes, comments, formatting preservation, and text extraction. Use when Claude needs to work with professional documents (.docx files) for: (1) Creating new documents, (2) Modifying or editing content, (3) Working with tracked changes, (4) Adding comments, or any other document tasks" + +Do not include any other fields in YAML frontmatter. + +##### Body + +Write instructions for using the skill and its bundled resources. + +### Step 5: Packaging a Skill + +Once development of the skill is complete, it must be packaged into a distributable .skill file that gets shared with the user. The packaging process automatically validates the skill first to ensure it meets all requirements: + +```bash +scripts/package_skill.py +``` + +Optional output directory specification: + +```bash +scripts/package_skill.py ./dist +``` + +The packaging script will: + +1. **Validate** the skill automatically, checking: + + - YAML frontmatter format and required fields + - Skill naming conventions and directory structure + - Description completeness and quality + - File organization and resource references + +2. **Package** the skill if validation passes, creating a .skill file named after the skill (e.g., `my-skill.skill`) that includes all files and maintains the proper directory structure for distribution. The .skill file is a zip file with a .skill extension. + +If validation fails, the script will report the errors and exit without creating a package. Fix any validation errors and run the packaging command again. + +### Step 6: Iterate + +After testing the skill, users may request improvements. Often this happens right after using the skill, with fresh context of how the skill performed. + +**Iteration workflow:** + +1. Use the skill on real tasks +2. Notice struggles or inefficiencies +3. Identify how SKILL.md or bundled resources should be updated +4. Implement changes and test again diff --git a/.claude/skills/skill-creator/references/output-patterns.md b/.claude/skills/skill-creator/references/output-patterns.md new file mode 100644 index 0000000000..022e85fe5e --- /dev/null +++ b/.claude/skills/skill-creator/references/output-patterns.md @@ -0,0 +1,86 @@ +# Output Patterns + +Use these patterns when skills need to produce consistent, high-quality output. + +## Template Pattern + +Provide templates for output format. Match the level of strictness to your needs. + +**For strict requirements (like API responses or data formats):** + +```markdown +## Report structure + +ALWAYS use this exact template structure: + +# [Analysis Title] + +## Executive summary +[One-paragraph overview of key findings] + +## Key findings +- Finding 1 with supporting data +- Finding 2 with supporting data +- Finding 3 with supporting data + +## Recommendations +1. Specific actionable recommendation +2. Specific actionable recommendation +``` + +**For flexible guidance (when adaptation is useful):** + +```markdown +## Report structure + +Here is a sensible default format, but use your best judgment: + +# [Analysis Title] + +## Executive summary +[Overview] + +## Key findings +[Adapt sections based on what you discover] + +## Recommendations +[Tailor to the specific context] + +Adjust sections as needed for the specific analysis type. +``` + +## Examples Pattern + +For skills where output quality depends on seeing examples, provide input/output pairs: + +```markdown +## Commit message format + +Generate commit messages following these examples: + +**Example 1:** +Input: Added user authentication with JWT tokens +Output: +``` + +feat(auth): implement JWT-based authentication + +Add login endpoint and token validation middleware + +``` + +**Example 2:** +Input: Fixed bug where dates displayed incorrectly in reports +Output: +``` + +fix(reports): correct date formatting in timezone conversion + +Use UTC timestamps consistently across report generation + +``` + +Follow this style: type(scope): brief description, then detailed explanation. +``` + +Examples help Claude understand the desired style and level of detail more clearly than descriptions alone. diff --git a/.claude/skills/skill-creator/references/workflows.md b/.claude/skills/skill-creator/references/workflows.md new file mode 100644 index 0000000000..54b0174078 --- /dev/null +++ b/.claude/skills/skill-creator/references/workflows.md @@ -0,0 +1,28 @@ +# Workflow Patterns + +## Sequential Workflows + +For complex tasks, break operations into clear, sequential steps. It is often helpful to give Claude an overview of the process towards the beginning of SKILL.md: + +```markdown +Filling a PDF form involves these steps: + +1. Analyze the form (run analyze_form.py) +2. Create field mapping (edit fields.json) +3. Validate mapping (run validate_fields.py) +4. Fill the form (run fill_form.py) +5. Verify output (run verify_output.py) +``` + +## Conditional Workflows + +For tasks with branching logic, guide Claude through decision points: + +```markdown +1. Determine the modification type: + **Creating new content?** → Follow "Creation workflow" below + **Editing existing content?** → Follow "Editing workflow" below + +2. Creation workflow: [steps] +3. Editing workflow: [steps] +``` diff --git a/.claude/skills/skill-creator/scripts/init_skill.py b/.claude/skills/skill-creator/scripts/init_skill.py new file mode 100755 index 0000000000..249fffcbbd --- /dev/null +++ b/.claude/skills/skill-creator/scripts/init_skill.py @@ -0,0 +1,300 @@ +#!/usr/bin/env python3 +""" +Skill Initializer - Creates a new skill from template + +Usage: + init_skill.py --path + +Examples: + init_skill.py my-new-skill --path skills/public + init_skill.py my-api-helper --path skills/private + init_skill.py custom-skill --path /custom/location +""" + +import sys +from pathlib import Path + + +SKILL_TEMPLATE = """--- +name: {skill_name} +description: [TODO: Complete and informative explanation of what the skill does and when to use it. Include WHEN to use this skill - specific scenarios, file types, or tasks that trigger it.] +--- + +# {skill_title} + +## Overview + +[TODO: 1-2 sentences explaining what this skill enables] + +## Structuring This Skill + +[TODO: Choose the structure that best fits this skill's purpose. Common patterns: + +**1. Workflow-Based** (best for sequential processes) +- Works well when there are clear step-by-step procedures +- Example: DOCX skill with "Workflow Decision Tree" → "Reading" → "Creating" → "Editing" +- Structure: ## Overview → ## Workflow Decision Tree → ## Step 1 → ## Step 2... + +**2. Task-Based** (best for tool collections) +- Works well when the skill offers different operations/capabilities +- Example: PDF skill with "Quick Start" → "Merge PDFs" → "Split PDFs" → "Extract Text" +- Structure: ## Overview → ## Quick Start → ## Task Category 1 → ## Task Category 2... + +**3. Reference/Guidelines** (best for standards or specifications) +- Works well for brand guidelines, coding standards, or requirements +- Example: Brand styling with "Brand Guidelines" → "Colors" → "Typography" → "Features" +- Structure: ## Overview → ## Guidelines → ## Specifications → ## Usage... + +**4. Capabilities-Based** (best for integrated systems) +- Works well when the skill provides multiple interrelated features +- Example: Product Management with "Core Capabilities" → numbered capability list +- Structure: ## Overview → ## Core Capabilities → ### 1. Feature → ### 2. Feature... + +Patterns can be mixed and matched as needed. Most skills combine patterns (e.g., start with task-based, add workflow for complex operations). + +Delete this entire "Structuring This Skill" section when done - it's just guidance.] + +## [TODO: Replace with the first main section based on chosen structure] + +[TODO: Add content here. See examples in existing skills: +- Code samples for technical skills +- Decision trees for complex workflows +- Concrete examples with realistic user requests +- References to scripts/templates/references as needed] + +## Resources + +This skill includes example resource directories that demonstrate how to organize different types of bundled resources: + +### scripts/ +Executable code (Python/Bash/etc.) that can be run directly to perform specific operations. + +**Examples from other skills:** +- PDF skill: `fill_fillable_fields.py`, `extract_form_field_info.py` - utilities for PDF manipulation +- DOCX skill: `document.py`, `utilities.py` - Python modules for document processing + +**Appropriate for:** Python scripts, shell scripts, or any executable code that performs automation, data processing, or specific operations. + +**Note:** Scripts may be executed without loading into context, but can still be read by Claude for patching or environment adjustments. + +### references/ +Documentation and reference material intended to be loaded into context to inform Claude's process and thinking. + +**Examples from other skills:** +- Product management: `communication.md`, `context_building.md` - detailed workflow guides +- BigQuery: API reference documentation and query examples +- Finance: Schema documentation, company policies + +**Appropriate for:** In-depth documentation, API references, database schemas, comprehensive guides, or any detailed information that Claude should reference while working. + +### assets/ +Files not intended to be loaded into context, but rather used within the output Claude produces. + +**Examples from other skills:** +- Brand styling: PowerPoint template files (.pptx), logo files +- Frontend builder: HTML/React boilerplate project directories +- Typography: Font files (.ttf, .woff2) + +**Appropriate for:** Templates, boilerplate code, document templates, images, icons, fonts, or any files meant to be copied or used in the final output. + +--- + +**Any unneeded directories can be deleted.** Not every skill requires all three types of resources. +""" + +EXAMPLE_SCRIPT = '''#!/usr/bin/env python3 +""" +Example helper script for {skill_name} + +This is a placeholder script that can be executed directly. +Replace with actual implementation or delete if not needed. + +Example real scripts from other skills: +- pdf/scripts/fill_fillable_fields.py - Fills PDF form fields +- pdf/scripts/convert_pdf_to_images.py - Converts PDF pages to images +""" + +def main(): + print("This is an example script for {skill_name}") + # TODO: Add actual script logic here + # This could be data processing, file conversion, API calls, etc. + +if __name__ == "__main__": + main() +''' + +EXAMPLE_REFERENCE = """# Reference Documentation for {skill_title} + +This is a placeholder for detailed reference documentation. +Replace with actual reference content or delete if not needed. + +Example real reference docs from other skills: +- product-management/references/communication.md - Comprehensive guide for status updates +- product-management/references/context_building.md - Deep-dive on gathering context +- bigquery/references/ - API references and query examples + +## When Reference Docs Are Useful + +Reference docs are ideal for: +- Comprehensive API documentation +- Detailed workflow guides +- Complex multi-step processes +- Information too lengthy for main SKILL.md +- Content that's only needed for specific use cases + +## Structure Suggestions + +### API Reference Example +- Overview +- Authentication +- Endpoints with examples +- Error codes +- Rate limits + +### Workflow Guide Example +- Prerequisites +- Step-by-step instructions +- Common patterns +- Troubleshooting +- Best practices +""" + +EXAMPLE_ASSET = """# Example Asset File + +This placeholder represents where asset files would be stored. +Replace with actual asset files (templates, images, fonts, etc.) or delete if not needed. + +Asset files are NOT intended to be loaded into context, but rather used within +the output Claude produces. + +Example asset files from other skills: +- Brand guidelines: logo.png, slides_template.pptx +- Frontend builder: hello-world/ directory with HTML/React boilerplate +- Typography: custom-font.ttf, font-family.woff2 +- Data: sample_data.csv, test_dataset.json + +## Common Asset Types + +- Templates: .pptx, .docx, boilerplate directories +- Images: .png, .jpg, .svg, .gif +- Fonts: .ttf, .otf, .woff, .woff2 +- Boilerplate code: Project directories, starter files +- Icons: .ico, .svg +- Data files: .csv, .json, .xml, .yaml + +Note: This is a text placeholder. Actual assets can be any file type. +""" + + +def title_case_skill_name(skill_name): + """Convert hyphenated skill name to Title Case for display.""" + return " ".join(word.capitalize() for word in skill_name.split("-")) + + +def init_skill(skill_name, path): + """ + Initialize a new skill directory with template SKILL.md. + + Args: + skill_name: Name of the skill + path: Path where the skill directory should be created + + Returns: + Path to created skill directory, or None if error + """ + # Determine skill directory path + skill_dir = Path(path).resolve() / skill_name + + # Check if directory already exists + if skill_dir.exists(): + print(f"❌ Error: Skill directory already exists: {skill_dir}") + return None + + # Create skill directory + try: + skill_dir.mkdir(parents=True, exist_ok=False) + print(f"✅ Created skill directory: {skill_dir}") + except Exception as e: + print(f"❌ Error creating directory: {e}") + return None + + # Create SKILL.md from template + skill_title = title_case_skill_name(skill_name) + skill_content = SKILL_TEMPLATE.format(skill_name=skill_name, skill_title=skill_title) + + skill_md_path = skill_dir / "SKILL.md" + try: + skill_md_path.write_text(skill_content) + print("✅ Created SKILL.md") + except Exception as e: + print(f"❌ Error creating SKILL.md: {e}") + return None + + # Create resource directories with example files + try: + # Create scripts/ directory with example script + scripts_dir = skill_dir / "scripts" + scripts_dir.mkdir(exist_ok=True) + example_script = scripts_dir / "example.py" + example_script.write_text(EXAMPLE_SCRIPT.format(skill_name=skill_name)) + example_script.chmod(0o755) + print("✅ Created scripts/example.py") + + # Create references/ directory with example reference doc + references_dir = skill_dir / "references" + references_dir.mkdir(exist_ok=True) + example_reference = references_dir / "api_reference.md" + example_reference.write_text(EXAMPLE_REFERENCE.format(skill_title=skill_title)) + print("✅ Created references/api_reference.md") + + # Create assets/ directory with example asset placeholder + assets_dir = skill_dir / "assets" + assets_dir.mkdir(exist_ok=True) + example_asset = assets_dir / "example_asset.txt" + example_asset.write_text(EXAMPLE_ASSET) + print("✅ Created assets/example_asset.txt") + except Exception as e: + print(f"❌ Error creating resource directories: {e}") + return None + + # Print next steps + print(f"\n✅ Skill '{skill_name}' initialized successfully at {skill_dir}") + print("\nNext steps:") + print("1. Edit SKILL.md to complete the TODO items and update the description") + print("2. Customize or delete the example files in scripts/, references/, and assets/") + print("3. Run the validator when ready to check the skill structure") + + return skill_dir + + +def main(): + if len(sys.argv) < 4 or sys.argv[2] != "--path": + print("Usage: init_skill.py --path ") + print("\nSkill name requirements:") + print(" - Hyphen-case identifier (e.g., 'data-analyzer')") + print(" - Lowercase letters, digits, and hyphens only") + print(" - Max 40 characters") + print(" - Must match directory name exactly") + print("\nExamples:") + print(" init_skill.py my-new-skill --path skills/public") + print(" init_skill.py my-api-helper --path skills/private") + print(" init_skill.py custom-skill --path /custom/location") + sys.exit(1) + + skill_name = sys.argv[1] + path = sys.argv[3] + + print(f"🚀 Initializing skill: {skill_name}") + print(f" Location: {path}") + print() + + result = init_skill(skill_name, path) + + if result: + sys.exit(0) + else: + sys.exit(1) + + +if __name__ == "__main__": + main() diff --git a/.claude/skills/skill-creator/scripts/package_skill.py b/.claude/skills/skill-creator/scripts/package_skill.py new file mode 100755 index 0000000000..736b928be0 --- /dev/null +++ b/.claude/skills/skill-creator/scripts/package_skill.py @@ -0,0 +1,110 @@ +#!/usr/bin/env python3 +""" +Skill Packager - Creates a distributable .skill file of a skill folder + +Usage: + python utils/package_skill.py [output-directory] + +Example: + python utils/package_skill.py skills/public/my-skill + python utils/package_skill.py skills/public/my-skill ./dist +""" + +import sys +import zipfile +from pathlib import Path +from quick_validate import validate_skill + + +def package_skill(skill_path, output_dir=None): + """ + Package a skill folder into a .skill file. + + Args: + skill_path: Path to the skill folder + output_dir: Optional output directory for the .skill file (defaults to current directory) + + Returns: + Path to the created .skill file, or None if error + """ + skill_path = Path(skill_path).resolve() + + # Validate skill folder exists + if not skill_path.exists(): + print(f"❌ Error: Skill folder not found: {skill_path}") + return None + + if not skill_path.is_dir(): + print(f"❌ Error: Path is not a directory: {skill_path}") + return None + + # Validate SKILL.md exists + skill_md = skill_path / "SKILL.md" + if not skill_md.exists(): + print(f"❌ Error: SKILL.md not found in {skill_path}") + return None + + # Run validation before packaging + print("🔍 Validating skill...") + valid, message = validate_skill(skill_path) + if not valid: + print(f"❌ Validation failed: {message}") + print(" Please fix the validation errors before packaging.") + return None + print(f"✅ {message}\n") + + # Determine output location + skill_name = skill_path.name + if output_dir: + output_path = Path(output_dir).resolve() + output_path.mkdir(parents=True, exist_ok=True) + else: + output_path = Path.cwd() + + skill_filename = output_path / f"{skill_name}.skill" + + # Create the .skill file (zip format) + try: + with zipfile.ZipFile(skill_filename, "w", zipfile.ZIP_DEFLATED) as zipf: + # Walk through the skill directory + for file_path in skill_path.rglob("*"): + if file_path.is_file(): + # Calculate the relative path within the zip + arcname = file_path.relative_to(skill_path.parent) + zipf.write(file_path, arcname) + print(f" Added: {arcname}") + + print(f"\n✅ Successfully packaged skill to: {skill_filename}") + return skill_filename + + except Exception as e: + print(f"❌ Error creating .skill file: {e}") + return None + + +def main(): + if len(sys.argv) < 2: + print("Usage: python utils/package_skill.py [output-directory]") + print("\nExample:") + print(" python utils/package_skill.py skills/public/my-skill") + print(" python utils/package_skill.py skills/public/my-skill ./dist") + sys.exit(1) + + skill_path = sys.argv[1] + output_dir = sys.argv[2] if len(sys.argv) > 2 else None + + print(f"📦 Packaging skill: {skill_path}") + if output_dir: + print(f" Output directory: {output_dir}") + print() + + result = package_skill(skill_path, output_dir) + + if result: + sys.exit(0) + else: + sys.exit(1) + + +if __name__ == "__main__": + main() diff --git a/.claude/skills/skill-creator/scripts/quick_validate.py b/.claude/skills/skill-creator/scripts/quick_validate.py new file mode 100755 index 0000000000..66eb0a71bf --- /dev/null +++ b/.claude/skills/skill-creator/scripts/quick_validate.py @@ -0,0 +1,97 @@ +#!/usr/bin/env python3 +""" +Quick validation script for skills - minimal version +""" + +import sys +import os +import re +import yaml +from pathlib import Path + + +def validate_skill(skill_path): + """Basic validation of a skill""" + skill_path = Path(skill_path) + + # Check SKILL.md exists + skill_md = skill_path / "SKILL.md" + if not skill_md.exists(): + return False, "SKILL.md not found" + + # Read and validate frontmatter + content = skill_md.read_text() + if not content.startswith("---"): + return False, "No YAML frontmatter found" + + # Extract frontmatter + match = re.match(r"^---\n(.*?)\n---", content, re.DOTALL) + if not match: + return False, "Invalid frontmatter format" + + frontmatter_text = match.group(1) + + # Parse YAML frontmatter + try: + frontmatter = yaml.safe_load(frontmatter_text) + if not isinstance(frontmatter, dict): + return False, "Frontmatter must be a YAML dictionary" + except yaml.YAMLError as e: + return False, f"Invalid YAML in frontmatter: {e}" + + # Define allowed properties + ALLOWED_PROPERTIES = {"name", "description", "license", "allowed-tools", "metadata"} + + # Check for unexpected properties (excluding nested keys under metadata) + unexpected_keys = set(frontmatter.keys()) - ALLOWED_PROPERTIES + if unexpected_keys: + return False, ( + f"Unexpected key(s) in SKILL.md frontmatter: {', '.join(sorted(unexpected_keys))}. " + f"Allowed properties are: {', '.join(sorted(ALLOWED_PROPERTIES))}" + ) + + # Check required fields + if "name" not in frontmatter: + return False, "Missing 'name' in frontmatter" + if "description" not in frontmatter: + return False, "Missing 'description' in frontmatter" + + # Extract name for validation + name = frontmatter.get("name", "") + if not isinstance(name, str): + return False, f"Name must be a string, got {type(name).__name__}" + name = name.strip() + if name: + # Check naming convention (hyphen-case: lowercase with hyphens) + if not re.match(r"^[a-z0-9-]+$", name): + return False, f"Name '{name}' should be hyphen-case (lowercase letters, digits, and hyphens only)" + if name.startswith("-") or name.endswith("-") or "--" in name: + return False, f"Name '{name}' cannot start/end with hyphen or contain consecutive hyphens" + # Check name length (max 64 characters per spec) + if len(name) > 64: + return False, f"Name is too long ({len(name)} characters). Maximum is 64 characters." + + # Extract and validate description + description = frontmatter.get("description", "") + if not isinstance(description, str): + return False, f"Description must be a string, got {type(description).__name__}" + description = description.strip() + if description: + # Check for angle brackets + if "<" in description or ">" in description: + return False, "Description cannot contain angle brackets (< or >)" + # Check description length (max 1024 characters per spec) + if len(description) > 1024: + return False, f"Description is too long ({len(description)} characters). Maximum is 1024 characters." + + return True, "Skill is valid!" + + +if __name__ == "__main__": + if len(sys.argv) != 2: + print("Usage: python quick_validate.py ") + sys.exit(1) + + valid, message = validate_skill(sys.argv[1]) + print(message) + sys.exit(0 if valid else 1) diff --git a/.codex/skills b/.codex/skills new file mode 120000 index 0000000000..454b8427cd --- /dev/null +++ b/.codex/skills @@ -0,0 +1 @@ +../.claude/skills \ No newline at end of file diff --git a/.devcontainer/devcontainer.json b/.devcontainer/devcontainer.json index ddec42e0ee..3998a69c36 100644 --- a/.devcontainer/devcontainer.json +++ b/.devcontainer/devcontainer.json @@ -6,6 +6,9 @@ "context": "..", "dockerfile": "Dockerfile" }, + "mounts": [ + "source=dify-dev-tmp,target=/tmp,type=volume" + ], "features": { "ghcr.io/devcontainers/features/node:1": { "nodeGypDependencies": true, @@ -34,19 +37,13 @@ }, "postStartCommand": "./.devcontainer/post_start_command.sh", "postCreateCommand": "./.devcontainer/post_create_command.sh" - // Features to add to the dev container. More info: https://containers.dev/features. // "features": {}, - // Use 'forwardPorts' to make a list of ports inside the container available locally. // "forwardPorts": [], - // Use 'postCreateCommand' to run commands after the container is created. // "postCreateCommand": "python --version", - // Configure tool-specific properties. // "customizations": {}, - // Uncomment to connect as root instead. More info: https://aka.ms/dev-containers-non-root. - // "remoteUser": "root" -} +} \ No newline at end of file diff --git a/.devcontainer/post_create_command.sh b/.devcontainer/post_create_command.sh index a26fd076ed..220f77e5ce 100755 --- a/.devcontainer/post_create_command.sh +++ b/.devcontainer/post_create_command.sh @@ -1,12 +1,13 @@ #!/bin/bash WORKSPACE_ROOT=$(pwd) +export COREPACK_ENABLE_DOWNLOAD_PROMPT=0 corepack enable cd web && pnpm install pipx install uv echo "alias start-api=\"cd $WORKSPACE_ROOT/api && uv run python -m flask run --host 0.0.0.0 --port=5001 --debug\"" >> ~/.bashrc -echo "alias start-worker=\"cd $WORKSPACE_ROOT/api && uv run python -m celery -A app.celery worker -P threads -c 1 --loglevel INFO -Q dataset,priority_dataset,priority_pipeline,pipeline,mail,ops_trace,app_deletion,plugin,workflow_storage,conversation,workflow,schedule_poller,schedule_executor,triggered_workflow_dispatcher,trigger_refresh_executor\"" >> ~/.bashrc +echo "alias start-worker=\"cd $WORKSPACE_ROOT/api && uv run python -m celery -A app.celery worker -P threads -c 1 --loglevel INFO -Q dataset,priority_dataset,priority_pipeline,pipeline,mail,ops_trace,app_deletion,plugin,workflow_storage,conversation,workflow,schedule_poller,schedule_executor,triggered_workflow_dispatcher,trigger_refresh_executor,retention\"" >> ~/.bashrc echo "alias start-web=\"cd $WORKSPACE_ROOT/web && pnpm dev\"" >> ~/.bashrc echo "alias start-web-prod=\"cd $WORKSPACE_ROOT/web && pnpm build && pnpm start\"" >> ~/.bashrc echo "alias start-containers=\"cd $WORKSPACE_ROOT/docker && docker-compose -f docker-compose.middleware.yaml -p dify --env-file middleware.env up -d\"" >> ~/.bashrc diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS index d6f326d4dc..106c26bbed 100644 --- a/.github/CODEOWNERS +++ b/.github/CODEOWNERS @@ -6,229 +6,244 @@ * @crazywoola @laipz8200 @Yeuoly +# CODEOWNERS file +/.github/CODEOWNERS @laipz8200 @crazywoola + +# Docs +/docs/ @crazywoola + # Backend (default owner, more specific rules below will override) -api/ @QuantumGhost +/api/ @QuantumGhost # Backend - MCP -api/core/mcp/ @Nov1c444 -api/core/entities/mcp_provider.py @Nov1c444 -api/services/tools/mcp_tools_manage_service.py @Nov1c444 -api/controllers/mcp/ @Nov1c444 -api/controllers/console/app/mcp_server.py @Nov1c444 -api/tests/**/*mcp* @Nov1c444 +/api/core/mcp/ @Nov1c444 +/api/core/entities/mcp_provider.py @Nov1c444 +/api/services/tools/mcp_tools_manage_service.py @Nov1c444 +/api/controllers/mcp/ @Nov1c444 +/api/controllers/console/app/mcp_server.py @Nov1c444 +/api/tests/**/*mcp* @Nov1c444 # Backend - Workflow - Engine (Core graph execution engine) -api/core/workflow/graph_engine/ @laipz8200 @QuantumGhost -api/core/workflow/runtime/ @laipz8200 @QuantumGhost -api/core/workflow/graph/ @laipz8200 @QuantumGhost -api/core/workflow/graph_events/ @laipz8200 @QuantumGhost -api/core/workflow/node_events/ @laipz8200 @QuantumGhost -api/core/model_runtime/ @laipz8200 @QuantumGhost +/api/core/workflow/graph_engine/ @laipz8200 @QuantumGhost +/api/core/workflow/runtime/ @laipz8200 @QuantumGhost +/api/core/workflow/graph/ @laipz8200 @QuantumGhost +/api/core/workflow/graph_events/ @laipz8200 @QuantumGhost +/api/core/workflow/node_events/ @laipz8200 @QuantumGhost +/api/core/model_runtime/ @laipz8200 @QuantumGhost # Backend - Workflow - Nodes (Agent, Iteration, Loop, LLM) -api/core/workflow/nodes/agent/ @Nov1c444 -api/core/workflow/nodes/iteration/ @Nov1c444 -api/core/workflow/nodes/loop/ @Nov1c444 -api/core/workflow/nodes/llm/ @Nov1c444 +/api/core/workflow/nodes/agent/ @Nov1c444 +/api/core/workflow/nodes/iteration/ @Nov1c444 +/api/core/workflow/nodes/loop/ @Nov1c444 +/api/core/workflow/nodes/llm/ @Nov1c444 # Backend - RAG (Retrieval Augmented Generation) -api/core/rag/ @JohnJyong -api/services/rag_pipeline/ @JohnJyong -api/services/dataset_service.py @JohnJyong -api/services/knowledge_service.py @JohnJyong -api/services/external_knowledge_service.py @JohnJyong -api/services/hit_testing_service.py @JohnJyong -api/services/metadata_service.py @JohnJyong -api/services/vector_service.py @JohnJyong -api/services/entities/knowledge_entities/ @JohnJyong -api/services/entities/external_knowledge_entities/ @JohnJyong -api/controllers/console/datasets/ @JohnJyong -api/controllers/service_api/dataset/ @JohnJyong -api/models/dataset.py @JohnJyong -api/tasks/rag_pipeline/ @JohnJyong -api/tasks/add_document_to_index_task.py @JohnJyong -api/tasks/batch_clean_document_task.py @JohnJyong -api/tasks/clean_document_task.py @JohnJyong -api/tasks/clean_notion_document_task.py @JohnJyong -api/tasks/document_indexing_task.py @JohnJyong -api/tasks/document_indexing_sync_task.py @JohnJyong -api/tasks/document_indexing_update_task.py @JohnJyong -api/tasks/duplicate_document_indexing_task.py @JohnJyong -api/tasks/recover_document_indexing_task.py @JohnJyong -api/tasks/remove_document_from_index_task.py @JohnJyong -api/tasks/retry_document_indexing_task.py @JohnJyong -api/tasks/sync_website_document_indexing_task.py @JohnJyong -api/tasks/batch_create_segment_to_index_task.py @JohnJyong -api/tasks/create_segment_to_index_task.py @JohnJyong -api/tasks/delete_segment_from_index_task.py @JohnJyong -api/tasks/disable_segment_from_index_task.py @JohnJyong -api/tasks/disable_segments_from_index_task.py @JohnJyong -api/tasks/enable_segment_to_index_task.py @JohnJyong -api/tasks/enable_segments_to_index_task.py @JohnJyong -api/tasks/clean_dataset_task.py @JohnJyong -api/tasks/deal_dataset_index_update_task.py @JohnJyong -api/tasks/deal_dataset_vector_index_task.py @JohnJyong +/api/core/rag/ @JohnJyong +/api/services/rag_pipeline/ @JohnJyong +/api/services/dataset_service.py @JohnJyong +/api/services/knowledge_service.py @JohnJyong +/api/services/external_knowledge_service.py @JohnJyong +/api/services/hit_testing_service.py @JohnJyong +/api/services/metadata_service.py @JohnJyong +/api/services/vector_service.py @JohnJyong +/api/services/entities/knowledge_entities/ @JohnJyong +/api/services/entities/external_knowledge_entities/ @JohnJyong +/api/controllers/console/datasets/ @JohnJyong +/api/controllers/service_api/dataset/ @JohnJyong +/api/models/dataset.py @JohnJyong +/api/tasks/rag_pipeline/ @JohnJyong +/api/tasks/add_document_to_index_task.py @JohnJyong +/api/tasks/batch_clean_document_task.py @JohnJyong +/api/tasks/clean_document_task.py @JohnJyong +/api/tasks/clean_notion_document_task.py @JohnJyong +/api/tasks/document_indexing_task.py @JohnJyong +/api/tasks/document_indexing_sync_task.py @JohnJyong +/api/tasks/document_indexing_update_task.py @JohnJyong +/api/tasks/duplicate_document_indexing_task.py @JohnJyong +/api/tasks/recover_document_indexing_task.py @JohnJyong +/api/tasks/remove_document_from_index_task.py @JohnJyong +/api/tasks/retry_document_indexing_task.py @JohnJyong +/api/tasks/sync_website_document_indexing_task.py @JohnJyong +/api/tasks/batch_create_segment_to_index_task.py @JohnJyong +/api/tasks/create_segment_to_index_task.py @JohnJyong +/api/tasks/delete_segment_from_index_task.py @JohnJyong +/api/tasks/disable_segment_from_index_task.py @JohnJyong +/api/tasks/disable_segments_from_index_task.py @JohnJyong +/api/tasks/enable_segment_to_index_task.py @JohnJyong +/api/tasks/enable_segments_to_index_task.py @JohnJyong +/api/tasks/clean_dataset_task.py @JohnJyong +/api/tasks/deal_dataset_index_update_task.py @JohnJyong +/api/tasks/deal_dataset_vector_index_task.py @JohnJyong # Backend - Plugins -api/core/plugin/ @Mairuis @Yeuoly @Stream29 -api/services/plugin/ @Mairuis @Yeuoly @Stream29 -api/controllers/console/workspace/plugin.py @Mairuis @Yeuoly @Stream29 -api/controllers/inner_api/plugin/ @Mairuis @Yeuoly @Stream29 -api/tasks/process_tenant_plugin_autoupgrade_check_task.py @Mairuis @Yeuoly @Stream29 +/api/core/plugin/ @Mairuis @Yeuoly @Stream29 +/api/services/plugin/ @Mairuis @Yeuoly @Stream29 +/api/controllers/console/workspace/plugin.py @Mairuis @Yeuoly @Stream29 +/api/controllers/inner_api/plugin/ @Mairuis @Yeuoly @Stream29 +/api/tasks/process_tenant_plugin_autoupgrade_check_task.py @Mairuis @Yeuoly @Stream29 # Backend - Trigger/Schedule/Webhook -api/controllers/trigger/ @Mairuis @Yeuoly -api/controllers/console/app/workflow_trigger.py @Mairuis @Yeuoly -api/controllers/console/workspace/trigger_providers.py @Mairuis @Yeuoly -api/core/trigger/ @Mairuis @Yeuoly -api/core/app/layers/trigger_post_layer.py @Mairuis @Yeuoly -api/services/trigger/ @Mairuis @Yeuoly -api/models/trigger.py @Mairuis @Yeuoly -api/fields/workflow_trigger_fields.py @Mairuis @Yeuoly -api/repositories/workflow_trigger_log_repository.py @Mairuis @Yeuoly -api/repositories/sqlalchemy_workflow_trigger_log_repository.py @Mairuis @Yeuoly -api/libs/schedule_utils.py @Mairuis @Yeuoly -api/services/workflow/scheduler.py @Mairuis @Yeuoly -api/schedule/trigger_provider_refresh_task.py @Mairuis @Yeuoly -api/schedule/workflow_schedule_task.py @Mairuis @Yeuoly -api/tasks/trigger_processing_tasks.py @Mairuis @Yeuoly -api/tasks/trigger_subscription_refresh_tasks.py @Mairuis @Yeuoly -api/tasks/workflow_schedule_tasks.py @Mairuis @Yeuoly -api/tasks/workflow_cfs_scheduler/ @Mairuis @Yeuoly -api/events/event_handlers/sync_plugin_trigger_when_app_created.py @Mairuis @Yeuoly -api/events/event_handlers/update_app_triggers_when_app_published_workflow_updated.py @Mairuis @Yeuoly -api/events/event_handlers/sync_workflow_schedule_when_app_published.py @Mairuis @Yeuoly -api/events/event_handlers/sync_webhook_when_app_created.py @Mairuis @Yeuoly +/api/controllers/trigger/ @Mairuis @Yeuoly +/api/controllers/console/app/workflow_trigger.py @Mairuis @Yeuoly +/api/controllers/console/workspace/trigger_providers.py @Mairuis @Yeuoly +/api/core/trigger/ @Mairuis @Yeuoly +/api/core/app/layers/trigger_post_layer.py @Mairuis @Yeuoly +/api/services/trigger/ @Mairuis @Yeuoly +/api/models/trigger.py @Mairuis @Yeuoly +/api/fields/workflow_trigger_fields.py @Mairuis @Yeuoly +/api/repositories/workflow_trigger_log_repository.py @Mairuis @Yeuoly +/api/repositories/sqlalchemy_workflow_trigger_log_repository.py @Mairuis @Yeuoly +/api/libs/schedule_utils.py @Mairuis @Yeuoly +/api/services/workflow/scheduler.py @Mairuis @Yeuoly +/api/schedule/trigger_provider_refresh_task.py @Mairuis @Yeuoly +/api/schedule/workflow_schedule_task.py @Mairuis @Yeuoly +/api/tasks/trigger_processing_tasks.py @Mairuis @Yeuoly +/api/tasks/trigger_subscription_refresh_tasks.py @Mairuis @Yeuoly +/api/tasks/workflow_schedule_tasks.py @Mairuis @Yeuoly +/api/tasks/workflow_cfs_scheduler/ @Mairuis @Yeuoly +/api/events/event_handlers/sync_plugin_trigger_when_app_created.py @Mairuis @Yeuoly +/api/events/event_handlers/update_app_triggers_when_app_published_workflow_updated.py @Mairuis @Yeuoly +/api/events/event_handlers/sync_workflow_schedule_when_app_published.py @Mairuis @Yeuoly +/api/events/event_handlers/sync_webhook_when_app_created.py @Mairuis @Yeuoly # Backend - Async Workflow -api/services/async_workflow_service.py @Mairuis @Yeuoly -api/tasks/async_workflow_tasks.py @Mairuis @Yeuoly +/api/services/async_workflow_service.py @Mairuis @Yeuoly +/api/tasks/async_workflow_tasks.py @Mairuis @Yeuoly # Backend - Billing -api/services/billing_service.py @hj24 @zyssyz123 -api/controllers/console/billing/ @hj24 @zyssyz123 +/api/services/billing_service.py @hj24 @zyssyz123 +/api/controllers/console/billing/ @hj24 @zyssyz123 # Backend - Enterprise -api/configs/enterprise/ @GarfieldDai @GareArc -api/services/enterprise/ @GarfieldDai @GareArc -api/services/feature_service.py @GarfieldDai @GareArc -api/controllers/console/feature.py @GarfieldDai @GareArc -api/controllers/web/feature.py @GarfieldDai @GareArc +/api/configs/enterprise/ @GarfieldDai @GareArc +/api/services/enterprise/ @GarfieldDai @GareArc +/api/services/feature_service.py @GarfieldDai @GareArc +/api/controllers/console/feature.py @GarfieldDai @GareArc +/api/controllers/web/feature.py @GarfieldDai @GareArc # Backend - Database Migrations -api/migrations/ @snakevash @laipz8200 +/api/migrations/ @snakevash @laipz8200 @MRZHUH + +# Backend - Vector DB Middleware +/api/configs/middleware/vdb/* @JohnJyong # Frontend -web/ @iamjoel +/web/ @iamjoel + +# Frontend - Web Tests +/.github/workflows/web-tests.yml @iamjoel # Frontend - App - Orchestration -web/app/components/workflow/ @iamjoel @zxhlyh -web/app/components/workflow-app/ @iamjoel @zxhlyh -web/app/components/app/configuration/ @iamjoel @zxhlyh -web/app/components/app/app-publisher/ @iamjoel @zxhlyh +/web/app/components/workflow/ @iamjoel @zxhlyh +/web/app/components/workflow-app/ @iamjoel @zxhlyh +/web/app/components/app/configuration/ @iamjoel @zxhlyh +/web/app/components/app/app-publisher/ @iamjoel @zxhlyh # Frontend - WebApp - Chat -web/app/components/base/chat/ @iamjoel @zxhlyh +/web/app/components/base/chat/ @iamjoel @zxhlyh # Frontend - WebApp - Completion -web/app/components/share/text-generation/ @iamjoel @zxhlyh +/web/app/components/share/text-generation/ @iamjoel @zxhlyh # Frontend - App - List and Creation -web/app/components/apps/ @JzoNgKVO @iamjoel -web/app/components/app/create-app-dialog/ @JzoNgKVO @iamjoel -web/app/components/app/create-app-modal/ @JzoNgKVO @iamjoel -web/app/components/app/create-from-dsl-modal/ @JzoNgKVO @iamjoel +/web/app/components/apps/ @JzoNgKVO @iamjoel +/web/app/components/app/create-app-dialog/ @JzoNgKVO @iamjoel +/web/app/components/app/create-app-modal/ @JzoNgKVO @iamjoel +/web/app/components/app/create-from-dsl-modal/ @JzoNgKVO @iamjoel # Frontend - App - API Documentation -web/app/components/develop/ @JzoNgKVO @iamjoel +/web/app/components/develop/ @JzoNgKVO @iamjoel # Frontend - App - Logs and Annotations -web/app/components/app/workflow-log/ @JzoNgKVO @iamjoel -web/app/components/app/log/ @JzoNgKVO @iamjoel -web/app/components/app/log-annotation/ @JzoNgKVO @iamjoel -web/app/components/app/annotation/ @JzoNgKVO @iamjoel +/web/app/components/app/workflow-log/ @JzoNgKVO @iamjoel +/web/app/components/app/log/ @JzoNgKVO @iamjoel +/web/app/components/app/log-annotation/ @JzoNgKVO @iamjoel +/web/app/components/app/annotation/ @JzoNgKVO @iamjoel # Frontend - App - Monitoring -web/app/(commonLayout)/app/(appDetailLayout)/\[appId\]/overview/ @JzoNgKVO @iamjoel -web/app/components/app/overview/ @JzoNgKVO @iamjoel +/web/app/(commonLayout)/app/(appDetailLayout)/\[appId\]/overview/ @JzoNgKVO @iamjoel +/web/app/components/app/overview/ @JzoNgKVO @iamjoel # Frontend - App - Settings -web/app/components/app-sidebar/ @JzoNgKVO @iamjoel +/web/app/components/app-sidebar/ @JzoNgKVO @iamjoel # Frontend - RAG - Hit Testing -web/app/components/datasets/hit-testing/ @JzoNgKVO @iamjoel +/web/app/components/datasets/hit-testing/ @JzoNgKVO @iamjoel # Frontend - RAG - List and Creation -web/app/components/datasets/list/ @iamjoel @WTW0313 -web/app/components/datasets/create/ @iamjoel @WTW0313 -web/app/components/datasets/create-from-pipeline/ @iamjoel @WTW0313 -web/app/components/datasets/external-knowledge-base/ @iamjoel @WTW0313 +/web/app/components/datasets/list/ @iamjoel @WTW0313 +/web/app/components/datasets/create/ @iamjoel @WTW0313 +/web/app/components/datasets/create-from-pipeline/ @iamjoel @WTW0313 +/web/app/components/datasets/external-knowledge-base/ @iamjoel @WTW0313 # Frontend - RAG - Orchestration (general rule first, specific rules below override) -web/app/components/rag-pipeline/ @iamjoel @WTW0313 -web/app/components/rag-pipeline/components/rag-pipeline-main.tsx @iamjoel @zxhlyh -web/app/components/rag-pipeline/store/ @iamjoel @zxhlyh +/web/app/components/rag-pipeline/ @iamjoel @WTW0313 +/web/app/components/rag-pipeline/components/rag-pipeline-main.tsx @iamjoel @zxhlyh +/web/app/components/rag-pipeline/store/ @iamjoel @zxhlyh # Frontend - RAG - Documents List -web/app/components/datasets/documents/list.tsx @iamjoel @WTW0313 -web/app/components/datasets/documents/create-from-pipeline/ @iamjoel @WTW0313 +/web/app/components/datasets/documents/list.tsx @iamjoel @WTW0313 +/web/app/components/datasets/documents/create-from-pipeline/ @iamjoel @WTW0313 # Frontend - RAG - Segments List -web/app/components/datasets/documents/detail/ @iamjoel @WTW0313 +/web/app/components/datasets/documents/detail/ @iamjoel @WTW0313 # Frontend - RAG - Settings -web/app/components/datasets/settings/ @iamjoel @WTW0313 +/web/app/components/datasets/settings/ @iamjoel @WTW0313 # Frontend - Ecosystem - Plugins -web/app/components/plugins/ @iamjoel @zhsama +/web/app/components/plugins/ @iamjoel @zhsama # Frontend - Ecosystem - Tools -web/app/components/tools/ @iamjoel @Yessenia-d +/web/app/components/tools/ @iamjoel @Yessenia-d # Frontend - Ecosystem - MarketPlace -web/app/components/plugins/marketplace/ @iamjoel @Yessenia-d +/web/app/components/plugins/marketplace/ @iamjoel @Yessenia-d # Frontend - Login and Registration -web/app/signin/ @douxc @iamjoel -web/app/signup/ @douxc @iamjoel -web/app/reset-password/ @douxc @iamjoel -web/app/install/ @douxc @iamjoel -web/app/init/ @douxc @iamjoel -web/app/forgot-password/ @douxc @iamjoel -web/app/account/ @douxc @iamjoel +/web/app/signin/ @douxc @iamjoel +/web/app/signup/ @douxc @iamjoel +/web/app/reset-password/ @douxc @iamjoel +/web/app/install/ @douxc @iamjoel +/web/app/init/ @douxc @iamjoel +/web/app/forgot-password/ @douxc @iamjoel +/web/app/account/ @douxc @iamjoel # Frontend - Service Authentication -web/service/base.ts @douxc @iamjoel +/web/service/base.ts @douxc @iamjoel # Frontend - WebApp Authentication and Access Control -web/app/(shareLayout)/components/ @douxc @iamjoel -web/app/(shareLayout)/webapp-signin/ @douxc @iamjoel -web/app/(shareLayout)/webapp-reset-password/ @douxc @iamjoel -web/app/components/app/app-access-control/ @douxc @iamjoel +/web/app/(shareLayout)/components/ @douxc @iamjoel +/web/app/(shareLayout)/webapp-signin/ @douxc @iamjoel +/web/app/(shareLayout)/webapp-reset-password/ @douxc @iamjoel +/web/app/components/app/app-access-control/ @douxc @iamjoel # Frontend - Explore Page -web/app/components/explore/ @CodingOnStar @iamjoel +/web/app/components/explore/ @CodingOnStar @iamjoel # Frontend - Personal Settings -web/app/components/header/account-setting/ @CodingOnStar @iamjoel -web/app/components/header/account-dropdown/ @CodingOnStar @iamjoel +/web/app/components/header/account-setting/ @CodingOnStar @iamjoel +/web/app/components/header/account-dropdown/ @CodingOnStar @iamjoel # Frontend - Analytics -web/app/components/base/ga/ @CodingOnStar @iamjoel +/web/app/components/base/ga/ @CodingOnStar @iamjoel # Frontend - Base Components -web/app/components/base/ @iamjoel @zxhlyh +/web/app/components/base/ @iamjoel @zxhlyh # Frontend - Utils and Hooks -web/utils/classnames.ts @iamjoel @zxhlyh -web/utils/time.ts @iamjoel @zxhlyh -web/utils/format.ts @iamjoel @zxhlyh -web/utils/clipboard.ts @iamjoel @zxhlyh -web/hooks/use-document-title.ts @iamjoel @zxhlyh +/web/utils/classnames.ts @iamjoel @zxhlyh +/web/utils/time.ts @iamjoel @zxhlyh +/web/utils/format.ts @iamjoel @zxhlyh +/web/utils/clipboard.ts @iamjoel @zxhlyh +/web/hooks/use-document-title.ts @iamjoel @zxhlyh # Frontend - Billing and Education -web/app/components/billing/ @iamjoel @zxhlyh -web/app/education-apply/ @iamjoel @zxhlyh +/web/app/components/billing/ @iamjoel @zxhlyh +/web/app/education-apply/ @iamjoel @zxhlyh # Frontend - Workspace -web/app/components/header/account-dropdown/workplace-selector/ @iamjoel @zxhlyh +/web/app/components/header/account-dropdown/workplace-selector/ @iamjoel @zxhlyh + +# Docker +/docker/* @laipz8200 diff --git a/.github/pull_request_template.md b/.github/pull_request_template.md index aa5a50918a..50dbde2aee 100644 --- a/.github/pull_request_template.md +++ b/.github/pull_request_template.md @@ -20,4 +20,4 @@ - [x] I understand that this PR may be closed in case there was no previous discussion or issues. (This doesn't apply to typos!) - [x] I've added a test for each change that was introduced, and I tried as much as possible to make a single atomic change. - [x] I've updated the documentation accordingly. -- [x] I ran `dev/reformat`(backend) and `cd web && npx lint-staged`(frontend) to appease the lint gods +- [x] I ran `make lint` and `make type-check` (backend) and `cd web && npx lint-staged` (frontend) to appease the lint gods diff --git a/.github/workflows/api-tests.yml b/.github/workflows/api-tests.yml index ab7878dc64..190e00d9fe 100644 --- a/.github/workflows/api-tests.yml +++ b/.github/workflows/api-tests.yml @@ -22,12 +22,12 @@ jobs: steps: - name: Checkout code - uses: actions/checkout@v4 + uses: actions/checkout@v6 with: persist-credentials: false - name: Setup UV and Python - uses: astral-sh/setup-uv@v6 + uses: astral-sh/setup-uv@v7 with: enable-cache: true python-version: ${{ matrix.python-version }} @@ -39,12 +39,6 @@ jobs: - name: Install dependencies run: uv sync --project api --dev - - name: Run pyrefly check - run: | - cd api - uv add --dev pyrefly - uv run pyrefly check || true - - name: Run dify config tests run: uv run --project api dev/pytest/pytest_config_tests.py @@ -57,7 +51,7 @@ jobs: run: sh .github/workflows/expose_service_ports.sh - name: Set up Sandbox - uses: hoverkraft-tech/compose-action@v2.0.2 + uses: hoverkraft-tech/compose-action@v2 with: compose-file: | docker/docker-compose.middleware.yaml @@ -93,4 +87,12 @@ jobs: # Create a detailed coverage summary echo "### Test Coverage Summary :test_tube:" >> $GITHUB_STEP_SUMMARY echo "Total Coverage: ${TOTAL_COVERAGE}%" >> $GITHUB_STEP_SUMMARY - uv run --project api coverage report --format=markdown >> $GITHUB_STEP_SUMMARY + { + echo "" + echo "
File-level coverage (click to expand)" + echo "" + echo '```' + uv run --project api coverage report -m + echo '```' + echo "
" + } >> $GITHUB_STEP_SUMMARY diff --git a/.github/workflows/autofix.yml b/.github/workflows/autofix.yml index d7a58ce93d..5413f83c27 100644 --- a/.github/workflows/autofix.yml +++ b/.github/workflows/autofix.yml @@ -12,12 +12,28 @@ jobs: if: github.repository == 'langgenius/dify' runs-on: ubuntu-latest steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@v6 + + - name: Check Docker Compose inputs + id: docker-compose-changes + uses: tj-actions/changed-files@v46 + with: + files: | + docker/generate_docker_compose + docker/.env.example + docker/docker-compose-template.yaml + docker/docker-compose.yaml - uses: actions/setup-python@v5 with: python-version: "3.11" - - uses: astral-sh/setup-uv@v6 + - uses: astral-sh/setup-uv@v7 + + - name: Generate Docker Compose + if: steps.docker-compose-changes.outputs.any_changed == 'true' + run: | + cd docker + ./generate_docker_compose - run: | cd api @@ -66,27 +82,6 @@ jobs: # mdformat breaks YAML front matter in markdown files. Add --exclude for directories containing YAML front matter. - name: mdformat run: | - uvx --python 3.13 mdformat . --exclude ".claude/skills/**" - - - name: Install pnpm - uses: pnpm/action-setup@v4 - with: - package_json_file: web/package.json - run_install: false - - - name: Setup NodeJS - uses: actions/setup-node@v4 - with: - node-version: 22 - cache: pnpm - cache-dependency-path: ./web/package.json - - - name: Web dependencies - working-directory: ./web - run: pnpm install --frozen-lockfile - - - name: oxlint - working-directory: ./web - run: pnpm exec oxlint --config .oxlintrc.json --fix . + uvx --python 3.13 mdformat . --exclude ".claude/skills/**/SKILL.md" - uses: autofix-ci/action@635ffb0c9798bd160680f18fd73371e355b85f27 diff --git a/.github/workflows/build-push.yml b/.github/workflows/build-push.yml index f7f464a601..bbf89236de 100644 --- a/.github/workflows/build-push.yml +++ b/.github/workflows/build-push.yml @@ -90,7 +90,7 @@ jobs: touch "/tmp/digests/${sanitized_digest}" - name: Upload digest - uses: actions/upload-artifact@v4 + uses: actions/upload-artifact@v6 with: name: digests-${{ matrix.context }}-${{ env.PLATFORM_PAIR }} path: /tmp/digests/* diff --git a/.github/workflows/db-migration-test.yml b/.github/workflows/db-migration-test.yml index 101d973466..e20cf9850b 100644 --- a/.github/workflows/db-migration-test.yml +++ b/.github/workflows/db-migration-test.yml @@ -13,13 +13,13 @@ jobs: steps: - name: Checkout code - uses: actions/checkout@v4 + uses: actions/checkout@v6 with: fetch-depth: 0 persist-credentials: false - name: Setup UV and Python - uses: astral-sh/setup-uv@v6 + uses: astral-sh/setup-uv@v7 with: enable-cache: true python-version: "3.12" @@ -63,13 +63,13 @@ jobs: steps: - name: Checkout code - uses: actions/checkout@v4 + uses: actions/checkout@v6 with: fetch-depth: 0 persist-credentials: false - name: Setup UV and Python - uses: astral-sh/setup-uv@v6 + uses: astral-sh/setup-uv@v7 with: enable-cache: true python-version: "3.12" diff --git a/.github/workflows/deploy-trigger-dev.yml b/.github/workflows/deploy-agent-dev.yml similarity index 75% rename from .github/workflows/deploy-trigger-dev.yml rename to .github/workflows/deploy-agent-dev.yml index 2d9a904fc5..dff48b5510 100644 --- a/.github/workflows/deploy-trigger-dev.yml +++ b/.github/workflows/deploy-agent-dev.yml @@ -1,4 +1,4 @@ -name: Deploy Trigger Dev +name: Deploy Agent Dev permissions: contents: read @@ -7,7 +7,7 @@ on: workflow_run: workflows: ["Build and Push API & Web"] branches: - - "deploy/trigger-dev" + - "deploy/agent-dev" types: - completed @@ -16,12 +16,12 @@ jobs: runs-on: ubuntu-latest if: | github.event.workflow_run.conclusion == 'success' && - github.event.workflow_run.head_branch == 'deploy/trigger-dev' + github.event.workflow_run.head_branch == 'deploy/agent-dev' steps: - name: Deploy to server uses: appleboy/ssh-action@v0.1.8 with: - host: ${{ secrets.TRIGGER_SSH_HOST }} + host: ${{ secrets.AGENT_DEV_SSH_HOST }} username: ${{ secrets.SSH_USER }} key: ${{ secrets.SSH_PRIVATE_KEY }} script: | diff --git a/.github/workflows/main-ci.yml b/.github/workflows/main-ci.yml index 876ec23a3d..d6653de950 100644 --- a/.github/workflows/main-ci.yml +++ b/.github/workflows/main-ci.yml @@ -27,7 +27,7 @@ jobs: vdb-changed: ${{ steps.changes.outputs.vdb }} migration-changed: ${{ steps.changes.outputs.migration }} steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@v6 - uses: dorny/paths-filter@v3 id: changes with: @@ -38,6 +38,7 @@ jobs: - '.github/workflows/api-tests.yml' web: - 'web/**' + - '.github/workflows/web-tests.yml' vdb: - 'api/core/rag/datasource/**' - 'docker/**' diff --git a/.github/workflows/style.yml b/.github/workflows/style.yml index 5a8a34be79..462ece303e 100644 --- a/.github/workflows/style.yml +++ b/.github/workflows/style.yml @@ -19,13 +19,13 @@ jobs: steps: - name: Checkout code - uses: actions/checkout@v4 + uses: actions/checkout@v6 with: persist-credentials: false - name: Check changed files id: changed-files - uses: tj-actions/changed-files@v46 + uses: tj-actions/changed-files@v47 with: files: | api/** @@ -33,7 +33,7 @@ jobs: - name: Setup UV and Python if: steps.changed-files.outputs.any_changed == 'true' - uses: astral-sh/setup-uv@v6 + uses: astral-sh/setup-uv@v7 with: enable-cache: false python-version: "3.12" @@ -68,15 +68,17 @@ jobs: steps: - name: Checkout code - uses: actions/checkout@v4 + uses: actions/checkout@v6 with: persist-credentials: false - name: Check changed files id: changed-files - uses: tj-actions/changed-files@v46 + uses: tj-actions/changed-files@v47 with: - files: web/** + files: | + web/** + .github/workflows/style.yml - name: Install pnpm uses: pnpm/action-setup@v4 @@ -85,12 +87,12 @@ jobs: run_install: false - name: Setup NodeJS - uses: actions/setup-node@v4 + uses: actions/setup-node@v6 if: steps.changed-files.outputs.any_changed == 'true' with: node-version: 22 cache: pnpm - cache-dependency-path: ./web/package.json + cache-dependency-path: ./web/pnpm-lock.yaml - name: Web dependencies if: steps.changed-files.outputs.any_changed == 'true' @@ -108,35 +110,15 @@ jobs: working-directory: ./web run: pnpm run type-check:tsgo - docker-compose-template: - name: Docker Compose Template - runs-on: ubuntu-latest - - steps: - - name: Checkout code - uses: actions/checkout@v4 - with: - persist-credentials: false - - - name: Check changed files - id: changed-files - uses: tj-actions/changed-files@v46 - with: - files: | - docker/generate_docker_compose - docker/.env.example - docker/docker-compose-template.yaml - docker/docker-compose.yaml - - - name: Generate Docker Compose + - name: Web dead code check if: steps.changed-files.outputs.any_changed == 'true' - run: | - cd docker - ./generate_docker_compose + working-directory: ./web + run: pnpm run knip - - name: Check for changes + - name: Web build check if: steps.changed-files.outputs.any_changed == 'true' - run: git diff --exit-code + working-directory: ./web + run: pnpm run build superlinter: name: SuperLinter @@ -144,14 +126,14 @@ jobs: steps: - name: Checkout code - uses: actions/checkout@v4 + uses: actions/checkout@v6 with: fetch-depth: 0 persist-credentials: false - name: Check changed files id: changed-files - uses: tj-actions/changed-files@v46 + uses: tj-actions/changed-files@v47 with: files: | **.sh diff --git a/.github/workflows/tool-test-sdks.yaml b/.github/workflows/tool-test-sdks.yaml index b1ccd7417a..0259ef2232 100644 --- a/.github/workflows/tool-test-sdks.yaml +++ b/.github/workflows/tool-test-sdks.yaml @@ -25,12 +25,12 @@ jobs: working-directory: sdks/nodejs-client steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@v6 with: persist-credentials: false - name: Use Node.js ${{ matrix.node-version }} - uses: actions/setup-node@v4 + uses: actions/setup-node@v6 with: node-version: ${{ matrix.node-version }} cache: '' diff --git a/.github/workflows/translate-i18n-base-on-english.yml b/.github/workflows/translate-i18n-base-on-english.yml deleted file mode 100644 index fe8e2ebc2b..0000000000 --- a/.github/workflows/translate-i18n-base-on-english.yml +++ /dev/null @@ -1,91 +0,0 @@ -name: Check i18n Files and Create PR - -on: - push: - branches: [main] - paths: - - 'web/i18n/en-US/*.ts' - -permissions: - contents: write - pull-requests: write - -jobs: - check-and-update: - if: github.repository == 'langgenius/dify' - runs-on: ubuntu-latest - defaults: - run: - working-directory: web - steps: - - uses: actions/checkout@v4 - with: - fetch-depth: 0 - token: ${{ secrets.GITHUB_TOKEN }} - - - name: Check for file changes in i18n/en-US - id: check_files - run: | - git fetch origin "${{ github.event.before }}" || true - git fetch origin "${{ github.sha }}" || true - changed_files=$(git diff --name-only "${{ github.event.before }}" "${{ github.sha }}" -- 'i18n/en-US/*.ts') - echo "Changed files: $changed_files" - if [ -n "$changed_files" ]; then - echo "FILES_CHANGED=true" >> $GITHUB_ENV - file_args="" - for file in $changed_files; do - filename=$(basename "$file" .ts) - file_args="$file_args --file $filename" - done - echo "FILE_ARGS=$file_args" >> $GITHUB_ENV - echo "File arguments: $file_args" - else - echo "FILES_CHANGED=false" >> $GITHUB_ENV - fi - - - name: Install pnpm - uses: pnpm/action-setup@v4 - with: - package_json_file: web/package.json - run_install: false - - - name: Set up Node.js - if: env.FILES_CHANGED == 'true' - uses: actions/setup-node@v4 - with: - node-version: 'lts/*' - cache: pnpm - cache-dependency-path: ./web/package.json - - - name: Install dependencies - if: env.FILES_CHANGED == 'true' - working-directory: ./web - run: pnpm install --frozen-lockfile - - - name: Generate i18n translations - if: env.FILES_CHANGED == 'true' - working-directory: ./web - run: pnpm run auto-gen-i18n ${{ env.FILE_ARGS }} - - - name: Generate i18n type definitions - if: env.FILES_CHANGED == 'true' - working-directory: ./web - run: pnpm run gen:i18n-types - - - name: Create Pull Request - if: env.FILES_CHANGED == 'true' - uses: peter-evans/create-pull-request@v6 - with: - token: ${{ secrets.GITHUB_TOKEN }} - commit-message: 'chore(i18n): update translations based on en-US changes' - title: 'chore(i18n): translate i18n files and update type definitions' - body: | - This PR was automatically created to update i18n files and TypeScript type definitions based on changes in en-US locale. - - **Triggered by:** ${{ github.sha }} - - **Changes included:** - - Updated translation files for all locales - - Regenerated TypeScript type definitions for type safety - branch: chore/automated-i18n-updates-${{ github.sha }} - delete-branch: true diff --git a/.github/workflows/translate-i18n-claude.yml b/.github/workflows/translate-i18n-claude.yml new file mode 100644 index 0000000000..003e7ffc6e --- /dev/null +++ b/.github/workflows/translate-i18n-claude.yml @@ -0,0 +1,421 @@ +name: Translate i18n Files with Claude Code + +# Note: claude-code-action doesn't support push events directly. +# Push events are handled by trigger-i18n-sync.yml which sends repository_dispatch. +# See: https://github.com/langgenius/dify/issues/30743 + +on: + repository_dispatch: + types: [i18n-sync] + workflow_dispatch: + inputs: + files: + description: 'Specific files to translate (space-separated, e.g., "app common"). Leave empty for all files.' + required: false + type: string + languages: + description: 'Specific languages to translate (space-separated, e.g., "zh-Hans ja-JP"). Leave empty for all supported languages.' + required: false + type: string + mode: + description: 'Sync mode: incremental (only changes) or full (re-check all keys)' + required: false + default: 'incremental' + type: choice + options: + - incremental + - full + +permissions: + contents: write + pull-requests: write + +jobs: + translate: + if: github.repository == 'langgenius/dify' + runs-on: ubuntu-latest + timeout-minutes: 60 + + steps: + - name: Checkout repository + uses: actions/checkout@v6 + with: + fetch-depth: 0 + token: ${{ secrets.GITHUB_TOKEN }} + + - name: Configure Git + run: | + git config --global user.name "github-actions[bot]" + git config --global user.email "github-actions[bot]@users.noreply.github.com" + + - name: Install pnpm + uses: pnpm/action-setup@v4 + with: + package_json_file: web/package.json + run_install: false + + - name: Set up Node.js + uses: actions/setup-node@v6 + with: + node-version: 'lts/*' + cache: pnpm + cache-dependency-path: ./web/pnpm-lock.yaml + + - name: Detect changed files and generate diff + id: detect_changes + run: | + if [ "${{ github.event_name }}" == "workflow_dispatch" ]; then + # Manual trigger + if [ -n "${{ github.event.inputs.files }}" ]; then + echo "CHANGED_FILES=${{ github.event.inputs.files }}" >> $GITHUB_OUTPUT + else + # Get all JSON files in en-US directory + files=$(ls web/i18n/en-US/*.json 2>/dev/null | xargs -n1 basename | sed 's/.json$//' | tr '\n' ' ') + echo "CHANGED_FILES=$files" >> $GITHUB_OUTPUT + fi + echo "TARGET_LANGS=${{ github.event.inputs.languages }}" >> $GITHUB_OUTPUT + echo "SYNC_MODE=${{ github.event.inputs.mode || 'incremental' }}" >> $GITHUB_OUTPUT + + # For manual trigger with incremental mode, get diff from last commit + # For full mode, we'll do a complete check anyway + if [ "${{ github.event.inputs.mode }}" == "full" ]; then + echo "Full mode: will check all keys" > /tmp/i18n-diff.txt + echo "DIFF_AVAILABLE=false" >> $GITHUB_OUTPUT + else + git diff HEAD~1..HEAD -- 'web/i18n/en-US/*.json' > /tmp/i18n-diff.txt 2>/dev/null || echo "" > /tmp/i18n-diff.txt + if [ -s /tmp/i18n-diff.txt ]; then + echo "DIFF_AVAILABLE=true" >> $GITHUB_OUTPUT + else + echo "DIFF_AVAILABLE=false" >> $GITHUB_OUTPUT + fi + fi + elif [ "${{ github.event_name }}" == "repository_dispatch" ]; then + # Triggered by push via trigger-i18n-sync.yml workflow + # Validate required payload fields + if [ -z "${{ github.event.client_payload.changed_files }}" ]; then + echo "Error: repository_dispatch payload missing required 'changed_files' field" >&2 + exit 1 + fi + echo "CHANGED_FILES=${{ github.event.client_payload.changed_files }}" >> $GITHUB_OUTPUT + echo "TARGET_LANGS=" >> $GITHUB_OUTPUT + echo "SYNC_MODE=${{ github.event.client_payload.sync_mode || 'incremental' }}" >> $GITHUB_OUTPUT + + # Decode the base64-encoded diff from the trigger workflow + if [ -n "${{ github.event.client_payload.diff_base64 }}" ]; then + if ! echo "${{ github.event.client_payload.diff_base64 }}" | base64 -d > /tmp/i18n-diff.txt 2>&1; then + echo "Warning: Failed to decode base64 diff payload" >&2 + echo "" > /tmp/i18n-diff.txt + echo "DIFF_AVAILABLE=false" >> $GITHUB_OUTPUT + elif [ -s /tmp/i18n-diff.txt ]; then + echo "DIFF_AVAILABLE=true" >> $GITHUB_OUTPUT + else + echo "DIFF_AVAILABLE=false" >> $GITHUB_OUTPUT + fi + else + echo "" > /tmp/i18n-diff.txt + echo "DIFF_AVAILABLE=false" >> $GITHUB_OUTPUT + fi + else + echo "Unsupported event type: ${{ github.event_name }}" + exit 1 + fi + + # Truncate diff if too large (keep first 50KB) + if [ -f /tmp/i18n-diff.txt ]; then + head -c 50000 /tmp/i18n-diff.txt > /tmp/i18n-diff-truncated.txt + mv /tmp/i18n-diff-truncated.txt /tmp/i18n-diff.txt + fi + + echo "Detected files: $(cat $GITHUB_OUTPUT | grep CHANGED_FILES || echo 'none')" + + - name: Run Claude Code for Translation Sync + if: steps.detect_changes.outputs.CHANGED_FILES != '' + uses: anthropics/claude-code-action@v1 + with: + anthropic_api_key: ${{ secrets.ANTHROPIC_API_KEY }} + github_token: ${{ secrets.GITHUB_TOKEN }} + prompt: | + You are a professional i18n synchronization engineer for the Dify project. + Your task is to keep all language translations in sync with the English source (en-US). + + ## CRITICAL TOOL RESTRICTIONS + - Use **Read** tool to read files (NOT cat or bash) + - Use **Edit** tool to modify JSON files (NOT node, jq, or bash scripts) + - Use **Bash** ONLY for: git commands, gh commands, pnpm commands + - Run bash commands ONE BY ONE, never combine with && or || + - NEVER use `$()` command substitution - it's not supported. Split into separate commands instead. + + ## WORKING DIRECTORY & ABSOLUTE PATHS + Claude Code sandbox working directory may vary. Always use absolute paths: + - For pnpm: `pnpm --dir ${{ github.workspace }}/web ` + - For git: `git -C ${{ github.workspace }} ` + - For gh: `gh --repo ${{ github.repository }} ` + - For file paths: `${{ github.workspace }}/web/i18n/` + + ## EFFICIENCY RULES + - **ONE Edit per language file** - batch all key additions into a single Edit + - Insert new keys at the beginning of JSON (after `{`), lint:fix will sort them + - Translate ALL keys for a language mentally first, then do ONE Edit + + ## Context + - Changed/target files: ${{ steps.detect_changes.outputs.CHANGED_FILES }} + - Target languages (empty means all supported): ${{ steps.detect_changes.outputs.TARGET_LANGS }} + - Sync mode: ${{ steps.detect_changes.outputs.SYNC_MODE }} + - Translation files are located in: ${{ github.workspace }}/web/i18n/{locale}/{filename}.json + - Language configuration is in: ${{ github.workspace }}/web/i18n-config/languages.ts + - Git diff is available: ${{ steps.detect_changes.outputs.DIFF_AVAILABLE }} + + ## CRITICAL DESIGN: Verify First, Then Sync + + You MUST follow this three-phase approach: + + ═══════════════════════════════════════════════════════════════ + ║ PHASE 1: VERIFY - Analyze and Generate Change Report ║ + ═══════════════════════════════════════════════════════════════ + + ### Step 1.1: Analyze Git Diff (for incremental mode) + Use the Read tool to read `/tmp/i18n-diff.txt` to see the git diff. + + Parse the diff to categorize changes: + - Lines with `+` (not `+++`): Added or modified values + - Lines with `-` (not `---`): Removed or old values + - Identify specific keys for each category: + * ADD: Keys that appear only in `+` lines (new keys) + * UPDATE: Keys that appear in both `-` and `+` lines (value changed) + * DELETE: Keys that appear only in `-` lines (removed keys) + + ### Step 1.2: Read Language Configuration + Use the Read tool to read `${{ github.workspace }}/web/i18n-config/languages.ts`. + Extract all languages with `supported: true`. + + ### Step 1.3: Run i18n:check for Each Language + ```bash + pnpm --dir ${{ github.workspace }}/web install --frozen-lockfile + ``` + ```bash + pnpm --dir ${{ github.workspace }}/web run i18n:check + ``` + + This will report: + - Missing keys (need to ADD) + - Extra keys (need to DELETE) + + ### Step 1.4: Generate Change Report + + Create a structured report identifying: + ``` + ╔══════════════════════════════════════════════════════════════╗ + ║ I18N SYNC CHANGE REPORT ║ + ╠══════════════════════════════════════════════════════════════╣ + ║ Files to process: [list] ║ + ║ Languages to sync: [list] ║ + ╠══════════════════════════════════════════════════════════════╣ + ║ ADD (New Keys): ║ + ║ - [filename].[key]: "English value" ║ + ║ ... ║ + ╠══════════════════════════════════════════════════════════════╣ + ║ UPDATE (Modified Keys - MUST re-translate): ║ + ║ - [filename].[key]: "Old value" → "New value" ║ + ║ ... ║ + ╠══════════════════════════════════════════════════════════════╣ + ║ DELETE (Extra Keys): ║ + ║ - [language]/[filename].[key] ║ + ║ ... ║ + ╚══════════════════════════════════════════════════════════════╝ + ``` + + **IMPORTANT**: For UPDATE detection, compare git diff to find keys where + the English value changed. These MUST be re-translated even if target + language already has a translation (it's now stale!). + + ═══════════════════════════════════════════════════════════════ + ║ PHASE 2: SYNC - Execute Changes Based on Report ║ + ═══════════════════════════════════════════════════════════════ + + ### Step 2.1: Process ADD Operations (BATCH per language file) + + **CRITICAL WORKFLOW for efficiency:** + 1. First, translate ALL new keys for ALL languages mentally + 2. Then, for EACH language file, do ONE Edit operation: + - Read the file once + - Insert ALL new keys at the beginning (right after the opening `{`) + - Don't worry about alphabetical order - lint:fix will sort them later + + Example Edit (adding 3 keys to zh-Hans/app.json): + ``` + old_string: '{\n "accessControl"' + new_string: '{\n "newKey1": "translation1",\n "newKey2": "translation2",\n "newKey3": "translation3",\n "accessControl"' + ``` + + **IMPORTANT**: + - ONE Edit per language file (not one Edit per key!) + - Always use the Edit tool. NEVER use bash scripts, node, or jq. + + ### Step 2.2: Process UPDATE Operations + + **IMPORTANT: Special handling for zh-Hans and ja-JP** + If zh-Hans or ja-JP files were ALSO modified in the same push: + - Run: `git -C ${{ github.workspace }} diff HEAD~1 --name-only` and check for zh-Hans or ja-JP files + - If found, it means someone manually translated them. Apply these rules: + + 1. **Missing keys**: Still ADD them (completeness required) + 2. **Existing translations**: Compare with the NEW English value: + - If translation is **completely wrong** or **unrelated** → Update it + - If translation is **roughly correct** (captures the meaning) → Keep it, respect manual work + - When in doubt, **keep the manual translation** + + Example: + - English changed: "Save" → "Save Changes" + - Manual translation: "保存更改" → Keep it (correct meaning) + - Manual translation: "删除" → Update it (completely wrong) + + For other languages: + Use Edit tool to replace the old value with the new translation. + You can batch multiple updates in one Edit if they are adjacent. + + ### Step 2.3: Process DELETE Operations + For extra keys reported by i18n:check: + - Run: `pnpm --dir ${{ github.workspace }}/web run i18n:check --auto-remove` + - Or manually remove from target language JSON files + + ## Translation Guidelines + + - PRESERVE all placeholders exactly as-is: + - `{{variable}}` - Mustache interpolation + - `${variable}` - Template literal + - `content` - HTML tags + - `_one`, `_other` - Pluralization suffixes (these are KEY suffixes, not values) + - Use appropriate language register (formal/informal) based on existing translations + - Match existing translation style in each language + - Technical terms: check existing conventions per language + - For CJK languages: no spaces between characters unless necessary + - For RTL languages (ar-TN, fa-IR): ensure proper text handling + + ## Output Format Requirements + - Alphabetical key ordering (if original file uses it) + - 2-space indentation + - Trailing newline at end of file + - Valid JSON (use proper escaping for special characters) + + ═══════════════════════════════════════════════════════════════ + ║ PHASE 3: RE-VERIFY - Confirm All Issues Resolved ║ + ═══════════════════════════════════════════════════════════════ + + ### Step 3.1: Run Lint Fix (IMPORTANT!) + ```bash + pnpm --dir ${{ github.workspace }}/web lint:fix --quiet -- 'i18n/**/*.json' + ``` + This ensures: + - JSON keys are sorted alphabetically (jsonc/sort-keys rule) + - Valid i18n keys (dify-i18n/valid-i18n-keys rule) + - No extra keys (dify-i18n/no-extra-keys rule) + + ### Step 3.2: Run Final i18n Check + ```bash + pnpm --dir ${{ github.workspace }}/web run i18n:check + ``` + + ### Step 3.3: Fix Any Remaining Issues + If check reports issues: + - Go back to PHASE 2 for unresolved items + - Repeat until check passes + + ### Step 3.4: Generate Final Summary + ``` + ╔══════════════════════════════════════════════════════════════╗ + ║ SYNC COMPLETED SUMMARY ║ + ╠══════════════════════════════════════════════════════════════╣ + ║ Language │ Added │ Updated │ Deleted │ Status ║ + ╠══════════════════════════════════════════════════════════════╣ + ║ zh-Hans │ 5 │ 2 │ 1 │ ✓ Complete ║ + ║ ja-JP │ 5 │ 2 │ 1 │ ✓ Complete ║ + ║ ... │ ... │ ... │ ... │ ... ║ + ╠══════════════════════════════════════════════════════════════╣ + ║ i18n:check │ PASSED - All keys in sync ║ + ╚══════════════════════════════════════════════════════════════╝ + ``` + + ## Mode-Specific Behavior + + **SYNC_MODE = "incremental"** (default): + - Focus on keys identified from git diff + - Also check i18n:check output for any missing/extra keys + - Efficient for small changes + + **SYNC_MODE = "full"**: + - Compare ALL keys between en-US and each language + - Run i18n:check to identify all discrepancies + - Use for first-time sync or fixing historical issues + + ## Important Notes + + 1. Always run i18n:check BEFORE and AFTER making changes + 2. The check script is the source of truth for missing/extra keys + 3. For UPDATE scenario: git diff is the source of truth for changed values + 4. Create a single commit with all translation changes + 5. If any translation fails, continue with others and report failures + + ═══════════════════════════════════════════════════════════════ + ║ PHASE 4: COMMIT AND CREATE PR ║ + ═══════════════════════════════════════════════════════════════ + + After all translations are complete and verified: + + ### Step 4.1: Check for changes + ```bash + git -C ${{ github.workspace }} status --porcelain + ``` + + If there are changes: + + ### Step 4.2: Create a new branch and commit + Run these git commands ONE BY ONE (not combined with &&). + **IMPORTANT**: Do NOT use `$()` command substitution. Use two separate commands: + + 1. First, get the timestamp: + ```bash + date +%Y%m%d-%H%M%S + ``` + (Note the output, e.g., "20260115-143052") + + 2. Then create branch using the timestamp value: + ```bash + git -C ${{ github.workspace }} checkout -b chore/i18n-sync-20260115-143052 + ``` + (Replace "20260115-143052" with the actual timestamp from step 1) + + 3. Stage changes: + ```bash + git -C ${{ github.workspace }} add web/i18n/ + ``` + + 4. Commit: + ```bash + git -C ${{ github.workspace }} commit -m "chore(i18n): sync translations with en-US - Mode: ${{ steps.detect_changes.outputs.SYNC_MODE }}" + ``` + + 5. Push: + ```bash + git -C ${{ github.workspace }} push origin HEAD + ``` + + ### Step 4.3: Create Pull Request + ```bash + gh pr create --repo ${{ github.repository }} --title "chore(i18n): sync translations with en-US" --body "## Summary + + This PR was automatically generated to sync i18n translation files. + + ### Changes + - Mode: ${{ steps.detect_changes.outputs.SYNC_MODE }} + - Files processed: ${{ steps.detect_changes.outputs.CHANGED_FILES }} + + ### Verification + - [x] \`i18n:check\` passed + - [x] \`lint:fix\` applied + + 🤖 Generated with Claude Code GitHub Action" --base main + ``` + + claude_args: | + --max-turns 150 + --allowedTools "Read,Write,Edit,Bash(git *),Bash(git:*),Bash(gh *),Bash(gh:*),Bash(pnpm *),Bash(pnpm:*),Bash(date *),Bash(date:*),Glob,Grep" diff --git a/.github/workflows/trigger-i18n-sync.yml b/.github/workflows/trigger-i18n-sync.yml new file mode 100644 index 0000000000..de093c9235 --- /dev/null +++ b/.github/workflows/trigger-i18n-sync.yml @@ -0,0 +1,66 @@ +name: Trigger i18n Sync on Push + +# This workflow bridges the push event to repository_dispatch +# because claude-code-action doesn't support push events directly. +# See: https://github.com/langgenius/dify/issues/30743 + +on: + push: + branches: [main] + paths: + - 'web/i18n/en-US/*.json' + +permissions: + contents: write + +jobs: + trigger: + if: github.repository == 'langgenius/dify' + runs-on: ubuntu-latest + timeout-minutes: 5 + + steps: + - name: Checkout repository + uses: actions/checkout@v4 + with: + fetch-depth: 0 + + - name: Detect changed files and generate diff + id: detect + run: | + BEFORE_SHA="${{ github.event.before }}" + # Handle edge case: force push may have null/zero SHA + if [ -z "$BEFORE_SHA" ] || [ "$BEFORE_SHA" = "0000000000000000000000000000000000000000" ]; then + BEFORE_SHA="HEAD~1" + fi + + # Detect changed i18n files + changed=$(git diff --name-only "$BEFORE_SHA" "${{ github.sha }}" -- 'web/i18n/en-US/*.json' 2>/dev/null | xargs -n1 basename 2>/dev/null | sed 's/.json$//' | tr '\n' ' ' || echo "") + echo "changed_files=$changed" >> $GITHUB_OUTPUT + + # Generate diff for context + git diff "$BEFORE_SHA" "${{ github.sha }}" -- 'web/i18n/en-US/*.json' > /tmp/i18n-diff.txt 2>/dev/null || echo "" > /tmp/i18n-diff.txt + + # Truncate if too large (keep first 50KB to match receiving workflow) + head -c 50000 /tmp/i18n-diff.txt > /tmp/i18n-diff-truncated.txt + mv /tmp/i18n-diff-truncated.txt /tmp/i18n-diff.txt + + # Base64 encode the diff for safe JSON transport (portable, single-line) + diff_base64=$(base64 < /tmp/i18n-diff.txt | tr -d '\n') + echo "diff_base64=$diff_base64" >> $GITHUB_OUTPUT + + if [ -n "$changed" ]; then + echo "has_changes=true" >> $GITHUB_OUTPUT + echo "Detected changed files: $changed" + else + echo "has_changes=false" >> $GITHUB_OUTPUT + echo "No i18n changes detected" + fi + + - name: Trigger i18n sync workflow + if: steps.detect.outputs.has_changes == 'true' + uses: peter-evans/repository-dispatch@v3 + with: + token: ${{ secrets.GITHUB_TOKEN }} + event-type: i18n-sync + client-payload: '{"changed_files": "${{ steps.detect.outputs.changed_files }}", "diff_base64": "${{ steps.detect.outputs.diff_base64 }}", "sync_mode": "incremental", "trigger_sha": "${{ github.sha }}"}' diff --git a/.github/workflows/vdb-tests.yml b/.github/workflows/vdb-tests.yml index 291171e5c7..7735afdaca 100644 --- a/.github/workflows/vdb-tests.yml +++ b/.github/workflows/vdb-tests.yml @@ -19,19 +19,19 @@ jobs: steps: - name: Checkout code - uses: actions/checkout@v4 + uses: actions/checkout@v6 with: persist-credentials: false - name: Free Disk Space - uses: endersonmenezes/free-disk-space@v2 + uses: endersonmenezes/free-disk-space@v3 with: remove_dotnet: true remove_haskell: true remove_tool_cache: true - name: Setup UV and Python - uses: astral-sh/setup-uv@v6 + uses: astral-sh/setup-uv@v7 with: enable-cache: true python-version: ${{ matrix.python-version }} diff --git a/.github/workflows/web-tests.yml b/.github/workflows/web-tests.yml index 3313e58614..0fd1d5d22b 100644 --- a/.github/workflows/web-tests.yml +++ b/.github/workflows/web-tests.yml @@ -13,46 +13,356 @@ jobs: runs-on: ubuntu-latest defaults: run: + shell: bash working-directory: ./web steps: - name: Checkout code - uses: actions/checkout@v4 + uses: actions/checkout@v6 with: persist-credentials: false - - name: Check changed files - id: changed-files - uses: tj-actions/changed-files@v46 - with: - files: web/** - - name: Install pnpm - if: steps.changed-files.outputs.any_changed == 'true' uses: pnpm/action-setup@v4 with: package_json_file: web/package.json run_install: false - name: Setup Node.js - uses: actions/setup-node@v4 - if: steps.changed-files.outputs.any_changed == 'true' + uses: actions/setup-node@v6 with: node-version: 22 cache: pnpm - cache-dependency-path: ./web/package.json + cache-dependency-path: ./web/pnpm-lock.yaml - name: Install dependencies - if: steps.changed-files.outputs.any_changed == 'true' - working-directory: ./web run: pnpm install --frozen-lockfile - - name: Check i18n types synchronization - if: steps.changed-files.outputs.any_changed == 'true' - working-directory: ./web - run: pnpm run check:i18n-types - - name: Run tests - if: steps.changed-files.outputs.any_changed == 'true' - working-directory: ./web - run: pnpm test + run: pnpm test:coverage + + - name: Coverage Summary + if: always() + id: coverage-summary + run: | + set -eo pipefail + + COVERAGE_FILE="coverage/coverage-final.json" + COVERAGE_SUMMARY_FILE="coverage/coverage-summary.json" + + if [ ! -f "$COVERAGE_FILE" ] && [ ! -f "$COVERAGE_SUMMARY_FILE" ]; then + echo "has_coverage=false" >> "$GITHUB_OUTPUT" + echo "### 🚨 Test Coverage Report :test_tube:" >> "$GITHUB_STEP_SUMMARY" + echo "Coverage data not found. Ensure Vitest runs with coverage enabled." >> "$GITHUB_STEP_SUMMARY" + exit 0 + fi + + echo "has_coverage=true" >> "$GITHUB_OUTPUT" + + node <<'NODE' >> "$GITHUB_STEP_SUMMARY" + const fs = require('fs'); + const path = require('path'); + let libCoverage = null; + + try { + libCoverage = require('istanbul-lib-coverage'); + } catch (error) { + libCoverage = null; + } + + const summaryPath = path.join('coverage', 'coverage-summary.json'); + const finalPath = path.join('coverage', 'coverage-final.json'); + + const hasSummary = fs.existsSync(summaryPath); + const hasFinal = fs.existsSync(finalPath); + + if (!hasSummary && !hasFinal) { + console.log('### Test Coverage Summary :test_tube:'); + console.log(''); + console.log('No coverage data found.'); + process.exit(0); + } + + const summary = hasSummary + ? JSON.parse(fs.readFileSync(summaryPath, 'utf8')) + : null; + const coverage = hasFinal + ? JSON.parse(fs.readFileSync(finalPath, 'utf8')) + : null; + + const getLineCoverageFromStatements = (statementMap, statementHits) => { + const lineHits = {}; + + if (!statementMap || !statementHits) { + return lineHits; + } + + Object.entries(statementMap).forEach(([key, statement]) => { + const line = statement?.start?.line; + if (!line) { + return; + } + const hits = statementHits[key] ?? 0; + const previous = lineHits[line]; + lineHits[line] = previous === undefined ? hits : Math.max(previous, hits); + }); + + return lineHits; + }; + + const getFileCoverage = (entry) => ( + libCoverage ? libCoverage.createFileCoverage(entry) : null + ); + + const getLineHits = (entry, fileCoverage) => { + const lineHits = entry.l ?? {}; + if (Object.keys(lineHits).length > 0) { + return lineHits; + } + if (fileCoverage) { + return fileCoverage.getLineCoverage(); + } + return getLineCoverageFromStatements(entry.statementMap ?? {}, entry.s ?? {}); + }; + + const getUncoveredLines = (entry, fileCoverage, lineHits) => { + if (lineHits && Object.keys(lineHits).length > 0) { + return Object.entries(lineHits) + .filter(([, count]) => count === 0) + .map(([line]) => Number(line)) + .sort((a, b) => a - b); + } + if (fileCoverage) { + return fileCoverage.getUncoveredLines(); + } + return []; + }; + + const totals = { + lines: { covered: 0, total: 0 }, + statements: { covered: 0, total: 0 }, + branches: { covered: 0, total: 0 }, + functions: { covered: 0, total: 0 }, + }; + const fileSummaries = []; + + if (summary) { + const totalEntry = summary.total ?? {}; + ['lines', 'statements', 'branches', 'functions'].forEach((key) => { + if (totalEntry[key]) { + totals[key].covered = totalEntry[key].covered ?? 0; + totals[key].total = totalEntry[key].total ?? 0; + } + }); + + Object.entries(summary) + .filter(([file]) => file !== 'total') + .forEach(([file, data]) => { + fileSummaries.push({ + file, + pct: data.lines?.pct ?? data.statements?.pct ?? 0, + lines: { + covered: data.lines?.covered ?? 0, + total: data.lines?.total ?? 0, + }, + }); + }); + } else if (coverage) { + Object.entries(coverage).forEach(([file, entry]) => { + const fileCoverage = getFileCoverage(entry); + const lineHits = getLineHits(entry, fileCoverage); + const statementHits = entry.s ?? {}; + const branchHits = entry.b ?? {}; + const functionHits = entry.f ?? {}; + + const lineTotal = Object.keys(lineHits).length; + const lineCovered = Object.values(lineHits).filter((n) => n > 0).length; + + const statementTotal = Object.keys(statementHits).length; + const statementCovered = Object.values(statementHits).filter((n) => n > 0).length; + + const branchTotal = Object.values(branchHits).reduce((acc, branches) => acc + branches.length, 0); + const branchCovered = Object.values(branchHits).reduce( + (acc, branches) => acc + branches.filter((n) => n > 0).length, + 0, + ); + + const functionTotal = Object.keys(functionHits).length; + const functionCovered = Object.values(functionHits).filter((n) => n > 0).length; + + totals.lines.total += lineTotal; + totals.lines.covered += lineCovered; + totals.statements.total += statementTotal; + totals.statements.covered += statementCovered; + totals.branches.total += branchTotal; + totals.branches.covered += branchCovered; + totals.functions.total += functionTotal; + totals.functions.covered += functionCovered; + + const pct = (covered, tot) => (tot > 0 ? (covered / tot) * 100 : 0); + + fileSummaries.push({ + file, + pct: pct(lineCovered || statementCovered, lineTotal || statementTotal), + lines: { + covered: lineCovered || statementCovered, + total: lineTotal || statementTotal, + }, + }); + }); + } + + const pct = (covered, tot) => (tot > 0 ? ((covered / tot) * 100).toFixed(2) : '0.00'); + + console.log('### Test Coverage Summary :test_tube:'); + console.log(''); + console.log('| Metric | Coverage | Covered / Total |'); + console.log('|--------|----------|-----------------|'); + console.log(`| Lines | ${pct(totals.lines.covered, totals.lines.total)}% | ${totals.lines.covered} / ${totals.lines.total} |`); + console.log(`| Statements | ${pct(totals.statements.covered, totals.statements.total)}% | ${totals.statements.covered} / ${totals.statements.total} |`); + console.log(`| Branches | ${pct(totals.branches.covered, totals.branches.total)}% | ${totals.branches.covered} / ${totals.branches.total} |`); + console.log(`| Functions | ${pct(totals.functions.covered, totals.functions.total)}% | ${totals.functions.covered} / ${totals.functions.total} |`); + + console.log(''); + console.log('
File coverage (lowest lines first)'); + console.log(''); + console.log('```'); + fileSummaries + .sort((a, b) => (a.pct - b.pct) || (b.lines.total - a.lines.total)) + .slice(0, 25) + .forEach(({ file, pct, lines }) => { + console.log(`${pct.toFixed(2)}%\t${lines.covered}/${lines.total}\t${file}`); + }); + console.log('```'); + console.log('
'); + + if (coverage) { + const pctValue = (covered, tot) => { + if (tot === 0) { + return '0'; + } + return ((covered / tot) * 100) + .toFixed(2) + .replace(/\.?0+$/, ''); + }; + + const formatLineRanges = (lines) => { + if (lines.length === 0) { + return ''; + } + const ranges = []; + let start = lines[0]; + let end = lines[0]; + + for (let i = 1; i < lines.length; i += 1) { + const current = lines[i]; + if (current === end + 1) { + end = current; + continue; + } + ranges.push(start === end ? `${start}` : `${start}-${end}`); + start = current; + end = current; + } + ranges.push(start === end ? `${start}` : `${start}-${end}`); + return ranges.join(','); + }; + + const tableTotals = { + statements: { covered: 0, total: 0 }, + branches: { covered: 0, total: 0 }, + functions: { covered: 0, total: 0 }, + lines: { covered: 0, total: 0 }, + }; + const tableRows = Object.entries(coverage) + .map(([file, entry]) => { + const fileCoverage = getFileCoverage(entry); + const lineHits = getLineHits(entry, fileCoverage); + const statementHits = entry.s ?? {}; + const branchHits = entry.b ?? {}; + const functionHits = entry.f ?? {}; + + const lineTotal = Object.keys(lineHits).length; + const lineCovered = Object.values(lineHits).filter((n) => n > 0).length; + const statementTotal = Object.keys(statementHits).length; + const statementCovered = Object.values(statementHits).filter((n) => n > 0).length; + const branchTotal = Object.values(branchHits).reduce((acc, branches) => acc + branches.length, 0); + const branchCovered = Object.values(branchHits).reduce( + (acc, branches) => acc + branches.filter((n) => n > 0).length, + 0, + ); + const functionTotal = Object.keys(functionHits).length; + const functionCovered = Object.values(functionHits).filter((n) => n > 0).length; + + tableTotals.lines.total += lineTotal; + tableTotals.lines.covered += lineCovered; + tableTotals.statements.total += statementTotal; + tableTotals.statements.covered += statementCovered; + tableTotals.branches.total += branchTotal; + tableTotals.branches.covered += branchCovered; + tableTotals.functions.total += functionTotal; + tableTotals.functions.covered += functionCovered; + + const uncoveredLines = getUncoveredLines(entry, fileCoverage, lineHits); + + const filePath = entry.path ?? file; + const relativePath = path.isAbsolute(filePath) + ? path.relative(process.cwd(), filePath) + : filePath; + + return { + file: relativePath || file, + statements: pctValue(statementCovered, statementTotal), + branches: pctValue(branchCovered, branchTotal), + functions: pctValue(functionCovered, functionTotal), + lines: pctValue(lineCovered, lineTotal), + uncovered: formatLineRanges(uncoveredLines), + }; + }) + .sort((a, b) => a.file.localeCompare(b.file)); + + const columns = [ + { key: 'file', header: 'File', align: 'left' }, + { key: 'statements', header: '% Stmts', align: 'right' }, + { key: 'branches', header: '% Branch', align: 'right' }, + { key: 'functions', header: '% Funcs', align: 'right' }, + { key: 'lines', header: '% Lines', align: 'right' }, + { key: 'uncovered', header: 'Uncovered Line #s', align: 'left' }, + ]; + + const allFilesRow = { + file: 'All files', + statements: pctValue(tableTotals.statements.covered, tableTotals.statements.total), + branches: pctValue(tableTotals.branches.covered, tableTotals.branches.total), + functions: pctValue(tableTotals.functions.covered, tableTotals.functions.total), + lines: pctValue(tableTotals.lines.covered, tableTotals.lines.total), + uncovered: '', + }; + + const rowsForOutput = [allFilesRow, ...tableRows]; + const formatRow = (row) => `| ${columns + .map(({ key }) => String(row[key] ?? '')) + .join(' | ')} |`; + const headerRow = `| ${columns.map(({ header }) => header).join(' | ')} |`; + const dividerRow = `| ${columns + .map(({ align }) => (align === 'right' ? '---:' : ':---')) + .join(' | ')} |`; + + console.log(''); + console.log('
Vitest coverage table'); + console.log(''); + console.log(headerRow); + console.log(dividerRow); + rowsForOutput.forEach((row) => console.log(formatRow(row))); + console.log('
'); + } + NODE + + - name: Upload Coverage Artifact + if: steps.coverage-summary.outputs.has_coverage == 'true' + uses: actions/upload-artifact@v6 + with: + name: web-coverage-report + path: web/coverage + retention-days: 30 + if-no-files-found: error diff --git a/.gitignore b/.gitignore index 5ad728c3da..7bd919f095 100644 --- a/.gitignore +++ b/.gitignore @@ -139,7 +139,6 @@ pyrightconfig.json .idea/' .DS_Store -web/.vscode/settings.json # Intellij IDEA Files .idea/* @@ -196,6 +195,7 @@ docker/nginx/ssl/* !docker/nginx/ssl/.gitkeep docker/middleware.env docker/docker-compose.override.yaml +docker/env-backup/* sdks/python-client/build sdks/python-client/dist @@ -205,7 +205,6 @@ sdks/python-client/dify_client.egg-info !.vscode/launch.json.template !.vscode/README.md api/.vscode -web/.vscode # vscode Code History Extension .history @@ -220,15 +219,6 @@ plugins.jsonl # mise mise.toml -# Next.js build output -.next/ - -# PWA generated files -web/public/sw.js -web/public/sw.js.map -web/public/workbox-*.js -web/public/workbox-*.js.map -web/public/fallback-*.js # AI Assistant .roo/ @@ -245,3 +235,4 @@ scripts/stress-test/reports/ # settings *.local.json +*.local.md diff --git a/.mcp.json b/.mcp.json deleted file mode 100644 index 8eceaf9ead..0000000000 --- a/.mcp.json +++ /dev/null @@ -1,34 +0,0 @@ -{ - "mcpServers": { - "context7": { - "type": "http", - "url": "https://mcp.context7.com/mcp" - }, - "sequential-thinking": { - "type": "stdio", - "command": "npx", - "args": ["-y", "@modelcontextprotocol/server-sequential-thinking"], - "env": {} - }, - "github": { - "type": "stdio", - "command": "npx", - "args": ["-y", "@modelcontextprotocol/server-github"], - "env": { - "GITHUB_PERSONAL_ACCESS_TOKEN": "${GITHUB_PERSONAL_ACCESS_TOKEN}" - } - }, - "fetch": { - "type": "stdio", - "command": "uvx", - "args": ["mcp-server-fetch"], - "env": {} - }, - "playwright": { - "type": "stdio", - "command": "npx", - "args": ["-y", "@playwright/mcp@latest"], - "env": {} - } - } - } \ No newline at end of file diff --git a/.nvmrc b/.nvmrc deleted file mode 100644 index 7af24b7ddb..0000000000 --- a/.nvmrc +++ /dev/null @@ -1 +0,0 @@ -22.11.0 diff --git a/.vscode/launch.json.template b/.vscode/launch.json.template index cb934d01b5..bdded1e73e 100644 --- a/.vscode/launch.json.template +++ b/.vscode/launch.json.template @@ -37,7 +37,7 @@ "-c", "1", "-Q", - "dataset,priority_dataset,priority_pipeline,pipeline,mail,ops_trace,app_deletion,plugin,workflow_storage,conversation,workflow,schedule_poller,schedule_executor,triggered_workflow_dispatcher,trigger_refresh_executor", + "dataset,priority_dataset,priority_pipeline,pipeline,mail,ops_trace,app_deletion,plugin,workflow_storage,conversation,workflow,schedule_poller,schedule_executor,triggered_workflow_dispatcher,trigger_refresh_executor,retention", "--loglevel", "INFO" ], diff --git a/Makefile b/Makefile index 07afd8187e..60c32948b9 100644 --- a/Makefile +++ b/Makefile @@ -60,9 +60,10 @@ check: @echo "✅ Code check complete" lint: - @echo "🔧 Running ruff format, check with fixes, and import linter..." + @echo "🔧 Running ruff format, check with fixes, import linter, and dotenv-linter..." @uv run --project api --dev sh -c 'ruff format ./api && ruff check --fix ./api' @uv run --directory api --dev lint-imports + @uv run --project api --dev dotenv-linter ./api/.env.example ./web/.env.example @echo "✅ Linting complete" type-check: @@ -122,7 +123,7 @@ help: @echo "Backend Code Quality:" @echo " make format - Format code with ruff" @echo " make check - Check code with ruff" - @echo " make lint - Format and fix code with ruff" + @echo " make lint - Format, fix, and lint code (ruff, imports, dotenv)" @echo " make type-check - Run type checking with basedpyright" @echo " make test - Run backend unit tests" @echo "" diff --git a/api/.env.example b/api/.env.example index 8c4ea617d4..8099c4a42a 100644 --- a/api/.env.example +++ b/api/.env.example @@ -101,6 +101,15 @@ S3_ACCESS_KEY=your-access-key S3_SECRET_KEY=your-secret-key S3_REGION=your-region +# Workflow run and Conversation archive storage (S3-compatible) +ARCHIVE_STORAGE_ENABLED=false +ARCHIVE_STORAGE_ENDPOINT= +ARCHIVE_STORAGE_ARCHIVE_BUCKET= +ARCHIVE_STORAGE_EXPORT_BUCKET= +ARCHIVE_STORAGE_ACCESS_KEY= +ARCHIVE_STORAGE_SECRET_KEY= +ARCHIVE_STORAGE_REGION=auto + # Azure Blob Storage configuration AZURE_BLOB_ACCOUNT_NAME=your-account-name AZURE_BLOB_ACCOUNT_KEY=your-account-key @@ -116,6 +125,7 @@ ALIYUN_OSS_AUTH_VERSION=v1 ALIYUN_OSS_REGION=your-region # Don't start with '/'. OSS doesn't support leading slash in object names. ALIYUN_OSS_PATH=your-path +ALIYUN_CLOUDBOX_ID=your-cloudbox-id # Google Storage configuration GOOGLE_STORAGE_BUCKET_NAME=your-bucket-name @@ -127,12 +137,14 @@ TENCENT_COS_SECRET_KEY=your-secret-key TENCENT_COS_SECRET_ID=your-secret-id TENCENT_COS_REGION=your-region TENCENT_COS_SCHEME=your-scheme +TENCENT_COS_CUSTOM_DOMAIN=your-custom-domain # Huawei OBS Storage Configuration HUAWEI_OBS_BUCKET_NAME=your-bucket-name HUAWEI_OBS_SECRET_KEY=your-secret-key HUAWEI_OBS_ACCESS_KEY=your-access-key HUAWEI_OBS_SERVER=your-server-url +HUAWEI_OBS_PATH_STYLE=false # Baidu OBS Storage Configuration BAIDU_OBS_BUCKET_NAME=your-bucket-name @@ -490,6 +502,8 @@ LOG_FILE_BACKUP_COUNT=5 LOG_DATEFORMAT=%Y-%m-%d %H:%M:%S # Log Timezone LOG_TZ=UTC +# Log output format: text or json +LOG_OUTPUT_FORMAT=text # Log format LOG_FORMAT=%(asctime)s,%(msecs)d %(levelname)-2s [%(filename)s:%(lineno)d] %(req_id)s %(message)s @@ -543,6 +557,29 @@ APP_MAX_EXECUTION_TIME=1200 APP_DEFAULT_ACTIVE_REQUESTS=0 APP_MAX_ACTIVE_REQUESTS=0 +# Aliyun SLS Logstore Configuration +# Aliyun Access Key ID +ALIYUN_SLS_ACCESS_KEY_ID= +# Aliyun Access Key Secret +ALIYUN_SLS_ACCESS_KEY_SECRET= +# Aliyun SLS Endpoint (e.g., cn-hangzhou.log.aliyuncs.com) +ALIYUN_SLS_ENDPOINT= +# Aliyun SLS Region (e.g., cn-hangzhou) +ALIYUN_SLS_REGION= +# Aliyun SLS Project Name +ALIYUN_SLS_PROJECT_NAME= +# Number of days to retain workflow run logs (default: 365 days, 3650 for permanent storage) +ALIYUN_SLS_LOGSTORE_TTL=365 +# Enable dual-write to both SLS LogStore and SQL database (default: false) +LOGSTORE_DUAL_WRITE_ENABLED=false +# Enable dual-read fallback to SQL database when LogStore returns no results (default: true) +# Useful for migration scenarios where historical data exists only in SQL database +LOGSTORE_DUAL_READ_ENABLED=true +# Control flag for whether to write the `graph` field to LogStore. +# If LOGSTORE_ENABLE_PUT_GRAPH_FIELD is "true", write the full `graph` field; +# otherwise write an empty {} instead. Defaults to writing the `graph` field. +LOGSTORE_ENABLE_PUT_GRAPH_FIELD=true + # Celery beat configuration CELERY_BEAT_SCHEDULER_TIME=1 @@ -552,6 +589,7 @@ ENABLE_CLEAN_UNUSED_DATASETS_TASK=false ENABLE_CREATE_TIDB_SERVERLESS_TASK=false ENABLE_UPDATE_TIDB_SERVERLESS_STATUS_TASK=false ENABLE_CLEAN_MESSAGES=false +ENABLE_WORKFLOW_RUN_CLEANUP_TASK=false ENABLE_MAIL_CLEAN_DOCUMENT_NOTIFY_TASK=false ENABLE_DATASETS_QUEUE_MONITOR=false ENABLE_CHECK_UPGRADABLE_PLUGIN_TASK=true @@ -626,17 +664,7 @@ QUEUE_MONITOR_ALERT_EMAILS= QUEUE_MONITOR_INTERVAL=30 # Swagger UI configuration -# SECURITY: Swagger UI is automatically disabled in PRODUCTION environment (DEPLOY_ENV=PRODUCTION) -# to prevent API information disclosure. -# -# Behavior: -# - DEPLOY_ENV=PRODUCTION + SWAGGER_UI_ENABLED not set -> Swagger DISABLED (secure default) -# - DEPLOY_ENV=DEVELOPMENT/TESTING + SWAGGER_UI_ENABLED not set -> Swagger ENABLED -# - SWAGGER_UI_ENABLED=true -> Swagger ENABLED (overrides environment check) -# - SWAGGER_UI_ENABLED=false -> Swagger DISABLED (explicit disable) -# -# For development, you can uncomment below or set DEPLOY_ENV=DEVELOPMENT -# SWAGGER_UI_ENABLED=false +SWAGGER_UI_ENABLED=true SWAGGER_UI_PATH=/swagger-ui.html # Whether to encrypt dataset IDs when exporting DSL files (default: true) @@ -680,4 +708,8 @@ ANNOTATION_IMPORT_MIN_RECORDS=1 ANNOTATION_IMPORT_RATE_LIMIT_PER_MINUTE=5 ANNOTATION_IMPORT_RATE_LIMIT_PER_HOUR=20 # Maximum number of concurrent annotation import tasks per tenant -ANNOTATION_IMPORT_MAX_CONCURRENT=5 \ No newline at end of file +ANNOTATION_IMPORT_MAX_CONCURRENT=5 +# Sandbox expired records clean configuration +SANDBOX_EXPIRED_RECORDS_CLEAN_GRACEFUL_PERIOD=21 +SANDBOX_EXPIRED_RECORDS_CLEAN_BATCH_SIZE=1000 +SANDBOX_EXPIRED_RECORDS_RETENTION_DAYS=30 diff --git a/api/.importlinter b/api/.importlinter index 24ece72b30..2dec958788 100644 --- a/api/.importlinter +++ b/api/.importlinter @@ -3,9 +3,11 @@ root_packages = core configs controllers + extensions models tasks services +include_external_packages = True [importlinter:contract:workflow] name = Workflow @@ -33,6 +35,28 @@ ignore_imports = core.workflow.nodes.loop.loop_node -> core.workflow.graph core.workflow.nodes.loop.loop_node -> core.workflow.graph_engine.command_channels +[importlinter:contract:workflow-infrastructure-dependencies] +name = Workflow Infrastructure Dependencies +type = forbidden +source_modules = + core.workflow +forbidden_modules = + extensions.ext_database + extensions.ext_redis +allow_indirect_imports = True +ignore_imports = + core.workflow.nodes.agent.agent_node -> extensions.ext_database + core.workflow.nodes.datasource.datasource_node -> extensions.ext_database + core.workflow.nodes.knowledge_index.knowledge_index_node -> extensions.ext_database + core.workflow.nodes.knowledge_retrieval.knowledge_retrieval_node -> extensions.ext_database + core.workflow.nodes.llm.file_saver -> extensions.ext_database + core.workflow.nodes.llm.llm_utils -> extensions.ext_database + core.workflow.nodes.llm.node -> extensions.ext_database + core.workflow.nodes.tool.tool_node -> extensions.ext_database + core.workflow.graph_engine.command_channels.redis_channel -> extensions.ext_redis + core.workflow.graph_engine.manager -> extensions.ext_redis + core.workflow.nodes.knowledge_retrieval.knowledge_retrieval_node -> extensions.ext_redis + [importlinter:contract:rsc] name = RSC type = layers diff --git a/api/.ruff.toml b/api/.ruff.toml index 7206f7fa0f..8db0cbcb21 100644 --- a/api/.ruff.toml +++ b/api/.ruff.toml @@ -1,4 +1,8 @@ -exclude = ["migrations/*"] +exclude = [ + "migrations/*", + ".git", + ".git/**", +] line-length = 120 [format] diff --git a/api/Dockerfile b/api/Dockerfile index 02df91bfc1..a08d4e3aab 100644 --- a/api/Dockerfile +++ b/api/Dockerfile @@ -50,16 +50,33 @@ WORKDIR /app/api # Create non-root user ARG dify_uid=1001 +ARG NODE_MAJOR=22 +ARG NODE_PACKAGE_VERSION=22.21.0-1nodesource1 +ARG NODESOURCE_KEY_FPR=6F71F525282841EEDAF851B42F59B5F99B1BE0B4 RUN groupadd -r -g ${dify_uid} dify && \ useradd -r -u ${dify_uid} -g ${dify_uid} -s /bin/bash dify && \ chown -R dify:dify /app RUN \ apt-get update \ + && apt-get install -y --no-install-recommends \ + ca-certificates \ + curl \ + gnupg \ + && mkdir -p /etc/apt/keyrings \ + && curl -fsSL https://deb.nodesource.com/gpgkey/nodesource-repo.gpg.key -o /tmp/nodesource.gpg \ + && gpg --show-keys --with-colons /tmp/nodesource.gpg \ + | awk -F: '/^fpr:/ {print $10}' \ + | grep -Fx "${NODESOURCE_KEY_FPR}" \ + && gpg --dearmor -o /etc/apt/keyrings/nodesource.gpg /tmp/nodesource.gpg \ + && rm -f /tmp/nodesource.gpg \ + && echo "deb [signed-by=/etc/apt/keyrings/nodesource.gpg] https://deb.nodesource.com/node_${NODE_MAJOR}.x nodistro main" \ + > /etc/apt/sources.list.d/nodesource.list \ + && apt-get update \ # Install dependencies && apt-get install -y --no-install-recommends \ # basic environment - curl nodejs \ + nodejs=${NODE_PACKAGE_VERSION} \ # for gmpy2 \ libgmp-dev libmpfr-dev libmpc-dev \ # For Security @@ -79,7 +96,8 @@ COPY --from=packages --chown=dify:dify ${VIRTUAL_ENV} ${VIRTUAL_ENV} ENV PATH="${VIRTUAL_ENV}/bin:${PATH}" # Download nltk data -RUN mkdir -p /usr/local/share/nltk_data && NLTK_DATA=/usr/local/share/nltk_data python -c "import nltk; nltk.download('punkt'); nltk.download('averaged_perceptron_tagger'); nltk.download('stopwords')" \ +RUN mkdir -p /usr/local/share/nltk_data \ + && NLTK_DATA=/usr/local/share/nltk_data python -c "import nltk; from unstructured.nlp.tokenize import download_nltk_packages; nltk.download('punkt'); nltk.download('averaged_perceptron_tagger'); nltk.download('stopwords'); download_nltk_packages()" \ && chmod -R 755 /usr/local/share/nltk_data ENV TIKTOKEN_CACHE_DIR=/app/api/.tiktoken_cache diff --git a/api/README.md b/api/README.md index 2dab2ec6e6..794b05d3af 100644 --- a/api/README.md +++ b/api/README.md @@ -84,7 +84,7 @@ 1. If you need to handle and debug the async tasks (e.g. dataset importing and documents indexing), please start the worker service. ```bash -uv run celery -A app.celery worker -P threads -c 2 --loglevel INFO -Q dataset,priority_dataset,priority_pipeline,pipeline,mail,ops_trace,app_deletion,plugin,workflow_storage,conversation,workflow,schedule_poller,schedule_executor,triggered_workflow_dispatcher,trigger_refresh_executor +uv run celery -A app.celery worker -P threads -c 2 --loglevel INFO -Q dataset,priority_dataset,priority_pipeline,pipeline,mail,ops_trace,app_deletion,plugin,workflow_storage,conversation,workflow,schedule_poller,schedule_executor,triggered_workflow_dispatcher,trigger_refresh_executor,retention ``` Additionally, if you want to debug the celery scheduled tasks, you can run the following command in another terminal to start the beat service: diff --git a/api/app_factory.py b/api/app_factory.py index 026310a8aa..f827842d68 100644 --- a/api/app_factory.py +++ b/api/app_factory.py @@ -2,9 +2,11 @@ import logging import time from opentelemetry.trace import get_current_span +from opentelemetry.trace.span import INVALID_SPAN_ID, INVALID_TRACE_ID from configs import dify_config from contexts.wrapper import RecyclableContextVar +from core.logging.context import init_request_context from dify_app import DifyApp logger = logging.getLogger(__name__) @@ -25,28 +27,35 @@ def create_flask_app_with_configs() -> DifyApp: # add before request hook @dify_app.before_request def before_request(): - # add an unique identifier to each request + # Initialize logging context for this request + init_request_context() RecyclableContextVar.increment_thread_recycles() - # add after request hook for injecting X-Trace-Id header from OpenTelemetry span context + # add after request hook for injecting trace headers from OpenTelemetry span context + # Only adds headers when OTEL is enabled and has valid context @dify_app.after_request - def add_trace_id_header(response): + def add_trace_headers(response): try: span = get_current_span() ctx = span.get_span_context() if span else None - if ctx and ctx.is_valid: - trace_id_hex = format(ctx.trace_id, "032x") - # Avoid duplicates if some middleware added it - if "X-Trace-Id" not in response.headers: - response.headers["X-Trace-Id"] = trace_id_hex + + if not ctx or not ctx.is_valid: + return response + + # Inject trace headers from OTEL context + if ctx.trace_id != INVALID_TRACE_ID and "X-Trace-Id" not in response.headers: + response.headers["X-Trace-Id"] = format(ctx.trace_id, "032x") + if ctx.span_id != INVALID_SPAN_ID and "X-Span-Id" not in response.headers: + response.headers["X-Span-Id"] = format(ctx.span_id, "016x") + except Exception: # Never break the response due to tracing header injection - logger.warning("Failed to add trace ID to response header", exc_info=True) + logger.warning("Failed to add trace headers to response", exc_info=True) return response # Capture the decorator's return value to avoid pyright reportUnusedFunction _ = before_request - _ = add_trace_id_header + _ = add_trace_headers return dify_app @@ -75,6 +84,7 @@ def initialize_extensions(app: DifyApp): ext_import_modules, ext_logging, ext_login, + ext_logstore, ext_mail, ext_migrate, ext_orjson, @@ -105,6 +115,7 @@ def initialize_extensions(app: DifyApp): ext_migrate, ext_redis, ext_storage, + ext_logstore, # Initialize logstore after storage, before celery ext_celery, ext_login, ext_mail, diff --git a/api/commands.py b/api/commands.py index a8d89ac200..f6227ad2ba 100644 --- a/api/commands.py +++ b/api/commands.py @@ -1,8 +1,10 @@ import base64 +import datetime import json import logging import secrets from typing import Any +from uuid import uuid4 import click import sqlalchemy as sa @@ -22,6 +24,7 @@ from core.rag.datasource.vdb.vector_type import VectorType from core.rag.index_processor.constant.built_in_field import BuiltInField from core.rag.models.document import Document from core.tools.utils.system_oauth_encryption import encrypt_system_oauth_params +from core.workflow.entities.pause_reason import PauseReasonType from events.app_event import app_was_created from extensions.ext_database import db from extensions.ext_redis import redis_client @@ -34,13 +37,25 @@ from libs.rsa import generate_key_pair from models import Tenant from models.dataset import Dataset, DatasetCollectionBinding, DatasetMetadata, DatasetMetadataBinding, DocumentSegment from models.dataset import Document as DatasetDocument +from models.enums import CreatorUserRole, ExecutionOffLoadType from models.model import Account, App, AppAnnotationSetting, AppMode, Conversation, MessageAnnotation, UploadFile from models.oauth import DatasourceOauthParamConfig, DatasourceProvider from models.provider import Provider, ProviderModel from models.provider_ids import DatasourceProviderID, ToolProviderID from models.source import DataSourceApiKeyAuthBinding, DataSourceOauthBinding from models.tools import ToolOAuthSystemClient +from models.trigger import AppTriggerType, WorkflowTriggerLog, WorkflowTriggerStatus +from models.workflow import ( + WorkflowAppLog, + WorkflowNodeExecutionModel, + WorkflowNodeExecutionOffload, + WorkflowNodeExecutionTriggeredFrom, + WorkflowPause, + WorkflowPauseReason, + WorkflowRun, +) from services.account_service import AccountService, RegisterService, TenantService +from services.clear_free_plan_expired_workflow_run_logs import WorkflowRunCleanup from services.clear_free_plan_tenant_expired_logs import ClearFreePlanTenantExpiredLogs from services.plugin.data_migration import PluginDataMigration from services.plugin.plugin_migration import PluginMigration @@ -235,7 +250,7 @@ def migrate_annotation_vector_database(): if annotations: for annotation in annotations: document = Document( - page_content=annotation.question, + page_content=annotation.question_text, metadata={"annotation_id": annotation.id, "app_id": app.id, "doc_id": annotation.id}, ) documents.append(document) @@ -852,6 +867,218 @@ def clear_free_plan_tenant_expired_logs(days: int, batch: int, tenant_ids: list[ click.echo(click.style("Clear free plan tenant expired logs completed.", fg="green")) +@click.command("clean-workflow-runs", help="Clean expired workflow runs and related data for free tenants.") +@click.option("--days", default=30, show_default=True, help="Delete workflow runs created before N days ago.") +@click.option("--batch-size", default=200, show_default=True, help="Batch size for selecting workflow runs.") +@click.option( + "--start-after", + type=click.DateTime(formats=["%Y-%m-%d", "%Y-%m-%dT%H:%M:%S"]), + default=None, + help="Optional lower bound (inclusive) for created_at; must be paired with --end-before.", +) +@click.option( + "--end-before", + type=click.DateTime(formats=["%Y-%m-%d", "%Y-%m-%dT%H:%M:%S"]), + default=None, + help="Optional upper bound (exclusive) for created_at; must be paired with --start-after.", +) +def clean_workflow_runs( + days: int, + batch_size: int, + start_after: datetime.datetime | None, + end_before: datetime.datetime | None, +): + """ + Clean workflow runs and related workflow data for free tenants. + """ + if (start_after is None) ^ (end_before is None): + raise click.UsageError("--start-after and --end-before must be provided together.") + + click.echo(click.style("Starting workflow run cleanup.", fg="white")) + + WorkflowRunCleanup( + days=days, + batch_size=batch_size, + start_after=start_after, + end_before=end_before, + ).run() + + click.echo(click.style("Workflow run cleanup completed.", fg="green")) + + +@click.command( + "seed-expired-workflow-runs", + help="Test-only: create synthetic expired workflow runs (with related records) for cleanup validation.", +) +@click.option("--days-ago", default=40, show_default=True, help="Backdate created_at to N days ago.") +@click.option("--limit", default=5, show_default=True, help="Number of workflow runs to create.") +@click.option( + "--tenant-id", + default=None, + help="Optional tenant id to use; if omitted, a random tenant id is generated.", +) +def seed_expired_workflow_runs(days_ago: int, limit: int, tenant_id: str | None) -> None: + base_time = datetime.datetime.now() - datetime.timedelta(days=days_ago) + tenant = tenant_id or str(uuid4()) + + with sessionmaker(db.engine, expire_on_commit=False).begin() as session: + for i in range(limit): + run_id = str(uuid4()) + app_id = str(uuid4()) + workflow_id = str(uuid4()) + creator_id = str(uuid4()) + created_at = base_time - datetime.timedelta(minutes=i) + + run = WorkflowRun( + id=run_id, + tenant_id=tenant, + app_id=app_id, + workflow_id=workflow_id, + type="workflow", + triggered_from="workflow-run", + version="v1", + graph="{}", + inputs="{}", + status="succeeded", + outputs="{}", + error=None, + elapsed_time=0.1, + total_tokens=0, + total_steps=0, + created_by_role="account", + created_by=creator_id, + created_at=created_at, + finished_at=created_at + datetime.timedelta(seconds=1), + exceptions_count=0, + ) + + node_execution = WorkflowNodeExecutionModel( + id=str(uuid4()), + tenant_id=tenant, + app_id=app_id, + workflow_id=workflow_id, + triggered_from=WorkflowNodeExecutionTriggeredFrom.WORKFLOW_RUN.value, + workflow_run_id=run_id, + index=0, + predecessor_node_id=None, + node_execution_id=None, + node_id="start", + node_type="start", + title="Seeded node", + inputs="{}", + process_data="{}", + outputs="{}", + status="succeeded", + error=None, + elapsed_time=0.1, + execution_metadata="{}", + created_by_role="account", + created_by=creator_id, + created_at=created_at, + finished_at=created_at + datetime.timedelta(seconds=1), + ) + + offload_files: list[UploadFile] = [] + node_execution_offloads: list[WorkflowNodeExecutionOffload] = [] + for offload_type in ( + ExecutionOffLoadType.INPUTS, + ExecutionOffLoadType.OUTPUTS, + ExecutionOffLoadType.PROCESS_DATA, + ): + upload_file = UploadFile( + tenant_id=tenant, + storage_type=StorageType.LOCAL.value, + key=f"seeded/workflow-node-offload/{run_id}-{offload_type.value}.json", + name=f"{run_id}-{offload_type.value}.json", + size=256, + extension="json", + mime_type="application/json", + created_by_role=CreatorUserRole.ACCOUNT, + created_by=creator_id, + created_at=created_at, + used=True, + used_by=creator_id, + used_at=created_at, + ) + offload_files.append(upload_file) + node_execution_offloads.append( + WorkflowNodeExecutionOffload( + tenant_id=tenant, + app_id=app_id, + node_execution_id=node_execution.id, + type_=offload_type, + file_id=upload_file.id, + ) + ) + + app_log = WorkflowAppLog( + tenant_id=tenant, + app_id=app_id, + workflow_id=workflow_id, + workflow_run_id=run_id, + created_from="service-api", + created_by_role="account", + created_by=creator_id + ) + + trigger_log = WorkflowTriggerLog( + tenant_id=tenant, + app_id=app_id, + workflow_id=workflow_id, + workflow_run_id=run_id, + root_node_id=None, + trigger_metadata="{}", + trigger_type=AppTriggerType.TRIGGER_WEBHOOK, + trigger_data="{}", + inputs="{}", + outputs="{}", + status=WorkflowTriggerStatus.SUCCEEDED, + error=None, + queue_name="seed", + celery_task_id=None, + created_by_role="account", + created_by=creator_id, + retry_count=0, + elapsed_time=0.1, + total_tokens=0, + triggered_at=created_at, + finished_at=created_at + datetime.timedelta(seconds=1), + ) + + pause_id = str(uuid4()) + pause = WorkflowPause( + id=pause_id, + workflow_id=workflow_id, + workflow_run_id=run_id, + resumed_at=None, + state_object_key="seeded-state", + created_at=created_at, + updated_at=created_at, + ) + + pause_reason = WorkflowPauseReason( + pause_id=pause_id, + type_=PauseReasonType.HUMAN_INPUT_REQUIRED, + form_id=str(uuid4()), + message="Seeded human input required.", + node_id="seeded-human-input", + created_at=created_at, + updated_at=created_at, + ) + + records = [run, node_execution, app_log, trigger_log, pause, pause_reason] + records.extend(offload_files) + records.extend(node_execution_offloads) + session.add_all(records) + + click.echo( + click.style( + f"Created {limit} synthetic workflow runs (tenant={tenant}) backdated ~{days_ago} days.", + fg="green", + ) + ) + + @click.option("-f", "--force", is_flag=True, help="Skip user confirmation and force the command to execute.") @click.command("clear-orphaned-file-records", help="Clear orphaned file records.") def clear_orphaned_file_records(force: bool): @@ -1184,6 +1411,217 @@ def remove_orphaned_files_on_storage(force: bool): click.echo(click.style(f"Removed {removed_files} orphaned files, with {error_files} errors.", fg="yellow")) +@click.command("file-usage", help="Query file usages and show where files are referenced.") +@click.option("--file-id", type=str, default=None, help="Filter by file UUID.") +@click.option("--key", type=str, default=None, help="Filter by storage key.") +@click.option("--src", type=str, default=None, help="Filter by table.column pattern (e.g., 'documents.%' or '%.icon').") +@click.option("--limit", type=int, default=100, help="Limit number of results (default: 100).") +@click.option("--offset", type=int, default=0, help="Offset for pagination (default: 0).") +@click.option("--json", "output_json", is_flag=True, help="Output results in JSON format.") +def file_usage( + file_id: str | None, + key: str | None, + src: str | None, + limit: int, + offset: int, + output_json: bool, +): + """ + Query file usages and show where files are referenced in the database. + + This command reuses the same reference checking logic as clear-orphaned-file-records + and displays detailed information about where each file is referenced. + """ + # define tables and columns to process + files_tables = [ + {"table": "upload_files", "id_column": "id", "key_column": "key"}, + {"table": "tool_files", "id_column": "id", "key_column": "file_key"}, + ] + ids_tables = [ + {"type": "uuid", "table": "message_files", "column": "upload_file_id", "pk_column": "id"}, + {"type": "text", "table": "documents", "column": "data_source_info", "pk_column": "id"}, + {"type": "text", "table": "document_segments", "column": "content", "pk_column": "id"}, + {"type": "text", "table": "messages", "column": "answer", "pk_column": "id"}, + {"type": "text", "table": "workflow_node_executions", "column": "inputs", "pk_column": "id"}, + {"type": "text", "table": "workflow_node_executions", "column": "process_data", "pk_column": "id"}, + {"type": "text", "table": "workflow_node_executions", "column": "outputs", "pk_column": "id"}, + {"type": "text", "table": "conversations", "column": "introduction", "pk_column": "id"}, + {"type": "text", "table": "conversations", "column": "system_instruction", "pk_column": "id"}, + {"type": "text", "table": "accounts", "column": "avatar", "pk_column": "id"}, + {"type": "text", "table": "apps", "column": "icon", "pk_column": "id"}, + {"type": "text", "table": "sites", "column": "icon", "pk_column": "id"}, + {"type": "json", "table": "messages", "column": "inputs", "pk_column": "id"}, + {"type": "json", "table": "messages", "column": "message", "pk_column": "id"}, + ] + + # Stream file usages with pagination to avoid holding all results in memory + paginated_usages = [] + total_count = 0 + + # First, build a mapping of file_id -> storage_key from the base tables + file_key_map = {} + for files_table in files_tables: + query = f"SELECT {files_table['id_column']}, {files_table['key_column']} FROM {files_table['table']}" + with db.engine.begin() as conn: + rs = conn.execute(sa.text(query)) + for row in rs: + file_key_map[str(row[0])] = f"{files_table['table']}:{row[1]}" + + # If filtering by key or file_id, verify it exists + if file_id and file_id not in file_key_map: + if output_json: + click.echo(json.dumps({"error": f"File ID {file_id} not found in base tables"})) + else: + click.echo(click.style(f"File ID {file_id} not found in base tables.", fg="red")) + return + + if key: + valid_prefixes = {f"upload_files:{key}", f"tool_files:{key}"} + matching_file_ids = [fid for fid, fkey in file_key_map.items() if fkey in valid_prefixes] + if not matching_file_ids: + if output_json: + click.echo(json.dumps({"error": f"Key {key} not found in base tables"})) + else: + click.echo(click.style(f"Key {key} not found in base tables.", fg="red")) + return + + guid_regexp = "[0-9a-fA-F]{8}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{12}" + + # For each reference table/column, find matching file IDs and record the references + for ids_table in ids_tables: + src_filter = f"{ids_table['table']}.{ids_table['column']}" + + # Skip if src filter doesn't match (use fnmatch for wildcard patterns) + if src: + if "%" in src or "_" in src: + import fnmatch + + # Convert SQL LIKE wildcards to fnmatch wildcards (% -> *, _ -> ?) + pattern = src.replace("%", "*").replace("_", "?") + if not fnmatch.fnmatch(src_filter, pattern): + continue + else: + if src_filter != src: + continue + + if ids_table["type"] == "uuid": + # Direct UUID match + query = ( + f"SELECT {ids_table['pk_column']}, {ids_table['column']} " + f"FROM {ids_table['table']} WHERE {ids_table['column']} IS NOT NULL" + ) + with db.engine.begin() as conn: + rs = conn.execute(sa.text(query)) + for row in rs: + record_id = str(row[0]) + ref_file_id = str(row[1]) + if ref_file_id not in file_key_map: + continue + storage_key = file_key_map[ref_file_id] + + # Apply filters + if file_id and ref_file_id != file_id: + continue + if key and not storage_key.endswith(key): + continue + + # Only collect items within the requested page range + if offset <= total_count < offset + limit: + paginated_usages.append( + { + "src": f"{ids_table['table']}.{ids_table['column']}", + "record_id": record_id, + "file_id": ref_file_id, + "key": storage_key, + } + ) + total_count += 1 + + elif ids_table["type"] in ("text", "json"): + # Extract UUIDs from text/json content + column_cast = f"{ids_table['column']}::text" if ids_table["type"] == "json" else ids_table["column"] + query = ( + f"SELECT {ids_table['pk_column']}, {column_cast} " + f"FROM {ids_table['table']} WHERE {ids_table['column']} IS NOT NULL" + ) + with db.engine.begin() as conn: + rs = conn.execute(sa.text(query)) + for row in rs: + record_id = str(row[0]) + content = str(row[1]) + + # Find all UUIDs in the content + import re + + uuid_pattern = re.compile(guid_regexp, re.IGNORECASE) + matches = uuid_pattern.findall(content) + + for ref_file_id in matches: + if ref_file_id not in file_key_map: + continue + storage_key = file_key_map[ref_file_id] + + # Apply filters + if file_id and ref_file_id != file_id: + continue + if key and not storage_key.endswith(key): + continue + + # Only collect items within the requested page range + if offset <= total_count < offset + limit: + paginated_usages.append( + { + "src": f"{ids_table['table']}.{ids_table['column']}", + "record_id": record_id, + "file_id": ref_file_id, + "key": storage_key, + } + ) + total_count += 1 + + # Output results + if output_json: + result = { + "total": total_count, + "offset": offset, + "limit": limit, + "usages": paginated_usages, + } + click.echo(json.dumps(result, indent=2)) + else: + click.echo( + click.style(f"Found {total_count} file usages (showing {len(paginated_usages)} results)", fg="white") + ) + click.echo("") + + if not paginated_usages: + click.echo(click.style("No file usages found matching the specified criteria.", fg="yellow")) + return + + # Print table header + click.echo( + click.style( + f"{'Src (Table.Column)':<50} {'Record ID':<40} {'File ID':<40} {'Storage Key':<60}", + fg="cyan", + ) + ) + click.echo(click.style("-" * 190, fg="white")) + + # Print each usage + for usage in paginated_usages: + click.echo(f"{usage['src']:<50} {usage['record_id']:<40} {usage['file_id']:<40} {usage['key']:<60}") + + # Show pagination info + if offset + limit < total_count: + click.echo("") + click.echo( + click.style( + f"Showing {offset + 1}-{offset + len(paginated_usages)} of {total_count} results", fg="white" + ) + ) + click.echo(click.style(f"Use --offset {offset + limit} to see next page", fg="white")) + + @click.command("setup-system-tool-oauth-client", help="Setup system tool oauth client.") @click.option("--provider", prompt=True, help="Provider name") @click.option("--client-params", prompt=True, help="Client Params") diff --git a/api/configs/extra/__init__.py b/api/configs/extra/__init__.py index 4543b5389d..de97adfc0e 100644 --- a/api/configs/extra/__init__.py +++ b/api/configs/extra/__init__.py @@ -1,9 +1,11 @@ +from configs.extra.archive_config import ArchiveStorageConfig from configs.extra.notion_config import NotionConfig from configs.extra.sentry_config import SentryConfig class ExtraServiceConfig( # place the configs in alphabet order + ArchiveStorageConfig, NotionConfig, SentryConfig, ): diff --git a/api/configs/extra/archive_config.py b/api/configs/extra/archive_config.py new file mode 100644 index 0000000000..a85628fa61 --- /dev/null +++ b/api/configs/extra/archive_config.py @@ -0,0 +1,43 @@ +from pydantic import Field +from pydantic_settings import BaseSettings + + +class ArchiveStorageConfig(BaseSettings): + """ + Configuration settings for workflow run logs archiving storage. + """ + + ARCHIVE_STORAGE_ENABLED: bool = Field( + description="Enable workflow run logs archiving to S3-compatible storage", + default=False, + ) + + ARCHIVE_STORAGE_ENDPOINT: str | None = Field( + description="URL of the S3-compatible storage endpoint (e.g., 'https://storage.example.com')", + default=None, + ) + + ARCHIVE_STORAGE_ARCHIVE_BUCKET: str | None = Field( + description="Name of the bucket to store archived workflow logs", + default=None, + ) + + ARCHIVE_STORAGE_EXPORT_BUCKET: str | None = Field( + description="Name of the bucket to store exported workflow runs", + default=None, + ) + + ARCHIVE_STORAGE_ACCESS_KEY: str | None = Field( + description="Access key ID for authenticating with storage", + default=None, + ) + + ARCHIVE_STORAGE_SECRET_KEY: str | None = Field( + description="Secret access key for authenticating with storage", + default=None, + ) + + ARCHIVE_STORAGE_REGION: str = Field( + description="Region for storage (use 'auto' if the provider supports it)", + default="auto", + ) diff --git a/api/configs/feature/__init__.py b/api/configs/feature/__init__.py index b9091b5e2f..99ac618bcb 100644 --- a/api/configs/feature/__init__.py +++ b/api/configs/feature/__init__.py @@ -218,7 +218,7 @@ class PluginConfig(BaseSettings): PLUGIN_DAEMON_TIMEOUT: PositiveFloat | None = Field( description="Timeout in seconds for requests to the plugin daemon (set to None to disable)", - default=300.0, + default=600.0, ) INNER_API_KEY_FOR_PLUGIN: str = Field(description="Inner api key for plugin", default="inner-api-key") @@ -587,6 +587,11 @@ class LoggingConfig(BaseSettings): default="INFO", ) + LOG_OUTPUT_FORMAT: Literal["text", "json"] = Field( + description="Log output format: 'text' for human-readable, 'json' for structured JSON logs.", + default="text", + ) + LOG_FILE: str | None = Field( description="File path for log output.", default=None, @@ -954,6 +959,16 @@ class MailConfig(BaseSettings): default=None, ) + ENABLE_TRIAL_APP: bool = Field( + description="Enable trial app", + default=False, + ) + + ENABLE_EXPLORE_BANNER: bool = Field( + description="Enable explore banner", + default=False, + ) + class RagEtlConfig(BaseSettings): """ @@ -1096,6 +1111,10 @@ class CeleryScheduleTasksConfig(BaseSettings): description="Enable clean messages task", default=False, ) + ENABLE_WORKFLOW_RUN_CLEANUP_TASK: bool = Field( + description="Enable scheduled workflow run cleanup task", + default=False, + ) ENABLE_MAIL_CLEAN_DOCUMENT_NOTIFY_TASK: bool = Field( description="Enable mail clean document notify task", default=False, @@ -1252,19 +1271,9 @@ class WorkflowLogConfig(BaseSettings): class SwaggerUIConfig(BaseSettings): - """ - Configuration for Swagger UI documentation. - - Security Note: Swagger UI is automatically disabled in PRODUCTION environment - to prevent API information disclosure. Set SWAGGER_UI_ENABLED=true explicitly - to enable in production if needed. - """ - - SWAGGER_UI_ENABLED: bool | None = Field( - description="Whether to enable Swagger UI in api module. " - "Automatically disabled in PRODUCTION environment for security. " - "Set to true explicitly to enable in production.", - default=None, + SWAGGER_UI_ENABLED: bool = Field( + description="Whether to enable Swagger UI in api module", + default=True, ) SWAGGER_UI_PATH: str = Field( @@ -1272,23 +1281,6 @@ class SwaggerUIConfig(BaseSettings): default="/swagger-ui.html", ) - @property - def swagger_ui_enabled(self) -> bool: - """ - Compute whether Swagger UI should be enabled. - - If SWAGGER_UI_ENABLED is explicitly set, use that value. - Otherwise, disable in PRODUCTION environment for security. - """ - if self.SWAGGER_UI_ENABLED is not None: - return self.SWAGGER_UI_ENABLED - - # Auto-disable in production environment - import os - - deploy_env = os.environ.get("DEPLOY_ENV", "PRODUCTION") - return deploy_env.upper() != "PRODUCTION" - class TenantIsolatedTaskQueueConfig(BaseSettings): TENANT_ISOLATED_TASK_CONCURRENCY: int = Field( @@ -1297,6 +1289,21 @@ class TenantIsolatedTaskQueueConfig(BaseSettings): ) +class SandboxExpiredRecordsCleanConfig(BaseSettings): + SANDBOX_EXPIRED_RECORDS_CLEAN_GRACEFUL_PERIOD: NonNegativeInt = Field( + description="Graceful period in days for sandbox records clean after subscription expiration", + default=21, + ) + SANDBOX_EXPIRED_RECORDS_CLEAN_BATCH_SIZE: PositiveInt = Field( + description="Maximum number of records to process in each batch", + default=1000, + ) + SANDBOX_EXPIRED_RECORDS_RETENTION_DAYS: PositiveInt = Field( + description="Retention days for sandbox expired workflow_run records and message records", + default=30, + ) + + class FeatureConfig( # place the configs in alphabet order AppExecutionConfig, @@ -1322,6 +1329,7 @@ class FeatureConfig( PositionConfig, RagEtlConfig, RepositoryConfig, + SandboxExpiredRecordsCleanConfig, SecurityConfig, TenantIsolatedTaskQueueConfig, ToolConfig, diff --git a/api/configs/feature/hosted_service/__init__.py b/api/configs/feature/hosted_service/__init__.py index 4ad30014c7..42ede718c4 100644 --- a/api/configs/feature/hosted_service/__init__.py +++ b/api/configs/feature/hosted_service/__init__.py @@ -8,6 +8,11 @@ class HostedCreditConfig(BaseSettings): default="", ) + HOSTED_POOL_CREDITS: int = Field( + description="Pool credits for hosted service", + default=200, + ) + def get_model_credits(self, model_name: str) -> int: """ Get credit value for a specific model name. @@ -60,19 +65,46 @@ class HostedOpenAiConfig(BaseSettings): HOSTED_OPENAI_TRIAL_MODELS: str = Field( description="Comma-separated list of available models for trial access", - default="gpt-3.5-turbo," - "gpt-3.5-turbo-1106," - "gpt-3.5-turbo-instruct," + default="gpt-4," + "gpt-4-turbo-preview," + "gpt-4-turbo-2024-04-09," + "gpt-4-1106-preview," + "gpt-4-0125-preview," + "gpt-4-turbo," + "gpt-4.1," + "gpt-4.1-2025-04-14," + "gpt-4.1-mini," + "gpt-4.1-mini-2025-04-14," + "gpt-4.1-nano," + "gpt-4.1-nano-2025-04-14," + "gpt-3.5-turbo," "gpt-3.5-turbo-16k," "gpt-3.5-turbo-16k-0613," + "gpt-3.5-turbo-1106," "gpt-3.5-turbo-0613," "gpt-3.5-turbo-0125," - "text-davinci-003", - ) - - HOSTED_OPENAI_QUOTA_LIMIT: NonNegativeInt = Field( - description="Quota limit for hosted OpenAI service usage", - default=200, + "gpt-3.5-turbo-instruct," + "text-davinci-003," + "chatgpt-4o-latest," + "gpt-4o," + "gpt-4o-2024-05-13," + "gpt-4o-2024-08-06," + "gpt-4o-2024-11-20," + "gpt-4o-audio-preview," + "gpt-4o-audio-preview-2025-06-03," + "gpt-4o-mini," + "gpt-4o-mini-2024-07-18," + "o3-mini," + "o3-mini-2025-01-31," + "gpt-5-mini-2025-08-07," + "gpt-5-mini," + "o4-mini," + "o4-mini-2025-04-16," + "gpt-5-chat-latest," + "gpt-5," + "gpt-5-2025-08-07," + "gpt-5-nano," + "gpt-5-nano-2025-08-07", ) HOSTED_OPENAI_PAID_ENABLED: bool = Field( @@ -87,6 +119,13 @@ class HostedOpenAiConfig(BaseSettings): "gpt-4-turbo-2024-04-09," "gpt-4-1106-preview," "gpt-4-0125-preview," + "gpt-4-turbo," + "gpt-4.1," + "gpt-4.1-2025-04-14," + "gpt-4.1-mini," + "gpt-4.1-mini-2025-04-14," + "gpt-4.1-nano," + "gpt-4.1-nano-2025-04-14," "gpt-3.5-turbo," "gpt-3.5-turbo-16k," "gpt-3.5-turbo-16k-0613," @@ -94,7 +133,150 @@ class HostedOpenAiConfig(BaseSettings): "gpt-3.5-turbo-0613," "gpt-3.5-turbo-0125," "gpt-3.5-turbo-instruct," - "text-davinci-003", + "text-davinci-003," + "chatgpt-4o-latest," + "gpt-4o," + "gpt-4o-2024-05-13," + "gpt-4o-2024-08-06," + "gpt-4o-2024-11-20," + "gpt-4o-audio-preview," + "gpt-4o-audio-preview-2025-06-03," + "gpt-4o-mini," + "gpt-4o-mini-2024-07-18," + "o3-mini," + "o3-mini-2025-01-31," + "gpt-5-mini-2025-08-07," + "gpt-5-mini," + "o4-mini," + "o4-mini-2025-04-16," + "gpt-5-chat-latest," + "gpt-5," + "gpt-5-2025-08-07," + "gpt-5-nano," + "gpt-5-nano-2025-08-07", + ) + + +class HostedGeminiConfig(BaseSettings): + """ + Configuration for fetching Gemini service + """ + + HOSTED_GEMINI_API_KEY: str | None = Field( + description="API key for hosted Gemini service", + default=None, + ) + + HOSTED_GEMINI_API_BASE: str | None = Field( + description="Base URL for hosted Gemini API", + default=None, + ) + + HOSTED_GEMINI_API_ORGANIZATION: str | None = Field( + description="Organization ID for hosted Gemini service", + default=None, + ) + + HOSTED_GEMINI_TRIAL_ENABLED: bool = Field( + description="Enable trial access to hosted Gemini service", + default=False, + ) + + HOSTED_GEMINI_TRIAL_MODELS: str = Field( + description="Comma-separated list of available models for trial access", + default="gemini-2.5-flash,gemini-2.0-flash,gemini-2.0-flash-lite,", + ) + + HOSTED_GEMINI_PAID_ENABLED: bool = Field( + description="Enable paid access to hosted gemini service", + default=False, + ) + + HOSTED_GEMINI_PAID_MODELS: str = Field( + description="Comma-separated list of available models for paid access", + default="gemini-2.5-flash,gemini-2.0-flash,gemini-2.0-flash-lite,", + ) + + +class HostedXAIConfig(BaseSettings): + """ + Configuration for fetching XAI service + """ + + HOSTED_XAI_API_KEY: str | None = Field( + description="API key for hosted XAI service", + default=None, + ) + + HOSTED_XAI_API_BASE: str | None = Field( + description="Base URL for hosted XAI API", + default=None, + ) + + HOSTED_XAI_API_ORGANIZATION: str | None = Field( + description="Organization ID for hosted XAI service", + default=None, + ) + + HOSTED_XAI_TRIAL_ENABLED: bool = Field( + description="Enable trial access to hosted XAI service", + default=False, + ) + + HOSTED_XAI_TRIAL_MODELS: str = Field( + description="Comma-separated list of available models for trial access", + default="grok-3,grok-3-mini,grok-3-mini-fast", + ) + + HOSTED_XAI_PAID_ENABLED: bool = Field( + description="Enable paid access to hosted XAI service", + default=False, + ) + + HOSTED_XAI_PAID_MODELS: str = Field( + description="Comma-separated list of available models for paid access", + default="grok-3,grok-3-mini,grok-3-mini-fast", + ) + + +class HostedDeepseekConfig(BaseSettings): + """ + Configuration for fetching Deepseek service + """ + + HOSTED_DEEPSEEK_API_KEY: str | None = Field( + description="API key for hosted Deepseek service", + default=None, + ) + + HOSTED_DEEPSEEK_API_BASE: str | None = Field( + description="Base URL for hosted Deepseek API", + default=None, + ) + + HOSTED_DEEPSEEK_API_ORGANIZATION: str | None = Field( + description="Organization ID for hosted Deepseek service", + default=None, + ) + + HOSTED_DEEPSEEK_TRIAL_ENABLED: bool = Field( + description="Enable trial access to hosted Deepseek service", + default=False, + ) + + HOSTED_DEEPSEEK_TRIAL_MODELS: str = Field( + description="Comma-separated list of available models for trial access", + default="deepseek-chat,deepseek-reasoner", + ) + + HOSTED_DEEPSEEK_PAID_ENABLED: bool = Field( + description="Enable paid access to hosted Deepseek service", + default=False, + ) + + HOSTED_DEEPSEEK_PAID_MODELS: str = Field( + description="Comma-separated list of available models for paid access", + default="deepseek-chat,deepseek-reasoner", ) @@ -144,16 +326,66 @@ class HostedAnthropicConfig(BaseSettings): default=False, ) - HOSTED_ANTHROPIC_QUOTA_LIMIT: NonNegativeInt = Field( - description="Quota limit for hosted Anthropic service usage", - default=600000, - ) - HOSTED_ANTHROPIC_PAID_ENABLED: bool = Field( description="Enable paid access to hosted Anthropic service", default=False, ) + HOSTED_ANTHROPIC_TRIAL_MODELS: str = Field( + description="Comma-separated list of available models for paid access", + default="claude-opus-4-20250514," + "claude-sonnet-4-20250514," + "claude-3-5-haiku-20241022," + "claude-3-opus-20240229," + "claude-3-7-sonnet-20250219," + "claude-3-haiku-20240307", + ) + HOSTED_ANTHROPIC_PAID_MODELS: str = Field( + description="Comma-separated list of available models for paid access", + default="claude-opus-4-20250514," + "claude-sonnet-4-20250514," + "claude-3-5-haiku-20241022," + "claude-3-opus-20240229," + "claude-3-7-sonnet-20250219," + "claude-3-haiku-20240307", + ) + + +class HostedTongyiConfig(BaseSettings): + """ + Configuration for hosted Tongyi service + """ + + HOSTED_TONGYI_API_KEY: str | None = Field( + description="API key for hosted Tongyi service", + default=None, + ) + + HOSTED_TONGYI_USE_INTERNATIONAL_ENDPOINT: bool = Field( + description="Use international endpoint for hosted Tongyi service", + default=False, + ) + + HOSTED_TONGYI_TRIAL_ENABLED: bool = Field( + description="Enable trial access to hosted Tongyi service", + default=False, + ) + + HOSTED_TONGYI_PAID_ENABLED: bool = Field( + description="Enable paid access to hosted Anthropic service", + default=False, + ) + + HOSTED_TONGYI_TRIAL_MODELS: str = Field( + description="Comma-separated list of available models for trial access", + default="", + ) + + HOSTED_TONGYI_PAID_MODELS: str = Field( + description="Comma-separated list of available models for paid access", + default="", + ) + class HostedMinmaxConfig(BaseSettings): """ @@ -246,9 +478,13 @@ class HostedServiceConfig( HostedOpenAiConfig, HostedSparkConfig, HostedZhipuAIConfig, + HostedTongyiConfig, # moderation HostedModerationConfig, # credit config HostedCreditConfig, + HostedGeminiConfig, + HostedXAIConfig, + HostedDeepseekConfig, ): pass diff --git a/api/configs/middleware/__init__.py b/api/configs/middleware/__init__.py index c4390ffaab..63f75924bf 100644 --- a/api/configs/middleware/__init__.py +++ b/api/configs/middleware/__init__.py @@ -107,7 +107,7 @@ class KeywordStoreConfig(BaseSettings): class DatabaseConfig(BaseSettings): # Database type selector - DB_TYPE: Literal["postgresql", "mysql", "oceanbase"] = Field( + DB_TYPE: Literal["postgresql", "mysql", "oceanbase", "seekdb"] = Field( description="Database type to use. OceanBase is MySQL-compatible.", default="postgresql", ) diff --git a/api/configs/middleware/storage/aliyun_oss_storage_config.py b/api/configs/middleware/storage/aliyun_oss_storage_config.py index 331c486d54..6df14175ae 100644 --- a/api/configs/middleware/storage/aliyun_oss_storage_config.py +++ b/api/configs/middleware/storage/aliyun_oss_storage_config.py @@ -41,3 +41,8 @@ class AliyunOSSStorageConfig(BaseSettings): description="Base path within the bucket to store objects (e.g., 'my-app-data/')", default=None, ) + + ALIYUN_CLOUDBOX_ID: str | None = Field( + description="Cloudbox id for aliyun cloudbox service", + default=None, + ) diff --git a/api/configs/middleware/storage/huawei_obs_storage_config.py b/api/configs/middleware/storage/huawei_obs_storage_config.py index 5b5cd2f750..46b6f2e68d 100644 --- a/api/configs/middleware/storage/huawei_obs_storage_config.py +++ b/api/configs/middleware/storage/huawei_obs_storage_config.py @@ -26,3 +26,8 @@ class HuaweiCloudOBSStorageConfig(BaseSettings): description="Endpoint URL for Huawei Cloud OBS (e.g., 'https://obs.cn-north-4.myhuaweicloud.com')", default=None, ) + + HUAWEI_OBS_PATH_STYLE: bool = Field( + description="Flag to indicate whether to use path-style URLs for OBS requests", + default=False, + ) diff --git a/api/configs/middleware/storage/tencent_cos_storage_config.py b/api/configs/middleware/storage/tencent_cos_storage_config.py index e297e748e9..cdd10740f8 100644 --- a/api/configs/middleware/storage/tencent_cos_storage_config.py +++ b/api/configs/middleware/storage/tencent_cos_storage_config.py @@ -31,3 +31,8 @@ class TencentCloudCOSStorageConfig(BaseSettings): description="Protocol scheme for COS requests: 'https' (recommended) or 'http'", default=None, ) + + TENCENT_COS_CUSTOM_DOMAIN: str | None = Field( + description="Tencent Cloud COS custom domain setting", + default=None, + ) diff --git a/api/configs/middleware/vdb/milvus_config.py b/api/configs/middleware/vdb/milvus_config.py index 05cee51cc9..eb9b0ac2ab 100644 --- a/api/configs/middleware/vdb/milvus_config.py +++ b/api/configs/middleware/vdb/milvus_config.py @@ -16,7 +16,6 @@ class MilvusConfig(BaseSettings): description="Authentication token for Milvus, if token-based authentication is enabled", default=None, ) - MILVUS_USER: str | None = Field( description="Username for authenticating with Milvus, if username/password authentication is enabled", default=None, diff --git a/api/controllers/common/fields.py b/api/controllers/common/fields.py index df9de825de..c16a23fac8 100644 --- a/api/controllers/common/fields.py +++ b/api/controllers/common/fields.py @@ -1,62 +1,59 @@ -from flask_restx import Api, Namespace, fields +from __future__ import annotations -from libs.helper import AppIconUrlField +from typing import Any, TypeAlias -parameters__system_parameters = { - "image_file_size_limit": fields.Integer, - "video_file_size_limit": fields.Integer, - "audio_file_size_limit": fields.Integer, - "file_size_limit": fields.Integer, - "workflow_file_upload_limit": fields.Integer, -} +from pydantic import BaseModel, ConfigDict, computed_field + +from core.file import helpers as file_helpers +from models.model import IconType + +JSONValue: TypeAlias = str | int | float | bool | None | dict[str, Any] | list[Any] +JSONObject: TypeAlias = dict[str, Any] -def build_system_parameters_model(api_or_ns: Api | Namespace): - """Build the system parameters model for the API or Namespace.""" - return api_or_ns.model("SystemParameters", parameters__system_parameters) +class SystemParameters(BaseModel): + image_file_size_limit: int + video_file_size_limit: int + audio_file_size_limit: int + file_size_limit: int + workflow_file_upload_limit: int -parameters_fields = { - "opening_statement": fields.String, - "suggested_questions": fields.Raw, - "suggested_questions_after_answer": fields.Raw, - "speech_to_text": fields.Raw, - "text_to_speech": fields.Raw, - "retriever_resource": fields.Raw, - "annotation_reply": fields.Raw, - "more_like_this": fields.Raw, - "user_input_form": fields.Raw, - "sensitive_word_avoidance": fields.Raw, - "file_upload": fields.Raw, - "system_parameters": fields.Nested(parameters__system_parameters), -} +class Parameters(BaseModel): + opening_statement: str | None = None + suggested_questions: list[str] + suggested_questions_after_answer: JSONObject + speech_to_text: JSONObject + text_to_speech: JSONObject + retriever_resource: JSONObject + annotation_reply: JSONObject + more_like_this: JSONObject + user_input_form: list[JSONObject] + sensitive_word_avoidance: JSONObject + file_upload: JSONObject + system_parameters: SystemParameters -def build_parameters_model(api_or_ns: Api | Namespace): - """Build the parameters model for the API or Namespace.""" - copied_fields = parameters_fields.copy() - copied_fields["system_parameters"] = fields.Nested(build_system_parameters_model(api_or_ns)) - return api_or_ns.model("Parameters", copied_fields) +class Site(BaseModel): + model_config = ConfigDict(from_attributes=True) + title: str + chat_color_theme: str | None = None + chat_color_theme_inverted: bool + icon_type: str | None = None + icon: str | None = None + icon_background: str | None = None + description: str | None = None + copyright: str | None = None + privacy_policy: str | None = None + custom_disclaimer: str | None = None + default_language: str + show_workflow_steps: bool + use_icon_as_answer_icon: bool -site_fields = { - "title": fields.String, - "chat_color_theme": fields.String, - "chat_color_theme_inverted": fields.Boolean, - "icon_type": fields.String, - "icon": fields.String, - "icon_background": fields.String, - "icon_url": AppIconUrlField, - "description": fields.String, - "copyright": fields.String, - "privacy_policy": fields.String, - "custom_disclaimer": fields.String, - "default_language": fields.String, - "show_workflow_steps": fields.Boolean, - "use_icon_as_answer_icon": fields.Boolean, -} - - -def build_site_model(api_or_ns: Api | Namespace): - """Build the site model for the API or Namespace.""" - return api_or_ns.model("Site", site_fields) + @computed_field(return_type=str | None) # type: ignore + @property + def icon_url(self) -> str | None: + if self.icon and self.icon_type == IconType.IMAGE: + return file_helpers.get_signed_file_url(self.icon) + return None diff --git a/api/controllers/common/file_response.py b/api/controllers/common/file_response.py new file mode 100644 index 0000000000..ca8ea3d52e --- /dev/null +++ b/api/controllers/common/file_response.py @@ -0,0 +1,57 @@ +import os +from email.message import Message +from urllib.parse import quote + +from flask import Response + +HTML_MIME_TYPES = frozenset({"text/html", "application/xhtml+xml"}) +HTML_EXTENSIONS = frozenset({"html", "htm"}) + + +def _normalize_mime_type(mime_type: str | None) -> str: + if not mime_type: + return "" + message = Message() + message["Content-Type"] = mime_type + return message.get_content_type().strip().lower() + + +def _is_html_extension(extension: str | None) -> bool: + if not extension: + return False + return extension.lstrip(".").lower() in HTML_EXTENSIONS + + +def is_html_content(mime_type: str | None, filename: str | None, extension: str | None = None) -> bool: + normalized_mime_type = _normalize_mime_type(mime_type) + if normalized_mime_type in HTML_MIME_TYPES: + return True + + if _is_html_extension(extension): + return True + + if filename: + return _is_html_extension(os.path.splitext(filename)[1]) + + return False + + +def enforce_download_for_html( + response: Response, + *, + mime_type: str | None, + filename: str | None, + extension: str | None = None, +) -> bool: + if not is_html_content(mime_type, filename, extension): + return False + + if filename: + encoded_filename = quote(filename) + response.headers["Content-Disposition"] = f"attachment; filename*=UTF-8''{encoded_filename}" + else: + response.headers["Content-Disposition"] = "attachment" + + response.headers["Content-Type"] = "application/octet-stream" + response.headers["X-Content-Type-Options"] = "nosniff" + return True diff --git a/api/controllers/console/__init__.py b/api/controllers/console/__init__.py index ad878fc266..fdc9aabc83 100644 --- a/api/controllers/console/__init__.py +++ b/api/controllers/console/__init__.py @@ -107,10 +107,12 @@ from .datasets.rag_pipeline import ( # Import explore controllers from .explore import ( + banner, installed_app, parameter, recommended_app, saved_message, + trial, ) # Import tag controllers @@ -145,6 +147,7 @@ __all__ = [ "apikey", "app", "audio", + "banner", "billing", "bp", "completion", @@ -198,6 +201,7 @@ __all__ = [ "statistic", "tags", "tool_providers", + "trial", "trigger_providers", "version", "website", diff --git a/api/controllers/console/admin.py b/api/controllers/console/admin.py index a25ca5ef51..978df15cf1 100644 --- a/api/controllers/console/admin.py +++ b/api/controllers/console/admin.py @@ -15,7 +15,7 @@ from controllers.console.wraps import only_edition_cloud from core.db.session_factory import session_factory from extensions.ext_database import db from libs.token import extract_access_token -from models.model import App, InstalledApp, RecommendedApp +from models.model import App, ExporleBanner, InstalledApp, RecommendedApp, TrialApp P = ParamSpec("P") R = TypeVar("R") @@ -32,6 +32,8 @@ class InsertExploreAppPayload(BaseModel): language: str = Field(...) category: str = Field(...) position: int = Field(...) + can_trial: bool = Field(default=False) + trial_limit: int = Field(default=0) @field_validator("language") @classmethod @@ -39,11 +41,33 @@ class InsertExploreAppPayload(BaseModel): return supported_language(value) +class InsertExploreBannerPayload(BaseModel): + category: str = Field(...) + title: str = Field(...) + description: str = Field(...) + img_src: str = Field(..., alias="img-src") + language: str = Field(default="en-US") + link: str = Field(...) + sort: int = Field(...) + + @field_validator("language") + @classmethod + def validate_language(cls, value: str) -> str: + return supported_language(value) + + model_config = {"populate_by_name": True} + + console_ns.schema_model( InsertExploreAppPayload.__name__, InsertExploreAppPayload.model_json_schema(ref_template=DEFAULT_REF_TEMPLATE_SWAGGER_2_0), ) +console_ns.schema_model( + InsertExploreBannerPayload.__name__, + InsertExploreBannerPayload.model_json_schema(ref_template=DEFAULT_REF_TEMPLATE_SWAGGER_2_0), +) + def admin_required(view: Callable[P, R]): @wraps(view) @@ -109,6 +133,20 @@ class InsertExploreAppListApi(Resource): ) db.session.add(recommended_app) + if payload.can_trial: + trial_app = db.session.execute( + select(TrialApp).where(TrialApp.app_id == payload.app_id) + ).scalar_one_or_none() + if not trial_app: + db.session.add( + TrialApp( + app_id=payload.app_id, + tenant_id=app.tenant_id, + trial_limit=payload.trial_limit, + ) + ) + else: + trial_app.trial_limit = payload.trial_limit app.is_public = True db.session.commit() @@ -123,6 +161,20 @@ class InsertExploreAppListApi(Resource): recommended_app.category = payload.category recommended_app.position = payload.position + if payload.can_trial: + trial_app = db.session.execute( + select(TrialApp).where(TrialApp.app_id == payload.app_id) + ).scalar_one_or_none() + if not trial_app: + db.session.add( + TrialApp( + app_id=payload.app_id, + tenant_id=app.tenant_id, + trial_limit=payload.trial_limit, + ) + ) + else: + trial_app.trial_limit = payload.trial_limit app.is_public = True db.session.commit() @@ -168,7 +220,62 @@ class InsertExploreAppApi(Resource): for installed_app in installed_apps: session.delete(installed_app) + trial_app = session.execute( + select(TrialApp).where(TrialApp.app_id == recommended_app.app_id) + ).scalar_one_or_none() + if trial_app: + session.delete(trial_app) + db.session.delete(recommended_app) db.session.commit() return {"result": "success"}, 204 + + +@console_ns.route("/admin/insert-explore-banner") +class InsertExploreBannerApi(Resource): + @console_ns.doc("insert_explore_banner") + @console_ns.doc(description="Insert an explore banner") + @console_ns.expect(console_ns.models[InsertExploreBannerPayload.__name__]) + @console_ns.response(201, "Banner inserted successfully") + @only_edition_cloud + @admin_required + def post(self): + payload = InsertExploreBannerPayload.model_validate(console_ns.payload) + + content = { + "category": payload.category, + "title": payload.title, + "description": payload.description, + "img-src": payload.img_src, + } + + banner = ExporleBanner( + content=content, + link=payload.link, + sort=payload.sort, + language=payload.language, + ) + db.session.add(banner) + db.session.commit() + + return {"result": "success"}, 201 + + +@console_ns.route("/admin/insert-explore-banner/") +class DeleteExploreBannerApi(Resource): + @console_ns.doc("delete_explore_banner") + @console_ns.doc(description="Delete an explore banner") + @console_ns.doc(params={"banner_id": "Banner ID to delete"}) + @console_ns.response(204, "Banner deleted successfully") + @only_edition_cloud + @admin_required + def delete(self, banner_id): + banner = db.session.execute(select(ExporleBanner).where(ExporleBanner.id == banner_id)).scalar_one_or_none() + if not banner: + raise NotFound(f"Banner '{banner_id}' is not found") + + db.session.delete(banner) + db.session.commit() + + return {"result": "success"}, 204 diff --git a/api/controllers/console/app/annotation.py b/api/controllers/console/app/annotation.py index 6a4c1528b0..388820e20a 100644 --- a/api/controllers/console/app/annotation.py +++ b/api/controllers/console/app/annotation.py @@ -272,6 +272,7 @@ class AnnotationExportApi(Resource): @account_initialization_required @edit_permission_required def get(self, app_id): + app_id = str(app_id) annotation_list = AppAnnotationService.export_annotation_list_by_app_id(app_id) response_data = {"data": marshal(annotation_list, annotation_fields)} @@ -359,7 +360,6 @@ class AnnotationBatchImportApi(Resource): file.seek(0, 2) # Seek to end of file file_size = file.tell() file.seek(0) # Reset to beginning - max_size_bytes = dify_config.ANNOTATION_IMPORT_FILE_SIZE_LIMIT * 1024 * 1024 if file_size > max_size_bytes: abort( diff --git a/api/controllers/console/app/app.py b/api/controllers/console/app/app.py index 62e997dae2..d66bb7063f 100644 --- a/api/controllers/console/app/app.py +++ b/api/controllers/console/app/app.py @@ -1,13 +1,16 @@ +import re import uuid -from typing import Literal +from datetime import datetime +from typing import Any, Literal, TypeAlias from flask import request -from flask_restx import Resource, fields, marshal, marshal_with -from pydantic import BaseModel, Field, field_validator +from flask_restx import Resource +from pydantic import AliasChoices, BaseModel, ConfigDict, Field, computed_field, field_validator from sqlalchemy import select from sqlalchemy.orm import Session from werkzeug.exceptions import BadRequest +from controllers.common.schema import register_schema_models from controllers.console import console_ns from controllers.console.app.wraps import get_app_model from controllers.console.wraps import ( @@ -18,27 +21,19 @@ from controllers.console.wraps import ( is_admin_or_owner_required, setup_required, ) +from core.file import helpers as file_helpers from core.ops.ops_trace_manager import OpsTraceManager from core.workflow.enums import NodeType from extensions.ext_database import db -from fields.app_fields import ( - deleted_tool_fields, - model_config_fields, - model_config_partial_fields, - site_fields, - tag_fields, -) -from fields.workflow_fields import workflow_partial_fields as _workflow_partial_fields_dict -from libs.helper import AppIconUrlField, TimestampField from libs.login import current_account_with_tenant, login_required from models import App, Workflow +from models.model import IconType from services.app_dsl_service import AppDslService, ImportMode from services.app_service import AppService from services.enterprise.enterprise_service import EnterpriseService from services.feature_service import FeatureService ALLOW_CREATE_APP_MODES = ["chat", "agent-chat", "advanced-chat", "workflow", "completion"] -DEFAULT_REF_TEMPLATE_SWAGGER_2_0 = "#/definitions/{model}" class AppListQuery(BaseModel): @@ -73,6 +68,48 @@ class AppListQuery(BaseModel): raise ValueError("Invalid UUID format in tag_ids.") from exc +# XSS prevention: patterns that could lead to XSS attacks +# Includes: script tags, iframe tags, javascript: protocol, SVG with onload, etc. +_XSS_PATTERNS = [ + r"]*>.*?", # Script tags + r"]*?(?:/>|>.*?)", # Iframe tags (including self-closing) + r"javascript:", # JavaScript protocol + r"]*?\s+onload\s*=[^>]*>", # SVG with onload handler (attribute-aware, flexible whitespace) + r"<.*?on\s*\w+\s*=", # Event handlers like onclick, onerror, etc. + r"]*(?:\s*/>|>.*?)", # Object tags (opening tag) + r"]*>", # Embed tags (self-closing) + r"]*>", # Link tags with javascript +] + + +def _validate_xss_safe(value: str | None, field_name: str = "Field") -> str | None: + """ + Validate that a string value doesn't contain potential XSS payloads. + + Args: + value: The string value to validate + field_name: Name of the field for error messages + + Returns: + The original value if safe + + Raises: + ValueError: If the value contains XSS patterns + """ + if value is None: + return None + + value_lower = value.lower() + for pattern in _XSS_PATTERNS: + if re.search(pattern, value_lower, re.DOTALL | re.IGNORECASE): + raise ValueError( + f"{field_name} contains invalid characters or patterns. " + "HTML tags, JavaScript, and other potentially dangerous content are not allowed." + ) + + return value + + class CreateAppPayload(BaseModel): name: str = Field(..., min_length=1, description="App name") description: str | None = Field(default=None, description="App description (max 400 chars)", max_length=400) @@ -81,6 +118,11 @@ class CreateAppPayload(BaseModel): icon: str | None = Field(default=None, description="Icon") icon_background: str | None = Field(default=None, description="Icon background color") + @field_validator("name", "description", mode="before") + @classmethod + def validate_xss_safe(cls, value: str | None, info) -> str | None: + return _validate_xss_safe(value, info.field_name) + class UpdateAppPayload(BaseModel): name: str = Field(..., min_length=1, description="App name") @@ -91,6 +133,11 @@ class UpdateAppPayload(BaseModel): use_icon_as_answer_icon: bool | None = Field(default=None, description="Use icon as answer icon") max_active_requests: int | None = Field(default=None, description="Maximum active requests") + @field_validator("name", "description", mode="before") + @classmethod + def validate_xss_safe(cls, value: str | None, info) -> str | None: + return _validate_xss_safe(value, info.field_name) + class CopyAppPayload(BaseModel): name: str | None = Field(default=None, description="Name for the copied app") @@ -99,6 +146,11 @@ class CopyAppPayload(BaseModel): icon: str | None = Field(default=None, description="Icon") icon_background: str | None = Field(default=None, description="Icon background color") + @field_validator("name", "description", mode="before") + @classmethod + def validate_xss_safe(cls, value: str | None, info) -> str | None: + return _validate_xss_safe(value, info.field_name) + class AppExportQuery(BaseModel): include_secret: bool = Field(default=False, description="Include secrets in export") @@ -134,124 +186,292 @@ class AppTracePayload(BaseModel): return value -def reg(cls: type[BaseModel]): - console_ns.schema_model(cls.__name__, cls.model_json_schema(ref_template=DEFAULT_REF_TEMPLATE_SWAGGER_2_0)) +JSONValue: TypeAlias = Any -reg(AppListQuery) -reg(CreateAppPayload) -reg(UpdateAppPayload) -reg(CopyAppPayload) -reg(AppExportQuery) -reg(AppNamePayload) -reg(AppIconPayload) -reg(AppSiteStatusPayload) -reg(AppApiStatusPayload) -reg(AppTracePayload) +class ResponseModel(BaseModel): + model_config = ConfigDict( + from_attributes=True, + extra="ignore", + populate_by_name=True, + serialize_by_alias=True, + protected_namespaces=(), + ) -# Register models for flask_restx to avoid dict type issues in Swagger -# Register base models first -tag_model = console_ns.model("Tag", tag_fields) -workflow_partial_model = console_ns.model("WorkflowPartial", _workflow_partial_fields_dict) +def _to_timestamp(value: datetime | int | None) -> int | None: + if isinstance(value, datetime): + return int(value.timestamp()) + return value -model_config_model = console_ns.model("ModelConfig", model_config_fields) -model_config_partial_model = console_ns.model("ModelConfigPartial", model_config_partial_fields) +def _build_icon_url(icon_type: str | IconType | None, icon: str | None) -> str | None: + if icon is None or icon_type is None: + return None + icon_type_value = icon_type.value if isinstance(icon_type, IconType) else str(icon_type) + if icon_type_value.lower() != IconType.IMAGE.value: + return None + return file_helpers.get_signed_file_url(icon) -deleted_tool_model = console_ns.model("DeletedTool", deleted_tool_fields) -site_model = console_ns.model("Site", site_fields) +class Tag(ResponseModel): + id: str + name: str + type: str -app_partial_model = console_ns.model( - "AppPartial", - { - "id": fields.String, - "name": fields.String, - "max_active_requests": fields.Raw(), - "description": fields.String(attribute="desc_or_prompt"), - "mode": fields.String(attribute="mode_compatible_with_agent"), - "icon_type": fields.String, - "icon": fields.String, - "icon_background": fields.String, - "icon_url": AppIconUrlField, - "model_config": fields.Nested(model_config_partial_model, attribute="app_model_config", allow_null=True), - "workflow": fields.Nested(workflow_partial_model, allow_null=True), - "use_icon_as_answer_icon": fields.Boolean, - "created_by": fields.String, - "created_at": TimestampField, - "updated_by": fields.String, - "updated_at": TimestampField, - "tags": fields.List(fields.Nested(tag_model)), - "access_mode": fields.String, - "create_user_name": fields.String, - "author_name": fields.String, - "has_draft_trigger": fields.Boolean, - }, -) -app_detail_model = console_ns.model( - "AppDetail", - { - "id": fields.String, - "name": fields.String, - "description": fields.String, - "mode": fields.String(attribute="mode_compatible_with_agent"), - "icon": fields.String, - "icon_background": fields.String, - "enable_site": fields.Boolean, - "enable_api": fields.Boolean, - "model_config": fields.Nested(model_config_model, attribute="app_model_config", allow_null=True), - "workflow": fields.Nested(workflow_partial_model, allow_null=True), - "tracing": fields.Raw, - "use_icon_as_answer_icon": fields.Boolean, - "created_by": fields.String, - "created_at": TimestampField, - "updated_by": fields.String, - "updated_at": TimestampField, - "access_mode": fields.String, - "tags": fields.List(fields.Nested(tag_model)), - }, -) +class WorkflowPartial(ResponseModel): + id: str + created_by: str | None = None + created_at: int | None = None + updated_by: str | None = None + updated_at: int | None = None -app_detail_with_site_model = console_ns.model( - "AppDetailWithSite", - { - "id": fields.String, - "name": fields.String, - "description": fields.String, - "mode": fields.String(attribute="mode_compatible_with_agent"), - "icon_type": fields.String, - "icon": fields.String, - "icon_background": fields.String, - "icon_url": AppIconUrlField, - "enable_site": fields.Boolean, - "enable_api": fields.Boolean, - "model_config": fields.Nested(model_config_model, attribute="app_model_config", allow_null=True), - "workflow": fields.Nested(workflow_partial_model, allow_null=True), - "api_base_url": fields.String, - "use_icon_as_answer_icon": fields.Boolean, - "max_active_requests": fields.Integer, - "created_by": fields.String, - "created_at": TimestampField, - "updated_by": fields.String, - "updated_at": TimestampField, - "deleted_tools": fields.List(fields.Nested(deleted_tool_model)), - "access_mode": fields.String, - "tags": fields.List(fields.Nested(tag_model)), - "site": fields.Nested(site_model), - }, -) + @field_validator("created_at", "updated_at", mode="before") + @classmethod + def _normalize_timestamp(cls, value: datetime | int | None) -> int | None: + return _to_timestamp(value) -app_pagination_model = console_ns.model( - "AppPagination", - { - "page": fields.Integer, - "limit": fields.Integer(attribute="per_page"), - "total": fields.Integer, - "has_more": fields.Boolean(attribute="has_next"), - "data": fields.List(fields.Nested(app_partial_model), attribute="items"), - }, + +class ModelConfigPartial(ResponseModel): + model: JSONValue | None = Field(default=None, validation_alias=AliasChoices("model_dict", "model")) + pre_prompt: str | None = None + created_by: str | None = None + created_at: int | None = None + updated_by: str | None = None + updated_at: int | None = None + + @field_validator("created_at", "updated_at", mode="before") + @classmethod + def _normalize_timestamp(cls, value: datetime | int | None) -> int | None: + return _to_timestamp(value) + + +class ModelConfig(ResponseModel): + opening_statement: str | None = None + suggested_questions: JSONValue | None = Field( + default=None, validation_alias=AliasChoices("suggested_questions_list", "suggested_questions") + ) + suggested_questions_after_answer: JSONValue | None = Field( + default=None, + validation_alias=AliasChoices("suggested_questions_after_answer_dict", "suggested_questions_after_answer"), + ) + speech_to_text: JSONValue | None = Field( + default=None, validation_alias=AliasChoices("speech_to_text_dict", "speech_to_text") + ) + text_to_speech: JSONValue | None = Field( + default=None, validation_alias=AliasChoices("text_to_speech_dict", "text_to_speech") + ) + retriever_resource: JSONValue | None = Field( + default=None, validation_alias=AliasChoices("retriever_resource_dict", "retriever_resource") + ) + annotation_reply: JSONValue | None = Field( + default=None, validation_alias=AliasChoices("annotation_reply_dict", "annotation_reply") + ) + more_like_this: JSONValue | None = Field( + default=None, validation_alias=AliasChoices("more_like_this_dict", "more_like_this") + ) + sensitive_word_avoidance: JSONValue | None = Field( + default=None, validation_alias=AliasChoices("sensitive_word_avoidance_dict", "sensitive_word_avoidance") + ) + external_data_tools: JSONValue | None = Field( + default=None, validation_alias=AliasChoices("external_data_tools_list", "external_data_tools") + ) + model: JSONValue | None = Field(default=None, validation_alias=AliasChoices("model_dict", "model")) + user_input_form: JSONValue | None = Field( + default=None, validation_alias=AliasChoices("user_input_form_list", "user_input_form") + ) + dataset_query_variable: str | None = None + pre_prompt: str | None = None + agent_mode: JSONValue | None = Field(default=None, validation_alias=AliasChoices("agent_mode_dict", "agent_mode")) + prompt_type: str | None = None + chat_prompt_config: JSONValue | None = Field( + default=None, validation_alias=AliasChoices("chat_prompt_config_dict", "chat_prompt_config") + ) + completion_prompt_config: JSONValue | None = Field( + default=None, validation_alias=AliasChoices("completion_prompt_config_dict", "completion_prompt_config") + ) + dataset_configs: JSONValue | None = Field( + default=None, validation_alias=AliasChoices("dataset_configs_dict", "dataset_configs") + ) + file_upload: JSONValue | None = Field( + default=None, validation_alias=AliasChoices("file_upload_dict", "file_upload") + ) + created_by: str | None = None + created_at: int | None = None + updated_by: str | None = None + updated_at: int | None = None + + @field_validator("created_at", "updated_at", mode="before") + @classmethod + def _normalize_timestamp(cls, value: datetime | int | None) -> int | None: + return _to_timestamp(value) + + +class Site(ResponseModel): + access_token: str | None = Field(default=None, validation_alias="code") + code: str | None = None + title: str | None = None + icon_type: str | IconType | None = None + icon: str | None = None + icon_background: str | None = None + description: str | None = None + default_language: str | None = None + chat_color_theme: str | None = None + chat_color_theme_inverted: bool | None = None + customize_domain: str | None = None + copyright: str | None = None + privacy_policy: str | None = None + custom_disclaimer: str | None = None + customize_token_strategy: str | None = None + prompt_public: bool | None = None + app_base_url: str | None = None + show_workflow_steps: bool | None = None + use_icon_as_answer_icon: bool | None = None + created_by: str | None = None + created_at: int | None = None + updated_by: str | None = None + updated_at: int | None = None + + @computed_field(return_type=str | None) # type: ignore + @property + def icon_url(self) -> str | None: + return _build_icon_url(self.icon_type, self.icon) + + @field_validator("icon_type", mode="before") + @classmethod + def _normalize_icon_type(cls, value: str | IconType | None) -> str | None: + if isinstance(value, IconType): + return value.value + return value + + @field_validator("created_at", "updated_at", mode="before") + @classmethod + def _normalize_timestamp(cls, value: datetime | int | None) -> int | None: + return _to_timestamp(value) + + +class DeletedTool(ResponseModel): + type: str + tool_name: str + provider_id: str + + +class AppPartial(ResponseModel): + id: str + name: str + max_active_requests: int | None = None + description: str | None = Field(default=None, validation_alias=AliasChoices("desc_or_prompt", "description")) + mode: str = Field(validation_alias="mode_compatible_with_agent") + icon_type: str | None = None + icon: str | None = None + icon_background: str | None = None + model_config_: ModelConfigPartial | None = Field( + default=None, + validation_alias=AliasChoices("app_model_config", "model_config"), + alias="model_config", + ) + workflow: WorkflowPartial | None = None + use_icon_as_answer_icon: bool | None = None + created_by: str | None = None + created_at: int | None = None + updated_by: str | None = None + updated_at: int | None = None + tags: list[Tag] = Field(default_factory=list) + access_mode: str | None = None + create_user_name: str | None = None + author_name: str | None = None + has_draft_trigger: bool | None = None + + @computed_field(return_type=str | None) # type: ignore + @property + def icon_url(self) -> str | None: + return _build_icon_url(self.icon_type, self.icon) + + @field_validator("created_at", "updated_at", mode="before") + @classmethod + def _normalize_timestamp(cls, value: datetime | int | None) -> int | None: + return _to_timestamp(value) + + +class AppDetail(ResponseModel): + id: str + name: str + description: str | None = None + mode: str = Field(validation_alias="mode_compatible_with_agent") + icon: str | None = None + icon_background: str | None = None + enable_site: bool + enable_api: bool + model_config_: ModelConfig | None = Field( + default=None, + validation_alias=AliasChoices("app_model_config", "model_config"), + alias="model_config", + ) + workflow: WorkflowPartial | None = None + tracing: JSONValue | None = None + use_icon_as_answer_icon: bool | None = None + created_by: str | None = None + created_at: int | None = None + updated_by: str | None = None + updated_at: int | None = None + access_mode: str | None = None + tags: list[Tag] = Field(default_factory=list) + + @field_validator("created_at", "updated_at", mode="before") + @classmethod + def _normalize_timestamp(cls, value: datetime | int | None) -> int | None: + return _to_timestamp(value) + + +class AppDetailWithSite(AppDetail): + icon_type: str | None = None + api_base_url: str | None = None + max_active_requests: int | None = None + deleted_tools: list[DeletedTool] = Field(default_factory=list) + site: Site | None = None + + @computed_field(return_type=str | None) # type: ignore + @property + def icon_url(self) -> str | None: + return _build_icon_url(self.icon_type, self.icon) + + +class AppPagination(ResponseModel): + page: int + limit: int = Field(validation_alias=AliasChoices("per_page", "limit")) + total: int + has_more: bool = Field(validation_alias=AliasChoices("has_next", "has_more")) + data: list[AppPartial] = Field(validation_alias=AliasChoices("items", "data")) + + +class AppExportResponse(ResponseModel): + data: str + + +register_schema_models( + console_ns, + AppListQuery, + CreateAppPayload, + UpdateAppPayload, + CopyAppPayload, + AppExportQuery, + AppNamePayload, + AppIconPayload, + AppSiteStatusPayload, + AppApiStatusPayload, + AppTracePayload, + Tag, + WorkflowPartial, + ModelConfigPartial, + ModelConfig, + Site, + DeletedTool, + AppPartial, + AppDetail, + AppDetailWithSite, + AppPagination, + AppExportResponse, ) @@ -260,7 +480,7 @@ class AppListApi(Resource): @console_ns.doc("list_apps") @console_ns.doc(description="Get list of applications with pagination and filtering") @console_ns.expect(console_ns.models[AppListQuery.__name__]) - @console_ns.response(200, "Success", app_pagination_model) + @console_ns.response(200, "Success", console_ns.models[AppPagination.__name__]) @setup_required @login_required @account_initialization_required @@ -276,7 +496,8 @@ class AppListApi(Resource): app_service = AppService() app_pagination = app_service.get_paginate_apps(current_user.id, current_tenant_id, args_dict) if not app_pagination: - return {"data": [], "total": 0, "page": 1, "limit": 20, "has_more": False} + empty = AppPagination(page=args.page, limit=args.limit, total=0, has_more=False, data=[]) + return empty.model_dump(mode="json"), 200 if FeatureService.get_system_features().webapp_auth.enabled: app_ids = [str(app.id) for app in app_pagination.items] @@ -320,18 +541,18 @@ class AppListApi(Resource): for app in app_pagination.items: app.has_draft_trigger = str(app.id) in draft_trigger_app_ids - return marshal(app_pagination, app_pagination_model), 200 + pagination_model = AppPagination.model_validate(app_pagination, from_attributes=True) + return pagination_model.model_dump(mode="json"), 200 @console_ns.doc("create_app") @console_ns.doc(description="Create a new application") @console_ns.expect(console_ns.models[CreateAppPayload.__name__]) - @console_ns.response(201, "App created successfully", app_detail_model) + @console_ns.response(201, "App created successfully", console_ns.models[AppDetail.__name__]) @console_ns.response(403, "Insufficient permissions") @console_ns.response(400, "Invalid request parameters") @setup_required @login_required @account_initialization_required - @marshal_with(app_detail_model) @cloud_edition_billing_resource_check("apps") @edit_permission_required def post(self): @@ -341,8 +562,8 @@ class AppListApi(Resource): app_service = AppService() app = app_service.create_app(current_tenant_id, args.model_dump(), current_user) - - return app, 201 + app_detail = AppDetail.model_validate(app, from_attributes=True) + return app_detail.model_dump(mode="json"), 201 @console_ns.route("/apps/") @@ -350,13 +571,12 @@ class AppApi(Resource): @console_ns.doc("get_app_detail") @console_ns.doc(description="Get application details") @console_ns.doc(params={"app_id": "Application ID"}) - @console_ns.response(200, "Success", app_detail_with_site_model) + @console_ns.response(200, "Success", console_ns.models[AppDetailWithSite.__name__]) @setup_required @login_required @account_initialization_required @enterprise_license_required - @get_app_model - @marshal_with(app_detail_with_site_model) + @get_app_model(mode=None) def get(self, app_model): """Get app detail""" app_service = AppService() @@ -367,21 +587,21 @@ class AppApi(Resource): app_setting = EnterpriseService.WebAppAuth.get_app_access_mode_by_id(app_id=str(app_model.id)) app_model.access_mode = app_setting.access_mode - return app_model + response_model = AppDetailWithSite.model_validate(app_model, from_attributes=True) + return response_model.model_dump(mode="json") @console_ns.doc("update_app") @console_ns.doc(description="Update application details") @console_ns.doc(params={"app_id": "Application ID"}) @console_ns.expect(console_ns.models[UpdateAppPayload.__name__]) - @console_ns.response(200, "App updated successfully", app_detail_with_site_model) + @console_ns.response(200, "App updated successfully", console_ns.models[AppDetailWithSite.__name__]) @console_ns.response(403, "Insufficient permissions") @console_ns.response(400, "Invalid request parameters") @setup_required @login_required @account_initialization_required - @get_app_model + @get_app_model(mode=None) @edit_permission_required - @marshal_with(app_detail_with_site_model) def put(self, app_model): """Update app""" args = UpdateAppPayload.model_validate(console_ns.payload) @@ -398,8 +618,8 @@ class AppApi(Resource): "max_active_requests": args.max_active_requests or 0, } app_model = app_service.update_app(app_model, args_dict) - - return app_model + response_model = AppDetailWithSite.model_validate(app_model, from_attributes=True) + return response_model.model_dump(mode="json") @console_ns.doc("delete_app") @console_ns.doc(description="Delete application") @@ -425,14 +645,13 @@ class AppCopyApi(Resource): @console_ns.doc(description="Create a copy of an existing application") @console_ns.doc(params={"app_id": "Application ID to copy"}) @console_ns.expect(console_ns.models[CopyAppPayload.__name__]) - @console_ns.response(201, "App copied successfully", app_detail_with_site_model) + @console_ns.response(201, "App copied successfully", console_ns.models[AppDetailWithSite.__name__]) @console_ns.response(403, "Insufficient permissions") @setup_required @login_required @account_initialization_required - @get_app_model + @get_app_model(mode=None) @edit_permission_required - @marshal_with(app_detail_with_site_model) def post(self, app_model): """Copy app""" # The role of the current user in the ta table must be admin, owner, or editor @@ -458,7 +677,8 @@ class AppCopyApi(Resource): stmt = select(App).where(App.id == result.app_id) app = session.scalar(stmt) - return app, 201 + response_model = AppDetailWithSite.model_validate(app, from_attributes=True) + return response_model.model_dump(mode="json"), 201 @console_ns.route("/apps//export") @@ -467,11 +687,7 @@ class AppExportApi(Resource): @console_ns.doc(description="Export application configuration as DSL") @console_ns.doc(params={"app_id": "Application ID to export"}) @console_ns.expect(console_ns.models[AppExportQuery.__name__]) - @console_ns.response( - 200, - "App exported successfully", - console_ns.model("AppExportResponse", {"data": fields.String(description="DSL export data")}), - ) + @console_ns.response(200, "App exported successfully", console_ns.models[AppExportResponse.__name__]) @console_ns.response(403, "Insufficient permissions") @get_app_model @setup_required @@ -482,13 +698,14 @@ class AppExportApi(Resource): """Export app""" args = AppExportQuery.model_validate(request.args.to_dict(flat=True)) # type: ignore - return { - "data": AppDslService.export_dsl( + payload = AppExportResponse( + data=AppDslService.export_dsl( app_model=app_model, include_secret=args.include_secret, workflow_id=args.workflow_id, ) - } + ) + return payload.model_dump(mode="json") @console_ns.route("/apps//name") @@ -497,20 +714,19 @@ class AppNameApi(Resource): @console_ns.doc(description="Check if app name is available") @console_ns.doc(params={"app_id": "Application ID"}) @console_ns.expect(console_ns.models[AppNamePayload.__name__]) - @console_ns.response(200, "Name availability checked") + @console_ns.response(200, "Name availability checked", console_ns.models[AppDetail.__name__]) @setup_required @login_required @account_initialization_required - @get_app_model - @marshal_with(app_detail_model) + @get_app_model(mode=None) @edit_permission_required def post(self, app_model): args = AppNamePayload.model_validate(console_ns.payload) app_service = AppService() app_model = app_service.update_app_name(app_model, args.name) - - return app_model + response_model = AppDetail.model_validate(app_model, from_attributes=True) + return response_model.model_dump(mode="json") @console_ns.route("/apps//icon") @@ -524,16 +740,15 @@ class AppIconApi(Resource): @setup_required @login_required @account_initialization_required - @get_app_model - @marshal_with(app_detail_model) + @get_app_model(mode=None) @edit_permission_required def post(self, app_model): args = AppIconPayload.model_validate(console_ns.payload or {}) app_service = AppService() app_model = app_service.update_app_icon(app_model, args.icon or "", args.icon_background or "") - - return app_model + response_model = AppDetail.model_validate(app_model, from_attributes=True) + return response_model.model_dump(mode="json") @console_ns.route("/apps//site-enable") @@ -542,21 +757,20 @@ class AppSiteStatus(Resource): @console_ns.doc(description="Enable or disable app site") @console_ns.doc(params={"app_id": "Application ID"}) @console_ns.expect(console_ns.models[AppSiteStatusPayload.__name__]) - @console_ns.response(200, "Site status updated successfully", app_detail_model) + @console_ns.response(200, "Site status updated successfully", console_ns.models[AppDetail.__name__]) @console_ns.response(403, "Insufficient permissions") @setup_required @login_required @account_initialization_required - @get_app_model - @marshal_with(app_detail_model) + @get_app_model(mode=None) @edit_permission_required def post(self, app_model): args = AppSiteStatusPayload.model_validate(console_ns.payload) app_service = AppService() app_model = app_service.update_app_site_status(app_model, args.enable_site) - - return app_model + response_model = AppDetail.model_validate(app_model, from_attributes=True) + return response_model.model_dump(mode="json") @console_ns.route("/apps//api-enable") @@ -565,21 +779,20 @@ class AppApiStatus(Resource): @console_ns.doc(description="Enable or disable app API") @console_ns.doc(params={"app_id": "Application ID"}) @console_ns.expect(console_ns.models[AppApiStatusPayload.__name__]) - @console_ns.response(200, "API status updated successfully", app_detail_model) + @console_ns.response(200, "API status updated successfully", console_ns.models[AppDetail.__name__]) @console_ns.response(403, "Insufficient permissions") @setup_required @login_required @is_admin_or_owner_required @account_initialization_required - @get_app_model - @marshal_with(app_detail_model) + @get_app_model(mode=None) def post(self, app_model): args = AppApiStatusPayload.model_validate(console_ns.payload) app_service = AppService() app_model = app_service.update_app_api_status(app_model, args.enable_api) - - return app_model + response_model = AppDetail.model_validate(app_model, from_attributes=True) + return response_model.model_dump(mode="json") @console_ns.route("/apps//trace") diff --git a/api/controllers/console/app/conversation.py b/api/controllers/console/app/conversation.py index c16dcfd91f..56816dd462 100644 --- a/api/controllers/console/app/conversation.py +++ b/api/controllers/console/app/conversation.py @@ -13,7 +13,6 @@ from controllers.console.app.wraps import get_app_model from controllers.console.wraps import account_initialization_required, edit_permission_required, setup_required from core.app.entities.app_invoke_entities import InvokeFrom from extensions.ext_database import db -from fields.conversation_fields import MessageTextField from fields.raws import FilesContainedField from libs.datetime_utils import naive_utc_now, parse_time_range from libs.helper import TimestampField @@ -177,6 +176,12 @@ annotation_hit_history_model = console_ns.model( }, ) + +class MessageTextField(fields.Raw): + def format(self, value): + return value[0]["text"] if value else "" + + # Simple message detail model simple_message_detail_model = console_ns.model( "SimpleMessageDetail", @@ -343,10 +348,13 @@ class CompletionConversationApi(Resource): ) if args.keyword: + from libs.helper import escape_like_pattern + + escaped_keyword = escape_like_pattern(args.keyword) query = query.join(Message, Message.conversation_id == Conversation.id).where( or_( - Message.query.ilike(f"%{args.keyword}%"), - Message.answer.ilike(f"%{args.keyword}%"), + Message.query.ilike(f"%{escaped_keyword}%", escape="\\"), + Message.answer.ilike(f"%{escaped_keyword}%", escape="\\"), ) ) @@ -455,7 +463,10 @@ class ChatConversationApi(Resource): query = sa.select(Conversation).where(Conversation.app_id == app_model.id, Conversation.is_deleted.is_(False)) if args.keyword: - keyword_filter = f"%{args.keyword}%" + from libs.helper import escape_like_pattern + + escaped_keyword = escape_like_pattern(args.keyword) + keyword_filter = f"%{escaped_keyword}%" query = ( query.join( Message, @@ -464,11 +475,11 @@ class ChatConversationApi(Resource): .join(subquery, subquery.c.conversation_id == Conversation.id) .where( or_( - Message.query.ilike(keyword_filter), - Message.answer.ilike(keyword_filter), - Conversation.name.ilike(keyword_filter), - Conversation.introduction.ilike(keyword_filter), - subquery.c.from_end_user_session_id.ilike(keyword_filter), + Message.query.ilike(keyword_filter, escape="\\"), + Message.answer.ilike(keyword_filter, escape="\\"), + Conversation.name.ilike(keyword_filter, escape="\\"), + Conversation.introduction.ilike(keyword_filter, escape="\\"), + subquery.c.from_end_user_session_id.ilike(keyword_filter, escape="\\"), ), ) .group_by(Conversation.id) diff --git a/api/controllers/console/app/error.py b/api/controllers/console/app/error.py index fbd7901646..6b4bd6755a 100644 --- a/api/controllers/console/app/error.py +++ b/api/controllers/console/app/error.py @@ -115,3 +115,9 @@ class InvokeRateLimitError(BaseHTTPException): error_code = "rate_limit_error" description = "Rate Limit Error" code = 429 + + +class NeedAddIdsError(BaseHTTPException): + error_code = "need_add_ids" + description = "Need to add ids." + code = 400 diff --git a/api/controllers/console/app/message.py b/api/controllers/console/app/message.py index 12ada8b798..66f4524156 100644 --- a/api/controllers/console/app/message.py +++ b/api/controllers/console/app/message.py @@ -202,6 +202,7 @@ message_detail_model = console_ns.model( "status": fields.String, "error": fields.String, "parent_message_id": fields.String, + "generation_detail": fields.Raw, }, ) diff --git a/api/controllers/console/app/wraps.py b/api/controllers/console/app/wraps.py index 9bb2718f89..e687d980fa 100644 --- a/api/controllers/console/app/wraps.py +++ b/api/controllers/console/app/wraps.py @@ -23,6 +23,11 @@ def _load_app_model(app_id: str) -> App | None: return app_model +def _load_app_model_with_trial(app_id: str) -> App | None: + app_model = db.session.query(App).where(App.id == app_id, App.status == "normal").first() + return app_model + + def get_app_model(view: Callable[P, R] | None = None, *, mode: Union[AppMode, list[AppMode], None] = None): def decorator(view_func: Callable[P1, R1]): @wraps(view_func) @@ -62,3 +67,44 @@ def get_app_model(view: Callable[P, R] | None = None, *, mode: Union[AppMode, li return decorator else: return decorator(view) + + +def get_app_model_with_trial(view: Callable[P, R] | None = None, *, mode: Union[AppMode, list[AppMode], None] = None): + def decorator(view_func: Callable[P, R]): + @wraps(view_func) + def decorated_view(*args: P.args, **kwargs: P.kwargs): + if not kwargs.get("app_id"): + raise ValueError("missing app_id in path parameters") + + app_id = kwargs.get("app_id") + app_id = str(app_id) + + del kwargs["app_id"] + + app_model = _load_app_model_with_trial(app_id) + + if not app_model: + raise AppNotFoundError() + + app_mode = AppMode.value_of(app_model.mode) + + if mode is not None: + if isinstance(mode, list): + modes = mode + else: + modes = [mode] + + if app_mode not in modes: + mode_values = {m.value for m in modes} + raise AppNotFoundError(f"App mode is not in the supported list: {mode_values}") + + kwargs["app_model"] = app_model + + return view_func(*args, **kwargs) + + return decorated_view + + if view is None: + return decorator + else: + return decorator(view) diff --git a/api/controllers/console/auth/activate.py b/api/controllers/console/auth/activate.py index 6834656a7f..fe70d930fb 100644 --- a/api/controllers/console/auth/activate.py +++ b/api/controllers/console/auth/activate.py @@ -7,9 +7,9 @@ from controllers.console import console_ns from controllers.console.error import AlreadyActivateError from extensions.ext_database import db from libs.datetime_utils import naive_utc_now -from libs.helper import EmailStr, extract_remote_ip, timezone +from libs.helper import EmailStr, timezone from models import AccountStatus -from services.account_service import AccountService, RegisterService +from services.account_service import RegisterService DEFAULT_REF_TEMPLATE_SWAGGER_2_0 = "#/definitions/{model}" @@ -93,7 +93,6 @@ class ActivateApi(Resource): "ActivationResponse", { "result": fields.String(description="Operation result"), - "data": fields.Raw(description="Login token data"), }, ), ) @@ -117,6 +116,4 @@ class ActivateApi(Resource): account.initialized_at = naive_utc_now() db.session.commit() - token_pair = AccountService.login(account, ip_address=extract_remote_ip(request)) - - return {"result": "success", "data": token_pair.model_dump()} + return {"result": "success"} diff --git a/api/controllers/console/auth/login.py b/api/controllers/console/auth/login.py index f486f4c313..4a52bf8abe 100644 --- a/api/controllers/console/auth/login.py +++ b/api/controllers/console/auth/login.py @@ -1,3 +1,5 @@ +from typing import Any + import flask_login from flask import make_response, request from flask_restx import Resource @@ -22,7 +24,12 @@ from controllers.console.error import ( NotAllowedCreateWorkspace, WorkspacesLimitExceeded, ) -from controllers.console.wraps import email_password_login_enabled, setup_required +from controllers.console.wraps import ( + decrypt_code_field, + decrypt_password_field, + email_password_login_enabled, + setup_required, +) from events.tenant_event import tenant_was_created from libs.helper import EmailStr, extract_remote_ip from libs.login import current_account_with_tenant @@ -79,6 +86,7 @@ class LoginApi(Resource): @setup_required @email_password_login_enabled @console_ns.expect(console_ns.models[LoginPayload.__name__]) + @decrypt_password_field def post(self): """Authenticate user and login.""" args = LoginPayload.model_validate(console_ns.payload) @@ -90,14 +98,13 @@ class LoginApi(Resource): if is_login_error_rate_limit: raise EmailPasswordLoginLimitError() - # TODO: why invitation is re-assigned with different type? - invitation = args.invite_token # type: ignore - if invitation: - invitation = RegisterService.get_invitation_if_token_valid(None, args.email, invitation) # type: ignore + invitation_data: dict[str, Any] | None = None + if args.invite_token: + invitation_data = RegisterService.get_invitation_if_token_valid(None, args.email, args.invite_token) try: - if invitation: - data = invitation.get("data", {}) # type: ignore + if invitation_data: + data = invitation_data.get("data", {}) invitee_email = data.get("email") if data else None if invitee_email != args.email: raise InvalidEmailError() @@ -218,6 +225,7 @@ class EmailCodeLoginSendEmailApi(Resource): class EmailCodeLoginApi(Resource): @setup_required @console_ns.expect(console_ns.models[EmailCodeLoginPayload.__name__]) + @decrypt_code_field def post(self): args = EmailCodeLoginPayload.model_validate(console_ns.payload) diff --git a/api/controllers/console/auth/oauth.py b/api/controllers/console/auth/oauth.py index 7ad1e56373..4a152444f2 100644 --- a/api/controllers/console/auth/oauth.py +++ b/api/controllers/console/auth/oauth.py @@ -124,7 +124,7 @@ class OAuthCallback(Resource): return redirect(f"{dify_config.CONSOLE_WEB_URL}/signin/invite-settings?invite_token={invite_token}") try: - account = _generate_account(provider, user_info) + account, oauth_new_user = _generate_account(provider, user_info) except AccountNotFoundError: return redirect(f"{dify_config.CONSOLE_WEB_URL}/signin?message=Account not found.") except (WorkSpaceNotFoundError, WorkSpaceNotAllowedCreateError): @@ -159,7 +159,7 @@ class OAuthCallback(Resource): ip_address=extract_remote_ip(request), ) - response = redirect(f"{dify_config.CONSOLE_WEB_URL}") + response = redirect(f"{dify_config.CONSOLE_WEB_URL}?oauth_new_user={str(oauth_new_user).lower()}") set_access_token_to_cookie(request, response, token_pair.access_token) set_refresh_token_to_cookie(request, response, token_pair.refresh_token) @@ -177,9 +177,10 @@ def _get_account_by_openid_or_email(provider: str, user_info: OAuthUserInfo) -> return account -def _generate_account(provider: str, user_info: OAuthUserInfo): +def _generate_account(provider: str, user_info: OAuthUserInfo) -> tuple[Account, bool]: # Get account by openid or email. account = _get_account_by_openid_or_email(provider, user_info) + oauth_new_user = False if account: tenants = TenantService.get_join_tenants(account) @@ -193,6 +194,7 @@ def _generate_account(provider: str, user_info: OAuthUserInfo): tenant_was_created.send(new_tenant) if not account: + oauth_new_user = True if not FeatureService.get_system_features().is_allow_register: if dify_config.BILLING_ENABLED and BillingService.is_email_in_freeze(user_info.email): raise AccountRegisterError( @@ -220,4 +222,4 @@ def _generate_account(provider: str, user_info: OAuthUserInfo): # Link account AccountService.link_account_integrate(provider, user_info.id, account) - return account + return account, oauth_new_user diff --git a/api/controllers/console/billing/billing.py b/api/controllers/console/billing/billing.py index 7f907dc420..ac039f9c5d 100644 --- a/api/controllers/console/billing/billing.py +++ b/api/controllers/console/billing/billing.py @@ -1,8 +1,9 @@ import base64 +from typing import Literal from flask import request from flask_restx import Resource, fields -from pydantic import BaseModel, Field, field_validator +from pydantic import BaseModel, Field from werkzeug.exceptions import BadRequest from controllers.console import console_ns @@ -15,22 +16,8 @@ DEFAULT_REF_TEMPLATE_SWAGGER_2_0 = "#/definitions/{model}" class SubscriptionQuery(BaseModel): - plan: str = Field(..., description="Subscription plan") - interval: str = Field(..., description="Billing interval") - - @field_validator("plan") - @classmethod - def validate_plan(cls, value: str) -> str: - if value not in [CloudPlan.PROFESSIONAL, CloudPlan.TEAM]: - raise ValueError("Invalid plan") - return value - - @field_validator("interval") - @classmethod - def validate_interval(cls, value: str) -> str: - if value not in {"month", "year"}: - raise ValueError("Invalid interval") - return value + plan: Literal[CloudPlan.PROFESSIONAL, CloudPlan.TEAM] = Field(..., description="Subscription plan") + interval: Literal["month", "year"] = Field(..., description="Billing interval") class PartnerTenantsPayload(BaseModel): diff --git a/api/controllers/console/datasets/data_source.py b/api/controllers/console/datasets/data_source.py index 01f268d94d..cd958bbb36 100644 --- a/api/controllers/console/datasets/data_source.py +++ b/api/controllers/console/datasets/data_source.py @@ -140,6 +140,18 @@ class DataSourceNotionListApi(Resource): credential_id = request.args.get("credential_id", default=None, type=str) if not credential_id: raise ValueError("Credential id is required.") + + # Get datasource_parameters from query string (optional, for GitHub and other datasources) + datasource_parameters_str = request.args.get("datasource_parameters", default=None, type=str) + datasource_parameters = {} + if datasource_parameters_str: + try: + datasource_parameters = json.loads(datasource_parameters_str) + if not isinstance(datasource_parameters, dict): + raise ValueError("datasource_parameters must be a JSON object.") + except json.JSONDecodeError: + raise ValueError("Invalid datasource_parameters JSON format.") + datasource_provider_service = DatasourceProviderService() credential = datasource_provider_service.get_datasource_credentials( tenant_id=current_tenant_id, @@ -187,7 +199,7 @@ class DataSourceNotionListApi(Resource): online_document_result: Generator[OnlineDocumentPagesMessage, None, None] = ( datasource_runtime.get_online_document_pages( user_id=current_user.id, - datasource_parameters={}, + datasource_parameters=datasource_parameters, provider_type=datasource_runtime.datasource_provider_type(), ) ) @@ -218,14 +230,14 @@ class DataSourceNotionListApi(Resource): @console_ns.route( - "/notion/workspaces//pages///preview", + "/notion/pages///preview", "/datasets/notion-indexing-estimate", ) class DataSourceNotionApi(Resource): @setup_required @login_required @account_initialization_required - def get(self, workspace_id, page_id, page_type): + def get(self, page_id, page_type): _, current_tenant_id = current_account_with_tenant() credential_id = request.args.get("credential_id", default=None, type=str) @@ -239,11 +251,10 @@ class DataSourceNotionApi(Resource): plugin_id="langgenius/notion_datasource", ) - workspace_id = str(workspace_id) page_id = str(page_id) extractor = NotionExtractor( - notion_workspace_id=workspace_id, + notion_workspace_id="", notion_obj_id=page_id, notion_page_type=page_type, notion_access_token=credential.get("integration_secret"), diff --git a/api/controllers/console/datasets/datasets.py b/api/controllers/console/datasets/datasets.py index a322dcfd14..979ab63a58 100644 --- a/api/controllers/console/datasets/datasets.py +++ b/api/controllers/console/datasets/datasets.py @@ -147,7 +147,7 @@ class DatasetUpdatePayload(BaseModel): embedding_model_provider: str | None = None retrieval_model: dict[str, Any] | None = None summary_index_setting: dict[str, Any] | None = None - partial_member_list: list[str] | None = None + partial_member_list: list[dict[str, str]] | None = None external_retrieval_model: dict[str, Any] | None = None external_knowledge_id: str | None = None external_knowledge_api_id: str | None = None @@ -224,6 +224,7 @@ def _get_retrieval_methods_by_vector_type(vector_type: str | None, is_mock: bool VectorType.COUCHBASE, VectorType.OPENGAUSS, VectorType.OCEANBASE, + VectorType.SEEKDB, VectorType.TABLESTORE, VectorType.HUAWEI_CLOUD, VectorType.TENCENT, diff --git a/api/controllers/console/datasets/datasets_document.py b/api/controllers/console/datasets/datasets_document.py index 06c895dc79..5982822713 100644 --- a/api/controllers/console/datasets/datasets_document.py +++ b/api/controllers/console/datasets/datasets_document.py @@ -670,7 +670,7 @@ class DocumentBatchIndexingEstimateApi(DocumentResource): datasource_type=DatasourceType.NOTION, notion_info=NotionInfo.model_validate( { - "credential_id": data_source_info["credential_id"], + "credential_id": data_source_info.get("credential_id"), "notion_workspace_id": data_source_info["notion_workspace_id"], "notion_obj_id": data_source_info["notion_page_id"], "notion_page_type": data_source_info["type"], @@ -849,12 +849,12 @@ class DocumentApi(DocumentResource): elif metadata == "without": dataset_process_rules = DatasetService.get_process_rules(dataset_id) document_process_rules = document.dataset_process_rule.to_dict() if document.dataset_process_rule else {} - data_source_info = document.data_source_detail_dict response = { "id": document.id, "position": document.position, "data_source_type": document.data_source_type, - "data_source_info": data_source_info, + "data_source_info": document.data_source_info_dict, + "data_source_detail_dict": document.data_source_detail_dict, "dataset_process_rule_id": document.dataset_process_rule_id, "dataset_process_rule": dataset_process_rules, "document_process_rule": document_process_rules, @@ -883,12 +883,12 @@ class DocumentApi(DocumentResource): else: dataset_process_rules = DatasetService.get_process_rules(dataset_id) document_process_rules = document.dataset_process_rule.to_dict() if document.dataset_process_rule else {} - data_source_info = document.data_source_detail_dict response = { "id": document.id, "position": document.position, "data_source_type": document.data_source_type, - "data_source_info": data_source_info, + "data_source_info": document.data_source_info_dict, + "data_source_detail_dict": document.data_source_detail_dict, "dataset_process_rule_id": document.dataset_process_rule_id, "dataset_process_rule": dataset_process_rules, "document_process_rule": document_process_rules, diff --git a/api/controllers/console/datasets/datasets_segments.py b/api/controllers/console/datasets/datasets_segments.py index 74a60edb07..423462f966 100644 --- a/api/controllers/console/datasets/datasets_segments.py +++ b/api/controllers/console/datasets/datasets_segments.py @@ -3,10 +3,12 @@ import uuid from flask import request from flask_restx import Resource, marshal from pydantic import BaseModel, Field -from sqlalchemy import select +from sqlalchemy import String, cast, func, or_, select +from sqlalchemy.dialects.postgresql import JSONB from werkzeug.exceptions import Forbidden, NotFound import services +from configs import dify_config from controllers.common.schema import register_schema_models from controllers.console import console_ns from controllers.console.app.error import ProviderNotInitializeError @@ -28,6 +30,7 @@ from core.model_runtime.entities.model_entities import ModelType from extensions.ext_database import db from extensions.ext_redis import redis_client from fields.segment_fields import child_chunk_fields, segment_fields +from libs.helper import escape_like_pattern from libs.login import current_account_with_tenant, login_required from models.dataset import ChildChunk, DocumentSegment, DocumentSegmentSummary from models.model import UploadFile @@ -161,7 +164,31 @@ class DatasetDocumentSegmentListApi(Resource): query = query.where(DocumentSegment.hit_count >= hit_count_gte) if keyword: - query = query.where(DocumentSegment.content.ilike(f"%{keyword}%")) + # Escape special characters in keyword to prevent SQL injection via LIKE wildcards + escaped_keyword = escape_like_pattern(keyword) + # Search in both content and keywords fields + # Use database-specific methods for JSON array search + if dify_config.SQLALCHEMY_DATABASE_URI_SCHEME == "postgresql": + # PostgreSQL: Use jsonb_array_elements_text to properly handle Unicode/Chinese text + keywords_condition = func.array_to_string( + func.array( + select(func.jsonb_array_elements_text(cast(DocumentSegment.keywords, JSONB))) + .correlate(DocumentSegment) + .scalar_subquery() + ), + ",", + ).ilike(f"%{escaped_keyword}%", escape="\\") + else: + # MySQL: Cast JSON to string for pattern matching + # MySQL stores Chinese text directly in JSON without Unicode escaping + keywords_condition = cast(DocumentSegment.keywords, String).ilike(f"%{escaped_keyword}%", escape="\\") + + query = query.where( + or_( + DocumentSegment.content.ilike(f"%{escaped_keyword}%", escape="\\"), + keywords_condition, + ) + ) if args.enabled.lower() != "all": if args.enabled.lower() == "true": diff --git a/api/controllers/console/datasets/hit_testing_base.py b/api/controllers/console/datasets/hit_testing_base.py index db7c50f422..db1a874437 100644 --- a/api/controllers/console/datasets/hit_testing_base.py +++ b/api/controllers/console/datasets/hit_testing_base.py @@ -1,7 +1,7 @@ import logging from typing import Any -from flask_restx import marshal, reqparse +from flask_restx import marshal from pydantic import BaseModel, Field from werkzeug.exceptions import Forbidden, InternalServerError, NotFound @@ -56,15 +56,10 @@ class DatasetsHitTestingBase: HitTestingService.hit_testing_args_check(args) @staticmethod - def parse_args(): - parser = ( - reqparse.RequestParser() - .add_argument("query", type=str, required=False, location="json") - .add_argument("attachment_ids", type=list, required=False, location="json") - .add_argument("retrieval_model", type=dict, required=False, location="json") - .add_argument("external_retrieval_model", type=dict, required=False, location="json") - ) - return parser.parse_args() + def parse_args(payload: dict[str, Any]) -> dict[str, Any]: + """Validate and return hit-testing arguments from an incoming payload.""" + hit_testing_payload = HitTestingPayload.model_validate(payload or {}) + return hit_testing_payload.model_dump(exclude_none=True) @staticmethod def perform_hit_testing(dataset, args): diff --git a/api/controllers/console/datasets/rag_pipeline/rag_pipeline_workflow.py b/api/controllers/console/datasets/rag_pipeline/rag_pipeline_workflow.py index 46d67f0581..02efc54eea 100644 --- a/api/controllers/console/datasets/rag_pipeline/rag_pipeline_workflow.py +++ b/api/controllers/console/datasets/rag_pipeline/rag_pipeline_workflow.py @@ -355,7 +355,7 @@ class PublishedRagPipelineRunApi(Resource): pipeline=pipeline, user=current_user, args=args, - invoke_from=InvokeFrom.DEBUGGER if payload.is_preview else InvokeFrom.PUBLISHED, + invoke_from=InvokeFrom.DEBUGGER if payload.is_preview else InvokeFrom.PUBLISHED_PIPELINE, streaming=streaming, ) diff --git a/api/controllers/console/explore/banner.py b/api/controllers/console/explore/banner.py new file mode 100644 index 0000000000..da306fbc9d --- /dev/null +++ b/api/controllers/console/explore/banner.py @@ -0,0 +1,43 @@ +from flask import request +from flask_restx import Resource + +from controllers.console import api +from controllers.console.explore.wraps import explore_banner_enabled +from extensions.ext_database import db +from models.model import ExporleBanner + + +class BannerApi(Resource): + """Resource for banner list.""" + + @explore_banner_enabled + def get(self): + """Get banner list.""" + language = request.args.get("language", "en-US") + + # Build base query for enabled banners + base_query = db.session.query(ExporleBanner).where(ExporleBanner.status == "enabled") + + # Try to get banners in the requested language + banners = base_query.where(ExporleBanner.language == language).order_by(ExporleBanner.sort).all() + + # Fallback to en-US if no banners found and language is not en-US + if not banners and language != "en-US": + banners = base_query.where(ExporleBanner.language == "en-US").order_by(ExporleBanner.sort).all() + # Convert banners to serializable format + result = [] + for banner in banners: + banner_data = { + "id": banner.id, + "content": banner.content, # Already parsed as JSON by SQLAlchemy + "link": banner.link, + "sort": banner.sort, + "status": banner.status, + "created_at": banner.created_at.isoformat() if banner.created_at else None, + } + result.append(banner_data) + + return result + + +api.add_resource(BannerApi, "/explore/banners") diff --git a/api/controllers/console/explore/completion.py b/api/controllers/console/explore/completion.py index 5901eca915..a6e5b2822a 100644 --- a/api/controllers/console/explore/completion.py +++ b/api/controllers/console/explore/completion.py @@ -40,7 +40,7 @@ from .. import console_ns logger = logging.getLogger(__name__) -class CompletionMessagePayload(BaseModel): +class CompletionMessageExplorePayload(BaseModel): inputs: dict[str, Any] query: str = "" files: list[dict[str, Any]] | None = None @@ -71,7 +71,7 @@ class ChatMessagePayload(BaseModel): raise ValueError("must be a valid UUID") from exc -register_schema_models(console_ns, CompletionMessagePayload, ChatMessagePayload) +register_schema_models(console_ns, CompletionMessageExplorePayload, ChatMessagePayload) # define completion api for user @@ -80,13 +80,13 @@ register_schema_models(console_ns, CompletionMessagePayload, ChatMessagePayload) endpoint="installed_app_completion", ) class CompletionApi(InstalledAppResource): - @console_ns.expect(console_ns.models[CompletionMessagePayload.__name__]) + @console_ns.expect(console_ns.models[CompletionMessageExplorePayload.__name__]) def post(self, installed_app): app_model = installed_app.app if app_model.mode != AppMode.COMPLETION: raise NotCompletionAppError() - payload = CompletionMessagePayload.model_validate(console_ns.payload or {}) + payload = CompletionMessageExplorePayload.model_validate(console_ns.payload or {}) args = payload.model_dump(exclude_none=True) streaming = payload.response_mode == "streaming" diff --git a/api/controllers/console/explore/conversation.py b/api/controllers/console/explore/conversation.py index 92da591ab4..933c80f509 100644 --- a/api/controllers/console/explore/conversation.py +++ b/api/controllers/console/explore/conversation.py @@ -1,9 +1,7 @@ from typing import Any -from uuid import UUID from flask import request -from flask_restx import marshal_with -from pydantic import BaseModel, Field, model_validator +from pydantic import BaseModel, Field, TypeAdapter, model_validator from sqlalchemy.orm import Session from werkzeug.exceptions import NotFound @@ -12,7 +10,12 @@ from controllers.console.explore.error import NotChatAppError from controllers.console.explore.wraps import InstalledAppResource from core.app.entities.app_invoke_entities import InvokeFrom from extensions.ext_database import db -from fields.conversation_fields import conversation_infinite_scroll_pagination_fields, simple_conversation_fields +from fields.conversation_fields import ( + ConversationInfiniteScrollPagination, + ResultResponse, + SimpleConversation, +) +from libs.helper import UUIDStrOrEmpty from libs.login import current_user from models import Account from models.model import AppMode @@ -24,7 +27,7 @@ from .. import console_ns class ConversationListQuery(BaseModel): - last_id: UUID | None = None + last_id: UUIDStrOrEmpty | None = None limit: int = Field(default=20, ge=1, le=100) pinned: bool | None = None @@ -49,7 +52,6 @@ register_schema_models(console_ns, ConversationListQuery, ConversationRenamePayl endpoint="installed_app_conversations", ) class ConversationListApi(InstalledAppResource): - @marshal_with(conversation_infinite_scroll_pagination_fields) @console_ns.expect(console_ns.models[ConversationListQuery.__name__]) def get(self, installed_app): app_model = installed_app.app @@ -73,7 +75,7 @@ class ConversationListApi(InstalledAppResource): if not isinstance(current_user, Account): raise ValueError("current_user must be an Account instance") with Session(db.engine) as session: - return WebConversationService.pagination_by_last_id( + pagination = WebConversationService.pagination_by_last_id( session=session, app_model=app_model, user=current_user, @@ -82,6 +84,13 @@ class ConversationListApi(InstalledAppResource): invoke_from=InvokeFrom.EXPLORE, pinned=args.pinned, ) + adapter = TypeAdapter(SimpleConversation) + conversations = [adapter.validate_python(item, from_attributes=True) for item in pagination.data] + return ConversationInfiniteScrollPagination( + limit=pagination.limit, + has_more=pagination.has_more, + data=conversations, + ).model_dump(mode="json") except LastConversationNotExistsError: raise NotFound("Last Conversation Not Exists.") @@ -105,7 +114,7 @@ class ConversationApi(InstalledAppResource): except ConversationNotExistsError: raise NotFound("Conversation Not Exists.") - return {"result": "success"}, 204 + return ResultResponse(result="success").model_dump(mode="json"), 204 @console_ns.route( @@ -113,7 +122,6 @@ class ConversationApi(InstalledAppResource): endpoint="installed_app_conversation_rename", ) class ConversationRenameApi(InstalledAppResource): - @marshal_with(simple_conversation_fields) @console_ns.expect(console_ns.models[ConversationRenamePayload.__name__]) def post(self, installed_app, c_id): app_model = installed_app.app @@ -128,9 +136,14 @@ class ConversationRenameApi(InstalledAppResource): try: if not isinstance(current_user, Account): raise ValueError("current_user must be an Account instance") - return ConversationService.rename( + conversation = ConversationService.rename( app_model, conversation_id, current_user, payload.name, payload.auto_generate ) + return ( + TypeAdapter(SimpleConversation) + .validate_python(conversation, from_attributes=True) + .model_dump(mode="json") + ) except ConversationNotExistsError: raise NotFound("Conversation Not Exists.") @@ -155,7 +168,7 @@ class ConversationPinApi(InstalledAppResource): except ConversationNotExistsError: raise NotFound("Conversation Not Exists.") - return {"result": "success"} + return ResultResponse(result="success").model_dump(mode="json") @console_ns.route( @@ -174,4 +187,4 @@ class ConversationUnPinApi(InstalledAppResource): raise ValueError("current_user must be an Account instance") WebConversationService.unpin(app_model, conversation_id, current_user) - return {"result": "success"} + return ResultResponse(result="success").model_dump(mode="json") diff --git a/api/controllers/console/explore/error.py b/api/controllers/console/explore/error.py index 1e05ff4206..e96fa64f84 100644 --- a/api/controllers/console/explore/error.py +++ b/api/controllers/console/explore/error.py @@ -29,3 +29,25 @@ class AppAccessDeniedError(BaseHTTPException): error_code = "access_denied" description = "App access denied." code = 403 + + +class TrialAppNotAllowed(BaseHTTPException): + """*403* `Trial App Not Allowed` + + Raise if the user has reached the trial app limit. + """ + + error_code = "trial_app_not_allowed" + code = 403 + description = "the app is not allowed to be trial." + + +class TrialAppLimitExceeded(BaseHTTPException): + """*403* `Trial App Limit Exceeded` + + Raise if the user has exceeded the trial app limit. + """ + + error_code = "trial_app_limit_exceeded" + code = 403 + description = "The user has exceeded the trial app limit." diff --git a/api/controllers/console/explore/installed_app.py b/api/controllers/console/explore/installed_app.py index 3c95779475..e42db10ba6 100644 --- a/api/controllers/console/explore/installed_app.py +++ b/api/controllers/console/explore/installed_app.py @@ -2,7 +2,8 @@ import logging from typing import Any from flask import request -from flask_restx import Resource, inputs, marshal_with, reqparse +from flask_restx import Resource, marshal_with +from pydantic import BaseModel from sqlalchemy import and_, select from werkzeug.exceptions import BadRequest, Forbidden, NotFound @@ -18,6 +19,15 @@ from services.account_service import TenantService from services.enterprise.enterprise_service import EnterpriseService from services.feature_service import FeatureService + +class InstalledAppCreatePayload(BaseModel): + app_id: str + + +class InstalledAppUpdatePayload(BaseModel): + is_pinned: bool | None = None + + logger = logging.getLogger(__name__) @@ -105,26 +115,25 @@ class InstalledAppsListApi(Resource): @account_initialization_required @cloud_edition_billing_resource_check("apps") def post(self): - parser = reqparse.RequestParser().add_argument("app_id", type=str, required=True, help="Invalid app_id") - args = parser.parse_args() + payload = InstalledAppCreatePayload.model_validate(console_ns.payload or {}) - recommended_app = db.session.query(RecommendedApp).where(RecommendedApp.app_id == args["app_id"]).first() + recommended_app = db.session.query(RecommendedApp).where(RecommendedApp.app_id == payload.app_id).first() if recommended_app is None: - raise NotFound("App not found") + raise NotFound("Recommended app not found") _, current_tenant_id = current_account_with_tenant() - app = db.session.query(App).where(App.id == args["app_id"]).first() + app = db.session.query(App).where(App.id == payload.app_id).first() if app is None: - raise NotFound("App not found") + raise NotFound("App entity not found") if not app.is_public: raise Forbidden("You can't install a non-public app") installed_app = ( db.session.query(InstalledApp) - .where(and_(InstalledApp.app_id == args["app_id"], InstalledApp.tenant_id == current_tenant_id)) + .where(and_(InstalledApp.app_id == payload.app_id, InstalledApp.tenant_id == current_tenant_id)) .first() ) @@ -133,7 +142,7 @@ class InstalledAppsListApi(Resource): recommended_app.install_count += 1 new_installed_app = InstalledApp( - app_id=args["app_id"], + app_id=payload.app_id, tenant_id=current_tenant_id, app_owner_tenant_id=app.tenant_id, is_pinned=False, @@ -163,12 +172,11 @@ class InstalledAppApi(InstalledAppResource): return {"result": "success", "message": "App uninstalled successfully"}, 204 def patch(self, installed_app): - parser = reqparse.RequestParser().add_argument("is_pinned", type=inputs.boolean) - args = parser.parse_args() + payload = InstalledAppUpdatePayload.model_validate(console_ns.payload or {}) commit_args = False - if "is_pinned" in args: - installed_app.is_pinned = args["is_pinned"] + if payload.is_pinned is not None: + installed_app.is_pinned = payload.is_pinned commit_args = True if commit_args: diff --git a/api/controllers/console/explore/message.py b/api/controllers/console/explore/message.py index 229b7c8865..88487ac96f 100644 --- a/api/controllers/console/explore/message.py +++ b/api/controllers/console/explore/message.py @@ -1,10 +1,8 @@ import logging from typing import Literal -from uuid import UUID from flask import request -from flask_restx import marshal_with -from pydantic import BaseModel, Field +from pydantic import BaseModel, Field, TypeAdapter from werkzeug.exceptions import InternalServerError, NotFound from controllers.common.schema import register_schema_models @@ -24,8 +22,10 @@ from controllers.console.explore.wraps import InstalledAppResource from core.app.entities.app_invoke_entities import InvokeFrom from core.errors.error import ModelCurrentlyNotSupportError, ProviderTokenNotInitError, QuotaExceededError from core.model_runtime.errors.invoke import InvokeError -from fields.message_fields import message_infinite_scroll_pagination_fields +from fields.conversation_fields import ResultResponse +from fields.message_fields import MessageInfiniteScrollPagination, MessageListItem, SuggestedQuestionsResponse from libs import helper +from libs.helper import UUIDStrOrEmpty from libs.login import current_account_with_tenant from models.model import AppMode from services.app_generate_service import AppGenerateService @@ -44,8 +44,8 @@ logger = logging.getLogger(__name__) class MessageListQuery(BaseModel): - conversation_id: UUID - first_id: UUID | None = None + conversation_id: UUIDStrOrEmpty + first_id: UUIDStrOrEmpty | None = None limit: int = Field(default=20, ge=1, le=100) @@ -66,7 +66,6 @@ register_schema_models(console_ns, MessageListQuery, MessageFeedbackPayload, Mor endpoint="installed_app_messages", ) class MessageListApi(InstalledAppResource): - @marshal_with(message_infinite_scroll_pagination_fields) @console_ns.expect(console_ns.models[MessageListQuery.__name__]) def get(self, installed_app): current_user, _ = current_account_with_tenant() @@ -78,13 +77,20 @@ class MessageListApi(InstalledAppResource): args = MessageListQuery.model_validate(request.args.to_dict()) try: - return MessageService.pagination_by_first_id( + pagination = MessageService.pagination_by_first_id( app_model, current_user, str(args.conversation_id), str(args.first_id) if args.first_id else None, args.limit, ) + adapter = TypeAdapter(MessageListItem) + items = [adapter.validate_python(message, from_attributes=True) for message in pagination.data] + return MessageInfiniteScrollPagination( + limit=pagination.limit, + has_more=pagination.has_more, + data=items, + ).model_dump(mode="json") except ConversationNotExistsError: raise NotFound("Conversation Not Exists.") except FirstMessageNotExistsError: @@ -116,7 +122,7 @@ class MessageFeedbackApi(InstalledAppResource): except MessageNotExistsError: raise NotFound("Message Not Exists.") - return {"result": "success"} + return ResultResponse(result="success").model_dump(mode="json") @console_ns.route( @@ -201,4 +207,4 @@ class MessageSuggestedQuestionApi(InstalledAppResource): logger.exception("internal server error.") raise InternalServerError() - return {"data": questions} + return SuggestedQuestionsResponse(data=questions).model_dump(mode="json") diff --git a/api/controllers/console/explore/parameter.py b/api/controllers/console/explore/parameter.py index 9c6b2aedfb..660a4d5aea 100644 --- a/api/controllers/console/explore/parameter.py +++ b/api/controllers/console/explore/parameter.py @@ -1,5 +1,3 @@ -from flask_restx import marshal_with - from controllers.common import fields from controllers.console import console_ns from controllers.console.app.error import AppUnavailableError @@ -13,7 +11,6 @@ from services.app_service import AppService class AppParameterApi(InstalledAppResource): """Resource for app variables.""" - @marshal_with(fields.parameters_fields) def get(self, installed_app: InstalledApp): """Retrieve app parameters.""" app_model = installed_app.app @@ -37,7 +34,8 @@ class AppParameterApi(InstalledAppResource): user_input_form = features_dict.get("user_input_form", []) - return get_parameters_from_feature_dict(features_dict=features_dict, user_input_form=user_input_form) + parameters = get_parameters_from_feature_dict(features_dict=features_dict, user_input_form=user_input_form) + return fields.Parameters.model_validate(parameters).model_dump(mode="json") @console_ns.route("/installed-apps//meta", endpoint="installed_app_meta") diff --git a/api/controllers/console/explore/recommended_app.py b/api/controllers/console/explore/recommended_app.py index 2b2f807694..362513ec1c 100644 --- a/api/controllers/console/explore/recommended_app.py +++ b/api/controllers/console/explore/recommended_app.py @@ -29,6 +29,7 @@ recommended_app_fields = { "category": fields.String, "position": fields.Integer, "is_listed": fields.Boolean, + "can_trial": fields.Boolean, } recommended_app_list_fields = { diff --git a/api/controllers/console/explore/saved_message.py b/api/controllers/console/explore/saved_message.py index 6a9e274a0e..ea3de91741 100644 --- a/api/controllers/console/explore/saved_message.py +++ b/api/controllers/console/explore/saved_message.py @@ -1,55 +1,33 @@ -from uuid import UUID - from flask import request -from flask_restx import fields, marshal_with -from pydantic import BaseModel, Field +from pydantic import BaseModel, Field, TypeAdapter from werkzeug.exceptions import NotFound from controllers.common.schema import register_schema_models from controllers.console import console_ns from controllers.console.explore.error import NotCompletionAppError from controllers.console.explore.wraps import InstalledAppResource -from fields.conversation_fields import message_file_fields -from libs.helper import TimestampField +from fields.conversation_fields import ResultResponse +from fields.message_fields import SavedMessageInfiniteScrollPagination, SavedMessageItem +from libs.helper import UUIDStrOrEmpty from libs.login import current_account_with_tenant from services.errors.message import MessageNotExistsError from services.saved_message_service import SavedMessageService class SavedMessageListQuery(BaseModel): - last_id: UUID | None = None + last_id: UUIDStrOrEmpty | None = None limit: int = Field(default=20, ge=1, le=100) class SavedMessageCreatePayload(BaseModel): - message_id: UUID + message_id: UUIDStrOrEmpty register_schema_models(console_ns, SavedMessageListQuery, SavedMessageCreatePayload) -feedback_fields = {"rating": fields.String} - -message_fields = { - "id": fields.String, - "inputs": fields.Raw, - "query": fields.String, - "answer": fields.String, - "message_files": fields.List(fields.Nested(message_file_fields)), - "feedback": fields.Nested(feedback_fields, attribute="user_feedback", allow_null=True), - "created_at": TimestampField, -} - - @console_ns.route("/installed-apps//saved-messages", endpoint="installed_app_saved_messages") class SavedMessageListApi(InstalledAppResource): - saved_message_infinite_scroll_pagination_fields = { - "limit": fields.Integer, - "has_more": fields.Boolean, - "data": fields.List(fields.Nested(message_fields)), - } - - @marshal_with(saved_message_infinite_scroll_pagination_fields) @console_ns.expect(console_ns.models[SavedMessageListQuery.__name__]) def get(self, installed_app): current_user, _ = current_account_with_tenant() @@ -59,12 +37,19 @@ class SavedMessageListApi(InstalledAppResource): args = SavedMessageListQuery.model_validate(request.args.to_dict()) - return SavedMessageService.pagination_by_last_id( + pagination = SavedMessageService.pagination_by_last_id( app_model, current_user, str(args.last_id) if args.last_id else None, args.limit, ) + adapter = TypeAdapter(SavedMessageItem) + items = [adapter.validate_python(message, from_attributes=True) for message in pagination.data] + return SavedMessageInfiniteScrollPagination( + limit=pagination.limit, + has_more=pagination.has_more, + data=items, + ).model_dump(mode="json") @console_ns.expect(console_ns.models[SavedMessageCreatePayload.__name__]) def post(self, installed_app): @@ -80,7 +65,7 @@ class SavedMessageListApi(InstalledAppResource): except MessageNotExistsError: raise NotFound("Message Not Exists.") - return {"result": "success"} + return ResultResponse(result="success").model_dump(mode="json") @console_ns.route( @@ -98,4 +83,4 @@ class SavedMessageApi(InstalledAppResource): SavedMessageService.delete(app_model, current_user, message_id) - return {"result": "success"}, 204 + return ResultResponse(result="success").model_dump(mode="json"), 204 diff --git a/api/controllers/console/explore/trial.py b/api/controllers/console/explore/trial.py new file mode 100644 index 0000000000..97d856bebe --- /dev/null +++ b/api/controllers/console/explore/trial.py @@ -0,0 +1,512 @@ +import logging +from typing import Any, cast + +from flask import request +from flask_restx import Resource, marshal, marshal_with, reqparse +from werkzeug.exceptions import Forbidden, InternalServerError, NotFound + +import services +from controllers.common.fields import Parameters as ParametersResponse +from controllers.common.fields import Site as SiteResponse +from controllers.console import api +from controllers.console.app.error import ( + AppUnavailableError, + AudioTooLargeError, + CompletionRequestError, + ConversationCompletedError, + NeedAddIdsError, + NoAudioUploadedError, + ProviderModelCurrentlyNotSupportError, + ProviderNotInitializeError, + ProviderNotSupportSpeechToTextError, + ProviderQuotaExceededError, + UnsupportedAudioTypeError, +) +from controllers.console.app.wraps import get_app_model_with_trial +from controllers.console.explore.error import ( + AppSuggestedQuestionsAfterAnswerDisabledError, + NotChatAppError, + NotCompletionAppError, + NotWorkflowAppError, +) +from controllers.console.explore.wraps import TrialAppResource, trial_feature_enable +from controllers.web.error import InvokeRateLimitError as InvokeRateLimitHttpError +from core.app.app_config.common.parameters_mapping import get_parameters_from_feature_dict +from core.app.apps.base_app_queue_manager import AppQueueManager +from core.app.entities.app_invoke_entities import InvokeFrom +from core.errors.error import ( + ModelCurrentlyNotSupportError, + ProviderTokenNotInitError, + QuotaExceededError, +) +from core.model_runtime.errors.invoke import InvokeError +from core.workflow.graph_engine.manager import GraphEngineManager +from extensions.ext_database import db +from fields.app_fields import app_detail_fields_with_site +from fields.dataset_fields import dataset_fields +from fields.workflow_fields import workflow_fields +from libs import helper +from libs.helper import uuid_value +from libs.login import current_user +from models import Account +from models.account import TenantStatus +from models.model import AppMode, Site +from models.workflow import Workflow +from services.app_generate_service import AppGenerateService +from services.app_service import AppService +from services.audio_service import AudioService +from services.dataset_service import DatasetService +from services.errors.audio import ( + AudioTooLargeServiceError, + NoAudioUploadedServiceError, + ProviderNotSupportSpeechToTextServiceError, + UnsupportedAudioTypeServiceError, +) +from services.errors.conversation import ConversationNotExistsError +from services.errors.llm import InvokeRateLimitError +from services.errors.message import ( + MessageNotExistsError, + SuggestedQuestionsAfterAnswerDisabledError, +) +from services.message_service import MessageService +from services.recommended_app_service import RecommendedAppService + +logger = logging.getLogger(__name__) + + +class TrialAppWorkflowRunApi(TrialAppResource): + def post(self, trial_app): + """ + Run workflow + """ + app_model = trial_app + if not app_model: + raise NotWorkflowAppError() + app_mode = AppMode.value_of(app_model.mode) + if app_mode != AppMode.WORKFLOW: + raise NotWorkflowAppError() + + parser = reqparse.RequestParser() + parser.add_argument("inputs", type=dict, required=True, nullable=False, location="json") + parser.add_argument("files", type=list, required=False, location="json") + args = parser.parse_args() + assert current_user is not None + try: + app_id = app_model.id + user_id = current_user.id + response = AppGenerateService.generate( + app_model=app_model, user=current_user, args=args, invoke_from=InvokeFrom.EXPLORE, streaming=True + ) + RecommendedAppService.add_trial_app_record(app_id, user_id) + return helper.compact_generate_response(response) + except ProviderTokenNotInitError as ex: + raise ProviderNotInitializeError(ex.description) + except QuotaExceededError: + raise ProviderQuotaExceededError() + except ModelCurrentlyNotSupportError: + raise ProviderModelCurrentlyNotSupportError() + except InvokeError as e: + raise CompletionRequestError(e.description) + except InvokeRateLimitError as ex: + raise InvokeRateLimitHttpError(ex.description) + except ValueError as e: + raise e + except Exception: + logger.exception("internal server error.") + raise InternalServerError() + + +class TrialAppWorkflowTaskStopApi(TrialAppResource): + def post(self, trial_app, task_id: str): + """ + Stop workflow task + """ + app_model = trial_app + if not app_model: + raise NotWorkflowAppError() + app_mode = AppMode.value_of(app_model.mode) + if app_mode != AppMode.WORKFLOW: + raise NotWorkflowAppError() + assert current_user is not None + + # Stop using both mechanisms for backward compatibility + # Legacy stop flag mechanism (without user check) + AppQueueManager.set_stop_flag_no_user_check(task_id) + + # New graph engine command channel mechanism + GraphEngineManager.send_stop_command(task_id) + + return {"result": "success"} + + +class TrialChatApi(TrialAppResource): + @trial_feature_enable + def post(self, trial_app): + app_model = trial_app + app_mode = AppMode.value_of(app_model.mode) + if app_mode not in {AppMode.CHAT, AppMode.AGENT_CHAT, AppMode.ADVANCED_CHAT}: + raise NotChatAppError() + + parser = reqparse.RequestParser() + parser.add_argument("inputs", type=dict, required=True, location="json") + parser.add_argument("query", type=str, required=True, location="json") + parser.add_argument("files", type=list, required=False, location="json") + parser.add_argument("conversation_id", type=uuid_value, location="json") + parser.add_argument("parent_message_id", type=uuid_value, required=False, location="json") + parser.add_argument("retriever_from", type=str, required=False, default="explore_app", location="json") + args = parser.parse_args() + + args["auto_generate_name"] = False + + try: + if not isinstance(current_user, Account): + raise ValueError("current_user must be an Account instance") + + # Get IDs before they might be detached from session + app_id = app_model.id + user_id = current_user.id + + response = AppGenerateService.generate( + app_model=app_model, user=current_user, args=args, invoke_from=InvokeFrom.EXPLORE, streaming=True + ) + RecommendedAppService.add_trial_app_record(app_id, user_id) + return helper.compact_generate_response(response) + except services.errors.conversation.ConversationNotExistsError: + raise NotFound("Conversation Not Exists.") + except services.errors.conversation.ConversationCompletedError: + raise ConversationCompletedError() + except services.errors.app_model_config.AppModelConfigBrokenError: + logger.exception("App model config broken.") + raise AppUnavailableError() + except ProviderTokenNotInitError as ex: + raise ProviderNotInitializeError(ex.description) + except QuotaExceededError: + raise ProviderQuotaExceededError() + except ModelCurrentlyNotSupportError: + raise ProviderModelCurrentlyNotSupportError() + except InvokeError as e: + raise CompletionRequestError(e.description) + except InvokeRateLimitError as ex: + raise InvokeRateLimitHttpError(ex.description) + except ValueError as e: + raise e + except Exception: + logger.exception("internal server error.") + raise InternalServerError() + + +class TrialMessageSuggestedQuestionApi(TrialAppResource): + @trial_feature_enable + def get(self, trial_app, message_id): + app_model = trial_app + app_mode = AppMode.value_of(app_model.mode) + if app_mode not in {AppMode.CHAT, AppMode.AGENT_CHAT, AppMode.ADVANCED_CHAT}: + raise NotChatAppError() + + message_id = str(message_id) + + try: + if not isinstance(current_user, Account): + raise ValueError("current_user must be an Account instance") + questions = MessageService.get_suggested_questions_after_answer( + app_model=app_model, user=current_user, message_id=message_id, invoke_from=InvokeFrom.EXPLORE + ) + except MessageNotExistsError: + raise NotFound("Message not found") + except ConversationNotExistsError: + raise NotFound("Conversation not found") + except SuggestedQuestionsAfterAnswerDisabledError: + raise AppSuggestedQuestionsAfterAnswerDisabledError() + except ProviderTokenNotInitError as ex: + raise ProviderNotInitializeError(ex.description) + except QuotaExceededError: + raise ProviderQuotaExceededError() + except ModelCurrentlyNotSupportError: + raise ProviderModelCurrentlyNotSupportError() + except InvokeError as e: + raise CompletionRequestError(e.description) + except Exception: + logger.exception("internal server error.") + raise InternalServerError() + + return {"data": questions} + + +class TrialChatAudioApi(TrialAppResource): + @trial_feature_enable + def post(self, trial_app): + app_model = trial_app + + file = request.files["file"] + + try: + if not isinstance(current_user, Account): + raise ValueError("current_user must be an Account instance") + + # Get IDs before they might be detached from session + app_id = app_model.id + user_id = current_user.id + + response = AudioService.transcript_asr(app_model=app_model, file=file, end_user=None) + RecommendedAppService.add_trial_app_record(app_id, user_id) + return response + except services.errors.app_model_config.AppModelConfigBrokenError: + logger.exception("App model config broken.") + raise AppUnavailableError() + except NoAudioUploadedServiceError: + raise NoAudioUploadedError() + except AudioTooLargeServiceError as e: + raise AudioTooLargeError(str(e)) + except UnsupportedAudioTypeServiceError: + raise UnsupportedAudioTypeError() + except ProviderNotSupportSpeechToTextServiceError: + raise ProviderNotSupportSpeechToTextError() + except ProviderTokenNotInitError as ex: + raise ProviderNotInitializeError(ex.description) + except QuotaExceededError: + raise ProviderQuotaExceededError() + except ModelCurrentlyNotSupportError: + raise ProviderModelCurrentlyNotSupportError() + except InvokeError as e: + raise CompletionRequestError(e.description) + except ValueError as e: + raise e + except Exception as e: + logger.exception("internal server error.") + raise InternalServerError() + + +class TrialChatTextApi(TrialAppResource): + @trial_feature_enable + def post(self, trial_app): + app_model = trial_app + try: + parser = reqparse.RequestParser() + parser.add_argument("message_id", type=str, required=False, location="json") + parser.add_argument("voice", type=str, location="json") + parser.add_argument("text", type=str, location="json") + parser.add_argument("streaming", type=bool, location="json") + args = parser.parse_args() + + message_id = args.get("message_id", None) + text = args.get("text", None) + voice = args.get("voice", None) + if not isinstance(current_user, Account): + raise ValueError("current_user must be an Account instance") + + # Get IDs before they might be detached from session + app_id = app_model.id + user_id = current_user.id + + response = AudioService.transcript_tts(app_model=app_model, text=text, voice=voice, message_id=message_id) + RecommendedAppService.add_trial_app_record(app_id, user_id) + return response + except services.errors.app_model_config.AppModelConfigBrokenError: + logger.exception("App model config broken.") + raise AppUnavailableError() + except NoAudioUploadedServiceError: + raise NoAudioUploadedError() + except AudioTooLargeServiceError as e: + raise AudioTooLargeError(str(e)) + except UnsupportedAudioTypeServiceError: + raise UnsupportedAudioTypeError() + except ProviderNotSupportSpeechToTextServiceError: + raise ProviderNotSupportSpeechToTextError() + except ProviderTokenNotInitError as ex: + raise ProviderNotInitializeError(ex.description) + except QuotaExceededError: + raise ProviderQuotaExceededError() + except ModelCurrentlyNotSupportError: + raise ProviderModelCurrentlyNotSupportError() + except InvokeError as e: + raise CompletionRequestError(e.description) + except ValueError as e: + raise e + except Exception as e: + logger.exception("internal server error.") + raise InternalServerError() + + +class TrialCompletionApi(TrialAppResource): + @trial_feature_enable + def post(self, trial_app): + app_model = trial_app + if app_model.mode != "completion": + raise NotCompletionAppError() + + parser = reqparse.RequestParser() + parser.add_argument("inputs", type=dict, required=True, location="json") + parser.add_argument("query", type=str, location="json", default="") + parser.add_argument("files", type=list, required=False, location="json") + parser.add_argument("response_mode", type=str, choices=["blocking", "streaming"], location="json") + parser.add_argument("retriever_from", type=str, required=False, default="explore_app", location="json") + args = parser.parse_args() + + streaming = args["response_mode"] == "streaming" + args["auto_generate_name"] = False + + try: + if not isinstance(current_user, Account): + raise ValueError("current_user must be an Account instance") + + # Get IDs before they might be detached from session + app_id = app_model.id + user_id = current_user.id + + response = AppGenerateService.generate( + app_model=app_model, user=current_user, args=args, invoke_from=InvokeFrom.EXPLORE, streaming=streaming + ) + + RecommendedAppService.add_trial_app_record(app_id, user_id) + return helper.compact_generate_response(response) + except services.errors.conversation.ConversationNotExistsError: + raise NotFound("Conversation Not Exists.") + except services.errors.conversation.ConversationCompletedError: + raise ConversationCompletedError() + except services.errors.app_model_config.AppModelConfigBrokenError: + logger.exception("App model config broken.") + raise AppUnavailableError() + except ProviderTokenNotInitError as ex: + raise ProviderNotInitializeError(ex.description) + except QuotaExceededError: + raise ProviderQuotaExceededError() + except ModelCurrentlyNotSupportError: + raise ProviderModelCurrentlyNotSupportError() + except InvokeError as e: + raise CompletionRequestError(e.description) + except ValueError as e: + raise e + except Exception: + logger.exception("internal server error.") + raise InternalServerError() + + +class TrialSitApi(Resource): + """Resource for trial app sites.""" + + @trial_feature_enable + @get_app_model_with_trial + def get(self, app_model): + """Retrieve app site info. + + Returns the site configuration for the application including theme, icons, and text. + """ + site = db.session.query(Site).where(Site.app_id == app_model.id).first() + + if not site: + raise Forbidden() + + assert app_model.tenant + if app_model.tenant.status == TenantStatus.ARCHIVE: + raise Forbidden() + + return SiteResponse.model_validate(site).model_dump(mode="json") + + +class TrialAppParameterApi(Resource): + """Resource for app variables.""" + + @trial_feature_enable + @get_app_model_with_trial + def get(self, app_model): + """Retrieve app parameters.""" + + if app_model is None: + raise AppUnavailableError() + + if app_model.mode in {AppMode.ADVANCED_CHAT, AppMode.WORKFLOW}: + workflow = app_model.workflow + if workflow is None: + raise AppUnavailableError() + + features_dict = workflow.features_dict + user_input_form = workflow.user_input_form(to_old_structure=True) + else: + app_model_config = app_model.app_model_config + if app_model_config is None: + raise AppUnavailableError() + + features_dict = app_model_config.to_dict() + + user_input_form = features_dict.get("user_input_form", []) + + parameters = get_parameters_from_feature_dict(features_dict=features_dict, user_input_form=user_input_form) + return ParametersResponse.model_validate(parameters).model_dump(mode="json") + + +class AppApi(Resource): + @trial_feature_enable + @get_app_model_with_trial + @marshal_with(app_detail_fields_with_site) + def get(self, app_model): + """Get app detail""" + + app_service = AppService() + app_model = app_service.get_app(app_model) + + return app_model + + +class AppWorkflowApi(Resource): + @trial_feature_enable + @get_app_model_with_trial + @marshal_with(workflow_fields) + def get(self, app_model): + """Get workflow detail""" + if not app_model.workflow_id: + raise AppUnavailableError() + + workflow = ( + db.session.query(Workflow) + .where( + Workflow.id == app_model.workflow_id, + ) + .first() + ) + return workflow + + +class DatasetListApi(Resource): + @trial_feature_enable + @get_app_model_with_trial + def get(self, app_model): + page = request.args.get("page", default=1, type=int) + limit = request.args.get("limit", default=20, type=int) + ids = request.args.getlist("ids") + + tenant_id = app_model.tenant_id + if ids: + datasets, total = DatasetService.get_datasets_by_ids(ids, tenant_id) + else: + raise NeedAddIdsError() + + data = cast(list[dict[str, Any]], marshal(datasets, dataset_fields)) + + response = {"data": data, "has_more": len(datasets) == limit, "limit": limit, "total": total, "page": page} + return response + + +api.add_resource(TrialChatApi, "/trial-apps//chat-messages", endpoint="trial_app_chat_completion") + +api.add_resource( + TrialMessageSuggestedQuestionApi, + "/trial-apps//messages//suggested-questions", + endpoint="trial_app_suggested_question", +) + +api.add_resource(TrialChatAudioApi, "/trial-apps//audio-to-text", endpoint="trial_app_audio") +api.add_resource(TrialChatTextApi, "/trial-apps//text-to-audio", endpoint="trial_app_text") + +api.add_resource(TrialCompletionApi, "/trial-apps//completion-messages", endpoint="trial_app_completion") + +api.add_resource(TrialSitApi, "/trial-apps//site") + +api.add_resource(TrialAppParameterApi, "/trial-apps//parameters", endpoint="trial_app_parameters") + +api.add_resource(AppApi, "/trial-apps/", endpoint="trial_app") + +api.add_resource(TrialAppWorkflowRunApi, "/trial-apps//workflows/run", endpoint="trial_app_workflow_run") +api.add_resource(TrialAppWorkflowTaskStopApi, "/trial-apps//workflows/tasks//stop") + +api.add_resource(AppWorkflowApi, "/trial-apps//workflows", endpoint="trial_app_workflow") +api.add_resource(DatasetListApi, "/trial-apps//datasets", endpoint="trial_app_datasets") diff --git a/api/controllers/console/explore/wraps.py b/api/controllers/console/explore/wraps.py index 2a97d312aa..38f0a04904 100644 --- a/api/controllers/console/explore/wraps.py +++ b/api/controllers/console/explore/wraps.py @@ -2,14 +2,15 @@ from collections.abc import Callable from functools import wraps from typing import Concatenate, ParamSpec, TypeVar +from flask import abort from flask_restx import Resource from werkzeug.exceptions import NotFound -from controllers.console.explore.error import AppAccessDeniedError +from controllers.console.explore.error import AppAccessDeniedError, TrialAppLimitExceeded, TrialAppNotAllowed from controllers.console.wraps import account_initialization_required from extensions.ext_database import db from libs.login import current_account_with_tenant, login_required -from models import InstalledApp +from models import AccountTrialAppRecord, App, InstalledApp, TrialApp from services.enterprise.enterprise_service import EnterpriseService from services.feature_service import FeatureService @@ -71,6 +72,61 @@ def user_allowed_to_access_app(view: Callable[Concatenate[InstalledApp, P], R] | return decorator +def trial_app_required(view: Callable[Concatenate[App, P], R] | None = None): + def decorator(view: Callable[Concatenate[App, P], R]): + @wraps(view) + def decorated(app_id: str, *args: P.args, **kwargs: P.kwargs): + current_user, _ = current_account_with_tenant() + + trial_app = db.session.query(TrialApp).where(TrialApp.app_id == str(app_id)).first() + + if trial_app is None: + raise TrialAppNotAllowed() + app = trial_app.app + + if app is None: + raise TrialAppNotAllowed() + + account_trial_app_record = ( + db.session.query(AccountTrialAppRecord) + .where(AccountTrialAppRecord.account_id == current_user.id, AccountTrialAppRecord.app_id == app_id) + .first() + ) + if account_trial_app_record: + if account_trial_app_record.count >= trial_app.trial_limit: + raise TrialAppLimitExceeded() + + return view(app, *args, **kwargs) + + return decorated + + if view: + return decorator(view) + return decorator + + +def trial_feature_enable(view: Callable[..., R]) -> Callable[..., R]: + @wraps(view) + def decorated(*args, **kwargs): + features = FeatureService.get_system_features() + if not features.enable_trial_app: + abort(403, "Trial app feature is not enabled.") + return view(*args, **kwargs) + + return decorated + + +def explore_banner_enabled(view: Callable[..., R]) -> Callable[..., R]: + @wraps(view) + def decorated(*args, **kwargs): + features = FeatureService.get_system_features() + if not features.enable_explore_banner: + abort(403, "Explore banner feature is not enabled.") + return view(*args, **kwargs) + + return decorated + + class InstalledAppResource(Resource): # must be reversed if there are multiple decorators @@ -80,3 +136,13 @@ class InstalledAppResource(Resource): account_initialization_required, login_required, ] + + +class TrialAppResource(Resource): + # must be reversed if there are multiple decorators + + method_decorators = [ + trial_app_required, + account_initialization_required, + login_required, + ] diff --git a/api/controllers/console/extension.py b/api/controllers/console/extension.py index 08f29b4655..efa46c9779 100644 --- a/api/controllers/console/extension.py +++ b/api/controllers/console/extension.py @@ -1,14 +1,32 @@ -from flask_restx import Resource, fields, marshal_with, reqparse +from flask import request +from flask_restx import Resource, fields, marshal_with +from pydantic import BaseModel, Field from constants import HIDDEN_VALUE -from controllers.console import console_ns -from controllers.console.wraps import account_initialization_required, setup_required from fields.api_based_extension_fields import api_based_extension_fields from libs.login import current_account_with_tenant, login_required from models.api_based_extension import APIBasedExtension from services.api_based_extension_service import APIBasedExtensionService from services.code_based_extension_service import CodeBasedExtensionService +from ..common.schema import register_schema_models +from . import console_ns +from .wraps import account_initialization_required, setup_required + + +class CodeBasedExtensionQuery(BaseModel): + module: str + + +class APIBasedExtensionPayload(BaseModel): + name: str = Field(description="Extension name") + api_endpoint: str = Field(description="API endpoint URL") + api_key: str = Field(description="API key for authentication") + + +register_schema_models(console_ns, APIBasedExtensionPayload) + + api_based_extension_model = console_ns.model("ApiBasedExtensionModel", api_based_extension_fields) api_based_extension_list_model = fields.List(fields.Nested(api_based_extension_model)) @@ -18,11 +36,7 @@ api_based_extension_list_model = fields.List(fields.Nested(api_based_extension_m class CodeBasedExtensionAPI(Resource): @console_ns.doc("get_code_based_extension") @console_ns.doc(description="Get code-based extension data by module name") - @console_ns.expect( - console_ns.parser().add_argument( - "module", type=str, required=True, location="args", help="Extension module name" - ) - ) + @console_ns.doc(params={"module": "Extension module name"}) @console_ns.response( 200, "Success", @@ -35,10 +49,9 @@ class CodeBasedExtensionAPI(Resource): @login_required @account_initialization_required def get(self): - parser = reqparse.RequestParser().add_argument("module", type=str, required=True, location="args") - args = parser.parse_args() + query = CodeBasedExtensionQuery.model_validate(request.args.to_dict(flat=True)) # type: ignore - return {"module": args["module"], "data": CodeBasedExtensionService.get_code_based_extension(args["module"])} + return {"module": query.module, "data": CodeBasedExtensionService.get_code_based_extension(query.module)} @console_ns.route("/api-based-extension") @@ -56,30 +69,21 @@ class APIBasedExtensionAPI(Resource): @console_ns.doc("create_api_based_extension") @console_ns.doc(description="Create a new API-based extension") - @console_ns.expect( - console_ns.model( - "CreateAPIBasedExtensionRequest", - { - "name": fields.String(required=True, description="Extension name"), - "api_endpoint": fields.String(required=True, description="API endpoint URL"), - "api_key": fields.String(required=True, description="API key for authentication"), - }, - ) - ) + @console_ns.expect(console_ns.models[APIBasedExtensionPayload.__name__]) @console_ns.response(201, "Extension created successfully", api_based_extension_model) @setup_required @login_required @account_initialization_required @marshal_with(api_based_extension_model) def post(self): - args = console_ns.payload + payload = APIBasedExtensionPayload.model_validate(console_ns.payload or {}) _, current_tenant_id = current_account_with_tenant() extension_data = APIBasedExtension( tenant_id=current_tenant_id, - name=args["name"], - api_endpoint=args["api_endpoint"], - api_key=args["api_key"], + name=payload.name, + api_endpoint=payload.api_endpoint, + api_key=payload.api_key, ) return APIBasedExtensionService.save(extension_data) @@ -104,16 +108,7 @@ class APIBasedExtensionDetailAPI(Resource): @console_ns.doc("update_api_based_extension") @console_ns.doc(description="Update API-based extension") @console_ns.doc(params={"id": "Extension ID"}) - @console_ns.expect( - console_ns.model( - "UpdateAPIBasedExtensionRequest", - { - "name": fields.String(required=True, description="Extension name"), - "api_endpoint": fields.String(required=True, description="API endpoint URL"), - "api_key": fields.String(required=True, description="API key for authentication"), - }, - ) - ) + @console_ns.expect(console_ns.models[APIBasedExtensionPayload.__name__]) @console_ns.response(200, "Extension updated successfully", api_based_extension_model) @setup_required @login_required @@ -125,13 +120,13 @@ class APIBasedExtensionDetailAPI(Resource): extension_data_from_db = APIBasedExtensionService.get_with_tenant_id(current_tenant_id, api_based_extension_id) - args = console_ns.payload + payload = APIBasedExtensionPayload.model_validate(console_ns.payload or {}) - extension_data_from_db.name = args["name"] - extension_data_from_db.api_endpoint = args["api_endpoint"] + extension_data_from_db.name = payload.name + extension_data_from_db.api_endpoint = payload.api_endpoint - if args["api_key"] != HIDDEN_VALUE: - extension_data_from_db.api_key = args["api_key"] + if payload.api_key != HIDDEN_VALUE: + extension_data_from_db.api_key = payload.api_key return APIBasedExtensionService.save(extension_data_from_db) diff --git a/api/controllers/console/files.py b/api/controllers/console/files.py index 29417dc896..109a3cd0d3 100644 --- a/api/controllers/console/files.py +++ b/api/controllers/console/files.py @@ -1,7 +1,7 @@ from typing import Literal from flask import request -from flask_restx import Resource, marshal_with +from flask_restx import Resource from werkzeug.exceptions import Forbidden import services @@ -15,18 +15,21 @@ from controllers.common.errors import ( TooManyFilesError, UnsupportedFileTypeError, ) +from controllers.common.schema import register_schema_models from controllers.console.wraps import ( account_initialization_required, cloud_edition_billing_resource_check, setup_required, ) from extensions.ext_database import db -from fields.file_fields import file_fields, upload_config_fields +from fields.file_fields import FileResponse, UploadConfig from libs.login import current_account_with_tenant, login_required from services.file_service import FileService from . import console_ns +register_schema_models(console_ns, UploadConfig, FileResponse) + PREVIEW_WORDS_LIMIT = 3000 @@ -35,26 +38,27 @@ class FileApi(Resource): @setup_required @login_required @account_initialization_required - @marshal_with(upload_config_fields) + @console_ns.response(200, "Success", console_ns.models[UploadConfig.__name__]) def get(self): - return { - "file_size_limit": dify_config.UPLOAD_FILE_SIZE_LIMIT, - "batch_count_limit": dify_config.UPLOAD_FILE_BATCH_LIMIT, - "file_upload_limit": dify_config.BATCH_UPLOAD_LIMIT, - "image_file_size_limit": dify_config.UPLOAD_IMAGE_FILE_SIZE_LIMIT, - "video_file_size_limit": dify_config.UPLOAD_VIDEO_FILE_SIZE_LIMIT, - "audio_file_size_limit": dify_config.UPLOAD_AUDIO_FILE_SIZE_LIMIT, - "workflow_file_upload_limit": dify_config.WORKFLOW_FILE_UPLOAD_LIMIT, - "image_file_batch_limit": dify_config.IMAGE_FILE_BATCH_LIMIT, - "single_chunk_attachment_limit": dify_config.SINGLE_CHUNK_ATTACHMENT_LIMIT, - "attachment_image_file_size_limit": dify_config.ATTACHMENT_IMAGE_FILE_SIZE_LIMIT, - }, 200 + config = UploadConfig( + file_size_limit=dify_config.UPLOAD_FILE_SIZE_LIMIT, + batch_count_limit=dify_config.UPLOAD_FILE_BATCH_LIMIT, + file_upload_limit=dify_config.BATCH_UPLOAD_LIMIT, + image_file_size_limit=dify_config.UPLOAD_IMAGE_FILE_SIZE_LIMIT, + video_file_size_limit=dify_config.UPLOAD_VIDEO_FILE_SIZE_LIMIT, + audio_file_size_limit=dify_config.UPLOAD_AUDIO_FILE_SIZE_LIMIT, + workflow_file_upload_limit=dify_config.WORKFLOW_FILE_UPLOAD_LIMIT, + image_file_batch_limit=dify_config.IMAGE_FILE_BATCH_LIMIT, + single_chunk_attachment_limit=dify_config.SINGLE_CHUNK_ATTACHMENT_LIMIT, + attachment_image_file_size_limit=dify_config.ATTACHMENT_IMAGE_FILE_SIZE_LIMIT, + ) + return config.model_dump(mode="json"), 200 @setup_required @login_required @account_initialization_required - @marshal_with(file_fields) @cloud_edition_billing_resource_check("documents") + @console_ns.response(201, "File uploaded successfully", console_ns.models[FileResponse.__name__]) def post(self): current_user, _ = current_account_with_tenant() source_str = request.form.get("source") @@ -90,7 +94,8 @@ class FileApi(Resource): except services.errors.file.BlockedFileExtensionError as blocked_extension_error: raise BlockedFileExtensionError(blocked_extension_error.description) - return upload_file, 201 + response = FileResponse.model_validate(upload_file, from_attributes=True) + return response.model_dump(mode="json"), 201 @console_ns.route("/files//preview") diff --git a/api/controllers/console/remote_files.py b/api/controllers/console/remote_files.py index 47eef7eb7e..70c7b80ffa 100644 --- a/api/controllers/console/remote_files.py +++ b/api/controllers/console/remote_files.py @@ -1,7 +1,7 @@ import urllib.parse import httpx -from flask_restx import Resource, marshal_with +from flask_restx import Resource from pydantic import BaseModel, Field import services @@ -11,19 +11,22 @@ from controllers.common.errors import ( RemoteFileUploadError, UnsupportedFileTypeError, ) +from controllers.common.schema import register_schema_models from core.file import helpers as file_helpers from core.helper import ssrf_proxy from extensions.ext_database import db -from fields.file_fields import file_fields_with_signed_url, remote_file_info_fields +from fields.file_fields import FileWithSignedUrl, RemoteFileInfo from libs.login import current_account_with_tenant from services.file_service import FileService from . import console_ns +register_schema_models(console_ns, RemoteFileInfo, FileWithSignedUrl) + @console_ns.route("/remote-files/") class RemoteFileInfoApi(Resource): - @marshal_with(remote_file_info_fields) + @console_ns.response(200, "Remote file info", console_ns.models[RemoteFileInfo.__name__]) def get(self, url): decoded_url = urllib.parse.unquote(url) resp = ssrf_proxy.head(decoded_url) @@ -31,10 +34,11 @@ class RemoteFileInfoApi(Resource): # failed back to get method resp = ssrf_proxy.get(decoded_url, timeout=3) resp.raise_for_status() - return { - "file_type": resp.headers.get("Content-Type", "application/octet-stream"), - "file_length": int(resp.headers.get("Content-Length", 0)), - } + info = RemoteFileInfo( + file_type=resp.headers.get("Content-Type", "application/octet-stream"), + file_length=int(resp.headers.get("Content-Length", 0)), + ) + return info.model_dump(mode="json") class RemoteFileUploadPayload(BaseModel): @@ -50,7 +54,7 @@ console_ns.schema_model( @console_ns.route("/remote-files/upload") class RemoteFileUploadApi(Resource): @console_ns.expect(console_ns.models[RemoteFileUploadPayload.__name__]) - @marshal_with(file_fields_with_signed_url) + @console_ns.response(201, "Remote file uploaded", console_ns.models[FileWithSignedUrl.__name__]) def post(self): args = RemoteFileUploadPayload.model_validate(console_ns.payload) url = args.url @@ -85,13 +89,14 @@ class RemoteFileUploadApi(Resource): except services.errors.file.UnsupportedFileTypeError: raise UnsupportedFileTypeError() - return { - "id": upload_file.id, - "name": upload_file.name, - "size": upload_file.size, - "extension": upload_file.extension, - "url": file_helpers.get_signed_file_url(upload_file_id=upload_file.id), - "mime_type": upload_file.mime_type, - "created_by": upload_file.created_by, - "created_at": upload_file.created_at, - }, 201 + payload = FileWithSignedUrl( + id=upload_file.id, + name=upload_file.name, + size=upload_file.size, + extension=upload_file.extension, + url=file_helpers.get_signed_file_url(upload_file_id=upload_file.id), + mime_type=upload_file.mime_type, + created_by=upload_file.created_by, + created_at=int(upload_file.created_at.timestamp()), + ) + return payload.model_dump(mode="json"), 201 diff --git a/api/controllers/console/tag/tags.py b/api/controllers/console/tag/tags.py index 17cfc3ff4b..e9fbb515e4 100644 --- a/api/controllers/console/tag/tags.py +++ b/api/controllers/console/tag/tags.py @@ -1,31 +1,40 @@ +from typing import Literal + from flask import request -from flask_restx import Resource, marshal_with, reqparse +from flask_restx import Resource, marshal_with +from pydantic import BaseModel, Field from werkzeug.exceptions import Forbidden +from controllers.common.schema import register_schema_models from controllers.console import console_ns from controllers.console.wraps import account_initialization_required, edit_permission_required, setup_required from fields.tag_fields import dataset_tag_fields from libs.login import current_account_with_tenant, login_required -from models.model import Tag from services.tag_service import TagService -def _validate_name(name): - if not name or len(name) < 1 or len(name) > 50: - raise ValueError("Name must be between 1 to 50 characters.") - return name +class TagBasePayload(BaseModel): + name: str = Field(description="Tag name", min_length=1, max_length=50) + type: Literal["knowledge", "app"] | None = Field(default=None, description="Tag type") -parser_tags = ( - reqparse.RequestParser() - .add_argument( - "name", - nullable=False, - required=True, - help="Name must be between 1 to 50 characters.", - type=_validate_name, - ) - .add_argument("type", type=str, location="json", choices=Tag.TAG_TYPE_LIST, nullable=True, help="Invalid tag type.") +class TagBindingPayload(BaseModel): + tag_ids: list[str] = Field(description="Tag IDs to bind") + target_id: str = Field(description="Target ID to bind tags to") + type: Literal["knowledge", "app"] | None = Field(default=None, description="Tag type") + + +class TagBindingRemovePayload(BaseModel): + tag_id: str = Field(description="Tag ID to remove") + target_id: str = Field(description="Target ID to unbind tag from") + type: Literal["knowledge", "app"] | None = Field(default=None, description="Tag type") + + +register_schema_models( + console_ns, + TagBasePayload, + TagBindingPayload, + TagBindingRemovePayload, ) @@ -43,7 +52,7 @@ class TagListApi(Resource): return tags, 200 - @console_ns.expect(parser_tags) + @console_ns.expect(console_ns.models[TagBasePayload.__name__]) @setup_required @login_required @account_initialization_required @@ -53,22 +62,17 @@ class TagListApi(Resource): if not (current_user.has_edit_permission or current_user.is_dataset_editor): raise Forbidden() - args = parser_tags.parse_args() - tag = TagService.save_tags(args) + payload = TagBasePayload.model_validate(console_ns.payload or {}) + tag = TagService.save_tags(payload.model_dump()) response = {"id": tag.id, "name": tag.name, "type": tag.type, "binding_count": 0} return response, 200 -parser_tag_id = reqparse.RequestParser().add_argument( - "name", nullable=False, required=True, help="Name must be between 1 to 50 characters.", type=_validate_name -) - - @console_ns.route("/tags/") class TagUpdateDeleteApi(Resource): - @console_ns.expect(parser_tag_id) + @console_ns.expect(console_ns.models[TagBasePayload.__name__]) @setup_required @login_required @account_initialization_required @@ -79,8 +83,8 @@ class TagUpdateDeleteApi(Resource): if not (current_user.has_edit_permission or current_user.is_dataset_editor): raise Forbidden() - args = parser_tag_id.parse_args() - tag = TagService.update_tags(args, tag_id) + payload = TagBasePayload.model_validate(console_ns.payload or {}) + tag = TagService.update_tags(payload.model_dump(), tag_id) binding_count = TagService.get_tag_binding_count(tag_id) @@ -100,17 +104,9 @@ class TagUpdateDeleteApi(Resource): return 204 -parser_create = ( - reqparse.RequestParser() - .add_argument("tag_ids", type=list, nullable=False, required=True, location="json", help="Tag IDs is required.") - .add_argument("target_id", type=str, nullable=False, required=True, location="json", help="Target ID is required.") - .add_argument("type", type=str, location="json", choices=Tag.TAG_TYPE_LIST, nullable=True, help="Invalid tag type.") -) - - @console_ns.route("/tag-bindings/create") class TagBindingCreateApi(Resource): - @console_ns.expect(parser_create) + @console_ns.expect(console_ns.models[TagBindingPayload.__name__]) @setup_required @login_required @account_initialization_required @@ -120,23 +116,15 @@ class TagBindingCreateApi(Resource): if not (current_user.has_edit_permission or current_user.is_dataset_editor): raise Forbidden() - args = parser_create.parse_args() - TagService.save_tag_binding(args) + payload = TagBindingPayload.model_validate(console_ns.payload or {}) + TagService.save_tag_binding(payload.model_dump()) return {"result": "success"}, 200 -parser_remove = ( - reqparse.RequestParser() - .add_argument("tag_id", type=str, nullable=False, required=True, help="Tag ID is required.") - .add_argument("target_id", type=str, nullable=False, required=True, help="Target ID is required.") - .add_argument("type", type=str, location="json", choices=Tag.TAG_TYPE_LIST, nullable=True, help="Invalid tag type.") -) - - @console_ns.route("/tag-bindings/remove") class TagBindingDeleteApi(Resource): - @console_ns.expect(parser_remove) + @console_ns.expect(console_ns.models[TagBindingRemovePayload.__name__]) @setup_required @login_required @account_initialization_required @@ -146,7 +134,7 @@ class TagBindingDeleteApi(Resource): if not (current_user.has_edit_permission or current_user.is_dataset_editor): raise Forbidden() - args = parser_remove.parse_args() - TagService.delete_tag_binding(args) + payload = TagBindingRemovePayload.model_validate(console_ns.payload or {}) + TagService.delete_tag_binding(payload.model_dump()) return {"result": "success"}, 200 diff --git a/api/controllers/console/workspace/account.py b/api/controllers/console/workspace/account.py index 55eaa2f09f..03ad0f423b 100644 --- a/api/controllers/console/workspace/account.py +++ b/api/controllers/console/workspace/account.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from datetime import datetime from typing import Literal @@ -99,7 +101,7 @@ class AccountPasswordPayload(BaseModel): repeat_new_password: str @model_validator(mode="after") - def check_passwords_match(self) -> "AccountPasswordPayload": + def check_passwords_match(self) -> AccountPasswordPayload: if self.new_password != self.repeat_new_password: raise RepeatPasswordNotMatchError() return self diff --git a/api/controllers/console/workspace/load_balancing_config.py b/api/controllers/console/workspace/load_balancing_config.py index 9bf393ea2e..ccb60b1461 100644 --- a/api/controllers/console/workspace/load_balancing_config.py +++ b/api/controllers/console/workspace/load_balancing_config.py @@ -1,6 +1,8 @@ -from flask_restx import Resource, reqparse +from flask_restx import Resource +from pydantic import BaseModel from werkzeug.exceptions import Forbidden +from controllers.common.schema import register_schema_models from controllers.console import console_ns from controllers.console.wraps import account_initialization_required, setup_required from core.model_runtime.entities.model_entities import ModelType @@ -10,10 +12,20 @@ from models import TenantAccountRole from services.model_load_balancing_service import ModelLoadBalancingService +class LoadBalancingCredentialPayload(BaseModel): + model: str + model_type: ModelType + credentials: dict[str, object] + + +register_schema_models(console_ns, LoadBalancingCredentialPayload) + + @console_ns.route( "/workspaces/current/model-providers//models/load-balancing-configs/credentials-validate" ) class LoadBalancingCredentialsValidateApi(Resource): + @console_ns.expect(console_ns.models[LoadBalancingCredentialPayload.__name__]) @setup_required @login_required @account_initialization_required @@ -24,20 +36,7 @@ class LoadBalancingCredentialsValidateApi(Resource): tenant_id = current_tenant_id - parser = ( - reqparse.RequestParser() - .add_argument("model", type=str, required=True, nullable=False, location="json") - .add_argument( - "model_type", - type=str, - required=True, - nullable=False, - choices=[mt.value for mt in ModelType], - location="json", - ) - .add_argument("credentials", type=dict, required=True, nullable=False, location="json") - ) - args = parser.parse_args() + payload = LoadBalancingCredentialPayload.model_validate(console_ns.payload or {}) # validate model load balancing credentials model_load_balancing_service = ModelLoadBalancingService() @@ -49,9 +48,9 @@ class LoadBalancingCredentialsValidateApi(Resource): model_load_balancing_service.validate_load_balancing_credentials( tenant_id=tenant_id, provider=provider, - model=args["model"], - model_type=args["model_type"], - credentials=args["credentials"], + model=payload.model, + model_type=payload.model_type, + credentials=payload.credentials, ) except CredentialsValidateFailedError as ex: result = False @@ -69,6 +68,7 @@ class LoadBalancingCredentialsValidateApi(Resource): "/workspaces/current/model-providers//models/load-balancing-configs//credentials-validate" ) class LoadBalancingConfigCredentialsValidateApi(Resource): + @console_ns.expect(console_ns.models[LoadBalancingCredentialPayload.__name__]) @setup_required @login_required @account_initialization_required @@ -79,20 +79,7 @@ class LoadBalancingConfigCredentialsValidateApi(Resource): tenant_id = current_tenant_id - parser = ( - reqparse.RequestParser() - .add_argument("model", type=str, required=True, nullable=False, location="json") - .add_argument( - "model_type", - type=str, - required=True, - nullable=False, - choices=[mt.value for mt in ModelType], - location="json", - ) - .add_argument("credentials", type=dict, required=True, nullable=False, location="json") - ) - args = parser.parse_args() + payload = LoadBalancingCredentialPayload.model_validate(console_ns.payload or {}) # validate model load balancing config credentials model_load_balancing_service = ModelLoadBalancingService() @@ -104,9 +91,9 @@ class LoadBalancingConfigCredentialsValidateApi(Resource): model_load_balancing_service.validate_load_balancing_credentials( tenant_id=tenant_id, provider=provider, - model=args["model"], - model_type=args["model_type"], - credentials=args["credentials"], + model=payload.model, + model_type=payload.model_type, + credentials=payload.credentials, config_id=config_id, ) except CredentialsValidateFailedError as ex: diff --git a/api/controllers/console/workspace/plugin.py b/api/controllers/console/workspace/plugin.py index 805058ba5a..ea74fc0337 100644 --- a/api/controllers/console/workspace/plugin.py +++ b/api/controllers/console/workspace/plugin.py @@ -1,5 +1,6 @@ import io -from typing import Literal +from collections.abc import Mapping +from typing import Any, Literal from flask import request, send_file from flask_restx import Resource @@ -141,6 +142,15 @@ class ParserDynamicOptions(BaseModel): provider_type: Literal["tool", "trigger"] +class ParserDynamicOptionsWithCredentials(BaseModel): + plugin_id: str + provider: str + action: str + parameter: str + credential_id: str + credentials: Mapping[str, Any] + + class PluginPermissionSettingsPayload(BaseModel): install_permission: TenantPluginPermission.InstallPermission = TenantPluginPermission.InstallPermission.EVERYONE debug_permission: TenantPluginPermission.DebugPermission = TenantPluginPermission.DebugPermission.EVERYONE @@ -183,6 +193,7 @@ reg(ParserGithubUpgrade) reg(ParserUninstall) reg(ParserPermissionChange) reg(ParserDynamicOptions) +reg(ParserDynamicOptionsWithCredentials) reg(ParserPreferencesChange) reg(ParserExcludePlugin) reg(ParserReadme) @@ -657,6 +668,37 @@ class PluginFetchDynamicSelectOptionsApi(Resource): return jsonable_encoder({"options": options}) +@console_ns.route("/workspaces/current/plugin/parameters/dynamic-options-with-credentials") +class PluginFetchDynamicSelectOptionsWithCredentialsApi(Resource): + @console_ns.expect(console_ns.models[ParserDynamicOptionsWithCredentials.__name__]) + @setup_required + @login_required + @is_admin_or_owner_required + @account_initialization_required + def post(self): + """Fetch dynamic options using credentials directly (for edit mode).""" + current_user, tenant_id = current_account_with_tenant() + user_id = current_user.id + + args = ParserDynamicOptionsWithCredentials.model_validate(console_ns.payload) + + try: + options = PluginParameterService.get_dynamic_select_options_with_credentials( + tenant_id=tenant_id, + user_id=user_id, + plugin_id=args.plugin_id, + provider=args.provider, + action=args.action, + parameter=args.parameter, + credential_id=args.credential_id, + credentials=args.credentials, + ) + except PluginDaemonClientSideError as e: + raise ValueError(e) + + return jsonable_encoder({"options": options}) + + @console_ns.route("/workspaces/current/plugin/preferences/change") class PluginChangePreferencesApi(Resource): @console_ns.expect(console_ns.models[ParserPreferencesChange.__name__]) diff --git a/api/controllers/console/workspace/tool_providers.py b/api/controllers/console/workspace/tool_providers.py index 2c54aa5a20..e9e7b72718 100644 --- a/api/controllers/console/workspace/tool_providers.py +++ b/api/controllers/console/workspace/tool_providers.py @@ -1,4 +1,5 @@ import io +import logging from urllib.parse import urlparse from flask import make_response, redirect, request, send_file @@ -17,6 +18,7 @@ from controllers.console.wraps import ( is_admin_or_owner_required, setup_required, ) +from core.db.session_factory import session_factory from core.entities.mcp_provider import MCPAuthentication, MCPConfiguration from core.mcp.auth.auth_flow import auth, handle_callback from core.mcp.error import MCPAuthError, MCPError, MCPRefreshTokenError @@ -39,6 +41,8 @@ from services.tools.tools_manage_service import ToolCommonService from services.tools.tools_transform_service import ToolTransformService from services.tools.workflow_tools_manage_service import WorkflowToolManageService +logger = logging.getLogger(__name__) + def is_valid_url(url: str) -> bool: if not url: @@ -944,8 +948,8 @@ class ToolProviderMCPApi(Resource): configuration = MCPConfiguration.model_validate(args["configuration"]) authentication = MCPAuthentication.model_validate(args["authentication"]) if args["authentication"] else None - # Create provider - with Session(db.engine) as session, session.begin(): + # 1) Create provider in a short transaction (no network I/O inside) + with session_factory.create_session() as session, session.begin(): service = MCPToolManageService(session=session) result = service.create_provider( tenant_id=tenant_id, @@ -960,7 +964,29 @@ class ToolProviderMCPApi(Resource): configuration=configuration, authentication=authentication, ) - return jsonable_encoder(result) + + # 2) Try to fetch tools immediately after creation so they appear without a second save. + # Perform network I/O outside any DB session to avoid holding locks. + try: + reconnect = MCPToolManageService.reconnect_with_url( + server_url=args["server_url"], + headers=args.get("headers") or {}, + timeout=configuration.timeout, + sse_read_timeout=configuration.sse_read_timeout, + ) + # Update just-created provider with authed/tools in a new short transaction + with session_factory.create_session() as session, session.begin(): + service = MCPToolManageService(session=session) + db_provider = service.get_provider(provider_id=result.id, tenant_id=tenant_id) + db_provider.authed = reconnect.authed + db_provider.tools = reconnect.tools + + result = ToolTransformService.mcp_provider_to_user_provider(db_provider, for_list=True) + except Exception: + # Best-effort: if initial fetch fails (e.g., auth required), return created provider as-is + logger.warning("Failed to fetch MCP tools after creation", exc_info=True) + + return jsonable_encoder(result) @console_ns.expect(parser_mcp_put) @setup_required @@ -972,17 +998,23 @@ class ToolProviderMCPApi(Resource): authentication = MCPAuthentication.model_validate(args["authentication"]) if args["authentication"] else None _, current_tenant_id = current_account_with_tenant() - # Step 1: Validate server URL change if needed (includes URL format validation and network operation) - validation_result = None + # Step 1: Get provider data for URL validation (short-lived session, no network I/O) + validation_data = None with Session(db.engine) as session: service = MCPToolManageService(session=session) - validation_result = service.validate_server_url_change( - tenant_id=current_tenant_id, provider_id=args["provider_id"], new_server_url=args["server_url"] + validation_data = service.get_provider_for_url_validation( + tenant_id=current_tenant_id, provider_id=args["provider_id"] ) - # No need to check for errors here, exceptions will be raised directly + # Step 2: Perform URL validation with network I/O OUTSIDE of any database session + # This prevents holding database locks during potentially slow network operations + validation_result = MCPToolManageService.validate_server_url_standalone( + tenant_id=current_tenant_id, + new_server_url=args["server_url"], + validation_data=validation_data, + ) - # Step 2: Perform database update in a transaction + # Step 3: Perform database update in a transaction with Session(db.engine) as session, session.begin(): service = MCPToolManageService(session=session) service.update_provider( @@ -999,7 +1031,8 @@ class ToolProviderMCPApi(Resource): authentication=authentication, validation_result=validation_result, ) - return {"result": "success"} + + return {"result": "success"} @console_ns.expect(parser_mcp_delete) @setup_required @@ -1012,7 +1045,8 @@ class ToolProviderMCPApi(Resource): with Session(db.engine) as session, session.begin(): service = MCPToolManageService(session=session) service.delete_provider(tenant_id=current_tenant_id, provider_id=args["provider_id"]) - return {"result": "success"} + + return {"result": "success"} parser_auth = ( diff --git a/api/controllers/console/workspace/trigger_providers.py b/api/controllers/console/workspace/trigger_providers.py index 268473d6d1..6b642af613 100644 --- a/api/controllers/console/workspace/trigger_providers.py +++ b/api/controllers/console/workspace/trigger_providers.py @@ -1,11 +1,14 @@ import logging +from typing import Any from flask import make_response, redirect, request -from flask_restx import Resource, reqparse +from flask_restx import Resource +from pydantic import BaseModel, model_validator from sqlalchemy.orm import Session from werkzeug.exceptions import BadRequest, Forbidden from configs import dify_config +from controllers.common.schema import register_schema_models from controllers.web.error import NotFoundError from core.model_runtime.utils.encoders import jsonable_encoder from core.plugin.entities.plugin_daemon import CredentialType @@ -32,6 +35,41 @@ from ..wraps import ( logger = logging.getLogger(__name__) +class TriggerSubscriptionBuilderCreatePayload(BaseModel): + credential_type: str = CredentialType.UNAUTHORIZED + + +class TriggerSubscriptionBuilderVerifyPayload(BaseModel): + credentials: dict[str, Any] + + +class TriggerSubscriptionBuilderUpdatePayload(BaseModel): + name: str | None = None + parameters: dict[str, Any] | None = None + properties: dict[str, Any] | None = None + credentials: dict[str, Any] | None = None + + @model_validator(mode="after") + def check_at_least_one_field(self): + if all(v is None for v in self.model_dump().values()): + raise ValueError("At least one of name, credentials, parameters, or properties must be provided") + return self + + +class TriggerOAuthClientPayload(BaseModel): + client_params: dict[str, Any] | None = None + enabled: bool | None = None + + +register_schema_models( + console_ns, + TriggerSubscriptionBuilderCreatePayload, + TriggerSubscriptionBuilderVerifyPayload, + TriggerSubscriptionBuilderUpdatePayload, + TriggerOAuthClientPayload, +) + + @console_ns.route("/workspaces/current/trigger-provider//icon") class TriggerProviderIconApi(Resource): @setup_required @@ -97,16 +135,11 @@ class TriggerSubscriptionListApi(Resource): raise -parser = reqparse.RequestParser().add_argument( - "credential_type", type=str, required=False, nullable=True, location="json" -) - - @console_ns.route( "/workspaces/current/trigger-provider//subscriptions/builder/create", ) class TriggerSubscriptionBuilderCreateApi(Resource): - @console_ns.expect(parser) + @console_ns.expect(console_ns.models[TriggerSubscriptionBuilderCreatePayload.__name__]) @setup_required @login_required @edit_permission_required @@ -116,10 +149,10 @@ class TriggerSubscriptionBuilderCreateApi(Resource): user = current_user assert user.current_tenant_id is not None - args = parser.parse_args() + payload = TriggerSubscriptionBuilderCreatePayload.model_validate(console_ns.payload or {}) try: - credential_type = CredentialType.of(args.get("credential_type") or CredentialType.UNAUTHORIZED.value) + credential_type = CredentialType.of(payload.credential_type) subscription_builder = TriggerSubscriptionBuilderService.create_trigger_subscription_builder( tenant_id=user.current_tenant_id, user_id=user.id, @@ -147,28 +180,21 @@ class TriggerSubscriptionBuilderGetApi(Resource): ) -parser_api = ( - reqparse.RequestParser() - # The credentials of the subscription builder - .add_argument("credentials", type=dict, required=False, nullable=True, location="json") -) - - @console_ns.route( - "/workspaces/current/trigger-provider//subscriptions/builder/verify/", + "/workspaces/current/trigger-provider//subscriptions/builder/verify-and-update/", ) class TriggerSubscriptionBuilderVerifyApi(Resource): - @console_ns.expect(parser_api) + @console_ns.expect(console_ns.models[TriggerSubscriptionBuilderVerifyPayload.__name__]) @setup_required @login_required @edit_permission_required @account_initialization_required def post(self, provider, subscription_builder_id): - """Verify a subscription instance for a trigger provider""" + """Verify and update a subscription instance for a trigger provider""" user = current_user assert user.current_tenant_id is not None - args = parser_api.parse_args() + payload = TriggerSubscriptionBuilderVerifyPayload.model_validate(console_ns.payload or {}) try: # Use atomic update_and_verify to prevent race conditions @@ -178,7 +204,7 @@ class TriggerSubscriptionBuilderVerifyApi(Resource): provider_id=TriggerProviderID(provider), subscription_builder_id=subscription_builder_id, subscription_builder_updater=SubscriptionBuilderUpdater( - credentials=args.get("credentials", None), + credentials=payload.credentials, ), ) except Exception as e: @@ -186,24 +212,11 @@ class TriggerSubscriptionBuilderVerifyApi(Resource): raise ValueError(str(e)) from e -parser_update_api = ( - reqparse.RequestParser() - # The name of the subscription builder - .add_argument("name", type=str, required=False, nullable=True, location="json") - # The parameters of the subscription builder - .add_argument("parameters", type=dict, required=False, nullable=True, location="json") - # The properties of the subscription builder - .add_argument("properties", type=dict, required=False, nullable=True, location="json") - # The credentials of the subscription builder - .add_argument("credentials", type=dict, required=False, nullable=True, location="json") -) - - @console_ns.route( "/workspaces/current/trigger-provider//subscriptions/builder/update/", ) class TriggerSubscriptionBuilderUpdateApi(Resource): - @console_ns.expect(parser_update_api) + @console_ns.expect(console_ns.models[TriggerSubscriptionBuilderUpdatePayload.__name__]) @setup_required @login_required @edit_permission_required @@ -214,7 +227,7 @@ class TriggerSubscriptionBuilderUpdateApi(Resource): assert isinstance(user, Account) assert user.current_tenant_id is not None - args = parser_update_api.parse_args() + payload = TriggerSubscriptionBuilderUpdatePayload.model_validate(console_ns.payload or {}) try: return jsonable_encoder( TriggerSubscriptionBuilderService.update_trigger_subscription_builder( @@ -222,10 +235,10 @@ class TriggerSubscriptionBuilderUpdateApi(Resource): provider_id=TriggerProviderID(provider), subscription_builder_id=subscription_builder_id, subscription_builder_updater=SubscriptionBuilderUpdater( - name=args.get("name", None), - parameters=args.get("parameters", None), - properties=args.get("properties", None), - credentials=args.get("credentials", None), + name=payload.name, + parameters=payload.parameters, + properties=payload.properties, + credentials=payload.credentials, ), ) ) @@ -260,7 +273,7 @@ class TriggerSubscriptionBuilderLogsApi(Resource): "/workspaces/current/trigger-provider//subscriptions/builder/build/", ) class TriggerSubscriptionBuilderBuildApi(Resource): - @console_ns.expect(parser_update_api) + @console_ns.expect(console_ns.models[TriggerSubscriptionBuilderUpdatePayload.__name__]) @setup_required @login_required @edit_permission_required @@ -269,7 +282,7 @@ class TriggerSubscriptionBuilderBuildApi(Resource): """Build a subscription instance for a trigger provider""" user = current_user assert user.current_tenant_id is not None - args = parser_update_api.parse_args() + payload = TriggerSubscriptionBuilderUpdatePayload.model_validate(console_ns.payload or {}) try: # Use atomic update_and_build to prevent race conditions TriggerSubscriptionBuilderService.update_and_build_builder( @@ -278,9 +291,9 @@ class TriggerSubscriptionBuilderBuildApi(Resource): provider_id=TriggerProviderID(provider), subscription_builder_id=subscription_builder_id, subscription_builder_updater=SubscriptionBuilderUpdater( - name=args.get("name", None), - parameters=args.get("parameters", None), - properties=args.get("properties", None), + name=payload.name, + parameters=payload.parameters, + properties=payload.properties, ), ) return 200 @@ -289,6 +302,65 @@ class TriggerSubscriptionBuilderBuildApi(Resource): raise ValueError(str(e)) from e +@console_ns.route( + "/workspaces/current/trigger-provider//subscriptions/update", +) +class TriggerSubscriptionUpdateApi(Resource): + @console_ns.expect(console_ns.models[TriggerSubscriptionBuilderUpdatePayload.__name__]) + @setup_required + @login_required + @edit_permission_required + @account_initialization_required + def post(self, subscription_id: str): + """Update a subscription instance""" + user = current_user + assert user.current_tenant_id is not None + + request = TriggerSubscriptionBuilderUpdatePayload.model_validate(console_ns.payload or {}) + + subscription = TriggerProviderService.get_subscription_by_id( + tenant_id=user.current_tenant_id, + subscription_id=subscription_id, + ) + if not subscription: + raise NotFoundError(f"Subscription {subscription_id} not found") + + provider_id = TriggerProviderID(subscription.provider_id) + + try: + # For rename only, just update the name + rename = request.name is not None and not any((request.credentials, request.parameters, request.properties)) + # When credential type is UNAUTHORIZED, it indicates the subscription was manually created + # For Manually created subscription, they dont have credentials, parameters + # They only have name and properties(which is input by user) + manually_created = subscription.credential_type == CredentialType.UNAUTHORIZED + if rename or manually_created: + TriggerProviderService.update_trigger_subscription( + tenant_id=user.current_tenant_id, + subscription_id=subscription_id, + name=request.name, + properties=request.properties, + ) + return 200 + + # For the rest cases(API_KEY, OAUTH2) + # we need to call third party provider(e.g. GitHub) to rebuild the subscription + TriggerProviderService.rebuild_trigger_subscription( + tenant_id=user.current_tenant_id, + name=request.name, + provider_id=provider_id, + subscription_id=subscription_id, + credentials=request.credentials or subscription.credentials, + parameters=request.parameters or subscription.parameters, + ) + return 200 + except ValueError as e: + raise BadRequest(str(e)) + except Exception as e: + logger.exception("Error updating subscription", exc_info=e) + raise + + @console_ns.route( "/workspaces/current/trigger-provider//subscriptions/delete", ) @@ -474,13 +546,6 @@ class TriggerOAuthCallbackApi(Resource): return redirect(f"{dify_config.CONSOLE_WEB_URL}/oauth-callback") -parser_oauth_client = ( - reqparse.RequestParser() - .add_argument("client_params", type=dict, required=False, nullable=True, location="json") - .add_argument("enabled", type=bool, required=False, nullable=True, location="json") -) - - @console_ns.route("/workspaces/current/trigger-provider//oauth/client") class TriggerOAuthClientManageApi(Resource): @setup_required @@ -528,7 +593,7 @@ class TriggerOAuthClientManageApi(Resource): logger.exception("Error getting OAuth client", exc_info=e) raise - @console_ns.expect(parser_oauth_client) + @console_ns.expect(console_ns.models[TriggerOAuthClientPayload.__name__]) @setup_required @login_required @is_admin_or_owner_required @@ -538,15 +603,15 @@ class TriggerOAuthClientManageApi(Resource): user = current_user assert user.current_tenant_id is not None - args = parser_oauth_client.parse_args() + payload = TriggerOAuthClientPayload.model_validate(console_ns.payload or {}) try: provider_id = TriggerProviderID(provider) return TriggerProviderService.save_custom_oauth_client_params( tenant_id=user.current_tenant_id, provider_id=provider_id, - client_params=args.get("client_params"), - enabled=args.get("enabled"), + client_params=payload.client_params, + enabled=payload.enabled, ) except ValueError as e: @@ -576,3 +641,36 @@ class TriggerOAuthClientManageApi(Resource): except Exception as e: logger.exception("Error removing OAuth client", exc_info=e) raise + + +@console_ns.route( + "/workspaces/current/trigger-provider//subscriptions/verify/", +) +class TriggerSubscriptionVerifyApi(Resource): + @console_ns.expect(console_ns.models[TriggerSubscriptionBuilderVerifyPayload.__name__]) + @setup_required + @login_required + @edit_permission_required + @account_initialization_required + def post(self, provider, subscription_id): + """Verify credentials for an existing subscription (edit mode only)""" + user = current_user + assert user.current_tenant_id is not None + + verify_request = TriggerSubscriptionBuilderVerifyPayload.model_validate(console_ns.payload or {}) + + try: + result = TriggerProviderService.verify_subscription_credentials( + tenant_id=user.current_tenant_id, + user_id=user.id, + provider_id=TriggerProviderID(provider), + subscription_id=subscription_id, + credentials=verify_request.credentials, + ) + return result + except ValueError as e: + logger.warning("Credential verification failed", exc_info=e) + raise BadRequest(str(e)) from e + except Exception as e: + logger.exception("Error verifying subscription credentials", exc_info=e) + raise BadRequest(str(e)) from e diff --git a/api/controllers/console/workspace/workspace.py b/api/controllers/console/workspace/workspace.py index 909a5ce201..52e6f7d737 100644 --- a/api/controllers/console/workspace/workspace.py +++ b/api/controllers/console/workspace/workspace.py @@ -80,6 +80,9 @@ tenant_fields = { "in_trial": fields.Boolean, "trial_end_reason": fields.String, "custom_config": fields.Raw(attribute="custom_config"), + "trial_credits": fields.Integer, + "trial_credits_used": fields.Integer, + "next_credit_reset_date": fields.Integer, } tenants_fields = { diff --git a/api/controllers/console/wraps.py b/api/controllers/console/wraps.py index 4654650c77..781941caa0 100644 --- a/api/controllers/console/wraps.py +++ b/api/controllers/console/wraps.py @@ -9,10 +9,12 @@ from typing import ParamSpec, TypeVar from flask import abort, request from configs import dify_config +from controllers.console.auth.error import AuthenticationFailedError, EmailCodeError from controllers.console.workspace.error import AccountNotInitializedError from enums.cloud_plan import CloudPlan from extensions.ext_database import db from extensions.ext_redis import redis_client +from libs.encryption import FieldEncryption from libs.login import current_account_with_tenant from models.account import AccountStatus from models.dataset import RateLimitLog @@ -25,6 +27,14 @@ from .error import NotInitValidateError, NotSetupError, UnauthorizedAndForceLogo P = ParamSpec("P") R = TypeVar("R") +# Field names for decryption +FIELD_NAME_PASSWORD = "password" +FIELD_NAME_CODE = "code" + +# Error messages for decryption failures +ERROR_MSG_INVALID_ENCRYPTED_DATA = "Invalid encrypted data" +ERROR_MSG_INVALID_ENCRYPTED_CODE = "Invalid encrypted code" + def account_initialization_required(view: Callable[P, R]): @wraps(view) @@ -348,14 +358,12 @@ def annotation_import_rate_limit(view: Callable[P, R]): def decorated(*args: P.args, **kwargs: P.kwargs): _, current_tenant_id = current_account_with_tenant() current_time = int(time.time() * 1000) - # Check per-minute rate limit minute_key = f"annotation_import_rate_limit:{current_tenant_id}:1min" redis_client.zadd(minute_key, {current_time: current_time}) redis_client.zremrangebyscore(minute_key, 0, current_time - 60000) minute_count = redis_client.zcard(minute_key) redis_client.expire(minute_key, 120) # 2 minutes TTL - if minute_count > dify_config.ANNOTATION_IMPORT_RATE_LIMIT_PER_MINUTE: abort( 429, @@ -369,7 +377,6 @@ def annotation_import_rate_limit(view: Callable[P, R]): redis_client.zremrangebyscore(hour_key, 0, current_time - 3600000) hour_count = redis_client.zcard(hour_key) redis_client.expire(hour_key, 7200) # 2 hours TTL - if hour_count > dify_config.ANNOTATION_IMPORT_RATE_LIMIT_PER_HOUR: abort( 429, @@ -419,3 +426,75 @@ def annotation_import_concurrency_limit(view: Callable[P, R]): return view(*args, **kwargs) return decorated + + +def _decrypt_field(field_name: str, error_class: type[Exception], error_message: str) -> None: + """ + Helper to decode a Base64 encoded field in the request payload. + + Args: + field_name: Name of the field to decode + error_class: Exception class to raise on decoding failure + error_message: Error message to include in the exception + """ + if not request or not request.is_json: + return + # Get the payload dict - it's cached and mutable + payload = request.get_json() + if not payload or field_name not in payload: + return + encoded_value = payload[field_name] + decoded_value = FieldEncryption.decrypt_field(encoded_value) + + # If decoding failed, raise error immediately + if decoded_value is None: + raise error_class(error_message) + + # Update payload dict in-place with decoded value + # Since payload is a mutable dict and get_json() returns the cached reference, + # modifying it will affect all subsequent accesses including console_ns.payload + payload[field_name] = decoded_value + + +def decrypt_password_field(view: Callable[P, R]): + """ + Decorator to decrypt password field in request payload. + + Automatically decrypts the 'password' field if encryption is enabled. + If decryption fails, raises AuthenticationFailedError. + + Usage: + @decrypt_password_field + def post(self): + args = LoginPayload.model_validate(console_ns.payload) + # args.password is now decrypted + """ + + @wraps(view) + def decorated(*args: P.args, **kwargs: P.kwargs): + _decrypt_field(FIELD_NAME_PASSWORD, AuthenticationFailedError, ERROR_MSG_INVALID_ENCRYPTED_DATA) + return view(*args, **kwargs) + + return decorated + + +def decrypt_code_field(view: Callable[P, R]): + """ + Decorator to decrypt verification code field in request payload. + + Automatically decrypts the 'code' field if encryption is enabled. + If decryption fails, raises EmailCodeError. + + Usage: + @decrypt_code_field + def post(self): + args = EmailCodeLoginPayload.model_validate(console_ns.payload) + # args.code is now decrypted + """ + + @wraps(view) + def decorated(*args: P.args, **kwargs: P.kwargs): + _decrypt_field(FIELD_NAME_CODE, EmailCodeError, ERROR_MSG_INVALID_ENCRYPTED_CODE) + return view(*args, **kwargs) + + return decorated diff --git a/api/controllers/files/image_preview.py b/api/controllers/files/image_preview.py index 64f47f426a..04db1c67cb 100644 --- a/api/controllers/files/image_preview.py +++ b/api/controllers/files/image_preview.py @@ -7,6 +7,7 @@ from werkzeug.exceptions import NotFound import services from controllers.common.errors import UnsupportedFileTypeError +from controllers.common.file_response import enforce_download_for_html from controllers.files import files_ns from extensions.ext_database import db from services.account_service import TenantService @@ -138,6 +139,13 @@ class FilePreviewApi(Resource): response.headers["Content-Disposition"] = f"attachment; filename*=UTF-8''{encoded_filename}" response.headers["Content-Type"] = "application/octet-stream" + enforce_download_for_html( + response, + mime_type=upload_file.mime_type, + filename=upload_file.name, + extension=upload_file.extension, + ) + return response diff --git a/api/controllers/files/tool_files.py b/api/controllers/files/tool_files.py index c487a0a915..89aa472015 100644 --- a/api/controllers/files/tool_files.py +++ b/api/controllers/files/tool_files.py @@ -6,6 +6,7 @@ from pydantic import BaseModel, Field from werkzeug.exceptions import Forbidden, NotFound from controllers.common.errors import UnsupportedFileTypeError +from controllers.common.file_response import enforce_download_for_html from controllers.files import files_ns from core.tools.signature import verify_tool_file_signature from core.tools.tool_file_manager import ToolFileManager @@ -78,4 +79,11 @@ class ToolFileApi(Resource): encoded_filename = quote(tool_file.name) response.headers["Content-Disposition"] = f"attachment; filename*=UTF-8''{encoded_filename}" + enforce_download_for_html( + response, + mime_type=tool_file.mimetype, + filename=tool_file.name, + extension=extension, + ) + return response diff --git a/api/controllers/files/upload.py b/api/controllers/files/upload.py index 6096a87c56..28ec4b3935 100644 --- a/api/controllers/files/upload.py +++ b/api/controllers/files/upload.py @@ -4,18 +4,18 @@ from flask import request from flask_restx import Resource from flask_restx.api import HTTPStatus from pydantic import BaseModel, Field -from werkzeug.datastructures import FileStorage from werkzeug.exceptions import Forbidden import services from core.file.helpers import verify_plugin_file_signature from core.tools.tool_file_manager import ToolFileManager -from fields.file_fields import build_file_model +from fields.file_fields import FileResponse from ..common.errors import ( FileTooLargeError, UnsupportedFileTypeError, ) +from ..common.schema import register_schema_models from ..console.wraps import setup_required from ..files import files_ns from ..inner_api.plugin.wraps import get_user @@ -35,6 +35,8 @@ files_ns.schema_model( PluginUploadQuery.__name__, PluginUploadQuery.model_json_schema(ref_template=DEFAULT_REF_TEMPLATE_SWAGGER_2_0) ) +register_schema_models(files_ns, FileResponse) + @files_ns.route("/upload/for-plugin") class PluginUploadFileApi(Resource): @@ -51,7 +53,7 @@ class PluginUploadFileApi(Resource): 415: "Unsupported file type", } ) - @files_ns.marshal_with(build_file_model(files_ns), code=HTTPStatus.CREATED) + @files_ns.response(HTTPStatus.CREATED, "File uploaded", files_ns.models[FileResponse.__name__]) def post(self): """Upload a file for plugin usage. @@ -69,7 +71,7 @@ class PluginUploadFileApi(Resource): """ args = PluginUploadQuery.model_validate(request.args.to_dict(flat=True)) # type: ignore - file: FileStorage | None = request.files.get("file") + file = request.files.get("file") if file is None: raise Forbidden("File is required.") @@ -80,8 +82,8 @@ class PluginUploadFileApi(Resource): user_id = args.user_id user = get_user(tenant_id, user_id) - filename: str | None = file.filename - mimetype: str | None = file.mimetype + filename = file.filename + mimetype = file.mimetype if not filename or not mimetype: raise Forbidden("Invalid request.") @@ -111,22 +113,22 @@ class PluginUploadFileApi(Resource): preview_url = ToolFileManager.sign_file(tool_file_id=tool_file.id, extension=extension) # Create a dictionary with all the necessary attributes - result = { - "id": tool_file.id, - "user_id": tool_file.user_id, - "tenant_id": tool_file.tenant_id, - "conversation_id": tool_file.conversation_id, - "file_key": tool_file.file_key, - "mimetype": tool_file.mimetype, - "original_url": tool_file.original_url, - "name": tool_file.name, - "size": tool_file.size, - "mime_type": mimetype, - "extension": extension, - "preview_url": preview_url, - } + result = FileResponse( + id=tool_file.id, + name=tool_file.name, + size=tool_file.size, + extension=extension, + mime_type=mimetype, + preview_url=preview_url, + source_url=tool_file.original_url, + original_url=tool_file.original_url, + user_id=tool_file.user_id, + tenant_id=tool_file.tenant_id, + conversation_id=tool_file.conversation_id, + file_key=tool_file.file_key, + ) - return result, 201 + return result.model_dump(mode="json"), 201 except services.errors.file.FileTooLargeError as file_too_large_error: raise FileTooLargeError(file_too_large_error.description) except services.errors.file.UnsupportedFileTypeError: diff --git a/api/controllers/service_api/app/annotation.py b/api/controllers/service_api/app/annotation.py index 63c373b50f..85ac9336d6 100644 --- a/api/controllers/service_api/app/annotation.py +++ b/api/controllers/service_api/app/annotation.py @@ -1,7 +1,7 @@ from typing import Literal from flask import request -from flask_restx import Api, Namespace, Resource, fields +from flask_restx import Namespace, Resource, fields from flask_restx.api import HTTPStatus from pydantic import BaseModel, Field @@ -92,7 +92,7 @@ annotation_list_fields = { } -def build_annotation_list_model(api_or_ns: Api | Namespace): +def build_annotation_list_model(api_or_ns: Namespace): """Build the annotation list model for the API or Namespace.""" copied_annotation_list_fields = annotation_list_fields.copy() copied_annotation_list_fields["data"] = fields.List(fields.Nested(build_annotation_model(api_or_ns))) diff --git a/api/controllers/service_api/app/app.py b/api/controllers/service_api/app/app.py index 25d7ccccec..562f5e33cc 100644 --- a/api/controllers/service_api/app/app.py +++ b/api/controllers/service_api/app/app.py @@ -1,6 +1,6 @@ from flask_restx import Resource -from controllers.common.fields import build_parameters_model +from controllers.common.fields import Parameters from controllers.service_api import service_api_ns from controllers.service_api.app.error import AppUnavailableError from controllers.service_api.wraps import validate_app_token @@ -23,7 +23,6 @@ class AppParameterApi(Resource): } ) @validate_app_token - @service_api_ns.marshal_with(build_parameters_model(service_api_ns)) def get(self, app_model: App): """Retrieve app parameters. @@ -45,7 +44,8 @@ class AppParameterApi(Resource): user_input_form = features_dict.get("user_input_form", []) - return get_parameters_from_feature_dict(features_dict=features_dict, user_input_form=user_input_form) + parameters = get_parameters_from_feature_dict(features_dict=features_dict, user_input_form=user_input_form) + return Parameters.model_validate(parameters).model_dump(mode="json") @service_api_ns.route("/meta") diff --git a/api/controllers/service_api/app/conversation.py b/api/controllers/service_api/app/conversation.py index be6d837032..62e8258e25 100644 --- a/api/controllers/service_api/app/conversation.py +++ b/api/controllers/service_api/app/conversation.py @@ -3,8 +3,7 @@ from uuid import UUID from flask import request from flask_restx import Resource -from flask_restx._http import HTTPStatus -from pydantic import BaseModel, Field, model_validator +from pydantic import BaseModel, Field, TypeAdapter, field_validator, model_validator from sqlalchemy.orm import Session from werkzeug.exceptions import BadRequest, NotFound @@ -16,9 +15,9 @@ from controllers.service_api.wraps import FetchUserArg, WhereisUserArg, validate from core.app.entities.app_invoke_entities import InvokeFrom from extensions.ext_database import db from fields.conversation_fields import ( - build_conversation_delete_model, - build_conversation_infinite_scroll_pagination_model, - build_simple_conversation_model, + ConversationDelete, + ConversationInfiniteScrollPagination, + SimpleConversation, ) from fields.conversation_variable_fields import ( build_conversation_variable_infinite_scroll_pagination_model, @@ -51,6 +50,32 @@ class ConversationRenamePayload(BaseModel): class ConversationVariablesQuery(BaseModel): last_id: UUID | None = Field(default=None, description="Last variable ID for pagination") limit: int = Field(default=20, ge=1, le=100, description="Number of variables to return") + variable_name: str | None = Field( + default=None, description="Filter variables by name", min_length=1, max_length=255 + ) + + @field_validator("variable_name", mode="before") + @classmethod + def validate_variable_name(cls, v: str | None) -> str | None: + """ + Validate variable_name to prevent injection attacks. + """ + if v is None: + return v + + # Only allow safe characters: alphanumeric, underscore, hyphen, period + if not v.replace("-", "").replace("_", "").replace(".", "").isalnum(): + raise ValueError( + "Variable name can only contain letters, numbers, hyphens (-), underscores (_), and periods (.)" + ) + + # Prevent SQL injection patterns + dangerous_patterns = ["'", '"', ";", "--", "/*", "*/", "xp_", "sp_"] + for pattern in dangerous_patterns: + if pattern in v.lower(): + raise ValueError(f"Variable name contains invalid characters: {pattern}") + + return v class ConversationVariableUpdatePayload(BaseModel): @@ -79,7 +104,6 @@ class ConversationApi(Resource): } ) @validate_app_token(fetch_user_arg=FetchUserArg(fetch_from=WhereisUserArg.QUERY)) - @service_api_ns.marshal_with(build_conversation_infinite_scroll_pagination_model(service_api_ns)) def get(self, app_model: App, end_user: EndUser): """List all conversations for the current user. @@ -94,7 +118,7 @@ class ConversationApi(Resource): try: with Session(db.engine) as session: - return ConversationService.pagination_by_last_id( + pagination = ConversationService.pagination_by_last_id( session=session, app_model=app_model, user=end_user, @@ -103,6 +127,13 @@ class ConversationApi(Resource): invoke_from=InvokeFrom.SERVICE_API, sort_by=query_args.sort_by, ) + adapter = TypeAdapter(SimpleConversation) + conversations = [adapter.validate_python(item, from_attributes=True) for item in pagination.data] + return ConversationInfiniteScrollPagination( + limit=pagination.limit, + has_more=pagination.has_more, + data=conversations, + ).model_dump(mode="json") except services.errors.conversation.LastConversationNotExistsError: raise NotFound("Last Conversation Not Exists.") @@ -120,7 +151,6 @@ class ConversationDetailApi(Resource): } ) @validate_app_token(fetch_user_arg=FetchUserArg(fetch_from=WhereisUserArg.JSON)) - @service_api_ns.marshal_with(build_conversation_delete_model(service_api_ns), code=HTTPStatus.NO_CONTENT) def delete(self, app_model: App, end_user: EndUser, c_id): """Delete a specific conversation.""" app_mode = AppMode.value_of(app_model.mode) @@ -133,7 +163,7 @@ class ConversationDetailApi(Resource): ConversationService.delete(app_model, conversation_id, end_user) except services.errors.conversation.ConversationNotExistsError: raise NotFound("Conversation Not Exists.") - return {"result": "success"}, 204 + return ConversationDelete(result="success").model_dump(mode="json"), 204 @service_api_ns.route("/conversations//name") @@ -150,7 +180,6 @@ class ConversationRenameApi(Resource): } ) @validate_app_token(fetch_user_arg=FetchUserArg(fetch_from=WhereisUserArg.JSON)) - @service_api_ns.marshal_with(build_simple_conversation_model(service_api_ns)) def post(self, app_model: App, end_user: EndUser, c_id): """Rename a conversation or auto-generate a name.""" app_mode = AppMode.value_of(app_model.mode) @@ -162,7 +191,14 @@ class ConversationRenameApi(Resource): payload = ConversationRenamePayload.model_validate(service_api_ns.payload or {}) try: - return ConversationService.rename(app_model, conversation_id, end_user, payload.name, payload.auto_generate) + conversation = ConversationService.rename( + app_model, conversation_id, end_user, payload.name, payload.auto_generate + ) + return ( + TypeAdapter(SimpleConversation) + .validate_python(conversation, from_attributes=True) + .model_dump(mode="json") + ) except services.errors.conversation.ConversationNotExistsError: raise NotFound("Conversation Not Exists.") @@ -199,7 +235,7 @@ class ConversationVariablesApi(Resource): try: return ConversationService.get_conversational_variable( - app_model, conversation_id, end_user, query_args.limit, last_id + app_model, conversation_id, end_user, query_args.limit, last_id, query_args.variable_name ) except services.errors.conversation.ConversationNotExistsError: raise NotFound("Conversation Not Exists.") diff --git a/api/controllers/service_api/app/file.py b/api/controllers/service_api/app/file.py index ffe4e0b492..6f6dadf768 100644 --- a/api/controllers/service_api/app/file.py +++ b/api/controllers/service_api/app/file.py @@ -10,13 +10,16 @@ from controllers.common.errors import ( TooManyFilesError, UnsupportedFileTypeError, ) +from controllers.common.schema import register_schema_models from controllers.service_api import service_api_ns from controllers.service_api.wraps import FetchUserArg, WhereisUserArg, validate_app_token from extensions.ext_database import db -from fields.file_fields import build_file_model +from fields.file_fields import FileResponse from models import App, EndUser from services.file_service import FileService +register_schema_models(service_api_ns, FileResponse) + @service_api_ns.route("/files/upload") class FileApi(Resource): @@ -31,8 +34,8 @@ class FileApi(Resource): 415: "Unsupported file type", } ) - @validate_app_token(fetch_user_arg=FetchUserArg(fetch_from=WhereisUserArg.FORM)) - @service_api_ns.marshal_with(build_file_model(service_api_ns), code=HTTPStatus.CREATED) + @validate_app_token(fetch_user_arg=FetchUserArg(fetch_from=WhereisUserArg.FORM)) # type: ignore + @service_api_ns.response(HTTPStatus.CREATED, "File uploaded", service_api_ns.models[FileResponse.__name__]) def post(self, app_model: App, end_user: EndUser): """Upload a file for use in conversations. @@ -64,4 +67,5 @@ class FileApi(Resource): except services.errors.file.UnsupportedFileTypeError: raise UnsupportedFileTypeError() - return upload_file, 201 + response = FileResponse.model_validate(upload_file, from_attributes=True) + return response.model_dump(mode="json"), 201 diff --git a/api/controllers/service_api/app/file_preview.py b/api/controllers/service_api/app/file_preview.py index 60f422b88e..f853a124ef 100644 --- a/api/controllers/service_api/app/file_preview.py +++ b/api/controllers/service_api/app/file_preview.py @@ -5,6 +5,7 @@ from flask import Response, request from flask_restx import Resource from pydantic import BaseModel, Field +from controllers.common.file_response import enforce_download_for_html from controllers.common.schema import register_schema_model from controllers.service_api import service_api_ns from controllers.service_api.app.error import ( @@ -183,6 +184,13 @@ class FilePreviewApi(Resource): # Override content-type for downloads to force download response.headers["Content-Type"] = "application/octet-stream" + enforce_download_for_html( + response, + mime_type=upload_file.mime_type, + filename=upload_file.name, + extension=upload_file.extension, + ) + # Add caching headers for performance response.headers["Cache-Control"] = "public, max-age=3600" # Cache for 1 hour diff --git a/api/controllers/service_api/app/message.py b/api/controllers/service_api/app/message.py index d342f4e661..8981bbd7d5 100644 --- a/api/controllers/service_api/app/message.py +++ b/api/controllers/service_api/app/message.py @@ -1,11 +1,10 @@ -import json import logging from typing import Literal from uuid import UUID from flask import request -from flask_restx import Namespace, Resource, fields -from pydantic import BaseModel, Field +from flask_restx import Resource +from pydantic import BaseModel, Field, TypeAdapter from werkzeug.exceptions import BadRequest, InternalServerError, NotFound import services @@ -14,10 +13,8 @@ from controllers.service_api import service_api_ns from controllers.service_api.app.error import NotChatAppError from controllers.service_api.wraps import FetchUserArg, WhereisUserArg, validate_app_token from core.app.entities.app_invoke_entities import InvokeFrom -from fields.conversation_fields import build_message_file_model -from fields.message_fields import build_agent_thought_model, build_feedback_model -from fields.raws import FilesContainedField -from libs.helper import TimestampField +from fields.conversation_fields import ResultResponse +from fields.message_fields import MessageInfiniteScrollPagination, MessageListItem from models.model import App, AppMode, EndUser from services.errors.message import ( FirstMessageNotExistsError, @@ -48,49 +45,6 @@ class FeedbackListQuery(BaseModel): register_schema_models(service_api_ns, MessageListQuery, MessageFeedbackPayload, FeedbackListQuery) -def build_message_model(api_or_ns: Namespace): - """Build the message model for the API or Namespace.""" - # First build the nested models - feedback_model = build_feedback_model(api_or_ns) - agent_thought_model = build_agent_thought_model(api_or_ns) - message_file_model = build_message_file_model(api_or_ns) - - # Then build the message fields with nested models - message_fields = { - "id": fields.String, - "conversation_id": fields.String, - "parent_message_id": fields.String, - "inputs": FilesContainedField, - "query": fields.String, - "answer": fields.String(attribute="re_sign_file_url_answer"), - "message_files": fields.List(fields.Nested(message_file_model)), - "feedback": fields.Nested(feedback_model, attribute="user_feedback", allow_null=True), - "retriever_resources": fields.Raw( - attribute=lambda obj: json.loads(obj.message_metadata).get("retriever_resources", []) - if obj.message_metadata - else [] - ), - "created_at": TimestampField, - "agent_thoughts": fields.List(fields.Nested(agent_thought_model)), - "status": fields.String, - "error": fields.String, - } - return api_or_ns.model("Message", message_fields) - - -def build_message_infinite_scroll_pagination_model(api_or_ns: Namespace): - """Build the message infinite scroll pagination model for the API or Namespace.""" - # Build the nested message model first - message_model = build_message_model(api_or_ns) - - message_infinite_scroll_pagination_fields = { - "limit": fields.Integer, - "has_more": fields.Boolean, - "data": fields.List(fields.Nested(message_model)), - } - return api_or_ns.model("MessageInfiniteScrollPagination", message_infinite_scroll_pagination_fields) - - @service_api_ns.route("/messages") class MessageListApi(Resource): @service_api_ns.expect(service_api_ns.models[MessageListQuery.__name__]) @@ -104,7 +58,6 @@ class MessageListApi(Resource): } ) @validate_app_token(fetch_user_arg=FetchUserArg(fetch_from=WhereisUserArg.QUERY)) - @service_api_ns.marshal_with(build_message_infinite_scroll_pagination_model(service_api_ns)) def get(self, app_model: App, end_user: EndUser): """List messages in a conversation. @@ -119,9 +72,16 @@ class MessageListApi(Resource): first_id = str(query_args.first_id) if query_args.first_id else None try: - return MessageService.pagination_by_first_id( + pagination = MessageService.pagination_by_first_id( app_model, end_user, conversation_id, first_id, query_args.limit ) + adapter = TypeAdapter(MessageListItem) + items = [adapter.validate_python(message, from_attributes=True) for message in pagination.data] + return MessageInfiniteScrollPagination( + limit=pagination.limit, + has_more=pagination.has_more, + data=items, + ).model_dump(mode="json") except services.errors.conversation.ConversationNotExistsError: raise NotFound("Conversation Not Exists.") except FirstMessageNotExistsError: @@ -162,7 +122,7 @@ class MessageFeedbackApi(Resource): except MessageNotExistsError: raise NotFound("Message Not Exists.") - return {"result": "success"} + return ResultResponse(result="success").model_dump(mode="json") @service_api_ns.route("/app/feedbacks") diff --git a/api/controllers/service_api/app/site.py b/api/controllers/service_api/app/site.py index 9f8324a84e..8b47a887bb 100644 --- a/api/controllers/service_api/app/site.py +++ b/api/controllers/service_api/app/site.py @@ -1,7 +1,7 @@ from flask_restx import Resource from werkzeug.exceptions import Forbidden -from controllers.common.fields import build_site_model +from controllers.common.fields import Site as SiteResponse from controllers.service_api import service_api_ns from controllers.service_api.wraps import validate_app_token from extensions.ext_database import db @@ -23,7 +23,6 @@ class AppSiteApi(Resource): } ) @validate_app_token - @service_api_ns.marshal_with(build_site_model(service_api_ns)) def get(self, app_model: App): """Retrieve app site info. @@ -38,4 +37,4 @@ class AppSiteApi(Resource): if app_model.tenant.status == TenantStatus.ARCHIVE: raise Forbidden() - return site + return SiteResponse.model_validate(site).model_dump(mode="json") diff --git a/api/controllers/service_api/app/workflow.py b/api/controllers/service_api/app/workflow.py index 4964888fd6..6a549fc926 100644 --- a/api/controllers/service_api/app/workflow.py +++ b/api/controllers/service_api/app/workflow.py @@ -3,7 +3,7 @@ from typing import Any, Literal from dateutil.parser import isoparse from flask import request -from flask_restx import Api, Namespace, Resource, fields +from flask_restx import Namespace, Resource, fields from pydantic import BaseModel, Field from sqlalchemy.orm import Session, sessionmaker from werkzeug.exceptions import BadRequest, InternalServerError, NotFound @@ -78,7 +78,7 @@ workflow_run_fields = { } -def build_workflow_run_model(api_or_ns: Api | Namespace): +def build_workflow_run_model(api_or_ns: Namespace): """Build the workflow run model for the API or Namespace.""" return api_or_ns.model("WorkflowRun", workflow_run_fields) diff --git a/api/controllers/service_api/dataset/dataset.py b/api/controllers/service_api/dataset/dataset.py index 7692aeed23..94faf8dd42 100644 --- a/api/controllers/service_api/dataset/dataset.py +++ b/api/controllers/service_api/dataset/dataset.py @@ -13,7 +13,6 @@ from controllers.service_api.dataset.error import DatasetInUseError, DatasetName from controllers.service_api.wraps import ( DatasetApiResource, cloud_edition_billing_rate_limit_check, - validate_dataset_token, ) from core.model_runtime.entities.model_entities import ModelType from core.provider_manager import ProviderManager @@ -49,7 +48,7 @@ class DatasetUpdatePayload(BaseModel): embedding_model: str | None = None embedding_model_provider: str | None = None retrieval_model: RetrievalModel | None = None - partial_member_list: list[str] | None = None + partial_member_list: list[dict[str, str]] | None = None external_retrieval_model: dict[str, Any] | None = None external_knowledge_id: str | None = None external_knowledge_api_id: str | None = None @@ -460,9 +459,8 @@ class DatasetTagsApi(DatasetApiResource): 401: "Unauthorized - invalid API token", } ) - @validate_dataset_token @service_api_ns.marshal_with(build_dataset_tag_fields(service_api_ns)) - def get(self, _, dataset_id): + def get(self, _): """Get all knowledge type tags.""" assert isinstance(current_user, Account) cid = current_user.current_tenant_id @@ -482,8 +480,7 @@ class DatasetTagsApi(DatasetApiResource): } ) @service_api_ns.marshal_with(build_dataset_tag_fields(service_api_ns)) - @validate_dataset_token - def post(self, _, dataset_id): + def post(self, _): """Add a knowledge type tag.""" assert isinstance(current_user, Account) if not (current_user.has_edit_permission or current_user.is_dataset_editor): @@ -506,8 +503,7 @@ class DatasetTagsApi(DatasetApiResource): } ) @service_api_ns.marshal_with(build_dataset_tag_fields(service_api_ns)) - @validate_dataset_token - def patch(self, _, dataset_id): + def patch(self, _): assert isinstance(current_user, Account) if not (current_user.has_edit_permission or current_user.is_dataset_editor): raise Forbidden() @@ -533,9 +529,8 @@ class DatasetTagsApi(DatasetApiResource): 403: "Forbidden - insufficient permissions", } ) - @validate_dataset_token @edit_permission_required - def delete(self, _, dataset_id): + def delete(self, _): """Delete a knowledge type tag.""" payload = TagDeletePayload.model_validate(service_api_ns.payload or {}) TagService.delete_tag(payload.tag_id) @@ -555,8 +550,7 @@ class DatasetTagBindingApi(DatasetApiResource): 403: "Forbidden - insufficient permissions", } ) - @validate_dataset_token - def post(self, _, dataset_id): + def post(self, _): # The role of the current user in the ta table must be admin, owner, editor, or dataset_operator assert isinstance(current_user, Account) if not (current_user.has_edit_permission or current_user.is_dataset_editor): @@ -580,8 +574,7 @@ class DatasetTagUnbindingApi(DatasetApiResource): 403: "Forbidden - insufficient permissions", } ) - @validate_dataset_token - def post(self, _, dataset_id): + def post(self, _): # The role of the current user in the ta table must be admin, owner, editor, or dataset_operator assert isinstance(current_user, Account) if not (current_user.has_edit_permission or current_user.is_dataset_editor): @@ -604,7 +597,6 @@ class DatasetTagsBindingStatusApi(DatasetApiResource): 401: "Unauthorized - invalid API token", } ) - @validate_dataset_token def get(self, _, *args, **kwargs): """Get all knowledge type tags.""" dataset_id = kwargs.get("dataset_id") diff --git a/api/controllers/service_api/dataset/hit_testing.py b/api/controllers/service_api/dataset/hit_testing.py index d81287d56f..8dbb690901 100644 --- a/api/controllers/service_api/dataset/hit_testing.py +++ b/api/controllers/service_api/dataset/hit_testing.py @@ -24,7 +24,7 @@ class HitTestingApi(DatasetApiResource, DatasetsHitTestingBase): dataset_id_str = str(dataset_id) dataset = self.get_and_validate_dataset(dataset_id_str) - args = self.parse_args() + args = self.parse_args(service_api_ns.payload) self.hit_testing_args_check(args) return self.perform_hit_testing(dataset, args) diff --git a/api/controllers/service_api/dataset/rag_pipeline/rag_pipeline_workflow.py b/api/controllers/service_api/dataset/rag_pipeline/rag_pipeline_workflow.py index 0a2017e2bd..70b5030237 100644 --- a/api/controllers/service_api/dataset/rag_pipeline/rag_pipeline_workflow.py +++ b/api/controllers/service_api/dataset/rag_pipeline/rag_pipeline_workflow.py @@ -174,7 +174,7 @@ class PipelineRunApi(DatasetApiResource): pipeline=pipeline, user=current_user, args=payload.model_dump(), - invoke_from=InvokeFrom.PUBLISHED if payload.is_published else InvokeFrom.DEBUGGER, + invoke_from=InvokeFrom.PUBLISHED_PIPELINE if payload.is_published else InvokeFrom.DEBUGGER, streaming=payload.response_mode == "streaming", ) diff --git a/api/controllers/web/app.py b/api/controllers/web/app.py index 60193f5f15..62ea532eac 100644 --- a/api/controllers/web/app.py +++ b/api/controllers/web/app.py @@ -1,14 +1,13 @@ import logging from flask import request -from flask_restx import Resource, marshal_with, reqparse +from flask_restx import Resource +from pydantic import BaseModel, ConfigDict, Field from werkzeug.exceptions import Unauthorized from constants import HEADER_NAME_APP_CODE from controllers.common import fields -from controllers.web import web_ns -from controllers.web.error import AppUnavailableError -from controllers.web.wraps import WebApiResource +from controllers.common.schema import register_schema_models from core.app.app_config.common.parameters_mapping import get_parameters_from_feature_dict from libs.passport import PassportService from libs.token import extract_webapp_passport @@ -18,9 +17,23 @@ from services.enterprise.enterprise_service import EnterpriseService from services.feature_service import FeatureService from services.webapp_auth_service import WebAppAuthService +from . import web_ns +from .error import AppUnavailableError +from .wraps import WebApiResource + logger = logging.getLogger(__name__) +class AppAccessModeQuery(BaseModel): + model_config = ConfigDict(populate_by_name=True) + + app_id: str | None = Field(default=None, alias="appId", description="Application ID") + app_code: str | None = Field(default=None, alias="appCode", description="Application code") + + +register_schema_models(web_ns, AppAccessModeQuery) + + @web_ns.route("/parameters") class AppParameterApi(WebApiResource): """Resource for app variables.""" @@ -37,7 +50,6 @@ class AppParameterApi(WebApiResource): 500: "Internal Server Error", } ) - @marshal_with(fields.parameters_fields) def get(self, app_model: App, end_user): """Retrieve app parameters.""" if app_model.mode in {AppMode.ADVANCED_CHAT, AppMode.WORKFLOW}: @@ -56,7 +68,8 @@ class AppParameterApi(WebApiResource): user_input_form = features_dict.get("user_input_form", []) - return get_parameters_from_feature_dict(features_dict=features_dict, user_input_form=user_input_form) + parameters = get_parameters_from_feature_dict(features_dict=features_dict, user_input_form=user_input_form) + return fields.Parameters.model_validate(parameters).model_dump(mode="json") @web_ns.route("/meta") @@ -96,21 +109,16 @@ class AppAccessMode(Resource): } ) def get(self): - parser = ( - reqparse.RequestParser() - .add_argument("appId", type=str, required=False, location="args") - .add_argument("appCode", type=str, required=False, location="args") - ) - args = parser.parse_args() + raw_args = request.args.to_dict() + args = AppAccessModeQuery.model_validate(raw_args) features = FeatureService.get_system_features() if not features.webapp_auth.enabled: return {"accessMode": "public"} - app_id = args.get("appId") - if args.get("appCode"): - app_code = args["appCode"] - app_id = AppService.get_app_id_by_code(app_code) + app_id = args.app_id + if args.app_code: + app_id = AppService.get_app_id_by_code(args.app_code) if not app_id: raise ValueError("appId or appCode must be provided") diff --git a/api/controllers/web/audio.py b/api/controllers/web/audio.py index b9fef48c4d..15828cc208 100644 --- a/api/controllers/web/audio.py +++ b/api/controllers/web/audio.py @@ -1,7 +1,8 @@ import logging from flask import request -from flask_restx import fields, marshal_with, reqparse +from flask_restx import fields, marshal_with +from pydantic import BaseModel, field_validator from werkzeug.exceptions import InternalServerError import services @@ -20,6 +21,7 @@ from controllers.web.error import ( from controllers.web.wraps import WebApiResource from core.errors.error import ModelCurrentlyNotSupportError, ProviderTokenNotInitError, QuotaExceededError from core.model_runtime.errors.invoke import InvokeError +from libs.helper import uuid_value from models.model import App from services.audio_service import AudioService from services.errors.audio import ( @@ -29,6 +31,25 @@ from services.errors.audio import ( UnsupportedAudioTypeServiceError, ) +from ..common.schema import register_schema_models + + +class TextToAudioPayload(BaseModel): + message_id: str | None = None + voice: str | None = None + text: str | None = None + streaming: bool | None = None + + @field_validator("message_id") + @classmethod + def validate_message_id(cls, value: str | None) -> str | None: + if value is None: + return value + return uuid_value(value) + + +register_schema_models(web_ns, TextToAudioPayload) + logger = logging.getLogger(__name__) @@ -88,6 +109,7 @@ class AudioApi(WebApiResource): @web_ns.route("/text-to-audio") class TextApi(WebApiResource): + @web_ns.expect(web_ns.models[TextToAudioPayload.__name__]) @web_ns.doc("Text to Audio") @web_ns.doc(description="Convert text to audio using text-to-speech service.") @web_ns.doc( @@ -102,18 +124,11 @@ class TextApi(WebApiResource): def post(self, app_model: App, end_user): """Convert text to audio""" try: - parser = ( - reqparse.RequestParser() - .add_argument("message_id", type=str, required=False, location="json") - .add_argument("voice", type=str, location="json") - .add_argument("text", type=str, location="json") - .add_argument("streaming", type=bool, location="json") - ) - args = parser.parse_args() + payload = TextToAudioPayload.model_validate(web_ns.payload or {}) - message_id = args.get("message_id", None) - text = args.get("text", None) - voice = args.get("voice", None) + message_id = payload.message_id + text = payload.text + voice = payload.voice response = AudioService.transcript_tts( app_model=app_model, text=text, voice=voice, end_user=end_user.external_user_id, message_id=message_id ) diff --git a/api/controllers/web/completion.py b/api/controllers/web/completion.py index e8a4698375..a97d745471 100644 --- a/api/controllers/web/completion.py +++ b/api/controllers/web/completion.py @@ -1,9 +1,11 @@ import logging +from typing import Any, Literal -from flask_restx import reqparse +from pydantic import BaseModel, Field, field_validator from werkzeug.exceptions import InternalServerError, NotFound import services +from controllers.common.schema import register_schema_models from controllers.web import web_ns from controllers.web.error import ( AppUnavailableError, @@ -34,25 +36,44 @@ from services.errors.llm import InvokeRateLimitError logger = logging.getLogger(__name__) +class CompletionMessagePayload(BaseModel): + inputs: dict[str, Any] = Field(description="Input variables for the completion") + query: str = Field(default="", description="Query text for completion") + files: list[dict[str, Any]] | None = Field(default=None, description="Files to be processed") + response_mode: Literal["blocking", "streaming"] | None = Field( + default=None, description="Response mode: blocking or streaming" + ) + retriever_from: str = Field(default="web_app", description="Source of retriever") + + +class ChatMessagePayload(BaseModel): + inputs: dict[str, Any] = Field(description="Input variables for the chat") + query: str = Field(description="User query/message") + files: list[dict[str, Any]] | None = Field(default=None, description="Files to be processed") + response_mode: Literal["blocking", "streaming"] | None = Field( + default=None, description="Response mode: blocking or streaming" + ) + conversation_id: str | None = Field(default=None, description="Conversation ID") + parent_message_id: str | None = Field(default=None, description="Parent message ID") + retriever_from: str = Field(default="web_app", description="Source of retriever") + + @field_validator("conversation_id", "parent_message_id") + @classmethod + def validate_uuid(cls, value: str | None) -> str | None: + if value is None: + return value + return uuid_value(value) + + +register_schema_models(web_ns, CompletionMessagePayload, ChatMessagePayload) + + # define completion api for user @web_ns.route("/completion-messages") class CompletionApi(WebApiResource): @web_ns.doc("Create Completion Message") @web_ns.doc(description="Create a completion message for text generation applications.") - @web_ns.doc( - params={ - "inputs": {"description": "Input variables for the completion", "type": "object", "required": True}, - "query": {"description": "Query text for completion", "type": "string", "required": False}, - "files": {"description": "Files to be processed", "type": "array", "required": False}, - "response_mode": { - "description": "Response mode: blocking or streaming", - "type": "string", - "enum": ["blocking", "streaming"], - "required": False, - }, - "retriever_from": {"description": "Source of retriever", "type": "string", "required": False}, - } - ) + @web_ns.expect(web_ns.models[CompletionMessagePayload.__name__]) @web_ns.doc( responses={ 200: "Success", @@ -67,18 +88,10 @@ class CompletionApi(WebApiResource): if app_model.mode != AppMode.COMPLETION: raise NotCompletionAppError() - parser = ( - reqparse.RequestParser() - .add_argument("inputs", type=dict, required=True, location="json") - .add_argument("query", type=str, location="json", default="") - .add_argument("files", type=list, required=False, location="json") - .add_argument("response_mode", type=str, choices=["blocking", "streaming"], location="json") - .add_argument("retriever_from", type=str, required=False, default="web_app", location="json") - ) + payload = CompletionMessagePayload.model_validate(web_ns.payload or {}) + args = payload.model_dump(exclude_none=True) - args = parser.parse_args() - - streaming = args["response_mode"] == "streaming" + streaming = payload.response_mode == "streaming" args["auto_generate_name"] = False try: @@ -142,22 +155,7 @@ class CompletionStopApi(WebApiResource): class ChatApi(WebApiResource): @web_ns.doc("Create Chat Message") @web_ns.doc(description="Create a chat message for conversational applications.") - @web_ns.doc( - params={ - "inputs": {"description": "Input variables for the chat", "type": "object", "required": True}, - "query": {"description": "User query/message", "type": "string", "required": True}, - "files": {"description": "Files to be processed", "type": "array", "required": False}, - "response_mode": { - "description": "Response mode: blocking or streaming", - "type": "string", - "enum": ["blocking", "streaming"], - "required": False, - }, - "conversation_id": {"description": "Conversation UUID", "type": "string", "required": False}, - "parent_message_id": {"description": "Parent message UUID", "type": "string", "required": False}, - "retriever_from": {"description": "Source of retriever", "type": "string", "required": False}, - } - ) + @web_ns.expect(web_ns.models[ChatMessagePayload.__name__]) @web_ns.doc( responses={ 200: "Success", @@ -173,20 +171,10 @@ class ChatApi(WebApiResource): if app_mode not in {AppMode.CHAT, AppMode.AGENT_CHAT, AppMode.ADVANCED_CHAT}: raise NotChatAppError() - parser = ( - reqparse.RequestParser() - .add_argument("inputs", type=dict, required=True, location="json") - .add_argument("query", type=str, required=True, location="json") - .add_argument("files", type=list, required=False, location="json") - .add_argument("response_mode", type=str, choices=["blocking", "streaming"], location="json") - .add_argument("conversation_id", type=uuid_value, location="json") - .add_argument("parent_message_id", type=uuid_value, required=False, location="json") - .add_argument("retriever_from", type=str, required=False, default="web_app", location="json") - ) + payload = ChatMessagePayload.model_validate(web_ns.payload or {}) + args = payload.model_dump(exclude_none=True) - args = parser.parse_args() - - streaming = args["response_mode"] == "streaming" + streaming = payload.response_mode == "streaming" args["auto_generate_name"] = False try: diff --git a/api/controllers/web/conversation.py b/api/controllers/web/conversation.py index 86e19423e5..e76649495a 100644 --- a/api/controllers/web/conversation.py +++ b/api/controllers/web/conversation.py @@ -1,14 +1,21 @@ -from flask_restx import fields, marshal_with, reqparse -from flask_restx.inputs import int_range +from typing import Literal + +from flask import request +from pydantic import BaseModel, Field, TypeAdapter, field_validator, model_validator from sqlalchemy.orm import Session from werkzeug.exceptions import NotFound +from controllers.common.schema import register_schema_models from controllers.web import web_ns from controllers.web.error import NotChatAppError from controllers.web.wraps import WebApiResource from core.app.entities.app_invoke_entities import InvokeFrom from extensions.ext_database import db -from fields.conversation_fields import conversation_infinite_scroll_pagination_fields, simple_conversation_fields +from fields.conversation_fields import ( + ConversationInfiniteScrollPagination, + ResultResponse, + SimpleConversation, +) from libs.helper import uuid_value from models.model import AppMode from services.conversation_service import ConversationService @@ -16,6 +23,35 @@ from services.errors.conversation import ConversationNotExistsError, LastConvers from services.web_conversation_service import WebConversationService +class ConversationListQuery(BaseModel): + last_id: str | None = None + limit: int = Field(default=20, ge=1, le=100) + pinned: bool | None = None + sort_by: Literal["created_at", "-created_at", "updated_at", "-updated_at"] = "-updated_at" + + @field_validator("last_id") + @classmethod + def validate_last_id(cls, value: str | None) -> str | None: + if value is None: + return value + return uuid_value(value) + + +class ConversationRenamePayload(BaseModel): + name: str | None = None + auto_generate: bool = False + + @model_validator(mode="after") + def validate_name_requirement(self): + if not self.auto_generate: + if self.name is None or not self.name.strip(): + raise ValueError("name is required when auto_generate is false") + return self + + +register_schema_models(web_ns, ConversationListQuery, ConversationRenamePayload) + + @web_ns.route("/conversations") class ConversationListApi(WebApiResource): @web_ns.doc("Get Conversation List") @@ -54,54 +90,39 @@ class ConversationListApi(WebApiResource): 500: "Internal Server Error", } ) - @marshal_with(conversation_infinite_scroll_pagination_fields) def get(self, app_model, end_user): app_mode = AppMode.value_of(app_model.mode) if app_mode not in {AppMode.CHAT, AppMode.AGENT_CHAT, AppMode.ADVANCED_CHAT}: raise NotChatAppError() - parser = ( - reqparse.RequestParser() - .add_argument("last_id", type=uuid_value, location="args") - .add_argument("limit", type=int_range(1, 100), required=False, default=20, location="args") - .add_argument("pinned", type=str, choices=["true", "false", None], location="args") - .add_argument( - "sort_by", - type=str, - choices=["created_at", "-created_at", "updated_at", "-updated_at"], - required=False, - default="-updated_at", - location="args", - ) - ) - args = parser.parse_args() - - pinned = None - if "pinned" in args and args["pinned"] is not None: - pinned = args["pinned"] == "true" + raw_args = request.args.to_dict() + query = ConversationListQuery.model_validate(raw_args) try: with Session(db.engine) as session: - return WebConversationService.pagination_by_last_id( + pagination = WebConversationService.pagination_by_last_id( session=session, app_model=app_model, user=end_user, - last_id=args["last_id"], - limit=args["limit"], + last_id=query.last_id, + limit=query.limit, invoke_from=InvokeFrom.WEB_APP, - pinned=pinned, - sort_by=args["sort_by"], + pinned=query.pinned, + sort_by=query.sort_by, ) + adapter = TypeAdapter(SimpleConversation) + conversations = [adapter.validate_python(item, from_attributes=True) for item in pagination.data] + return ConversationInfiniteScrollPagination( + limit=pagination.limit, + has_more=pagination.has_more, + data=conversations, + ).model_dump(mode="json") except LastConversationNotExistsError: raise NotFound("Last Conversation Not Exists.") @web_ns.route("/conversations/") class ConversationApi(WebApiResource): - delete_response_fields = { - "result": fields.String, - } - @web_ns.doc("Delete Conversation") @web_ns.doc(description="Delete a specific conversation.") @web_ns.doc(params={"c_id": {"description": "Conversation UUID", "type": "string", "required": True}}) @@ -115,7 +136,6 @@ class ConversationApi(WebApiResource): 500: "Internal Server Error", } ) - @marshal_with(delete_response_fields) def delete(self, app_model, end_user, c_id): app_mode = AppMode.value_of(app_model.mode) if app_mode not in {AppMode.CHAT, AppMode.AGENT_CHAT, AppMode.ADVANCED_CHAT}: @@ -126,7 +146,7 @@ class ConversationApi(WebApiResource): ConversationService.delete(app_model, conversation_id, end_user) except ConversationNotExistsError: raise NotFound("Conversation Not Exists.") - return {"result": "success"}, 204 + return ResultResponse(result="success").model_dump(mode="json"), 204 @web_ns.route("/conversations//name") @@ -155,7 +175,6 @@ class ConversationRenameApi(WebApiResource): 500: "Internal Server Error", } ) - @marshal_with(simple_conversation_fields) def post(self, app_model, end_user, c_id): app_mode = AppMode.value_of(app_model.mode) if app_mode not in {AppMode.CHAT, AppMode.AGENT_CHAT, AppMode.ADVANCED_CHAT}: @@ -163,25 +182,23 @@ class ConversationRenameApi(WebApiResource): conversation_id = str(c_id) - parser = ( - reqparse.RequestParser() - .add_argument("name", type=str, required=False, location="json") - .add_argument("auto_generate", type=bool, required=False, default=False, location="json") - ) - args = parser.parse_args() + payload = ConversationRenamePayload.model_validate(web_ns.payload or {}) try: - return ConversationService.rename(app_model, conversation_id, end_user, args["name"], args["auto_generate"]) + conversation = ConversationService.rename( + app_model, conversation_id, end_user, payload.name, payload.auto_generate + ) + return ( + TypeAdapter(SimpleConversation) + .validate_python(conversation, from_attributes=True) + .model_dump(mode="json") + ) except ConversationNotExistsError: raise NotFound("Conversation Not Exists.") @web_ns.route("/conversations//pin") class ConversationPinApi(WebApiResource): - pin_response_fields = { - "result": fields.String, - } - @web_ns.doc("Pin Conversation") @web_ns.doc(description="Pin a specific conversation to keep it at the top of the list.") @web_ns.doc(params={"c_id": {"description": "Conversation UUID", "type": "string", "required": True}}) @@ -195,7 +212,6 @@ class ConversationPinApi(WebApiResource): 500: "Internal Server Error", } ) - @marshal_with(pin_response_fields) def patch(self, app_model, end_user, c_id): app_mode = AppMode.value_of(app_model.mode) if app_mode not in {AppMode.CHAT, AppMode.AGENT_CHAT, AppMode.ADVANCED_CHAT}: @@ -208,15 +224,11 @@ class ConversationPinApi(WebApiResource): except ConversationNotExistsError: raise NotFound("Conversation Not Exists.") - return {"result": "success"} + return ResultResponse(result="success").model_dump(mode="json") @web_ns.route("/conversations//unpin") class ConversationUnPinApi(WebApiResource): - unpin_response_fields = { - "result": fields.String, - } - @web_ns.doc("Unpin Conversation") @web_ns.doc(description="Unpin a specific conversation to remove it from the top of the list.") @web_ns.doc(params={"c_id": {"description": "Conversation UUID", "type": "string", "required": True}}) @@ -230,7 +242,6 @@ class ConversationUnPinApi(WebApiResource): 500: "Internal Server Error", } ) - @marshal_with(unpin_response_fields) def patch(self, app_model, end_user, c_id): app_mode = AppMode.value_of(app_model.mode) if app_mode not in {AppMode.CHAT, AppMode.AGENT_CHAT, AppMode.ADVANCED_CHAT}: @@ -239,4 +250,4 @@ class ConversationUnPinApi(WebApiResource): conversation_id = str(c_id) WebConversationService.unpin(app_model, conversation_id, end_user) - return {"result": "success"} + return ResultResponse(result="success").model_dump(mode="json") diff --git a/api/controllers/web/files.py b/api/controllers/web/files.py index 80ad61e549..0036c90800 100644 --- a/api/controllers/web/files.py +++ b/api/controllers/web/files.py @@ -1,5 +1,4 @@ from flask import request -from flask_restx import marshal_with import services from controllers.common.errors import ( @@ -9,12 +8,15 @@ from controllers.common.errors import ( TooManyFilesError, UnsupportedFileTypeError, ) +from controllers.common.schema import register_schema_models from controllers.web import web_ns from controllers.web.wraps import WebApiResource from extensions.ext_database import db -from fields.file_fields import build_file_model +from fields.file_fields import FileResponse from services.file_service import FileService +register_schema_models(web_ns, FileResponse) + @web_ns.route("/files/upload") class FileApi(WebApiResource): @@ -28,7 +30,7 @@ class FileApi(WebApiResource): 415: "Unsupported file type", } ) - @marshal_with(build_file_model(web_ns)) + @web_ns.response(201, "File uploaded successfully", web_ns.models[FileResponse.__name__]) def post(self, app_model, end_user): """Upload a file for use in web applications. @@ -81,4 +83,5 @@ class FileApi(WebApiResource): except services.errors.file.UnsupportedFileTypeError: raise UnsupportedFileTypeError() - return upload_file, 201 + response = FileResponse.model_validate(upload_file, from_attributes=True) + return response.model_dump(mode="json"), 201 diff --git a/api/controllers/web/forgot_password.py b/api/controllers/web/forgot_password.py index b9e391e049..690b76655f 100644 --- a/api/controllers/web/forgot_password.py +++ b/api/controllers/web/forgot_password.py @@ -2,10 +2,12 @@ import base64 import secrets from flask import request -from flask_restx import Resource, reqparse +from flask_restx import Resource +from pydantic import BaseModel, Field, field_validator from sqlalchemy import select from sqlalchemy.orm import Session +from controllers.common.schema import register_schema_models from controllers.console.auth.error import ( AuthenticationFailedError, EmailCodeError, @@ -18,14 +20,40 @@ from controllers.console.error import EmailSendIpLimitError from controllers.console.wraps import email_password_login_enabled, only_edition_enterprise, setup_required from controllers.web import web_ns from extensions.ext_database import db -from libs.helper import email, extract_remote_ip +from libs.helper import EmailStr, extract_remote_ip from libs.password import hash_password, valid_password from models import Account from services.account_service import AccountService +class ForgotPasswordSendPayload(BaseModel): + email: EmailStr + language: str | None = None + + +class ForgotPasswordCheckPayload(BaseModel): + email: EmailStr + code: str + token: str = Field(min_length=1) + + +class ForgotPasswordResetPayload(BaseModel): + token: str = Field(min_length=1) + new_password: str + password_confirm: str + + @field_validator("new_password", "password_confirm") + @classmethod + def validate_password(cls, value: str) -> str: + return valid_password(value) + + +register_schema_models(web_ns, ForgotPasswordSendPayload, ForgotPasswordCheckPayload, ForgotPasswordResetPayload) + + @web_ns.route("/forgot-password") class ForgotPasswordSendEmailApi(Resource): + @web_ns.expect(web_ns.models[ForgotPasswordSendPayload.__name__]) @only_edition_enterprise @setup_required @email_password_login_enabled @@ -40,35 +68,31 @@ class ForgotPasswordSendEmailApi(Resource): } ) def post(self): - parser = ( - reqparse.RequestParser() - .add_argument("email", type=email, required=True, location="json") - .add_argument("language", type=str, required=False, location="json") - ) - args = parser.parse_args() + payload = ForgotPasswordSendPayload.model_validate(web_ns.payload or {}) ip_address = extract_remote_ip(request) if AccountService.is_email_send_ip_limit(ip_address): raise EmailSendIpLimitError() - if args["language"] is not None and args["language"] == "zh-Hans": + if payload.language == "zh-Hans": language = "zh-Hans" else: language = "en-US" with Session(db.engine) as session: - account = session.execute(select(Account).filter_by(email=args["email"])).scalar_one_or_none() + account = session.execute(select(Account).filter_by(email=payload.email)).scalar_one_or_none() token = None if account is None: raise AuthenticationFailedError() else: - token = AccountService.send_reset_password_email(account=account, email=args["email"], language=language) + token = AccountService.send_reset_password_email(account=account, email=payload.email, language=language) return {"result": "success", "data": token} @web_ns.route("/forgot-password/validity") class ForgotPasswordCheckApi(Resource): + @web_ns.expect(web_ns.models[ForgotPasswordCheckPayload.__name__]) @only_edition_enterprise @setup_required @email_password_login_enabled @@ -78,45 +102,40 @@ class ForgotPasswordCheckApi(Resource): responses={200: "Token is valid", 400: "Bad request - invalid token format", 401: "Invalid or expired token"} ) def post(self): - parser = ( - reqparse.RequestParser() - .add_argument("email", type=str, required=True, location="json") - .add_argument("code", type=str, required=True, location="json") - .add_argument("token", type=str, required=True, nullable=False, location="json") - ) - args = parser.parse_args() + payload = ForgotPasswordCheckPayload.model_validate(web_ns.payload or {}) - user_email = args["email"] + user_email = payload.email - is_forgot_password_error_rate_limit = AccountService.is_forgot_password_error_rate_limit(args["email"]) + is_forgot_password_error_rate_limit = AccountService.is_forgot_password_error_rate_limit(payload.email) if is_forgot_password_error_rate_limit: raise EmailPasswordResetLimitError() - token_data = AccountService.get_reset_password_data(args["token"]) + token_data = AccountService.get_reset_password_data(payload.token) if token_data is None: raise InvalidTokenError() if user_email != token_data.get("email"): raise InvalidEmailError() - if args["code"] != token_data.get("code"): - AccountService.add_forgot_password_error_rate_limit(args["email"]) + if payload.code != token_data.get("code"): + AccountService.add_forgot_password_error_rate_limit(payload.email) raise EmailCodeError() # Verified, revoke the first token - AccountService.revoke_reset_password_token(args["token"]) + AccountService.revoke_reset_password_token(payload.token) # Refresh token data by generating a new token _, new_token = AccountService.generate_reset_password_token( - user_email, code=args["code"], additional_data={"phase": "reset"} + user_email, code=payload.code, additional_data={"phase": "reset"} ) - AccountService.reset_forgot_password_error_rate_limit(args["email"]) + AccountService.reset_forgot_password_error_rate_limit(payload.email) return {"is_valid": True, "email": token_data.get("email"), "token": new_token} @web_ns.route("/forgot-password/resets") class ForgotPasswordResetApi(Resource): + @web_ns.expect(web_ns.models[ForgotPasswordResetPayload.__name__]) @only_edition_enterprise @setup_required @email_password_login_enabled @@ -131,20 +150,14 @@ class ForgotPasswordResetApi(Resource): } ) def post(self): - parser = ( - reqparse.RequestParser() - .add_argument("token", type=str, required=True, nullable=False, location="json") - .add_argument("new_password", type=valid_password, required=True, nullable=False, location="json") - .add_argument("password_confirm", type=valid_password, required=True, nullable=False, location="json") - ) - args = parser.parse_args() + payload = ForgotPasswordResetPayload.model_validate(web_ns.payload or {}) # Validate passwords match - if args["new_password"] != args["password_confirm"]: + if payload.new_password != payload.password_confirm: raise PasswordMismatchError() # Validate token and get reset data - reset_data = AccountService.get_reset_password_data(args["token"]) + reset_data = AccountService.get_reset_password_data(payload.token) if not reset_data: raise InvalidTokenError() # Must use token in reset phase @@ -152,11 +165,11 @@ class ForgotPasswordResetApi(Resource): raise InvalidTokenError() # Revoke token to prevent reuse - AccountService.revoke_reset_password_token(args["token"]) + AccountService.revoke_reset_password_token(payload.token) # Generate secure salt and hash password salt = secrets.token_bytes(16) - password_hashed = hash_password(args["new_password"], salt) + password_hashed = hash_password(payload.new_password, salt) email = reset_data.get("email", "") @@ -170,7 +183,7 @@ class ForgotPasswordResetApi(Resource): return {"result": "success"} - def _update_existing_account(self, account, password_hashed, salt, session): + def _update_existing_account(self, account: Account, password_hashed, salt, session): # Update existing account credentials account.password = base64.b64encode(password_hashed).decode() account.password_salt = base64.b64encode(salt).decode() diff --git a/api/controllers/web/login.py b/api/controllers/web/login.py index 538d0c44be..5847f4ae3a 100644 --- a/api/controllers/web/login.py +++ b/api/controllers/web/login.py @@ -10,7 +10,12 @@ from controllers.console.auth.error import ( InvalidEmailError, ) from controllers.console.error import AccountBannedError -from controllers.console.wraps import only_edition_enterprise, setup_required +from controllers.console.wraps import ( + decrypt_code_field, + decrypt_password_field, + only_edition_enterprise, + setup_required, +) from controllers.web import web_ns from controllers.web.wraps import decode_jwt_token from libs.helper import email @@ -42,6 +47,7 @@ class LoginApi(Resource): 404: "Account not found", } ) + @decrypt_password_field def post(self): """Authenticate user and login.""" parser = ( @@ -181,6 +187,7 @@ class EmailCodeLoginApi(Resource): 404: "Account not found", } ) + @decrypt_code_field def post(self): parser = ( reqparse.RequestParser() diff --git a/api/controllers/web/message.py b/api/controllers/web/message.py index 9f9aa4838c..80035ba818 100644 --- a/api/controllers/web/message.py +++ b/api/controllers/web/message.py @@ -1,9 +1,11 @@ import logging +from typing import Literal -from flask_restx import fields, marshal_with, reqparse -from flask_restx.inputs import int_range +from flask import request +from pydantic import BaseModel, Field, TypeAdapter, field_validator from werkzeug.exceptions import InternalServerError, NotFound +from controllers.common.schema import register_schema_models from controllers.web import web_ns from controllers.web.error import ( AppMoreLikeThisDisabledError, @@ -19,11 +21,10 @@ from controllers.web.wraps import WebApiResource from core.app.entities.app_invoke_entities import InvokeFrom from core.errors.error import ModelCurrentlyNotSupportError, ProviderTokenNotInitError, QuotaExceededError from core.model_runtime.errors.invoke import InvokeError -from fields.conversation_fields import message_file_fields -from fields.message_fields import agent_thought_fields, feedback_fields, retriever_resource_fields -from fields.raws import FilesContainedField +from fields.conversation_fields import ResultResponse +from fields.message_fields import SuggestedQuestionsResponse, WebMessageInfiniteScrollPagination, WebMessageListItem from libs import helper -from libs.helper import TimestampField, uuid_value +from libs.helper import uuid_value from models.model import AppMode from services.app_generate_service import AppGenerateService from services.errors.app import MoreLikeThisDisabledError @@ -38,37 +39,45 @@ from services.message_service import MessageService logger = logging.getLogger(__name__) +class MessageListQuery(BaseModel): + conversation_id: str = Field(description="Conversation UUID") + first_id: str | None = Field(default=None, description="First message ID for pagination") + limit: int = Field(default=20, ge=1, le=100, description="Number of messages to return (1-100)") + + @field_validator("conversation_id", "first_id") + @classmethod + def validate_uuid(cls, value: str | None) -> str | None: + if value is None: + return value + return uuid_value(value) + + +class MessageFeedbackPayload(BaseModel): + rating: Literal["like", "dislike"] | None = Field(default=None, description="Feedback rating") + content: str | None = Field(default=None, description="Feedback content") + + +class MessageMoreLikeThisQuery(BaseModel): + response_mode: Literal["blocking", "streaming"] = Field( + description="Response mode", + ) + + +register_schema_models(web_ns, MessageListQuery, MessageFeedbackPayload, MessageMoreLikeThisQuery) + + @web_ns.route("/messages") class MessageListApi(WebApiResource): - message_fields = { - "id": fields.String, - "conversation_id": fields.String, - "parent_message_id": fields.String, - "inputs": FilesContainedField, - "query": fields.String, - "answer": fields.String(attribute="re_sign_file_url_answer"), - "message_files": fields.List(fields.Nested(message_file_fields)), - "feedback": fields.Nested(feedback_fields, attribute="user_feedback", allow_null=True), - "retriever_resources": fields.List(fields.Nested(retriever_resource_fields)), - "created_at": TimestampField, - "agent_thoughts": fields.List(fields.Nested(agent_thought_fields)), - "metadata": fields.Raw(attribute="message_metadata_dict"), - "status": fields.String, - "error": fields.String, - } - - message_infinite_scroll_pagination_fields = { - "limit": fields.Integer, - "has_more": fields.Boolean, - "data": fields.List(fields.Nested(message_fields)), - } - @web_ns.doc("Get Message List") @web_ns.doc(description="Retrieve paginated list of messages from a conversation in a chat application.") @web_ns.doc( params={ "conversation_id": {"description": "Conversation UUID", "type": "string", "required": True}, - "first_id": {"description": "First message ID for pagination", "type": "string", "required": False}, + "first_id": { + "description": "First message ID for pagination", + "type": "string", + "required": False, + }, "limit": { "description": "Number of messages to return (1-100)", "type": "integer", @@ -87,24 +96,25 @@ class MessageListApi(WebApiResource): 500: "Internal Server Error", } ) - @marshal_with(message_infinite_scroll_pagination_fields) def get(self, app_model, end_user): app_mode = AppMode.value_of(app_model.mode) if app_mode not in {AppMode.CHAT, AppMode.AGENT_CHAT, AppMode.ADVANCED_CHAT}: raise NotChatAppError() - parser = ( - reqparse.RequestParser() - .add_argument("conversation_id", required=True, type=uuid_value, location="args") - .add_argument("first_id", type=uuid_value, location="args") - .add_argument("limit", type=int_range(1, 100), required=False, default=20, location="args") - ) - args = parser.parse_args() + raw_args = request.args.to_dict() + query = MessageListQuery.model_validate(raw_args) try: - return MessageService.pagination_by_first_id( - app_model, end_user, args["conversation_id"], args["first_id"], args["limit"] + pagination = MessageService.pagination_by_first_id( + app_model, end_user, query.conversation_id, query.first_id, query.limit ) + adapter = TypeAdapter(WebMessageListItem) + items = [adapter.validate_python(message, from_attributes=True) for message in pagination.data] + return WebMessageInfiniteScrollPagination( + limit=pagination.limit, + has_more=pagination.has_more, + data=items, + ).model_dump(mode="json") except ConversationNotExistsError: raise NotFound("Conversation Not Exists.") except FirstMessageNotExistsError: @@ -113,10 +123,6 @@ class MessageListApi(WebApiResource): @web_ns.route("/messages//feedbacks") class MessageFeedbackApi(WebApiResource): - feedback_response_fields = { - "result": fields.String, - } - @web_ns.doc("Create Message Feedback") @web_ns.doc(description="Submit feedback (like/dislike) for a specific message.") @web_ns.doc(params={"message_id": {"description": "Message UUID", "type": "string", "required": True}}) @@ -128,7 +134,7 @@ class MessageFeedbackApi(WebApiResource): "enum": ["like", "dislike"], "required": False, }, - "content": {"description": "Feedback content/comment", "type": "string", "required": False}, + "content": {"description": "Feedback content", "type": "string", "required": False}, } ) @web_ns.doc( @@ -141,46 +147,30 @@ class MessageFeedbackApi(WebApiResource): 500: "Internal Server Error", } ) - @marshal_with(feedback_response_fields) def post(self, app_model, end_user, message_id): message_id = str(message_id) - parser = ( - reqparse.RequestParser() - .add_argument("rating", type=str, choices=["like", "dislike", None], location="json") - .add_argument("content", type=str, location="json", default=None) - ) - args = parser.parse_args() + payload = MessageFeedbackPayload.model_validate(web_ns.payload or {}) try: MessageService.create_feedback( app_model=app_model, message_id=message_id, user=end_user, - rating=args.get("rating"), - content=args.get("content"), + rating=payload.rating, + content=payload.content, ) except MessageNotExistsError: raise NotFound("Message Not Exists.") - return {"result": "success"} + return ResultResponse(result="success").model_dump(mode="json") @web_ns.route("/messages//more-like-this") class MessageMoreLikeThisApi(WebApiResource): @web_ns.doc("Generate More Like This") @web_ns.doc(description="Generate a new completion similar to an existing message (completion apps only).") - @web_ns.doc( - params={ - "message_id": {"description": "Message UUID", "type": "string", "required": True}, - "response_mode": { - "description": "Response mode", - "type": "string", - "enum": ["blocking", "streaming"], - "required": True, - }, - } - ) + @web_ns.expect(web_ns.models[MessageMoreLikeThisQuery.__name__]) @web_ns.doc( responses={ 200: "Success", @@ -197,12 +187,10 @@ class MessageMoreLikeThisApi(WebApiResource): message_id = str(message_id) - parser = reqparse.RequestParser().add_argument( - "response_mode", type=str, required=True, choices=["blocking", "streaming"], location="args" - ) - args = parser.parse_args() + raw_args = request.args.to_dict() + query = MessageMoreLikeThisQuery.model_validate(raw_args) - streaming = args["response_mode"] == "streaming" + streaming = query.response_mode == "streaming" try: response = AppGenerateService.generate_more_like_this( @@ -235,10 +223,6 @@ class MessageMoreLikeThisApi(WebApiResource): @web_ns.route("/messages//suggested-questions") class MessageSuggestedQuestionApi(WebApiResource): - suggested_questions_response_fields = { - "data": fields.List(fields.String), - } - @web_ns.doc("Get Suggested Questions") @web_ns.doc(description="Get suggested follow-up questions after a message (chat apps only).") @web_ns.doc(params={"message_id": {"description": "Message UUID", "type": "string", "required": True}}) @@ -252,7 +236,6 @@ class MessageSuggestedQuestionApi(WebApiResource): 500: "Internal Server Error", } ) - @marshal_with(suggested_questions_response_fields) def get(self, app_model, end_user, message_id): app_mode = AppMode.value_of(app_model.mode) if app_mode not in {AppMode.CHAT, AppMode.AGENT_CHAT, AppMode.ADVANCED_CHAT}: @@ -265,7 +248,6 @@ class MessageSuggestedQuestionApi(WebApiResource): app_model=app_model, user=end_user, message_id=message_id, invoke_from=InvokeFrom.WEB_APP ) # questions is a list of strings, not a list of Message objects - # so we can directly return it except MessageNotExistsError: raise NotFound("Message not found") except ConversationNotExistsError: @@ -284,4 +266,4 @@ class MessageSuggestedQuestionApi(WebApiResource): logger.exception("internal server error.") raise InternalServerError() - return {"data": questions} + return SuggestedQuestionsResponse(data=questions).model_dump(mode="json") diff --git a/api/controllers/web/remote_files.py b/api/controllers/web/remote_files.py index dac4b3da94..b08b3fe858 100644 --- a/api/controllers/web/remote_files.py +++ b/api/controllers/web/remote_files.py @@ -1,7 +1,7 @@ import urllib.parse import httpx -from flask_restx import marshal_with, reqparse +from pydantic import BaseModel, Field, HttpUrl import services from controllers.common import helpers @@ -10,14 +10,23 @@ from controllers.common.errors import ( RemoteFileUploadError, UnsupportedFileTypeError, ) -from controllers.web import web_ns -from controllers.web.wraps import WebApiResource from core.file import helpers as file_helpers from core.helper import ssrf_proxy from extensions.ext_database import db -from fields.file_fields import build_file_with_signed_url_model, build_remote_file_info_model +from fields.file_fields import FileWithSignedUrl, RemoteFileInfo from services.file_service import FileService +from ..common.schema import register_schema_models +from . import web_ns +from .wraps import WebApiResource + + +class RemoteFileUploadPayload(BaseModel): + url: HttpUrl = Field(description="Remote file URL") + + +register_schema_models(web_ns, RemoteFileUploadPayload, RemoteFileInfo, FileWithSignedUrl) + @web_ns.route("/remote-files/") class RemoteFileInfoApi(WebApiResource): @@ -31,7 +40,7 @@ class RemoteFileInfoApi(WebApiResource): 500: "Failed to fetch remote file", } ) - @marshal_with(build_remote_file_info_model(web_ns)) + @web_ns.response(200, "Remote file info", web_ns.models[RemoteFileInfo.__name__]) def get(self, app_model, end_user, url): """Get information about a remote file. @@ -55,10 +64,11 @@ class RemoteFileInfoApi(WebApiResource): # failed back to get method resp = ssrf_proxy.get(decoded_url, timeout=3) resp.raise_for_status() - return { - "file_type": resp.headers.get("Content-Type", "application/octet-stream"), - "file_length": int(resp.headers.get("Content-Length", -1)), - } + info = RemoteFileInfo( + file_type=resp.headers.get("Content-Type", "application/octet-stream"), + file_length=int(resp.headers.get("Content-Length", -1)), + ) + return info.model_dump(mode="json") @web_ns.route("/remote-files/upload") @@ -74,7 +84,7 @@ class RemoteFileUploadApi(WebApiResource): 500: "Failed to fetch remote file", } ) - @marshal_with(build_file_with_signed_url_model(web_ns)) + @web_ns.response(201, "Remote file uploaded", web_ns.models[FileWithSignedUrl.__name__]) def post(self, app_model, end_user): """Upload a file from a remote URL. @@ -97,10 +107,8 @@ class RemoteFileUploadApi(WebApiResource): FileTooLargeError: File exceeds size limit UnsupportedFileTypeError: File type not supported """ - parser = reqparse.RequestParser().add_argument("url", type=str, required=True, help="URL is required") - args = parser.parse_args() - - url = args["url"] + payload = RemoteFileUploadPayload.model_validate(web_ns.payload or {}) + url = str(payload.url) try: resp = ssrf_proxy.head(url=url) @@ -131,13 +139,14 @@ class RemoteFileUploadApi(WebApiResource): except services.errors.file.UnsupportedFileTypeError: raise UnsupportedFileTypeError - return { - "id": upload_file.id, - "name": upload_file.name, - "size": upload_file.size, - "extension": upload_file.extension, - "url": file_helpers.get_signed_file_url(upload_file_id=upload_file.id), - "mime_type": upload_file.mime_type, - "created_by": upload_file.created_by, - "created_at": upload_file.created_at, - }, 201 + payload1 = FileWithSignedUrl( + id=upload_file.id, + name=upload_file.name, + size=upload_file.size, + extension=upload_file.extension, + url=file_helpers.get_signed_file_url(upload_file_id=upload_file.id), + mime_type=upload_file.mime_type, + created_by=upload_file.created_by, + created_at=int(upload_file.created_at.timestamp()), + ) + return payload1.model_dump(mode="json"), 201 diff --git a/api/controllers/web/saved_message.py b/api/controllers/web/saved_message.py index 865f3610a7..29993100f6 100644 --- a/api/controllers/web/saved_message.py +++ b/api/controllers/web/saved_message.py @@ -1,40 +1,32 @@ -from flask_restx import fields, marshal_with, reqparse -from flask_restx.inputs import int_range +from flask import request +from pydantic import BaseModel, Field, TypeAdapter from werkzeug.exceptions import NotFound +from controllers.common.schema import register_schema_models from controllers.web import web_ns from controllers.web.error import NotCompletionAppError from controllers.web.wraps import WebApiResource -from fields.conversation_fields import message_file_fields -from libs.helper import TimestampField, uuid_value +from fields.conversation_fields import ResultResponse +from fields.message_fields import SavedMessageInfiniteScrollPagination, SavedMessageItem +from libs.helper import UUIDStrOrEmpty from services.errors.message import MessageNotExistsError from services.saved_message_service import SavedMessageService -feedback_fields = {"rating": fields.String} -message_fields = { - "id": fields.String, - "inputs": fields.Raw, - "query": fields.String, - "answer": fields.String, - "message_files": fields.List(fields.Nested(message_file_fields)), - "feedback": fields.Nested(feedback_fields, attribute="user_feedback", allow_null=True), - "created_at": TimestampField, -} +class SavedMessageListQuery(BaseModel): + last_id: UUIDStrOrEmpty | None = None + limit: int = Field(default=20, ge=1, le=100) + + +class SavedMessageCreatePayload(BaseModel): + message_id: UUIDStrOrEmpty + + +register_schema_models(web_ns, SavedMessageListQuery, SavedMessageCreatePayload) @web_ns.route("/saved-messages") class SavedMessageListApi(WebApiResource): - saved_message_infinite_scroll_pagination_fields = { - "limit": fields.Integer, - "has_more": fields.Boolean, - "data": fields.List(fields.Nested(message_fields)), - } - - post_response_fields = { - "result": fields.String, - } - @web_ns.doc("Get Saved Messages") @web_ns.doc(description="Retrieve paginated list of saved messages for a completion application.") @web_ns.doc( @@ -58,19 +50,21 @@ class SavedMessageListApi(WebApiResource): 500: "Internal Server Error", } ) - @marshal_with(saved_message_infinite_scroll_pagination_fields) def get(self, app_model, end_user): if app_model.mode != "completion": raise NotCompletionAppError() - parser = ( - reqparse.RequestParser() - .add_argument("last_id", type=uuid_value, location="args") - .add_argument("limit", type=int_range(1, 100), required=False, default=20, location="args") - ) - args = parser.parse_args() + raw_args = request.args.to_dict() + query = SavedMessageListQuery.model_validate(raw_args) - return SavedMessageService.pagination_by_last_id(app_model, end_user, args["last_id"], args["limit"]) + pagination = SavedMessageService.pagination_by_last_id(app_model, end_user, query.last_id, query.limit) + adapter = TypeAdapter(SavedMessageItem) + items = [adapter.validate_python(message, from_attributes=True) for message in pagination.data] + return SavedMessageInfiniteScrollPagination( + limit=pagination.limit, + has_more=pagination.has_more, + data=items, + ).model_dump(mode="json") @web_ns.doc("Save Message") @web_ns.doc(description="Save a specific message for later reference.") @@ -89,28 +83,22 @@ class SavedMessageListApi(WebApiResource): 500: "Internal Server Error", } ) - @marshal_with(post_response_fields) def post(self, app_model, end_user): if app_model.mode != "completion": raise NotCompletionAppError() - parser = reqparse.RequestParser().add_argument("message_id", type=uuid_value, required=True, location="json") - args = parser.parse_args() + payload = SavedMessageCreatePayload.model_validate(web_ns.payload or {}) try: - SavedMessageService.save(app_model, end_user, args["message_id"]) + SavedMessageService.save(app_model, end_user, payload.message_id) except MessageNotExistsError: raise NotFound("Message Not Exists.") - return {"result": "success"} + return ResultResponse(result="success").model_dump(mode="json") @web_ns.route("/saved-messages/") class SavedMessageApi(WebApiResource): - delete_response_fields = { - "result": fields.String, - } - @web_ns.doc("Delete Saved Message") @web_ns.doc(description="Remove a message from saved messages.") @web_ns.doc(params={"message_id": {"description": "Message UUID to delete", "type": "string", "required": True}}) @@ -124,7 +112,6 @@ class SavedMessageApi(WebApiResource): 500: "Internal Server Error", } ) - @marshal_with(delete_response_fields) def delete(self, app_model, end_user, message_id): message_id = str(message_id) @@ -133,4 +120,4 @@ class SavedMessageApi(WebApiResource): SavedMessageService.delete(app_model, end_user, message_id) - return {"result": "success"}, 204 + return ResultResponse(result="success").model_dump(mode="json"), 204 diff --git a/api/core/agent/agent_app_runner.py b/api/core/agent/agent_app_runner.py new file mode 100644 index 0000000000..2ee0a23aab --- /dev/null +++ b/api/core/agent/agent_app_runner.py @@ -0,0 +1,380 @@ +import logging +from collections.abc import Generator +from copy import deepcopy +from typing import Any + +from core.agent.base_agent_runner import BaseAgentRunner +from core.agent.entities import AgentEntity, AgentLog, AgentResult +from core.agent.patterns.strategy_factory import StrategyFactory +from core.app.apps.base_app_queue_manager import PublishFrom +from core.app.entities.queue_entities import QueueAgentThoughtEvent, QueueMessageEndEvent, QueueMessageFileEvent +from core.file import file_manager +from core.model_runtime.entities import ( + AssistantPromptMessage, + LLMResult, + LLMResultChunk, + LLMUsage, + PromptMessage, + PromptMessageContentType, + SystemPromptMessage, + TextPromptMessageContent, + UserPromptMessage, +) +from core.model_runtime.entities.message_entities import ImagePromptMessageContent, PromptMessageContentUnionTypes +from core.prompt.agent_history_prompt_transform import AgentHistoryPromptTransform +from core.tools.__base.tool import Tool +from core.tools.entities.tool_entities import ToolInvokeMeta +from core.tools.tool_engine import ToolEngine +from models.model import Message + +logger = logging.getLogger(__name__) + + +class AgentAppRunner(BaseAgentRunner): + def _create_tool_invoke_hook(self, message: Message): + """ + Create a tool invoke hook that uses ToolEngine.agent_invoke. + This hook handles file creation and returns proper meta information. + """ + # Get trace manager from app generate entity + trace_manager = self.application_generate_entity.trace_manager + + def tool_invoke_hook( + tool: Tool, tool_args: dict[str, Any], tool_name: str + ) -> tuple[str, list[str], ToolInvokeMeta]: + """Hook that uses agent_invoke for proper file and meta handling.""" + tool_invoke_response, message_files, tool_invoke_meta = ToolEngine.agent_invoke( + tool=tool, + tool_parameters=tool_args, + user_id=self.user_id, + tenant_id=self.tenant_id, + message=message, + invoke_from=self.application_generate_entity.invoke_from, + agent_tool_callback=self.agent_callback, + trace_manager=trace_manager, + app_id=self.application_generate_entity.app_config.app_id, + message_id=message.id, + conversation_id=self.conversation.id, + ) + + # Publish files and track IDs + for message_file_id in message_files: + self.queue_manager.publish( + QueueMessageFileEvent(message_file_id=message_file_id), + PublishFrom.APPLICATION_MANAGER, + ) + self._current_message_file_ids.append(message_file_id) + + return tool_invoke_response, message_files, tool_invoke_meta + + return tool_invoke_hook + + def run(self, message: Message, query: str, **kwargs: Any) -> Generator[LLMResultChunk, None, None]: + """ + Run Agent application + """ + self.query = query + app_generate_entity = self.application_generate_entity + + app_config = self.app_config + assert app_config is not None, "app_config is required" + assert app_config.agent is not None, "app_config.agent is required" + + # convert tools into ModelRuntime Tool format + tool_instances, _ = self._init_prompt_tools() + + assert app_config.agent + + # Create tool invoke hook for agent_invoke + tool_invoke_hook = self._create_tool_invoke_hook(message) + + # Get instruction for ReAct strategy + instruction = self.app_config.prompt_template.simple_prompt_template or "" + + # Use factory to create appropriate strategy + strategy = StrategyFactory.create_strategy( + model_features=self.model_features, + model_instance=self.model_instance, + tools=list(tool_instances.values()), + files=list(self.files), + max_iterations=app_config.agent.max_iteration, + context=self.build_execution_context(), + agent_strategy=self.config.strategy, + tool_invoke_hook=tool_invoke_hook, + instruction=instruction, + ) + + # Initialize state variables + current_agent_thought_id = None + has_published_thought = False + current_tool_name: str | None = None + self._current_message_file_ids: list[str] = [] + + # organize prompt messages + prompt_messages = self._organize_prompt_messages() + + # Run strategy + generator = strategy.run( + prompt_messages=prompt_messages, + model_parameters=app_generate_entity.model_conf.parameters, + stop=app_generate_entity.model_conf.stop, + stream=True, + ) + + # Consume generator and collect result + result: AgentResult | None = None + try: + while True: + try: + output = next(generator) + except StopIteration as e: + # Generator finished, get the return value + result = e.value + break + + if isinstance(output, LLMResultChunk): + # Handle LLM chunk + if current_agent_thought_id and not has_published_thought: + self.queue_manager.publish( + QueueAgentThoughtEvent(agent_thought_id=current_agent_thought_id), + PublishFrom.APPLICATION_MANAGER, + ) + has_published_thought = True + + yield output + + elif isinstance(output, AgentLog): + # Handle Agent Log using log_type for type-safe dispatch + if output.status == AgentLog.LogStatus.START: + if output.log_type == AgentLog.LogType.ROUND: + # Start of a new round + message_file_ids: list[str] = [] + current_agent_thought_id = self.create_agent_thought( + message_id=message.id, + message="", + tool_name="", + tool_input="", + messages_ids=message_file_ids, + ) + has_published_thought = False + + elif output.log_type == AgentLog.LogType.TOOL_CALL: + if current_agent_thought_id is None: + continue + + # Tool call start - extract data from structured fields + current_tool_name = output.data.get("tool_name", "") + tool_input = output.data.get("tool_args", {}) + + self.save_agent_thought( + agent_thought_id=current_agent_thought_id, + tool_name=current_tool_name, + tool_input=tool_input, + thought=None, + observation=None, + tool_invoke_meta=None, + answer=None, + messages_ids=[], + ) + self.queue_manager.publish( + QueueAgentThoughtEvent(agent_thought_id=current_agent_thought_id), + PublishFrom.APPLICATION_MANAGER, + ) + + elif output.status == AgentLog.LogStatus.SUCCESS: + if output.log_type == AgentLog.LogType.THOUGHT: + if current_agent_thought_id is None: + continue + + thought_text = output.data.get("thought") + self.save_agent_thought( + agent_thought_id=current_agent_thought_id, + tool_name=None, + tool_input=None, + thought=thought_text, + observation=None, + tool_invoke_meta=None, + answer=None, + messages_ids=[], + ) + self.queue_manager.publish( + QueueAgentThoughtEvent(agent_thought_id=current_agent_thought_id), + PublishFrom.APPLICATION_MANAGER, + ) + + elif output.log_type == AgentLog.LogType.TOOL_CALL: + if current_agent_thought_id is None: + continue + + # Tool call finished + tool_output = output.data.get("output") + # Get meta from strategy output (now properly populated) + tool_meta = output.data.get("meta") + + # Wrap tool_meta with tool_name as key (required by agent_service) + if tool_meta and current_tool_name: + tool_meta = {current_tool_name: tool_meta} + + self.save_agent_thought( + agent_thought_id=current_agent_thought_id, + tool_name=None, + tool_input=None, + thought=None, + observation=tool_output, + tool_invoke_meta=tool_meta, + answer=None, + messages_ids=self._current_message_file_ids, + ) + # Clear message file ids after saving + self._current_message_file_ids = [] + current_tool_name = None + + self.queue_manager.publish( + QueueAgentThoughtEvent(agent_thought_id=current_agent_thought_id), + PublishFrom.APPLICATION_MANAGER, + ) + + elif output.log_type == AgentLog.LogType.ROUND: + if current_agent_thought_id is None: + continue + + # Round finished - save LLM usage and answer + llm_usage = output.metadata.get(AgentLog.LogMetadata.LLM_USAGE) + llm_result = output.data.get("llm_result") + final_answer = output.data.get("final_answer") + + self.save_agent_thought( + agent_thought_id=current_agent_thought_id, + tool_name=None, + tool_input=None, + thought=llm_result, + observation=None, + tool_invoke_meta=None, + answer=final_answer, + messages_ids=[], + llm_usage=llm_usage, + ) + self.queue_manager.publish( + QueueAgentThoughtEvent(agent_thought_id=current_agent_thought_id), + PublishFrom.APPLICATION_MANAGER, + ) + + except Exception: + # Re-raise any other exceptions + raise + + # Process final result + if isinstance(result, AgentResult): + final_answer = result.text + usage = result.usage or LLMUsage.empty_usage() + + # Publish end event + self.queue_manager.publish( + QueueMessageEndEvent( + llm_result=LLMResult( + model=self.model_instance.model, + prompt_messages=prompt_messages, + message=AssistantPromptMessage(content=final_answer), + usage=usage, + system_fingerprint="", + ) + ), + PublishFrom.APPLICATION_MANAGER, + ) + + def _init_system_message(self, prompt_template: str, prompt_messages: list[PromptMessage]) -> list[PromptMessage]: + """ + Initialize system message + """ + if not prompt_template: + return prompt_messages or [] + + prompt_messages = prompt_messages or [] + + if prompt_messages and isinstance(prompt_messages[0], SystemPromptMessage): + prompt_messages[0] = SystemPromptMessage(content=prompt_template) + return prompt_messages + + if not prompt_messages: + return [SystemPromptMessage(content=prompt_template)] + + prompt_messages.insert(0, SystemPromptMessage(content=prompt_template)) + return prompt_messages + + def _organize_user_query(self, query: str, prompt_messages: list[PromptMessage]) -> list[PromptMessage]: + """ + Organize user query + """ + if self.files: + # get image detail config + image_detail_config = ( + self.application_generate_entity.file_upload_config.image_config.detail + if ( + self.application_generate_entity.file_upload_config + and self.application_generate_entity.file_upload_config.image_config + ) + else None + ) + image_detail_config = image_detail_config or ImagePromptMessageContent.DETAIL.LOW + + prompt_message_contents: list[PromptMessageContentUnionTypes] = [] + for file in self.files: + prompt_message_contents.append( + file_manager.to_prompt_message_content( + file, + image_detail_config=image_detail_config, + ) + ) + prompt_message_contents.append(TextPromptMessageContent(data=query)) + + prompt_messages.append(UserPromptMessage(content=prompt_message_contents)) + else: + prompt_messages.append(UserPromptMessage(content=query)) + + return prompt_messages + + def _clear_user_prompt_image_messages(self, prompt_messages: list[PromptMessage]) -> list[PromptMessage]: + """ + As for now, gpt supports both fc and vision at the first iteration. + We need to remove the image messages from the prompt messages at the first iteration. + """ + prompt_messages = deepcopy(prompt_messages) + + for prompt_message in prompt_messages: + if isinstance(prompt_message, UserPromptMessage): + if isinstance(prompt_message.content, list): + prompt_message.content = "\n".join( + [ + content.data + if content.type == PromptMessageContentType.TEXT + else "[image]" + if content.type == PromptMessageContentType.IMAGE + else "[file]" + for content in prompt_message.content + ] + ) + + return prompt_messages + + def _organize_prompt_messages(self): + # For ReAct strategy, use the agent prompt template + if self.config.strategy == AgentEntity.Strategy.CHAIN_OF_THOUGHT and self.config.prompt: + prompt_template = self.config.prompt.first_prompt + else: + prompt_template = self.app_config.prompt_template.simple_prompt_template or "" + + self.history_prompt_messages = self._init_system_message(prompt_template, self.history_prompt_messages) + query_prompt_messages = self._organize_user_query(self.query or "", []) + + self.history_prompt_messages = AgentHistoryPromptTransform( + model_config=self.model_config, + prompt_messages=[*query_prompt_messages, *self._current_thoughts], + history_messages=self.history_prompt_messages, + memory=self.memory, + ).get_prompt() + + prompt_messages = [*self.history_prompt_messages, *query_prompt_messages, *self._current_thoughts] + if len(self._current_thoughts) != 0: + # clear messages after the first iteration + prompt_messages = self._clear_user_prompt_image_messages(prompt_messages) + return prompt_messages diff --git a/api/core/agent/base_agent_runner.py b/api/core/agent/base_agent_runner.py index c196dbbdf1..b5459611b1 100644 --- a/api/core/agent/base_agent_runner.py +++ b/api/core/agent/base_agent_runner.py @@ -1,11 +1,12 @@ import json import logging import uuid +from decimal import Decimal from typing import Union, cast from sqlalchemy import select -from core.agent.entities import AgentEntity, AgentToolEntity +from core.agent.entities import AgentEntity, AgentToolEntity, ExecutionContext from core.app.app_config.features.file_upload.manager import FileUploadConfigManager from core.app.apps.agent_chat.app_config_manager import AgentChatAppConfig from core.app.apps.base_app_queue_manager import AppQueueManager @@ -41,6 +42,7 @@ from core.tools.tool_manager import ToolManager from core.tools.utils.dataset_retriever_tool import DatasetRetrieverTool from extensions.ext_database import db from factories import file_factory +from models.enums import CreatorUserRole from models.model import Conversation, Message, MessageAgentThought, MessageFile logger = logging.getLogger(__name__) @@ -114,9 +116,20 @@ class BaseAgentRunner(AppRunner): features = model_schema.features if model_schema and model_schema.features else [] self.stream_tool_call = ModelFeature.STREAM_TOOL_CALL in features self.files = application_generate_entity.files if ModelFeature.VISION in features else [] + self.model_features = features self.query: str | None = "" self._current_thoughts: list[PromptMessage] = [] + def build_execution_context(self) -> ExecutionContext: + """Build execution context.""" + return ExecutionContext( + user_id=self.user_id, + app_id=self.app_config.app_id, + conversation_id=self.conversation.id, + message_id=self.message.id, + tenant_id=self.tenant_id, + ) + def _repack_app_generate_entity( self, app_generate_entity: AgentChatAppGenerateEntity ) -> AgentChatAppGenerateEntity: @@ -289,6 +302,7 @@ class BaseAgentRunner(AppRunner): thought = MessageAgentThought( message_id=message_id, message_chain_id=None, + tool_process_data=None, thought="", tool=tool_name, tool_labels_str="{}", @@ -296,20 +310,20 @@ class BaseAgentRunner(AppRunner): tool_input=tool_input, message=message, message_token=0, - message_unit_price=0, - message_price_unit=0, + message_unit_price=Decimal(0), + message_price_unit=Decimal("0.001"), message_files=json.dumps(messages_ids) if messages_ids else "", answer="", observation="", answer_token=0, - answer_unit_price=0, - answer_price_unit=0, + answer_unit_price=Decimal(0), + answer_price_unit=Decimal("0.001"), tokens=0, - total_price=0, + total_price=Decimal(0), position=self.agent_thought_count + 1, currency="USD", latency=0, - created_by_role="account", + created_by_role=CreatorUserRole.ACCOUNT, created_by=self.user_id, ) @@ -342,7 +356,8 @@ class BaseAgentRunner(AppRunner): raise ValueError("agent thought not found") if thought: - agent_thought.thought += thought + existing_thought = agent_thought.thought or "" + agent_thought.thought = f"{existing_thought}{thought}" if tool_name: agent_thought.tool = tool_name @@ -440,21 +455,30 @@ class BaseAgentRunner(AppRunner): agent_thoughts: list[MessageAgentThought] = message.agent_thoughts if agent_thoughts: for agent_thought in agent_thoughts: - tools = agent_thought.tool - if tools: - tools = tools.split(";") + tool_names_raw = agent_thought.tool + if tool_names_raw: + tool_names = tool_names_raw.split(";") tool_calls: list[AssistantPromptMessage.ToolCall] = [] tool_call_response: list[ToolPromptMessage] = [] - try: - tool_inputs = json.loads(agent_thought.tool_input) - except Exception: - tool_inputs = {tool: {} for tool in tools} - try: - tool_responses = json.loads(agent_thought.observation) - except Exception: - tool_responses = dict.fromkeys(tools, agent_thought.observation) + tool_input_payload = agent_thought.tool_input + if tool_input_payload: + try: + tool_inputs = json.loads(tool_input_payload) + except Exception: + tool_inputs = {tool: {} for tool in tool_names} + else: + tool_inputs = {tool: {} for tool in tool_names} - for tool in tools: + observation_payload = agent_thought.observation + if observation_payload: + try: + tool_responses = json.loads(observation_payload) + except Exception: + tool_responses = dict.fromkeys(tool_names, observation_payload) + else: + tool_responses = dict.fromkeys(tool_names, observation_payload) + + for tool in tool_names: # generate a uuid for tool call tool_call_id = str(uuid.uuid4()) tool_calls.append( @@ -484,7 +508,7 @@ class BaseAgentRunner(AppRunner): *tool_call_response, ] ) - if not tools: + if not tool_names_raw: result.append(AssistantPromptMessage(content=agent_thought.thought)) else: if message.answer: diff --git a/api/core/agent/cot_agent_runner.py b/api/core/agent/cot_agent_runner.py deleted file mode 100644 index b32e35d0ca..0000000000 --- a/api/core/agent/cot_agent_runner.py +++ /dev/null @@ -1,431 +0,0 @@ -import json -import logging -from abc import ABC, abstractmethod -from collections.abc import Generator, Mapping, Sequence -from typing import Any - -from core.agent.base_agent_runner import BaseAgentRunner -from core.agent.entities import AgentScratchpadUnit -from core.agent.output_parser.cot_output_parser import CotAgentOutputParser -from core.app.apps.base_app_queue_manager import PublishFrom -from core.app.entities.queue_entities import QueueAgentThoughtEvent, QueueMessageEndEvent, QueueMessageFileEvent -from core.model_runtime.entities.llm_entities import LLMResult, LLMResultChunk, LLMResultChunkDelta, LLMUsage -from core.model_runtime.entities.message_entities import ( - AssistantPromptMessage, - PromptMessage, - PromptMessageTool, - ToolPromptMessage, - UserPromptMessage, -) -from core.ops.ops_trace_manager import TraceQueueManager -from core.prompt.agent_history_prompt_transform import AgentHistoryPromptTransform -from core.tools.__base.tool import Tool -from core.tools.entities.tool_entities import ToolInvokeMeta -from core.tools.tool_engine import ToolEngine -from models.model import Message - -logger = logging.getLogger(__name__) - - -class CotAgentRunner(BaseAgentRunner, ABC): - _is_first_iteration = True - _ignore_observation_providers = ["wenxin"] - _historic_prompt_messages: list[PromptMessage] - _agent_scratchpad: list[AgentScratchpadUnit] - _instruction: str - _query: str - _prompt_messages_tools: Sequence[PromptMessageTool] - - def run( - self, - message: Message, - query: str, - inputs: Mapping[str, str], - ) -> Generator: - """ - Run Cot agent application - """ - - app_generate_entity = self.application_generate_entity - self._repack_app_generate_entity(app_generate_entity) - self._init_react_state(query) - - trace_manager = app_generate_entity.trace_manager - - # check model mode - if "Observation" not in app_generate_entity.model_conf.stop: - if app_generate_entity.model_conf.provider not in self._ignore_observation_providers: - app_generate_entity.model_conf.stop.append("Observation") - - app_config = self.app_config - assert app_config.agent - - # init instruction - inputs = inputs or {} - instruction = app_config.prompt_template.simple_prompt_template or "" - self._instruction = self._fill_in_inputs_from_external_data_tools(instruction, inputs) - - iteration_step = 1 - max_iteration_steps = min(app_config.agent.max_iteration, 99) + 1 - - # convert tools into ModelRuntime Tool format - tool_instances, prompt_messages_tools = self._init_prompt_tools() - self._prompt_messages_tools = prompt_messages_tools - - function_call_state = True - llm_usage: dict[str, LLMUsage | None] = {"usage": None} - final_answer = "" - prompt_messages: list = [] # Initialize prompt_messages - agent_thought_id = "" # Initialize agent_thought_id - - def increase_usage(final_llm_usage_dict: dict[str, LLMUsage | None], usage: LLMUsage): - if not final_llm_usage_dict["usage"]: - final_llm_usage_dict["usage"] = usage - else: - llm_usage = final_llm_usage_dict["usage"] - llm_usage.prompt_tokens += usage.prompt_tokens - llm_usage.completion_tokens += usage.completion_tokens - llm_usage.total_tokens += usage.total_tokens - llm_usage.prompt_price += usage.prompt_price - llm_usage.completion_price += usage.completion_price - llm_usage.total_price += usage.total_price - - model_instance = self.model_instance - - while function_call_state and iteration_step <= max_iteration_steps: - # continue to run until there is not any tool call - function_call_state = False - - if iteration_step == max_iteration_steps: - # the last iteration, remove all tools - self._prompt_messages_tools = [] - - message_file_ids: list[str] = [] - - agent_thought_id = self.create_agent_thought( - message_id=message.id, message="", tool_name="", tool_input="", messages_ids=message_file_ids - ) - - if iteration_step > 1: - self.queue_manager.publish( - QueueAgentThoughtEvent(agent_thought_id=agent_thought_id), PublishFrom.APPLICATION_MANAGER - ) - - # recalc llm max tokens - prompt_messages = self._organize_prompt_messages() - self.recalc_llm_max_tokens(self.model_config, prompt_messages) - # invoke model - chunks = model_instance.invoke_llm( - prompt_messages=prompt_messages, - model_parameters=app_generate_entity.model_conf.parameters, - tools=[], - stop=app_generate_entity.model_conf.stop, - stream=True, - user=self.user_id, - callbacks=[], - ) - - usage_dict: dict[str, LLMUsage | None] = {} - react_chunks = CotAgentOutputParser.handle_react_stream_output(chunks, usage_dict) - scratchpad = AgentScratchpadUnit( - agent_response="", - thought="", - action_str="", - observation="", - action=None, - ) - - # publish agent thought if it's first iteration - if iteration_step == 1: - self.queue_manager.publish( - QueueAgentThoughtEvent(agent_thought_id=agent_thought_id), PublishFrom.APPLICATION_MANAGER - ) - - for chunk in react_chunks: - if isinstance(chunk, AgentScratchpadUnit.Action): - action = chunk - # detect action - assert scratchpad.agent_response is not None - scratchpad.agent_response += json.dumps(chunk.model_dump()) - scratchpad.action_str = json.dumps(chunk.model_dump()) - scratchpad.action = action - else: - assert scratchpad.agent_response is not None - scratchpad.agent_response += chunk - assert scratchpad.thought is not None - scratchpad.thought += chunk - yield LLMResultChunk( - model=self.model_config.model, - prompt_messages=prompt_messages, - system_fingerprint="", - delta=LLMResultChunkDelta(index=0, message=AssistantPromptMessage(content=chunk), usage=None), - ) - - assert scratchpad.thought is not None - scratchpad.thought = scratchpad.thought.strip() or "I am thinking about how to help you" - self._agent_scratchpad.append(scratchpad) - - # get llm usage - if "usage" in usage_dict: - if usage_dict["usage"] is not None: - increase_usage(llm_usage, usage_dict["usage"]) - else: - usage_dict["usage"] = LLMUsage.empty_usage() - - self.save_agent_thought( - agent_thought_id=agent_thought_id, - tool_name=(scratchpad.action.action_name if scratchpad.action and not scratchpad.is_final() else ""), - tool_input={scratchpad.action.action_name: scratchpad.action.action_input} if scratchpad.action else {}, - tool_invoke_meta={}, - thought=scratchpad.thought or "", - observation="", - answer=scratchpad.agent_response or "", - messages_ids=[], - llm_usage=usage_dict["usage"], - ) - - if not scratchpad.is_final(): - self.queue_manager.publish( - QueueAgentThoughtEvent(agent_thought_id=agent_thought_id), PublishFrom.APPLICATION_MANAGER - ) - - if not scratchpad.action: - # failed to extract action, return final answer directly - final_answer = "" - else: - if scratchpad.action.action_name.lower() == "final answer": - # action is final answer, return final answer directly - try: - if isinstance(scratchpad.action.action_input, dict): - final_answer = json.dumps(scratchpad.action.action_input, ensure_ascii=False) - elif isinstance(scratchpad.action.action_input, str): - final_answer = scratchpad.action.action_input - else: - final_answer = f"{scratchpad.action.action_input}" - except TypeError: - final_answer = f"{scratchpad.action.action_input}" - else: - function_call_state = True - # action is tool call, invoke tool - tool_invoke_response, tool_invoke_meta = self._handle_invoke_action( - action=scratchpad.action, - tool_instances=tool_instances, - message_file_ids=message_file_ids, - trace_manager=trace_manager, - ) - scratchpad.observation = tool_invoke_response - scratchpad.agent_response = tool_invoke_response - - self.save_agent_thought( - agent_thought_id=agent_thought_id, - tool_name=scratchpad.action.action_name, - tool_input={scratchpad.action.action_name: scratchpad.action.action_input}, - thought=scratchpad.thought or "", - observation={scratchpad.action.action_name: tool_invoke_response}, - tool_invoke_meta={scratchpad.action.action_name: tool_invoke_meta.to_dict()}, - answer=scratchpad.agent_response, - messages_ids=message_file_ids, - llm_usage=usage_dict["usage"], - ) - - self.queue_manager.publish( - QueueAgentThoughtEvent(agent_thought_id=agent_thought_id), PublishFrom.APPLICATION_MANAGER - ) - - # update prompt tool message - for prompt_tool in self._prompt_messages_tools: - self.update_prompt_message_tool(tool_instances[prompt_tool.name], prompt_tool) - - iteration_step += 1 - - yield LLMResultChunk( - model=model_instance.model, - prompt_messages=prompt_messages, - delta=LLMResultChunkDelta( - index=0, message=AssistantPromptMessage(content=final_answer), usage=llm_usage["usage"] - ), - system_fingerprint="", - ) - - # save agent thought - self.save_agent_thought( - agent_thought_id=agent_thought_id, - tool_name="", - tool_input={}, - tool_invoke_meta={}, - thought=final_answer, - observation={}, - answer=final_answer, - messages_ids=[], - ) - # publish end event - self.queue_manager.publish( - QueueMessageEndEvent( - llm_result=LLMResult( - model=model_instance.model, - prompt_messages=prompt_messages, - message=AssistantPromptMessage(content=final_answer), - usage=llm_usage["usage"] or LLMUsage.empty_usage(), - system_fingerprint="", - ) - ), - PublishFrom.APPLICATION_MANAGER, - ) - - def _handle_invoke_action( - self, - action: AgentScratchpadUnit.Action, - tool_instances: Mapping[str, Tool], - message_file_ids: list[str], - trace_manager: TraceQueueManager | None = None, - ) -> tuple[str, ToolInvokeMeta]: - """ - handle invoke action - :param action: action - :param tool_instances: tool instances - :param message_file_ids: message file ids - :param trace_manager: trace manager - :return: observation, meta - """ - # action is tool call, invoke tool - tool_call_name = action.action_name - tool_call_args = action.action_input - tool_instance = tool_instances.get(tool_call_name) - - if not tool_instance: - answer = f"there is not a tool named {tool_call_name}" - return answer, ToolInvokeMeta.error_instance(answer) - - if isinstance(tool_call_args, str): - try: - tool_call_args = json.loads(tool_call_args) - except json.JSONDecodeError: - pass - - # invoke tool - tool_invoke_response, message_files, tool_invoke_meta = ToolEngine.agent_invoke( - tool=tool_instance, - tool_parameters=tool_call_args, - user_id=self.user_id, - tenant_id=self.tenant_id, - message=self.message, - invoke_from=self.application_generate_entity.invoke_from, - agent_tool_callback=self.agent_callback, - trace_manager=trace_manager, - ) - - # publish files - for message_file_id in message_files: - # publish message file - self.queue_manager.publish( - QueueMessageFileEvent(message_file_id=message_file_id), PublishFrom.APPLICATION_MANAGER - ) - # add message file ids - message_file_ids.append(message_file_id) - - return tool_invoke_response, tool_invoke_meta - - def _convert_dict_to_action(self, action: dict) -> AgentScratchpadUnit.Action: - """ - convert dict to action - """ - return AgentScratchpadUnit.Action(action_name=action["action"], action_input=action["action_input"]) - - def _fill_in_inputs_from_external_data_tools(self, instruction: str, inputs: Mapping[str, Any]) -> str: - """ - fill in inputs from external data tools - """ - for key, value in inputs.items(): - try: - instruction = instruction.replace(f"{{{{{key}}}}}", str(value)) - except Exception: - continue - - return instruction - - def _init_react_state(self, query): - """ - init agent scratchpad - """ - self._query = query - self._agent_scratchpad = [] - self._historic_prompt_messages = self._organize_historic_prompt_messages() - - @abstractmethod - def _organize_prompt_messages(self) -> list[PromptMessage]: - """ - organize prompt messages - """ - - def _format_assistant_message(self, agent_scratchpad: list[AgentScratchpadUnit]) -> str: - """ - format assistant message - """ - message = "" - for scratchpad in agent_scratchpad: - if scratchpad.is_final(): - message += f"Final Answer: {scratchpad.agent_response}" - else: - message += f"Thought: {scratchpad.thought}\n\n" - if scratchpad.action_str: - message += f"Action: {scratchpad.action_str}\n\n" - if scratchpad.observation: - message += f"Observation: {scratchpad.observation}\n\n" - - return message - - def _organize_historic_prompt_messages( - self, current_session_messages: list[PromptMessage] | None = None - ) -> list[PromptMessage]: - """ - organize historic prompt messages - """ - result: list[PromptMessage] = [] - scratchpads: list[AgentScratchpadUnit] = [] - current_scratchpad: AgentScratchpadUnit | None = None - - for message in self.history_prompt_messages: - if isinstance(message, AssistantPromptMessage): - if not current_scratchpad: - assert isinstance(message.content, str) - current_scratchpad = AgentScratchpadUnit( - agent_response=message.content, - thought=message.content or "I am thinking about how to help you", - action_str="", - action=None, - observation=None, - ) - scratchpads.append(current_scratchpad) - if message.tool_calls: - try: - current_scratchpad.action = AgentScratchpadUnit.Action( - action_name=message.tool_calls[0].function.name, - action_input=json.loads(message.tool_calls[0].function.arguments), - ) - current_scratchpad.action_str = json.dumps(current_scratchpad.action.to_dict()) - except Exception: - logger.exception("Failed to parse tool call from assistant message") - elif isinstance(message, ToolPromptMessage): - if current_scratchpad: - assert isinstance(message.content, str) - current_scratchpad.observation = message.content - else: - raise NotImplementedError("expected str type") - elif isinstance(message, UserPromptMessage): - if scratchpads: - result.append(AssistantPromptMessage(content=self._format_assistant_message(scratchpads))) - scratchpads = [] - current_scratchpad = None - - result.append(message) - - if scratchpads: - result.append(AssistantPromptMessage(content=self._format_assistant_message(scratchpads))) - - historic_prompts = AgentHistoryPromptTransform( - model_config=self.model_config, - prompt_messages=current_session_messages or [], - history_messages=result, - memory=self.memory, - ).get_prompt() - return historic_prompts diff --git a/api/core/agent/cot_chat_agent_runner.py b/api/core/agent/cot_chat_agent_runner.py deleted file mode 100644 index 4d1d94eadc..0000000000 --- a/api/core/agent/cot_chat_agent_runner.py +++ /dev/null @@ -1,118 +0,0 @@ -import json - -from core.agent.cot_agent_runner import CotAgentRunner -from core.file import file_manager -from core.model_runtime.entities import ( - AssistantPromptMessage, - PromptMessage, - SystemPromptMessage, - TextPromptMessageContent, - UserPromptMessage, -) -from core.model_runtime.entities.message_entities import ImagePromptMessageContent, PromptMessageContentUnionTypes -from core.model_runtime.utils.encoders import jsonable_encoder - - -class CotChatAgentRunner(CotAgentRunner): - def _organize_system_prompt(self) -> SystemPromptMessage: - """ - Organize system prompt - """ - assert self.app_config.agent - assert self.app_config.agent.prompt - - prompt_entity = self.app_config.agent.prompt - if not prompt_entity: - raise ValueError("Agent prompt configuration is not set") - first_prompt = prompt_entity.first_prompt - - system_prompt = ( - first_prompt.replace("{{instruction}}", self._instruction) - .replace("{{tools}}", json.dumps(jsonable_encoder(self._prompt_messages_tools))) - .replace("{{tool_names}}", ", ".join([tool.name for tool in self._prompt_messages_tools])) - ) - - return SystemPromptMessage(content=system_prompt) - - def _organize_user_query(self, query, prompt_messages: list[PromptMessage]) -> list[PromptMessage]: - """ - Organize user query - """ - if self.files: - # get image detail config - image_detail_config = ( - self.application_generate_entity.file_upload_config.image_config.detail - if ( - self.application_generate_entity.file_upload_config - and self.application_generate_entity.file_upload_config.image_config - ) - else None - ) - image_detail_config = image_detail_config or ImagePromptMessageContent.DETAIL.LOW - - prompt_message_contents: list[PromptMessageContentUnionTypes] = [] - for file in self.files: - prompt_message_contents.append( - file_manager.to_prompt_message_content( - file, - image_detail_config=image_detail_config, - ) - ) - prompt_message_contents.append(TextPromptMessageContent(data=query)) - - prompt_messages.append(UserPromptMessage(content=prompt_message_contents)) - else: - prompt_messages.append(UserPromptMessage(content=query)) - - return prompt_messages - - def _organize_prompt_messages(self) -> list[PromptMessage]: - """ - Organize - """ - # organize system prompt - system_message = self._organize_system_prompt() - - # organize current assistant messages - agent_scratchpad = self._agent_scratchpad - if not agent_scratchpad: - assistant_messages = [] - else: - assistant_message = AssistantPromptMessage(content="") - assistant_message.content = "" # FIXME: type check tell mypy that assistant_message.content is str - for unit in agent_scratchpad: - if unit.is_final(): - assert isinstance(assistant_message.content, str) - assistant_message.content += f"Final Answer: {unit.agent_response}" - else: - assert isinstance(assistant_message.content, str) - assistant_message.content += f"Thought: {unit.thought}\n\n" - if unit.action_str: - assistant_message.content += f"Action: {unit.action_str}\n\n" - if unit.observation: - assistant_message.content += f"Observation: {unit.observation}\n\n" - - assistant_messages = [assistant_message] - - # query messages - query_messages = self._organize_user_query(self._query, []) - - if assistant_messages: - # organize historic prompt messages - historic_messages = self._organize_historic_prompt_messages( - [system_message, *query_messages, *assistant_messages, UserPromptMessage(content="continue")] - ) - messages = [ - system_message, - *historic_messages, - *query_messages, - *assistant_messages, - UserPromptMessage(content="continue"), - ] - else: - # organize historic prompt messages - historic_messages = self._organize_historic_prompt_messages([system_message, *query_messages]) - messages = [system_message, *historic_messages, *query_messages] - - # join all messages - return messages diff --git a/api/core/agent/cot_completion_agent_runner.py b/api/core/agent/cot_completion_agent_runner.py deleted file mode 100644 index da9a001d84..0000000000 --- a/api/core/agent/cot_completion_agent_runner.py +++ /dev/null @@ -1,87 +0,0 @@ -import json - -from core.agent.cot_agent_runner import CotAgentRunner -from core.model_runtime.entities.message_entities import ( - AssistantPromptMessage, - PromptMessage, - TextPromptMessageContent, - UserPromptMessage, -) -from core.model_runtime.utils.encoders import jsonable_encoder - - -class CotCompletionAgentRunner(CotAgentRunner): - def _organize_instruction_prompt(self) -> str: - """ - Organize instruction prompt - """ - if self.app_config.agent is None: - raise ValueError("Agent configuration is not set") - prompt_entity = self.app_config.agent.prompt - if prompt_entity is None: - raise ValueError("prompt entity is not set") - first_prompt = prompt_entity.first_prompt - - system_prompt = ( - first_prompt.replace("{{instruction}}", self._instruction) - .replace("{{tools}}", json.dumps(jsonable_encoder(self._prompt_messages_tools))) - .replace("{{tool_names}}", ", ".join([tool.name for tool in self._prompt_messages_tools])) - ) - - return system_prompt - - def _organize_historic_prompt(self, current_session_messages: list[PromptMessage] | None = None) -> str: - """ - Organize historic prompt - """ - historic_prompt_messages = self._organize_historic_prompt_messages(current_session_messages) - historic_prompt = "" - - for message in historic_prompt_messages: - if isinstance(message, UserPromptMessage): - historic_prompt += f"Question: {message.content}\n\n" - elif isinstance(message, AssistantPromptMessage): - if isinstance(message.content, str): - historic_prompt += message.content + "\n\n" - elif isinstance(message.content, list): - for content in message.content: - if not isinstance(content, TextPromptMessageContent): - continue - historic_prompt += content.data - - return historic_prompt - - def _organize_prompt_messages(self) -> list[PromptMessage]: - """ - Organize prompt messages - """ - # organize system prompt - system_prompt = self._organize_instruction_prompt() - - # organize historic prompt messages - historic_prompt = self._organize_historic_prompt() - - # organize current assistant messages - agent_scratchpad = self._agent_scratchpad - assistant_prompt = "" - for unit in agent_scratchpad or []: - if unit.is_final(): - assistant_prompt += f"Final Answer: {unit.agent_response}" - else: - assistant_prompt += f"Thought: {unit.thought}\n\n" - if unit.action_str: - assistant_prompt += f"Action: {unit.action_str}\n\n" - if unit.observation: - assistant_prompt += f"Observation: {unit.observation}\n\n" - - # query messages - query_prompt = f"Question: {self._query}" - - # join all messages - prompt = ( - system_prompt.replace("{{historic_messages}}", historic_prompt) - .replace("{{agent_scratchpad}}", assistant_prompt) - .replace("{{query}}", query_prompt) - ) - - return [UserPromptMessage(content=prompt)] diff --git a/api/core/agent/entities.py b/api/core/agent/entities.py index 220feced1d..46af4d2d72 100644 --- a/api/core/agent/entities.py +++ b/api/core/agent/entities.py @@ -1,3 +1,5 @@ +import uuid +from collections.abc import Mapping from enum import StrEnum from typing import Any, Union @@ -92,3 +94,96 @@ class AgentInvokeMessage(ToolInvokeMessage): """ pass + + +class ExecutionContext(BaseModel): + """Execution context containing trace and audit information. + + This context carries all the IDs and metadata that are not part of + the core business logic but needed for tracing, auditing, and + correlation purposes. + """ + + user_id: str | None = None + app_id: str | None = None + conversation_id: str | None = None + message_id: str | None = None + tenant_id: str | None = None + + @classmethod + def create_minimal(cls, user_id: str | None = None) -> "ExecutionContext": + """Create a minimal context with only essential fields.""" + return cls(user_id=user_id) + + def to_dict(self) -> dict[str, Any]: + """Convert to dictionary for passing to legacy code.""" + return { + "user_id": self.user_id, + "app_id": self.app_id, + "conversation_id": self.conversation_id, + "message_id": self.message_id, + "tenant_id": self.tenant_id, + } + + def with_updates(self, **kwargs) -> "ExecutionContext": + """Create a new context with updated fields.""" + data = self.to_dict() + data.update(kwargs) + + return ExecutionContext( + user_id=data.get("user_id"), + app_id=data.get("app_id"), + conversation_id=data.get("conversation_id"), + message_id=data.get("message_id"), + tenant_id=data.get("tenant_id"), + ) + + +class AgentLog(BaseModel): + """ + Agent Log. + """ + + class LogType(StrEnum): + """Type of agent log entry.""" + + ROUND = "round" # A complete iteration round + THOUGHT = "thought" # LLM thinking/reasoning + TOOL_CALL = "tool_call" # Tool invocation + + class LogMetadata(StrEnum): + STARTED_AT = "started_at" + FINISHED_AT = "finished_at" + ELAPSED_TIME = "elapsed_time" + TOTAL_PRICE = "total_price" + TOTAL_TOKENS = "total_tokens" + PROVIDER = "provider" + CURRENCY = "currency" + LLM_USAGE = "llm_usage" + ICON = "icon" + ICON_DARK = "icon_dark" + + class LogStatus(StrEnum): + START = "start" + ERROR = "error" + SUCCESS = "success" + + id: str = Field(default_factory=lambda: str(uuid.uuid4()), description="The id of the log") + label: str = Field(..., description="The label of the log") + log_type: LogType = Field(..., description="The type of the log") + parent_id: str | None = Field(default=None, description="Leave empty for root log") + error: str | None = Field(default=None, description="The error message") + status: LogStatus = Field(..., description="The status of the log") + data: Mapping[str, Any] = Field(..., description="Detailed log data") + metadata: Mapping[LogMetadata, Any] = Field(default={}, description="The metadata of the log") + + +class AgentResult(BaseModel): + """ + Agent execution result. + """ + + text: str = Field(default="", description="The generated text") + files: list[Any] = Field(default_factory=list, description="Files produced during execution") + usage: Any | None = Field(default=None, description="LLM usage statistics") + finish_reason: str | None = Field(default=None, description="Reason for completion") diff --git a/api/core/agent/fc_agent_runner.py b/api/core/agent/fc_agent_runner.py deleted file mode 100644 index dcc1326b33..0000000000 --- a/api/core/agent/fc_agent_runner.py +++ /dev/null @@ -1,465 +0,0 @@ -import json -import logging -from collections.abc import Generator -from copy import deepcopy -from typing import Any, Union - -from core.agent.base_agent_runner import BaseAgentRunner -from core.app.apps.base_app_queue_manager import PublishFrom -from core.app.entities.queue_entities import QueueAgentThoughtEvent, QueueMessageEndEvent, QueueMessageFileEvent -from core.file import file_manager -from core.model_runtime.entities import ( - AssistantPromptMessage, - LLMResult, - LLMResultChunk, - LLMResultChunkDelta, - LLMUsage, - PromptMessage, - PromptMessageContentType, - SystemPromptMessage, - TextPromptMessageContent, - ToolPromptMessage, - UserPromptMessage, -) -from core.model_runtime.entities.message_entities import ImagePromptMessageContent, PromptMessageContentUnionTypes -from core.prompt.agent_history_prompt_transform import AgentHistoryPromptTransform -from core.tools.entities.tool_entities import ToolInvokeMeta -from core.tools.tool_engine import ToolEngine -from models.model import Message - -logger = logging.getLogger(__name__) - - -class FunctionCallAgentRunner(BaseAgentRunner): - def run(self, message: Message, query: str, **kwargs: Any) -> Generator[LLMResultChunk, None, None]: - """ - Run FunctionCall agent application - """ - self.query = query - app_generate_entity = self.application_generate_entity - - app_config = self.app_config - assert app_config is not None, "app_config is required" - assert app_config.agent is not None, "app_config.agent is required" - - # convert tools into ModelRuntime Tool format - tool_instances, prompt_messages_tools = self._init_prompt_tools() - - assert app_config.agent - - iteration_step = 1 - max_iteration_steps = min(app_config.agent.max_iteration, 99) + 1 - - # continue to run until there is not any tool call - function_call_state = True - llm_usage: dict[str, LLMUsage | None] = {"usage": None} - final_answer = "" - prompt_messages: list = [] # Initialize prompt_messages - - # get tracing instance - trace_manager = app_generate_entity.trace_manager - - def increase_usage(final_llm_usage_dict: dict[str, LLMUsage | None], usage: LLMUsage): - if not final_llm_usage_dict["usage"]: - final_llm_usage_dict["usage"] = usage - else: - llm_usage = final_llm_usage_dict["usage"] - llm_usage.prompt_tokens += usage.prompt_tokens - llm_usage.completion_tokens += usage.completion_tokens - llm_usage.total_tokens += usage.total_tokens - llm_usage.prompt_price += usage.prompt_price - llm_usage.completion_price += usage.completion_price - llm_usage.total_price += usage.total_price - - model_instance = self.model_instance - - while function_call_state and iteration_step <= max_iteration_steps: - function_call_state = False - - if iteration_step == max_iteration_steps: - # the last iteration, remove all tools - prompt_messages_tools = [] - - message_file_ids: list[str] = [] - agent_thought_id = self.create_agent_thought( - message_id=message.id, message="", tool_name="", tool_input="", messages_ids=message_file_ids - ) - - # recalc llm max tokens - prompt_messages = self._organize_prompt_messages() - self.recalc_llm_max_tokens(self.model_config, prompt_messages) - # invoke model - chunks: Union[Generator[LLMResultChunk, None, None], LLMResult] = model_instance.invoke_llm( - prompt_messages=prompt_messages, - model_parameters=app_generate_entity.model_conf.parameters, - tools=prompt_messages_tools, - stop=app_generate_entity.model_conf.stop, - stream=self.stream_tool_call, - user=self.user_id, - callbacks=[], - ) - - tool_calls: list[tuple[str, str, dict[str, Any]]] = [] - - # save full response - response = "" - - # save tool call names and inputs - tool_call_names = "" - tool_call_inputs = "" - - current_llm_usage = None - - if isinstance(chunks, Generator): - is_first_chunk = True - for chunk in chunks: - if is_first_chunk: - self.queue_manager.publish( - QueueAgentThoughtEvent(agent_thought_id=agent_thought_id), PublishFrom.APPLICATION_MANAGER - ) - is_first_chunk = False - # check if there is any tool call - if self.check_tool_calls(chunk): - function_call_state = True - tool_calls.extend(self.extract_tool_calls(chunk) or []) - tool_call_names = ";".join([tool_call[1] for tool_call in tool_calls]) - try: - tool_call_inputs = json.dumps( - {tool_call[1]: tool_call[2] for tool_call in tool_calls}, ensure_ascii=False - ) - except TypeError: - # fallback: force ASCII to handle non-serializable objects - tool_call_inputs = json.dumps({tool_call[1]: tool_call[2] for tool_call in tool_calls}) - - if chunk.delta.message and chunk.delta.message.content: - if isinstance(chunk.delta.message.content, list): - for content in chunk.delta.message.content: - response += content.data - else: - response += str(chunk.delta.message.content) - - if chunk.delta.usage: - increase_usage(llm_usage, chunk.delta.usage) - current_llm_usage = chunk.delta.usage - - yield chunk - else: - result = chunks - # check if there is any tool call - if self.check_blocking_tool_calls(result): - function_call_state = True - tool_calls.extend(self.extract_blocking_tool_calls(result) or []) - tool_call_names = ";".join([tool_call[1] for tool_call in tool_calls]) - try: - tool_call_inputs = json.dumps( - {tool_call[1]: tool_call[2] for tool_call in tool_calls}, ensure_ascii=False - ) - except TypeError: - # fallback: force ASCII to handle non-serializable objects - tool_call_inputs = json.dumps({tool_call[1]: tool_call[2] for tool_call in tool_calls}) - - if result.usage: - increase_usage(llm_usage, result.usage) - current_llm_usage = result.usage - - if result.message and result.message.content: - if isinstance(result.message.content, list): - for content in result.message.content: - response += content.data - else: - response += str(result.message.content) - - if not result.message.content: - result.message.content = "" - - self.queue_manager.publish( - QueueAgentThoughtEvent(agent_thought_id=agent_thought_id), PublishFrom.APPLICATION_MANAGER - ) - - yield LLMResultChunk( - model=model_instance.model, - prompt_messages=result.prompt_messages, - system_fingerprint=result.system_fingerprint, - delta=LLMResultChunkDelta( - index=0, - message=result.message, - usage=result.usage, - ), - ) - - assistant_message = AssistantPromptMessage(content="", tool_calls=[]) - if tool_calls: - assistant_message.tool_calls = [ - AssistantPromptMessage.ToolCall( - id=tool_call[0], - type="function", - function=AssistantPromptMessage.ToolCall.ToolCallFunction( - name=tool_call[1], arguments=json.dumps(tool_call[2], ensure_ascii=False) - ), - ) - for tool_call in tool_calls - ] - else: - assistant_message.content = response - - self._current_thoughts.append(assistant_message) - - # save thought - self.save_agent_thought( - agent_thought_id=agent_thought_id, - tool_name=tool_call_names, - tool_input=tool_call_inputs, - thought=response, - tool_invoke_meta=None, - observation=None, - answer=response, - messages_ids=[], - llm_usage=current_llm_usage, - ) - self.queue_manager.publish( - QueueAgentThoughtEvent(agent_thought_id=agent_thought_id), PublishFrom.APPLICATION_MANAGER - ) - - final_answer += response + "\n" - - # call tools - tool_responses = [] - for tool_call_id, tool_call_name, tool_call_args in tool_calls: - tool_instance = tool_instances.get(tool_call_name) - if not tool_instance: - tool_response = { - "tool_call_id": tool_call_id, - "tool_call_name": tool_call_name, - "tool_response": f"there is not a tool named {tool_call_name}", - "meta": ToolInvokeMeta.error_instance(f"there is not a tool named {tool_call_name}").to_dict(), - } - else: - # invoke tool - tool_invoke_response, message_files, tool_invoke_meta = ToolEngine.agent_invoke( - tool=tool_instance, - tool_parameters=tool_call_args, - user_id=self.user_id, - tenant_id=self.tenant_id, - message=self.message, - invoke_from=self.application_generate_entity.invoke_from, - agent_tool_callback=self.agent_callback, - trace_manager=trace_manager, - app_id=self.application_generate_entity.app_config.app_id, - message_id=self.message.id, - conversation_id=self.conversation.id, - ) - # publish files - for message_file_id in message_files: - # publish message file - self.queue_manager.publish( - QueueMessageFileEvent(message_file_id=message_file_id), PublishFrom.APPLICATION_MANAGER - ) - # add message file ids - message_file_ids.append(message_file_id) - - tool_response = { - "tool_call_id": tool_call_id, - "tool_call_name": tool_call_name, - "tool_response": tool_invoke_response, - "meta": tool_invoke_meta.to_dict(), - } - - tool_responses.append(tool_response) - if tool_response["tool_response"] is not None: - self._current_thoughts.append( - ToolPromptMessage( - content=str(tool_response["tool_response"]), - tool_call_id=tool_call_id, - name=tool_call_name, - ) - ) - - if len(tool_responses) > 0: - # save agent thought - self.save_agent_thought( - agent_thought_id=agent_thought_id, - tool_name="", - tool_input="", - thought="", - tool_invoke_meta={ - tool_response["tool_call_name"]: tool_response["meta"] for tool_response in tool_responses - }, - observation={ - tool_response["tool_call_name"]: tool_response["tool_response"] - for tool_response in tool_responses - }, - answer="", - messages_ids=message_file_ids, - ) - self.queue_manager.publish( - QueueAgentThoughtEvent(agent_thought_id=agent_thought_id), PublishFrom.APPLICATION_MANAGER - ) - - # update prompt tool - for prompt_tool in prompt_messages_tools: - self.update_prompt_message_tool(tool_instances[prompt_tool.name], prompt_tool) - - iteration_step += 1 - - # publish end event - self.queue_manager.publish( - QueueMessageEndEvent( - llm_result=LLMResult( - model=model_instance.model, - prompt_messages=prompt_messages, - message=AssistantPromptMessage(content=final_answer), - usage=llm_usage["usage"] or LLMUsage.empty_usage(), - system_fingerprint="", - ) - ), - PublishFrom.APPLICATION_MANAGER, - ) - - def check_tool_calls(self, llm_result_chunk: LLMResultChunk) -> bool: - """ - Check if there is any tool call in llm result chunk - """ - if llm_result_chunk.delta.message.tool_calls: - return True - return False - - def check_blocking_tool_calls(self, llm_result: LLMResult) -> bool: - """ - Check if there is any blocking tool call in llm result - """ - if llm_result.message.tool_calls: - return True - return False - - def extract_tool_calls(self, llm_result_chunk: LLMResultChunk) -> list[tuple[str, str, dict[str, Any]]]: - """ - Extract tool calls from llm result chunk - - Returns: - List[Tuple[str, str, Dict[str, Any]]]: [(tool_call_id, tool_call_name, tool_call_args)] - """ - tool_calls = [] - for prompt_message in llm_result_chunk.delta.message.tool_calls: - args = {} - if prompt_message.function.arguments != "": - args = json.loads(prompt_message.function.arguments) - - tool_calls.append( - ( - prompt_message.id, - prompt_message.function.name, - args, - ) - ) - - return tool_calls - - def extract_blocking_tool_calls(self, llm_result: LLMResult) -> list[tuple[str, str, dict[str, Any]]]: - """ - Extract blocking tool calls from llm result - - Returns: - List[Tuple[str, str, Dict[str, Any]]]: [(tool_call_id, tool_call_name, tool_call_args)] - """ - tool_calls = [] - for prompt_message in llm_result.message.tool_calls: - args = {} - if prompt_message.function.arguments != "": - args = json.loads(prompt_message.function.arguments) - - tool_calls.append( - ( - prompt_message.id, - prompt_message.function.name, - args, - ) - ) - - return tool_calls - - def _init_system_message(self, prompt_template: str, prompt_messages: list[PromptMessage]) -> list[PromptMessage]: - """ - Initialize system message - """ - if not prompt_messages and prompt_template: - return [ - SystemPromptMessage(content=prompt_template), - ] - - if prompt_messages and not isinstance(prompt_messages[0], SystemPromptMessage) and prompt_template: - prompt_messages.insert(0, SystemPromptMessage(content=prompt_template)) - - return prompt_messages or [] - - def _organize_user_query(self, query: str, prompt_messages: list[PromptMessage]) -> list[PromptMessage]: - """ - Organize user query - """ - if self.files: - # get image detail config - image_detail_config = ( - self.application_generate_entity.file_upload_config.image_config.detail - if ( - self.application_generate_entity.file_upload_config - and self.application_generate_entity.file_upload_config.image_config - ) - else None - ) - image_detail_config = image_detail_config or ImagePromptMessageContent.DETAIL.LOW - - prompt_message_contents: list[PromptMessageContentUnionTypes] = [] - for file in self.files: - prompt_message_contents.append( - file_manager.to_prompt_message_content( - file, - image_detail_config=image_detail_config, - ) - ) - prompt_message_contents.append(TextPromptMessageContent(data=query)) - - prompt_messages.append(UserPromptMessage(content=prompt_message_contents)) - else: - prompt_messages.append(UserPromptMessage(content=query)) - - return prompt_messages - - def _clear_user_prompt_image_messages(self, prompt_messages: list[PromptMessage]) -> list[PromptMessage]: - """ - As for now, gpt supports both fc and vision at the first iteration. - We need to remove the image messages from the prompt messages at the first iteration. - """ - prompt_messages = deepcopy(prompt_messages) - - for prompt_message in prompt_messages: - if isinstance(prompt_message, UserPromptMessage): - if isinstance(prompt_message.content, list): - prompt_message.content = "\n".join( - [ - content.data - if content.type == PromptMessageContentType.TEXT - else "[image]" - if content.type == PromptMessageContentType.IMAGE - else "[file]" - for content in prompt_message.content - ] - ) - - return prompt_messages - - def _organize_prompt_messages(self): - prompt_template = self.app_config.prompt_template.simple_prompt_template or "" - self.history_prompt_messages = self._init_system_message(prompt_template, self.history_prompt_messages) - query_prompt_messages = self._organize_user_query(self.query or "", []) - - self.history_prompt_messages = AgentHistoryPromptTransform( - model_config=self.model_config, - prompt_messages=[*query_prompt_messages, *self._current_thoughts], - history_messages=self.history_prompt_messages, - memory=self.memory, - ).get_prompt() - - prompt_messages = [*self.history_prompt_messages, *query_prompt_messages, *self._current_thoughts] - if len(self._current_thoughts) != 0: - # clear messages after the first iteration - prompt_messages = self._clear_user_prompt_image_messages(prompt_messages) - return prompt_messages diff --git a/api/core/agent/patterns/README.md b/api/core/agent/patterns/README.md new file mode 100644 index 0000000000..95b1bf87fa --- /dev/null +++ b/api/core/agent/patterns/README.md @@ -0,0 +1,55 @@ +# Agent Patterns + +A unified agent pattern module that powers both Agent V2 workflow nodes and agent applications. Strategies share a common execution contract while adapting to model capabilities and tool availability. + +## Overview + +The module applies a strategy pattern around LLM/tool orchestration. `StrategyFactory` auto-selects the best implementation based on model features or an explicit agent strategy, and each strategy streams logs and usage consistently. + +## Key Features + +- **Dual strategies** + - `FunctionCallStrategy`: uses native LLM function/tool calling when the model exposes `TOOL_CALL`, `MULTI_TOOL_CALL`, or `STREAM_TOOL_CALL`. + - `ReActStrategy`: ReAct (reasoning + acting) flow driven by `CotAgentOutputParser`, used when function calling is unavailable or explicitly requested. +- **Explicit or auto selection** + - `StrategyFactory.create_strategy` prefers an explicit `AgentEntity.Strategy` (FUNCTION_CALLING or CHAIN_OF_THOUGHT). + - Otherwise it falls back to function calling when tool-call features exist, or ReAct when they do not. +- **Unified execution contract** + - `AgentPattern.run` yields streaming `AgentLog` entries and `LLMResultChunk` data, returning an `AgentResult` with text, files, usage, and `finish_reason`. + - Iterations are configurable and hard-capped at 99 rounds; the last round forces a final answer by withholding tools. +- **Tool handling and hooks** + - Tools convert to `PromptMessageTool` objects before invocation. + - Optional `tool_invoke_hook` lets callers override tool execution (e.g., agent apps) while workflow runs use `ToolEngine.generic_invoke`. + - Tool outputs support text, links, JSON, variables, blobs, retriever resources, and file attachments; `target=="self"` files are reloaded into model context, others are returned as outputs. +- **File-aware arguments** + - Tool args accept `[File: ]` or `[Files: ]` placeholders that resolve to `File` objects before invocation, enabling models to reference uploaded files safely. +- **ReAct prompt shaping** + - System prompts replace `{{instruction}}`, `{{tools}}`, and `{{tool_names}}` placeholders. + - Adds `Observation` to stop sequences and appends scratchpad text so the model sees prior Thought/Action/Observation history. +- **Observability and accounting** + - Standardized `AgentLog` entries for rounds, model thoughts, and tool calls, including usage aggregation (`LLMUsage`) across streaming and non-streaming paths. + +## Architecture + +``` +agent/patterns/ +├── base.py # Shared utilities: logging, usage, tool invocation, file handling +├── function_call.py # Native function-calling loop with tool execution +├── react.py # ReAct loop with CoT parsing and scratchpad wiring +└── strategy_factory.py # Strategy selection by model features or explicit override +``` + +## Usage + +- For auto-selection: + - Call `StrategyFactory.create_strategy(model_features, model_instance, context, tools, files, ...)` and run the returned strategy with prompt messages and model params. +- For explicit behavior: + - Pass `agent_strategy=AgentEntity.Strategy.FUNCTION_CALLING` to force native calls (falls back to ReAct if unsupported), or `CHAIN_OF_THOUGHT` to force ReAct. +- Both strategies stream chunks and logs; collect the generator output until it returns an `AgentResult`. + +## Integration Points + +- **Model runtime**: delegates to `ModelInstance.invoke_llm` for both streaming and non-streaming calls. +- **Tool system**: defaults to `ToolEngine.generic_invoke`, with `tool_invoke_hook` for custom callers. +- **Files**: flows through `File` objects for tool inputs/outputs and model-context attachments. +- **Execution context**: `ExecutionContext` fields (user/app/conversation/message) propagate to tool invocations and logging. diff --git a/api/core/agent/patterns/__init__.py b/api/core/agent/patterns/__init__.py new file mode 100644 index 0000000000..8a3b125533 --- /dev/null +++ b/api/core/agent/patterns/__init__.py @@ -0,0 +1,19 @@ +"""Agent patterns module. + +This module provides different strategies for agent execution: +- FunctionCallStrategy: Uses native function/tool calling +- ReActStrategy: Uses ReAct (Reasoning + Acting) approach +- StrategyFactory: Factory for creating strategies based on model features +""" + +from .base import AgentPattern +from .function_call import FunctionCallStrategy +from .react import ReActStrategy +from .strategy_factory import StrategyFactory + +__all__ = [ + "AgentPattern", + "FunctionCallStrategy", + "ReActStrategy", + "StrategyFactory", +] diff --git a/api/core/agent/patterns/base.py b/api/core/agent/patterns/base.py new file mode 100644 index 0000000000..d98fa005a3 --- /dev/null +++ b/api/core/agent/patterns/base.py @@ -0,0 +1,474 @@ +"""Base class for agent strategies.""" + +from __future__ import annotations + +import json +import re +import time +from abc import ABC, abstractmethod +from collections.abc import Callable, Generator +from typing import TYPE_CHECKING, Any + +from core.agent.entities import AgentLog, AgentResult, ExecutionContext +from core.file import File +from core.model_manager import ModelInstance +from core.model_runtime.entities import ( + AssistantPromptMessage, + LLMResult, + LLMResultChunk, + LLMResultChunkDelta, + PromptMessage, + PromptMessageTool, +) +from core.model_runtime.entities.llm_entities import LLMUsage +from core.model_runtime.entities.message_entities import TextPromptMessageContent +from core.tools.entities.tool_entities import ToolInvokeMessage, ToolInvokeMeta + +if TYPE_CHECKING: + from core.tools.__base.tool import Tool + +# Type alias for tool invoke hook +# Returns: (response_content, message_file_ids, tool_invoke_meta) +ToolInvokeHook = Callable[["Tool", dict[str, Any], str], tuple[str, list[str], ToolInvokeMeta]] + + +class AgentPattern(ABC): + """Base class for agent execution strategies.""" + + def __init__( + self, + model_instance: ModelInstance, + tools: list[Tool], + context: ExecutionContext, + max_iterations: int = 10, + workflow_call_depth: int = 0, + files: list[File] = [], + tool_invoke_hook: ToolInvokeHook | None = None, + ): + """Initialize the agent strategy.""" + self.model_instance = model_instance + self.tools = tools + self.context = context + self.max_iterations = min(max_iterations, 99) # Cap at 99 iterations + self.workflow_call_depth = workflow_call_depth + self.files: list[File] = files + self.tool_invoke_hook = tool_invoke_hook + + @abstractmethod + def run( + self, + prompt_messages: list[PromptMessage], + model_parameters: dict[str, Any], + stop: list[str] = [], + stream: bool = True, + ) -> Generator[LLMResultChunk | AgentLog, None, AgentResult]: + """Execute the agent strategy.""" + pass + + def _accumulate_usage(self, total_usage: dict[str, Any], delta_usage: LLMUsage) -> None: + """Accumulate LLM usage statistics.""" + if not total_usage.get("usage"): + # Create a copy to avoid modifying the original + total_usage["usage"] = LLMUsage( + prompt_tokens=delta_usage.prompt_tokens, + prompt_unit_price=delta_usage.prompt_unit_price, + prompt_price_unit=delta_usage.prompt_price_unit, + prompt_price=delta_usage.prompt_price, + completion_tokens=delta_usage.completion_tokens, + completion_unit_price=delta_usage.completion_unit_price, + completion_price_unit=delta_usage.completion_price_unit, + completion_price=delta_usage.completion_price, + total_tokens=delta_usage.total_tokens, + total_price=delta_usage.total_price, + currency=delta_usage.currency, + latency=delta_usage.latency, + ) + else: + current: LLMUsage = total_usage["usage"] + current.prompt_tokens += delta_usage.prompt_tokens + current.completion_tokens += delta_usage.completion_tokens + current.total_tokens += delta_usage.total_tokens + current.prompt_price += delta_usage.prompt_price + current.completion_price += delta_usage.completion_price + current.total_price += delta_usage.total_price + + def _extract_content(self, content: Any) -> str: + """Extract text content from message content.""" + if isinstance(content, list): + # Content items are PromptMessageContentUnionTypes + text_parts = [] + for c in content: + # Check if it's a TextPromptMessageContent (which has data attribute) + if isinstance(c, TextPromptMessageContent): + text_parts.append(c.data) + return "".join(text_parts) + return str(content) + + def _has_tool_calls(self, chunk: LLMResultChunk) -> bool: + """Check if chunk contains tool calls.""" + # LLMResultChunk always has delta attribute + return bool(chunk.delta.message and chunk.delta.message.tool_calls) + + def _has_tool_calls_result(self, result: LLMResult) -> bool: + """Check if result contains tool calls (non-streaming).""" + # LLMResult always has message attribute + return bool(result.message and result.message.tool_calls) + + def _extract_tool_calls(self, chunk: LLMResultChunk) -> list[tuple[str, str, dict[str, Any]]]: + """Extract tool calls from streaming chunk.""" + tool_calls: list[tuple[str, str, dict[str, Any]]] = [] + if chunk.delta.message and chunk.delta.message.tool_calls: + for tool_call in chunk.delta.message.tool_calls: + if tool_call.function: + try: + args = json.loads(tool_call.function.arguments) if tool_call.function.arguments else {} + except json.JSONDecodeError: + args = {} + tool_calls.append((tool_call.id or "", tool_call.function.name, args)) + return tool_calls + + def _extract_tool_calls_result(self, result: LLMResult) -> list[tuple[str, str, dict[str, Any]]]: + """Extract tool calls from non-streaming result.""" + tool_calls = [] + if result.message and result.message.tool_calls: + for tool_call in result.message.tool_calls: + if tool_call.function: + try: + args = json.loads(tool_call.function.arguments) if tool_call.function.arguments else {} + except json.JSONDecodeError: + args = {} + tool_calls.append((tool_call.id or "", tool_call.function.name, args)) + return tool_calls + + def _extract_text_from_message(self, message: PromptMessage) -> str: + """Extract text content from a prompt message.""" + # PromptMessage always has content attribute + content = message.content + if isinstance(content, str): + return content + elif isinstance(content, list): + # Extract text from content list + text_parts = [] + for item in content: + if isinstance(item, TextPromptMessageContent): + text_parts.append(item.data) + return " ".join(text_parts) + return "" + + def _get_tool_metadata(self, tool_instance: Tool) -> dict[AgentLog.LogMetadata, Any]: + """Get metadata for a tool including provider and icon info.""" + from core.tools.tool_manager import ToolManager + + metadata: dict[AgentLog.LogMetadata, Any] = {} + if tool_instance.entity and tool_instance.entity.identity: + identity = tool_instance.entity.identity + if identity.provider: + metadata[AgentLog.LogMetadata.PROVIDER] = identity.provider + + # Get icon using ToolManager for proper URL generation + tenant_id = self.context.tenant_id + if tenant_id and identity.provider: + try: + provider_type = tool_instance.tool_provider_type() + icon = ToolManager.get_tool_icon(tenant_id, provider_type, identity.provider) + if isinstance(icon, str): + metadata[AgentLog.LogMetadata.ICON] = icon + elif isinstance(icon, dict): + # Handle icon dict with background/content or light/dark variants + metadata[AgentLog.LogMetadata.ICON] = icon + except Exception: + # Fallback to identity.icon if ToolManager fails + if identity.icon: + metadata[AgentLog.LogMetadata.ICON] = identity.icon + elif identity.icon: + metadata[AgentLog.LogMetadata.ICON] = identity.icon + return metadata + + def _create_log( + self, + label: str, + log_type: AgentLog.LogType, + status: AgentLog.LogStatus, + data: dict[str, Any] | None = None, + parent_id: str | None = None, + extra_metadata: dict[AgentLog.LogMetadata, Any] | None = None, + ) -> AgentLog: + """Create a new AgentLog with standard metadata.""" + metadata: dict[AgentLog.LogMetadata, Any] = { + AgentLog.LogMetadata.STARTED_AT: time.perf_counter(), + } + if extra_metadata: + metadata.update(extra_metadata) + + return AgentLog( + label=label, + log_type=log_type, + status=status, + data=data or {}, + parent_id=parent_id, + metadata=metadata, + ) + + def _finish_log( + self, + log: AgentLog, + data: dict[str, Any] | None = None, + usage: LLMUsage | None = None, + ) -> AgentLog: + """Finish an AgentLog by updating its status and metadata.""" + log.status = AgentLog.LogStatus.SUCCESS + + if data is not None: + log.data = data + + # Calculate elapsed time + started_at = log.metadata.get(AgentLog.LogMetadata.STARTED_AT, time.perf_counter()) + finished_at = time.perf_counter() + + # Update metadata + log.metadata = { + **log.metadata, + AgentLog.LogMetadata.FINISHED_AT: finished_at, + # Calculate elapsed time in seconds + AgentLog.LogMetadata.ELAPSED_TIME: round(finished_at - started_at, 4), + } + + # Add usage information if provided + if usage: + log.metadata.update( + { + AgentLog.LogMetadata.TOTAL_PRICE: usage.total_price, + AgentLog.LogMetadata.CURRENCY: usage.currency, + AgentLog.LogMetadata.TOTAL_TOKENS: usage.total_tokens, + AgentLog.LogMetadata.LLM_USAGE: usage, + } + ) + + return log + + def _replace_file_references(self, tool_args: dict[str, Any]) -> dict[str, Any]: + """ + Replace file references in tool arguments with actual File objects. + + Args: + tool_args: Dictionary of tool arguments + + Returns: + Updated tool arguments with file references replaced + """ + # Process each argument in the dictionary + processed_args: dict[str, Any] = {} + for key, value in tool_args.items(): + processed_args[key] = self._process_file_reference(value) + return processed_args + + def _process_file_reference(self, data: Any) -> Any: + """ + Recursively process data to replace file references. + Supports both single file [File: file_id] and multiple files [Files: file_id1, file_id2, ...]. + + Args: + data: The data to process (can be dict, list, str, or other types) + + Returns: + Processed data with file references replaced + """ + single_file_pattern = re.compile(r"^\[File:\s*([^\]]+)\]$") + multiple_files_pattern = re.compile(r"^\[Files:\s*([^\]]+)\]$") + + if isinstance(data, dict): + # Process dictionary recursively + return {key: self._process_file_reference(value) for key, value in data.items()} + elif isinstance(data, list): + # Process list recursively + return [self._process_file_reference(item) for item in data] + elif isinstance(data, str): + # Check for single file pattern [File: file_id] + single_match = single_file_pattern.match(data.strip()) + if single_match: + file_id = single_match.group(1).strip() + # Find the file in self.files + for file in self.files: + if file.id and str(file.id) == file_id: + return file + # If file not found, return original value + return data + + # Check for multiple files pattern [Files: file_id1, file_id2, ...] + multiple_match = multiple_files_pattern.match(data.strip()) + if multiple_match: + file_ids_str = multiple_match.group(1).strip() + # Split by comma and strip whitespace + file_ids = [fid.strip() for fid in file_ids_str.split(",")] + + # Find all matching files + matched_files: list[File] = [] + for file_id in file_ids: + for file in self.files: + if file.id and str(file.id) == file_id: + matched_files.append(file) + break + + # Return list of files if any were found, otherwise return original + return matched_files or data + + return data + else: + # Return other types as-is + return data + + def _create_text_chunk(self, text: str, prompt_messages: list[PromptMessage]) -> LLMResultChunk: + """Create a text chunk for streaming.""" + return LLMResultChunk( + model=self.model_instance.model, + prompt_messages=prompt_messages, + delta=LLMResultChunkDelta( + index=0, + message=AssistantPromptMessage(content=text), + usage=None, + ), + system_fingerprint="", + ) + + def _invoke_tool( + self, + tool_instance: Tool, + tool_args: dict[str, Any], + tool_name: str, + ) -> tuple[str, list[File], ToolInvokeMeta | None]: + """ + Invoke a tool and collect its response. + + Args: + tool_instance: The tool instance to invoke + tool_args: Tool arguments + tool_name: Name of the tool + + Returns: + Tuple of (response_content, tool_files, tool_invoke_meta) + """ + # Process tool_args to replace file references with actual File objects + tool_args = self._replace_file_references(tool_args) + + # If a tool invoke hook is set, use it instead of generic_invoke + if self.tool_invoke_hook: + response_content, _, tool_invoke_meta = self.tool_invoke_hook(tool_instance, tool_args, tool_name) + # Note: message_file_ids are stored in DB, we don't convert them to File objects here + # The caller (AgentAppRunner) handles file publishing + return response_content, [], tool_invoke_meta + + # Default: use generic_invoke for workflow scenarios + # Import here to avoid circular import + from core.tools.tool_engine import DifyWorkflowCallbackHandler, ToolEngine + + tool_response = ToolEngine().generic_invoke( + tool=tool_instance, + tool_parameters=tool_args, + user_id=self.context.user_id or "", + workflow_tool_callback=DifyWorkflowCallbackHandler(), + workflow_call_depth=self.workflow_call_depth, + app_id=self.context.app_id, + conversation_id=self.context.conversation_id, + message_id=self.context.message_id, + ) + + # Collect response and files + response_content = "" + tool_files: list[File] = [] + + for response in tool_response: + if response.type == ToolInvokeMessage.MessageType.TEXT: + assert isinstance(response.message, ToolInvokeMessage.TextMessage) + response_content += response.message.text + + elif response.type == ToolInvokeMessage.MessageType.LINK: + # Handle link messages + if isinstance(response.message, ToolInvokeMessage.TextMessage): + response_content += f"[Link: {response.message.text}]" + + elif response.type == ToolInvokeMessage.MessageType.IMAGE: + # Handle image URL messages + if isinstance(response.message, ToolInvokeMessage.TextMessage): + response_content += f"[Image: {response.message.text}]" + + elif response.type == ToolInvokeMessage.MessageType.IMAGE_LINK: + # Handle image link messages + if isinstance(response.message, ToolInvokeMessage.TextMessage): + response_content += f"[Image: {response.message.text}]" + + elif response.type == ToolInvokeMessage.MessageType.BINARY_LINK: + # Handle binary file link messages + if isinstance(response.message, ToolInvokeMessage.TextMessage): + filename = response.meta.get("filename", "file") if response.meta else "file" + response_content += f"[File: {filename} - {response.message.text}]" + + elif response.type == ToolInvokeMessage.MessageType.JSON: + # Handle JSON messages + if isinstance(response.message, ToolInvokeMessage.JsonMessage): + response_content += json.dumps(response.message.json_object, ensure_ascii=False, indent=2) + + elif response.type == ToolInvokeMessage.MessageType.BLOB: + # Handle blob messages - convert to text representation + if isinstance(response.message, ToolInvokeMessage.BlobMessage): + mime_type = ( + response.meta.get("mime_type", "application/octet-stream") + if response.meta + else "application/octet-stream" + ) + size = len(response.message.blob) + response_content += f"[Binary data: {mime_type}, size: {size} bytes]" + + elif response.type == ToolInvokeMessage.MessageType.VARIABLE: + # Handle variable messages + if isinstance(response.message, ToolInvokeMessage.VariableMessage): + var_name = response.message.variable_name + var_value = response.message.variable_value + if isinstance(var_value, str): + response_content += var_value + else: + response_content += f"[Variable {var_name}: {json.dumps(var_value, ensure_ascii=False)}]" + + elif response.type == ToolInvokeMessage.MessageType.BLOB_CHUNK: + # Handle blob chunk messages - these are parts of a larger blob + if isinstance(response.message, ToolInvokeMessage.BlobChunkMessage): + response_content += f"[Blob chunk {response.message.sequence}: {len(response.message.blob)} bytes]" + + elif response.type == ToolInvokeMessage.MessageType.RETRIEVER_RESOURCES: + # Handle retriever resources messages + if isinstance(response.message, ToolInvokeMessage.RetrieverResourceMessage): + response_content += response.message.context + + elif response.type == ToolInvokeMessage.MessageType.FILE: + # Extract file from meta + if response.meta and "file" in response.meta: + file = response.meta["file"] + if isinstance(file, File): + # Check if file is for model or tool output + if response.meta.get("target") == "self": + # File is for model - add to files for next prompt + self.files.append(file) + response_content += f"File '{file.filename}' has been loaded into your context." + else: + # File is tool output + tool_files.append(file) + + return response_content, tool_files, None + + def _find_tool_by_name(self, tool_name: str) -> Tool | None: + """Find a tool instance by its name.""" + for tool in self.tools: + if tool.entity.identity.name == tool_name: + return tool + return None + + def _convert_tools_to_prompt_format(self) -> list[PromptMessageTool]: + """Convert tools to prompt message format.""" + prompt_tools: list[PromptMessageTool] = [] + for tool in self.tools: + prompt_tools.append(tool.to_prompt_message_tool()) + return prompt_tools + + def _update_usage_with_empty(self, llm_usage: dict[str, Any]) -> None: + """Initialize usage tracking with empty usage if not set.""" + if "usage" not in llm_usage or llm_usage["usage"] is None: + llm_usage["usage"] = LLMUsage.empty_usage() diff --git a/api/core/agent/patterns/function_call.py b/api/core/agent/patterns/function_call.py new file mode 100644 index 0000000000..a0f0d30ab3 --- /dev/null +++ b/api/core/agent/patterns/function_call.py @@ -0,0 +1,299 @@ +"""Function Call strategy implementation.""" + +import json +from collections.abc import Generator +from typing import Any, Union + +from core.agent.entities import AgentLog, AgentResult +from core.file import File +from core.model_runtime.entities import ( + AssistantPromptMessage, + LLMResult, + LLMResultChunk, + LLMResultChunkDelta, + LLMUsage, + PromptMessage, + PromptMessageTool, + ToolPromptMessage, +) +from core.tools.entities.tool_entities import ToolInvokeMeta + +from .base import AgentPattern + + +class FunctionCallStrategy(AgentPattern): + """Function Call strategy using model's native tool calling capability.""" + + def run( + self, + prompt_messages: list[PromptMessage], + model_parameters: dict[str, Any], + stop: list[str] = [], + stream: bool = True, + ) -> Generator[LLMResultChunk | AgentLog, None, AgentResult]: + """Execute the function call agent strategy.""" + # Convert tools to prompt format + prompt_tools: list[PromptMessageTool] = self._convert_tools_to_prompt_format() + + # Initialize tracking + iteration_step: int = 1 + max_iterations: int = self.max_iterations + 1 + function_call_state: bool = True + total_usage: dict[str, LLMUsage | None] = {"usage": None} + messages: list[PromptMessage] = list(prompt_messages) # Create mutable copy + final_text: str = "" + finish_reason: str | None = None + output_files: list[File] = [] # Track files produced by tools + + while function_call_state and iteration_step <= max_iterations: + function_call_state = False + round_log = self._create_log( + label=f"ROUND {iteration_step}", + log_type=AgentLog.LogType.ROUND, + status=AgentLog.LogStatus.START, + data={}, + ) + yield round_log + # On last iteration, remove tools to force final answer + current_tools: list[PromptMessageTool] = [] if iteration_step == max_iterations else prompt_tools + model_log = self._create_log( + label=f"{self.model_instance.model} Thought", + log_type=AgentLog.LogType.THOUGHT, + status=AgentLog.LogStatus.START, + data={}, + parent_id=round_log.id, + extra_metadata={ + AgentLog.LogMetadata.PROVIDER: self.model_instance.provider, + }, + ) + yield model_log + + # Track usage for this round only + round_usage: dict[str, LLMUsage | None] = {"usage": None} + + # Invoke model + chunks: Union[Generator[LLMResultChunk, None, None], LLMResult] = self.model_instance.invoke_llm( + prompt_messages=messages, + model_parameters=model_parameters, + tools=current_tools, + stop=stop, + stream=stream, + user=self.context.user_id, + callbacks=[], + ) + + # Process response + tool_calls, response_content, chunk_finish_reason = yield from self._handle_chunks( + chunks, round_usage, model_log + ) + messages.append(self._create_assistant_message(response_content, tool_calls)) + + # Accumulate to total usage + round_usage_value = round_usage.get("usage") + if round_usage_value: + self._accumulate_usage(total_usage, round_usage_value) + + # Update final text if no tool calls (this is likely the final answer) + if not tool_calls: + final_text = response_content + + # Update finish reason + if chunk_finish_reason: + finish_reason = chunk_finish_reason + + # Process tool calls + tool_outputs: dict[str, str] = {} + if tool_calls: + function_call_state = True + # Execute tools + for tool_call_id, tool_name, tool_args in tool_calls: + tool_response, tool_files, _ = yield from self._handle_tool_call( + tool_name, tool_args, tool_call_id, messages, round_log + ) + tool_outputs[tool_name] = tool_response + # Track files produced by tools + output_files.extend(tool_files) + yield self._finish_log( + round_log, + data={ + "llm_result": response_content, + "tool_calls": [ + {"name": tc[1], "args": tc[2], "output": tool_outputs.get(tc[1], "")} for tc in tool_calls + ] + if tool_calls + else [], + "final_answer": final_text if not function_call_state else None, + }, + usage=round_usage.get("usage"), + ) + iteration_step += 1 + + # Return final result + from core.agent.entities import AgentResult + + return AgentResult( + text=final_text, + files=output_files, + usage=total_usage.get("usage") or LLMUsage.empty_usage(), + finish_reason=finish_reason, + ) + + def _handle_chunks( + self, + chunks: Union[Generator[LLMResultChunk, None, None], LLMResult], + llm_usage: dict[str, LLMUsage | None], + start_log: AgentLog, + ) -> Generator[ + LLMResultChunk | AgentLog, + None, + tuple[list[tuple[str, str, dict[str, Any]]], str, str | None], + ]: + """Handle LLM response chunks and extract tool calls and content. + + Returns a tuple of (tool_calls, response_content, finish_reason). + """ + tool_calls: list[tuple[str, str, dict[str, Any]]] = [] + response_content: str = "" + finish_reason: str | None = None + if isinstance(chunks, Generator): + # Streaming response + for chunk in chunks: + # Extract tool calls + if self._has_tool_calls(chunk): + tool_calls.extend(self._extract_tool_calls(chunk)) + + # Extract content + if chunk.delta.message and chunk.delta.message.content: + response_content += self._extract_content(chunk.delta.message.content) + + # Track usage + if chunk.delta.usage: + self._accumulate_usage(llm_usage, chunk.delta.usage) + + # Capture finish reason + if chunk.delta.finish_reason: + finish_reason = chunk.delta.finish_reason + + yield chunk + else: + # Non-streaming response + result: LLMResult = chunks + + if self._has_tool_calls_result(result): + tool_calls.extend(self._extract_tool_calls_result(result)) + + if result.message and result.message.content: + response_content += self._extract_content(result.message.content) + + if result.usage: + self._accumulate_usage(llm_usage, result.usage) + + # Convert to streaming format + yield LLMResultChunk( + model=result.model, + prompt_messages=result.prompt_messages, + delta=LLMResultChunkDelta(index=0, message=result.message, usage=result.usage), + ) + yield self._finish_log( + start_log, + data={ + "result": response_content, + }, + usage=llm_usage.get("usage"), + ) + return tool_calls, response_content, finish_reason + + def _create_assistant_message( + self, content: str, tool_calls: list[tuple[str, str, dict[str, Any]]] | None = None + ) -> AssistantPromptMessage: + """Create assistant message with tool calls.""" + if tool_calls is None: + return AssistantPromptMessage(content=content) + return AssistantPromptMessage( + content=content or "", + tool_calls=[ + AssistantPromptMessage.ToolCall( + id=tc[0], + type="function", + function=AssistantPromptMessage.ToolCall.ToolCallFunction(name=tc[1], arguments=json.dumps(tc[2])), + ) + for tc in tool_calls + ], + ) + + def _handle_tool_call( + self, + tool_name: str, + tool_args: dict[str, Any], + tool_call_id: str, + messages: list[PromptMessage], + round_log: AgentLog, + ) -> Generator[AgentLog, None, tuple[str, list[File], ToolInvokeMeta | None]]: + """Handle a single tool call and return response with files and meta.""" + # Find tool + tool_instance = self._find_tool_by_name(tool_name) + if not tool_instance: + raise ValueError(f"Tool {tool_name} not found") + + # Get tool metadata (provider, icon, etc.) + tool_metadata = self._get_tool_metadata(tool_instance) + + # Create tool call log + tool_call_log = self._create_log( + label=f"CALL {tool_name}", + log_type=AgentLog.LogType.TOOL_CALL, + status=AgentLog.LogStatus.START, + data={ + "tool_call_id": tool_call_id, + "tool_name": tool_name, + "tool_args": tool_args, + }, + parent_id=round_log.id, + extra_metadata=tool_metadata, + ) + yield tool_call_log + + # Invoke tool using base class method with error handling + try: + response_content, tool_files, tool_invoke_meta = self._invoke_tool(tool_instance, tool_args, tool_name) + + yield self._finish_log( + tool_call_log, + data={ + **tool_call_log.data, + "output": response_content, + "files": len(tool_files), + "meta": tool_invoke_meta.to_dict() if tool_invoke_meta else None, + }, + ) + final_content = response_content or "Tool executed successfully" + # Add tool response to messages + messages.append( + ToolPromptMessage( + content=final_content, + tool_call_id=tool_call_id, + name=tool_name, + ) + ) + return response_content, tool_files, tool_invoke_meta + except Exception as e: + # Tool invocation failed, yield error log + error_message = str(e) + tool_call_log.status = AgentLog.LogStatus.ERROR + tool_call_log.error = error_message + tool_call_log.data = { + **tool_call_log.data, + "error": error_message, + } + yield tool_call_log + + # Add error message to conversation + error_content = f"Tool execution failed: {error_message}" + messages.append( + ToolPromptMessage( + content=error_content, + tool_call_id=tool_call_id, + name=tool_name, + ) + ) + return error_content, [], None diff --git a/api/core/agent/patterns/react.py b/api/core/agent/patterns/react.py new file mode 100644 index 0000000000..87a9fa9b65 --- /dev/null +++ b/api/core/agent/patterns/react.py @@ -0,0 +1,418 @@ +"""ReAct strategy implementation.""" + +from __future__ import annotations + +import json +from collections.abc import Generator +from typing import TYPE_CHECKING, Any, Union + +from core.agent.entities import AgentLog, AgentResult, AgentScratchpadUnit, ExecutionContext +from core.agent.output_parser.cot_output_parser import CotAgentOutputParser +from core.file import File +from core.model_manager import ModelInstance +from core.model_runtime.entities import ( + AssistantPromptMessage, + LLMResult, + LLMResultChunk, + LLMResultChunkDelta, + PromptMessage, + SystemPromptMessage, +) + +from .base import AgentPattern, ToolInvokeHook + +if TYPE_CHECKING: + from core.tools.__base.tool import Tool + + +class ReActStrategy(AgentPattern): + """ReAct strategy using reasoning and acting approach.""" + + def __init__( + self, + model_instance: ModelInstance, + tools: list[Tool], + context: ExecutionContext, + max_iterations: int = 10, + workflow_call_depth: int = 0, + files: list[File] = [], + tool_invoke_hook: ToolInvokeHook | None = None, + instruction: str = "", + ): + """Initialize the ReAct strategy with instruction support.""" + super().__init__( + model_instance=model_instance, + tools=tools, + context=context, + max_iterations=max_iterations, + workflow_call_depth=workflow_call_depth, + files=files, + tool_invoke_hook=tool_invoke_hook, + ) + self.instruction = instruction + + def run( + self, + prompt_messages: list[PromptMessage], + model_parameters: dict[str, Any], + stop: list[str] = [], + stream: bool = True, + ) -> Generator[LLMResultChunk | AgentLog, None, AgentResult]: + """Execute the ReAct agent strategy.""" + # Initialize tracking + agent_scratchpad: list[AgentScratchpadUnit] = [] + iteration_step: int = 1 + max_iterations: int = self.max_iterations + 1 + react_state: bool = True + total_usage: dict[str, Any] = {"usage": None} + output_files: list[File] = [] # Track files produced by tools + final_text: str = "" + finish_reason: str | None = None + + # Add "Observation" to stop sequences + if "Observation" not in stop: + stop = stop.copy() + stop.append("Observation") + + while react_state and iteration_step <= max_iterations: + react_state = False + round_log = self._create_log( + label=f"ROUND {iteration_step}", + log_type=AgentLog.LogType.ROUND, + status=AgentLog.LogStatus.START, + data={}, + ) + yield round_log + + # Build prompt with/without tools based on iteration + include_tools = iteration_step < max_iterations + current_messages = self._build_prompt_with_react_format( + prompt_messages, agent_scratchpad, include_tools, self.instruction + ) + + model_log = self._create_log( + label=f"{self.model_instance.model} Thought", + log_type=AgentLog.LogType.THOUGHT, + status=AgentLog.LogStatus.START, + data={}, + parent_id=round_log.id, + extra_metadata={ + AgentLog.LogMetadata.PROVIDER: self.model_instance.provider, + }, + ) + yield model_log + + # Track usage for this round only + round_usage: dict[str, Any] = {"usage": None} + + # Use current messages directly (files are handled by base class if needed) + messages_to_use = current_messages + + # Invoke model + chunks: Union[Generator[LLMResultChunk, None, None], LLMResult] = self.model_instance.invoke_llm( + prompt_messages=messages_to_use, + model_parameters=model_parameters, + stop=stop, + stream=stream, + user=self.context.user_id or "", + callbacks=[], + ) + + # Process response + scratchpad, chunk_finish_reason = yield from self._handle_chunks( + chunks, round_usage, model_log, current_messages + ) + agent_scratchpad.append(scratchpad) + + # Accumulate to total usage + round_usage_value = round_usage.get("usage") + if round_usage_value: + self._accumulate_usage(total_usage, round_usage_value) + + # Update finish reason + if chunk_finish_reason: + finish_reason = chunk_finish_reason + + # Check if we have an action to execute + if scratchpad.action and scratchpad.action.action_name.lower() != "final answer": + react_state = True + # Execute tool + observation, tool_files = yield from self._handle_tool_call( + scratchpad.action, current_messages, round_log + ) + scratchpad.observation = observation + # Track files produced by tools + output_files.extend(tool_files) + + # Add observation to scratchpad for display + yield self._create_text_chunk(f"\nObservation: {observation}\n", current_messages) + else: + # Extract final answer + if scratchpad.action and scratchpad.action.action_input: + final_answer = scratchpad.action.action_input + if isinstance(final_answer, dict): + final_answer = json.dumps(final_answer, ensure_ascii=False) + final_text = str(final_answer) + elif scratchpad.thought: + # If no action but we have thought, use thought as final answer + final_text = scratchpad.thought + + yield self._finish_log( + round_log, + data={ + "thought": scratchpad.thought, + "action": scratchpad.action_str if scratchpad.action else None, + "observation": scratchpad.observation or None, + "final_answer": final_text if not react_state else None, + }, + usage=round_usage.get("usage"), + ) + iteration_step += 1 + + # Return final result + + from core.agent.entities import AgentResult + + return AgentResult( + text=final_text, files=output_files, usage=total_usage.get("usage"), finish_reason=finish_reason + ) + + def _build_prompt_with_react_format( + self, + original_messages: list[PromptMessage], + agent_scratchpad: list[AgentScratchpadUnit], + include_tools: bool = True, + instruction: str = "", + ) -> list[PromptMessage]: + """Build prompt messages with ReAct format.""" + # Copy messages to avoid modifying original + messages = list(original_messages) + + # Find and update the system prompt that should already exist + system_prompt_found = False + for i, msg in enumerate(messages): + if isinstance(msg, SystemPromptMessage): + system_prompt_found = True + # The system prompt from frontend already has the template, just replace placeholders + + # Format tools + tools_str = "" + tool_names = [] + if include_tools and self.tools: + # Convert tools to prompt message tools format + prompt_tools = [tool.to_prompt_message_tool() for tool in self.tools] + tool_names = [tool.name for tool in prompt_tools] + + # Format tools as JSON for comprehensive information + from core.model_runtime.utils.encoders import jsonable_encoder + + tools_str = json.dumps(jsonable_encoder(prompt_tools), indent=2) + tool_names_str = ", ".join(f'"{name}"' for name in tool_names) + else: + tools_str = "No tools available" + tool_names_str = "" + + # Replace placeholders in the existing system prompt + updated_content = msg.content + assert isinstance(updated_content, str) + updated_content = updated_content.replace("{{instruction}}", instruction) + updated_content = updated_content.replace("{{tools}}", tools_str) + updated_content = updated_content.replace("{{tool_names}}", tool_names_str) + + # Create new SystemPromptMessage with updated content + messages[i] = SystemPromptMessage(content=updated_content) + break + + # If no system prompt found, that's unexpected but add scratchpad anyway + if not system_prompt_found: + # This shouldn't happen if frontend is working correctly + pass + + # Format agent scratchpad + scratchpad_str = "" + if agent_scratchpad: + scratchpad_parts: list[str] = [] + for unit in agent_scratchpad: + if unit.thought: + scratchpad_parts.append(f"Thought: {unit.thought}") + if unit.action_str: + scratchpad_parts.append(f"Action:\n```\n{unit.action_str}\n```") + if unit.observation: + scratchpad_parts.append(f"Observation: {unit.observation}") + scratchpad_str = "\n".join(scratchpad_parts) + + # If there's a scratchpad, append it to the last message + if scratchpad_str: + messages.append(AssistantPromptMessage(content=scratchpad_str)) + + return messages + + def _handle_chunks( + self, + chunks: Union[Generator[LLMResultChunk, None, None], LLMResult], + llm_usage: dict[str, Any], + model_log: AgentLog, + current_messages: list[PromptMessage], + ) -> Generator[ + LLMResultChunk | AgentLog, + None, + tuple[AgentScratchpadUnit, str | None], + ]: + """Handle LLM response chunks and extract action/thought. + + Returns a tuple of (scratchpad_unit, finish_reason). + """ + usage_dict: dict[str, Any] = {} + + # Convert non-streaming to streaming format if needed + if isinstance(chunks, LLMResult): + # Create a generator from the LLMResult + def result_to_chunks() -> Generator[LLMResultChunk, None, None]: + yield LLMResultChunk( + model=chunks.model, + prompt_messages=chunks.prompt_messages, + delta=LLMResultChunkDelta( + index=0, + message=chunks.message, + usage=chunks.usage, + finish_reason=None, # LLMResult doesn't have finish_reason, only streaming chunks do + ), + system_fingerprint=chunks.system_fingerprint or "", + ) + + streaming_chunks = result_to_chunks() + else: + streaming_chunks = chunks + + react_chunks = CotAgentOutputParser.handle_react_stream_output(streaming_chunks, usage_dict) + + # Initialize scratchpad unit + scratchpad = AgentScratchpadUnit( + agent_response="", + thought="", + action_str="", + observation="", + action=None, + ) + + finish_reason: str | None = None + + # Process chunks + for chunk in react_chunks: + if isinstance(chunk, AgentScratchpadUnit.Action): + # Action detected + action_str = json.dumps(chunk.model_dump()) + scratchpad.agent_response = (scratchpad.agent_response or "") + action_str + scratchpad.action_str = action_str + scratchpad.action = chunk + + yield self._create_text_chunk(json.dumps(chunk.model_dump()), current_messages) + else: + # Text chunk + chunk_text = str(chunk) + scratchpad.agent_response = (scratchpad.agent_response or "") + chunk_text + scratchpad.thought = (scratchpad.thought or "") + chunk_text + + yield self._create_text_chunk(chunk_text, current_messages) + + # Update usage + if usage_dict.get("usage"): + if llm_usage.get("usage"): + self._accumulate_usage(llm_usage, usage_dict["usage"]) + else: + llm_usage["usage"] = usage_dict["usage"] + + # Clean up thought + scratchpad.thought = (scratchpad.thought or "").strip() or "I am thinking about how to help you" + + # Finish model log + yield self._finish_log( + model_log, + data={ + "thought": scratchpad.thought, + "action": scratchpad.action_str if scratchpad.action else None, + }, + usage=llm_usage.get("usage"), + ) + + return scratchpad, finish_reason + + def _handle_tool_call( + self, + action: AgentScratchpadUnit.Action, + prompt_messages: list[PromptMessage], + round_log: AgentLog, + ) -> Generator[AgentLog, None, tuple[str, list[File]]]: + """Handle tool call and return observation with files.""" + tool_name = action.action_name + tool_args: dict[str, Any] | str = action.action_input + + # Find tool instance first to get metadata + tool_instance = self._find_tool_by_name(tool_name) + tool_metadata = self._get_tool_metadata(tool_instance) if tool_instance else {} + + # Start tool log with tool metadata + tool_log = self._create_log( + label=f"CALL {tool_name}", + log_type=AgentLog.LogType.TOOL_CALL, + status=AgentLog.LogStatus.START, + data={ + "tool_name": tool_name, + "tool_args": tool_args, + }, + parent_id=round_log.id, + extra_metadata=tool_metadata, + ) + yield tool_log + + if not tool_instance: + # Finish tool log with error + yield self._finish_log( + tool_log, + data={ + **tool_log.data, + "error": f"Tool {tool_name} not found", + }, + ) + return f"Tool {tool_name} not found", [] + + # Ensure tool_args is a dict + tool_args_dict: dict[str, Any] + if isinstance(tool_args, str): + try: + tool_args_dict = json.loads(tool_args) + except json.JSONDecodeError: + tool_args_dict = {"input": tool_args} + elif not isinstance(tool_args, dict): + tool_args_dict = {"input": str(tool_args)} + else: + tool_args_dict = tool_args + + # Invoke tool using base class method with error handling + try: + response_content, tool_files, tool_invoke_meta = self._invoke_tool(tool_instance, tool_args_dict, tool_name) + + # Finish tool log + yield self._finish_log( + tool_log, + data={ + **tool_log.data, + "output": response_content, + "files": len(tool_files), + "meta": tool_invoke_meta.to_dict() if tool_invoke_meta else None, + }, + ) + + return response_content or "Tool executed successfully", tool_files + except Exception as e: + # Tool invocation failed, yield error log + error_message = str(e) + tool_log.status = AgentLog.LogStatus.ERROR + tool_log.error = error_message + tool_log.data = { + **tool_log.data, + "error": error_message, + } + yield tool_log + + return f"Tool execution failed: {error_message}", [] diff --git a/api/core/agent/patterns/strategy_factory.py b/api/core/agent/patterns/strategy_factory.py new file mode 100644 index 0000000000..ad26075291 --- /dev/null +++ b/api/core/agent/patterns/strategy_factory.py @@ -0,0 +1,107 @@ +"""Strategy factory for creating agent strategies.""" + +from __future__ import annotations + +from typing import TYPE_CHECKING + +from core.agent.entities import AgentEntity, ExecutionContext +from core.file.models import File +from core.model_manager import ModelInstance +from core.model_runtime.entities.model_entities import ModelFeature + +from .base import AgentPattern, ToolInvokeHook +from .function_call import FunctionCallStrategy +from .react import ReActStrategy + +if TYPE_CHECKING: + from core.tools.__base.tool import Tool + + +class StrategyFactory: + """Factory for creating agent strategies based on model features.""" + + # Tool calling related features + TOOL_CALL_FEATURES = {ModelFeature.TOOL_CALL, ModelFeature.MULTI_TOOL_CALL, ModelFeature.STREAM_TOOL_CALL} + + @staticmethod + def create_strategy( + model_features: list[ModelFeature], + model_instance: ModelInstance, + context: ExecutionContext, + tools: list[Tool], + files: list[File], + max_iterations: int = 10, + workflow_call_depth: int = 0, + agent_strategy: AgentEntity.Strategy | None = None, + tool_invoke_hook: ToolInvokeHook | None = None, + instruction: str = "", + ) -> AgentPattern: + """ + Create an appropriate strategy based on model features. + + Args: + model_features: List of model features/capabilities + model_instance: Model instance to use + context: Execution context containing trace/audit information + tools: Available tools + files: Available files + max_iterations: Maximum iterations for the strategy + workflow_call_depth: Depth of workflow calls + agent_strategy: Optional explicit strategy override + tool_invoke_hook: Optional hook for custom tool invocation (e.g., agent_invoke) + instruction: Optional instruction for ReAct strategy + + Returns: + AgentStrategy instance + """ + # If explicit strategy is provided and it's Function Calling, try to use it if supported + if agent_strategy == AgentEntity.Strategy.FUNCTION_CALLING: + if set(model_features) & StrategyFactory.TOOL_CALL_FEATURES: + return FunctionCallStrategy( + model_instance=model_instance, + context=context, + tools=tools, + files=files, + max_iterations=max_iterations, + workflow_call_depth=workflow_call_depth, + tool_invoke_hook=tool_invoke_hook, + ) + # Fallback to ReAct if FC is requested but not supported + + # If explicit strategy is Chain of Thought (ReAct) + if agent_strategy == AgentEntity.Strategy.CHAIN_OF_THOUGHT: + return ReActStrategy( + model_instance=model_instance, + context=context, + tools=tools, + files=files, + max_iterations=max_iterations, + workflow_call_depth=workflow_call_depth, + tool_invoke_hook=tool_invoke_hook, + instruction=instruction, + ) + + # Default auto-selection logic + if set(model_features) & StrategyFactory.TOOL_CALL_FEATURES: + # Model supports native function calling + return FunctionCallStrategy( + model_instance=model_instance, + context=context, + tools=tools, + files=files, + max_iterations=max_iterations, + workflow_call_depth=workflow_call_depth, + tool_invoke_hook=tool_invoke_hook, + ) + else: + # Use ReAct strategy for models without function calling + return ReActStrategy( + model_instance=model_instance, + context=context, + tools=tools, + files=files, + max_iterations=max_iterations, + workflow_call_depth=workflow_call_depth, + tool_invoke_hook=tool_invoke_hook, + instruction=instruction, + ) diff --git a/api/core/app/app_config/entities.py b/api/core/app/app_config/entities.py index 93f2742599..13c51529cc 100644 --- a/api/core/app/app_config/entities.py +++ b/api/core/app/app_config/entities.py @@ -120,7 +120,7 @@ class VariableEntity(BaseModel): allowed_file_types: Sequence[FileType] | None = Field(default_factory=list) allowed_file_extensions: Sequence[str] | None = Field(default_factory=list) allowed_file_upload_methods: Sequence[FileTransferMethod] | None = Field(default_factory=list) - json_schema: dict[str, Any] | None = Field(default=None) + json_schema: dict | None = Field(default=None) @field_validator("description", mode="before") @classmethod @@ -134,7 +134,7 @@ class VariableEntity(BaseModel): @field_validator("json_schema") @classmethod - def validate_json_schema(cls, schema: dict[str, Any] | None) -> dict[str, Any] | None: + def validate_json_schema(cls, schema: dict | None) -> dict | None: if schema is None: return None try: diff --git a/api/core/app/apps/advanced_chat/app_config_manager.py b/api/core/app/apps/advanced_chat/app_config_manager.py index e4b308a6f6..c21c494efe 100644 --- a/api/core/app/apps/advanced_chat/app_config_manager.py +++ b/api/core/app/apps/advanced_chat/app_config_manager.py @@ -26,7 +26,6 @@ class AdvancedChatAppConfigManager(BaseAppConfigManager): @classmethod def get_app_config(cls, app_model: App, workflow: Workflow) -> AdvancedChatAppConfig: features_dict = workflow.features_dict - app_mode = AppMode.value_of(app_model.mode) app_config = AdvancedChatAppConfig( tenant_id=app_model.tenant_id, diff --git a/api/core/app/apps/advanced_chat/app_runner.py b/api/core/app/apps/advanced_chat/app_runner.py index ee092e55c5..d636548f2b 100644 --- a/api/core/app/apps/advanced_chat/app_runner.py +++ b/api/core/app/apps/advanced_chat/app_runner.py @@ -20,6 +20,8 @@ from core.app.entities.queue_entities import ( QueueTextChunkEvent, ) from core.app.features.annotation_reply.annotation_reply import AnnotationReplyFeature +from core.app.layers.conversation_variable_persist_layer import ConversationVariablePersistenceLayer +from core.db.session_factory import session_factory from core.moderation.base import ModerationError from core.moderation.input_moderation import InputModeration from core.variables.variables import VariableUnion @@ -37,9 +39,9 @@ from extensions.ext_database import db from extensions.ext_redis import redis_client from extensions.otel import WorkflowAppRunnerHandler, trace_span from models import Workflow -from models.enums import UserFrom from models.model import App, Conversation, Message, MessageAnnotation from models.workflow import ConversationVariable +from services.conversation_variable_updater import ConversationVariableUpdater logger = logging.getLogger(__name__) @@ -103,6 +105,11 @@ class AdvancedChatAppRunner(WorkflowBasedAppRunner): if not app_record: raise ValueError("App not found") + invoke_from = self.application_generate_entity.invoke_from + if self.application_generate_entity.single_iteration_run or self.application_generate_entity.single_loop_run: + invoke_from = InvokeFrom.DEBUGGER + user_from = self._resolve_user_from(invoke_from) + if self.application_generate_entity.single_iteration_run or self.application_generate_entity.single_loop_run: # Handle single iteration or single loop run graph, variable_pool, graph_runtime_state = self._prepare_single_node_execution( @@ -155,6 +162,8 @@ class AdvancedChatAppRunner(WorkflowBasedAppRunner): workflow_id=self._workflow.id, tenant_id=self._workflow.tenant_id, user_id=self.application_generate_entity.user_id, + user_from=user_from, + invoke_from=invoke_from, ) db.session.close() @@ -172,12 +181,8 @@ class AdvancedChatAppRunner(WorkflowBasedAppRunner): graph=graph, graph_config=self._workflow.graph_dict, user_id=self.application_generate_entity.user_id, - user_from=( - UserFrom.ACCOUNT - if self.application_generate_entity.invoke_from in {InvokeFrom.EXPLORE, InvokeFrom.DEBUGGER} - else UserFrom.END_USER - ), - invoke_from=self.application_generate_entity.invoke_from, + user_from=user_from, + invoke_from=invoke_from, call_depth=self.application_generate_entity.call_depth, variable_pool=variable_pool, graph_runtime_state=graph_runtime_state, @@ -200,6 +205,10 @@ class AdvancedChatAppRunner(WorkflowBasedAppRunner): ) workflow_entry.graph_engine.layer(persistence_layer) + conversation_variable_layer = ConversationVariablePersistenceLayer( + ConversationVariableUpdater(session_factory.get_session_maker()) + ) + workflow_entry.graph_engine.layer(conversation_variable_layer) for layer in self._graph_engine_layers: workflow_entry.graph_engine.layer(layer) diff --git a/api/core/app/apps/advanced_chat/generate_task_pipeline.py b/api/core/app/apps/advanced_chat/generate_task_pipeline.py index da1e9f19b6..c4d89b8b2f 100644 --- a/api/core/app/apps/advanced_chat/generate_task_pipeline.py +++ b/api/core/app/apps/advanced_chat/generate_task_pipeline.py @@ -4,6 +4,7 @@ import re import time from collections.abc import Callable, Generator, Mapping from contextlib import contextmanager +from dataclasses import dataclass, field from threading import Thread from typing import Any, Union @@ -19,6 +20,7 @@ from core.app.entities.app_invoke_entities import ( InvokeFrom, ) from core.app.entities.queue_entities import ( + ChunkType, MessageQueueMessage, QueueAdvancedChatMessageEndEvent, QueueAgentLogEvent, @@ -70,13 +72,122 @@ from core.workflow.runtime import GraphRuntimeState from core.workflow.system_variable import SystemVariable from extensions.ext_database import db from libs.datetime_utils import naive_utc_now -from models import Account, Conversation, EndUser, Message, MessageFile +from models import Account, Conversation, EndUser, LLMGenerationDetail, Message, MessageFile from models.enums import CreatorUserRole from models.workflow import Workflow logger = logging.getLogger(__name__) +@dataclass +class StreamEventBuffer: + """ + Buffer for recording stream events in order to reconstruct the generation sequence. + Records the exact order of text chunks, thoughts, and tool calls as they stream. + """ + + # Accumulated reasoning content (each thought block is a separate element) + reasoning_content: list[str] = field(default_factory=list) + # Current reasoning buffer (accumulates until we see a different event type) + _current_reasoning: str = "" + # Tool calls with their details + tool_calls: list[dict] = field(default_factory=list) + # Tool call ID to index mapping for updating results + _tool_call_id_map: dict[str, int] = field(default_factory=dict) + # Sequence of events in stream order + sequence: list[dict] = field(default_factory=list) + # Current position in answer text + _content_position: int = 0 + # Track last event type to detect transitions + _last_event_type: str | None = None + + def _flush_current_reasoning(self) -> None: + """Flush accumulated reasoning to the list and add to sequence.""" + if self._current_reasoning.strip(): + self.reasoning_content.append(self._current_reasoning.strip()) + self.sequence.append({"type": "reasoning", "index": len(self.reasoning_content) - 1}) + self._current_reasoning = "" + + def record_text_chunk(self, text: str) -> None: + """Record a text chunk event.""" + if not text: + return + + # Flush any pending reasoning first + if self._last_event_type == "thought": + self._flush_current_reasoning() + + text_len = len(text) + start_pos = self._content_position + + # If last event was also content, extend it; otherwise create new + if self.sequence and self.sequence[-1].get("type") == "content": + self.sequence[-1]["end"] = start_pos + text_len + else: + self.sequence.append({"type": "content", "start": start_pos, "end": start_pos + text_len}) + + self._content_position += text_len + self._last_event_type = "content" + + def record_thought_chunk(self, text: str) -> None: + """Record a thought/reasoning chunk event.""" + if not text: + return + + # Accumulate thought content + self._current_reasoning += text + self._last_event_type = "thought" + + def record_tool_call(self, tool_call_id: str, tool_name: str, tool_arguments: str) -> None: + """Record a tool call event.""" + if not tool_call_id: + return + + # Flush any pending reasoning first + if self._last_event_type == "thought": + self._flush_current_reasoning() + + # Check if this tool call already exists (we might get multiple chunks) + if tool_call_id in self._tool_call_id_map: + idx = self._tool_call_id_map[tool_call_id] + # Update arguments if provided + if tool_arguments: + self.tool_calls[idx]["arguments"] = tool_arguments + else: + # New tool call + tool_call = { + "id": tool_call_id or "", + "name": tool_name or "", + "arguments": tool_arguments or "", + "result": "", + "elapsed_time": None, + } + self.tool_calls.append(tool_call) + idx = len(self.tool_calls) - 1 + self._tool_call_id_map[tool_call_id] = idx + self.sequence.append({"type": "tool_call", "index": idx}) + + self._last_event_type = "tool_call" + + def record_tool_result(self, tool_call_id: str, result: str, tool_elapsed_time: float | None = None) -> None: + """Record a tool result event (update existing tool call).""" + if not tool_call_id: + return + if tool_call_id in self._tool_call_id_map: + idx = self._tool_call_id_map[tool_call_id] + self.tool_calls[idx]["result"] = result + self.tool_calls[idx]["elapsed_time"] = tool_elapsed_time + + def finalize(self) -> None: + """Finalize the buffer, flushing any pending data.""" + if self._last_event_type == "thought": + self._flush_current_reasoning() + + def has_data(self) -> bool: + """Check if there's any meaningful data recorded.""" + return bool(self.reasoning_content or self.tool_calls or self.sequence) + + class AdvancedChatAppGenerateTaskPipeline(GraphRuntimeStateSupport): """ AdvancedChatAppGenerateTaskPipeline is a class that generate stream output and state management for Application. @@ -144,6 +255,8 @@ class AdvancedChatAppGenerateTaskPipeline(GraphRuntimeStateSupport): self._workflow_run_id: str = "" self._draft_var_saver_factory = draft_var_saver_factory self._graph_runtime_state: GraphRuntimeState | None = None + # Stream event buffer for recording generation sequence + self._stream_buffer = StreamEventBuffer() self._seed_graph_runtime_state_from_queue_manager() def process(self) -> Union[ChatbotAppBlockingResponse, Generator[ChatbotAppStreamResponse, None, None]]: @@ -383,7 +496,7 @@ class AdvancedChatAppGenerateTaskPipeline(GraphRuntimeStateSupport): queue_message: Union[WorkflowQueueMessage, MessageQueueMessage] | None = None, **kwargs, ) -> Generator[StreamResponse, None, None]: - """Handle text chunk events.""" + """Handle text chunk events and record to stream buffer for sequence reconstruction.""" delta_text = event.text if delta_text is None: return @@ -405,9 +518,52 @@ class AdvancedChatAppGenerateTaskPipeline(GraphRuntimeStateSupport): if tts_publisher and queue_message: tts_publisher.publish(queue_message) - self._task_state.answer += delta_text + tool_call = event.tool_call + tool_result = event.tool_result + tool_payload = tool_call or tool_result + tool_call_id = tool_payload.id if tool_payload and tool_payload.id else "" + tool_name = tool_payload.name if tool_payload and tool_payload.name else "" + tool_arguments = tool_call.arguments if tool_call and tool_call.arguments else "" + tool_files = tool_result.files if tool_result else [] + tool_elapsed_time = tool_result.elapsed_time if tool_result else None + tool_icon = tool_payload.icon if tool_payload else None + tool_icon_dark = tool_payload.icon_dark if tool_payload else None + # Record stream event based on chunk type + chunk_type = event.chunk_type or ChunkType.TEXT + match chunk_type: + case ChunkType.TEXT: + self._stream_buffer.record_text_chunk(delta_text) + self._task_state.answer += delta_text + case ChunkType.THOUGHT: + # Reasoning should not be part of final answer text + self._stream_buffer.record_thought_chunk(delta_text) + case ChunkType.TOOL_CALL: + self._stream_buffer.record_tool_call( + tool_call_id=tool_call_id, + tool_name=tool_name, + tool_arguments=tool_arguments, + ) + case ChunkType.TOOL_RESULT: + self._stream_buffer.record_tool_result( + tool_call_id=tool_call_id, + result=delta_text, + tool_elapsed_time=tool_elapsed_time, + ) + self._task_state.answer += delta_text + case _: + pass yield self._message_cycle_manager.message_to_stream_response( - answer=delta_text, message_id=self._message_id, from_variable_selector=event.from_variable_selector + answer=delta_text, + message_id=self._message_id, + from_variable_selector=event.from_variable_selector, + chunk_type=event.chunk_type.value if event.chunk_type else None, + tool_call_id=tool_call_id or None, + tool_name=tool_name or None, + tool_arguments=tool_arguments or None, + tool_files=tool_files, + tool_elapsed_time=tool_elapsed_time, + tool_icon=tool_icon, + tool_icon_dark=tool_icon_dark, ) def _handle_iteration_start_event( @@ -775,6 +931,7 @@ class AdvancedChatAppGenerateTaskPipeline(GraphRuntimeStateSupport): # If there are assistant files, remove markdown image links from answer answer_text = self._task_state.answer + answer_text = self._strip_think_blocks(answer_text) if self._recorded_files: # Remove markdown image links since we're storing files separately answer_text = re.sub(r"!\[.*?\]\(.*?\)", "", answer_text).strip() @@ -826,6 +983,54 @@ class AdvancedChatAppGenerateTaskPipeline(GraphRuntimeStateSupport): ] session.add_all(message_files) + # Save generation detail (reasoning/tool calls/sequence) from stream buffer + self._save_generation_detail(session=session, message=message) + + @staticmethod + def _strip_think_blocks(text: str) -> str: + """Remove ... blocks (including their content) from text.""" + if not text or "]*>.*?", "", text, flags=re.IGNORECASE | re.DOTALL) + clean_text = re.sub(r"\n\s*\n", "\n\n", clean_text).strip() + return clean_text + + def _save_generation_detail(self, *, session: Session, message: Message) -> None: + """ + Save LLM generation detail for Chatflow using stream event buffer. + The buffer records the exact order of events as they streamed, + allowing accurate reconstruction of the generation sequence. + """ + # Finalize the stream buffer to flush any pending data + self._stream_buffer.finalize() + + # Only save if there's meaningful data + if not self._stream_buffer.has_data(): + return + + reasoning_content = self._stream_buffer.reasoning_content + tool_calls = self._stream_buffer.tool_calls + sequence = self._stream_buffer.sequence + + # Check if generation detail already exists for this message + existing = session.query(LLMGenerationDetail).filter_by(message_id=message.id).first() + + if existing: + existing.reasoning_content = json.dumps(reasoning_content) if reasoning_content else None + existing.tool_calls = json.dumps(tool_calls) if tool_calls else None + existing.sequence = json.dumps(sequence) if sequence else None + else: + generation_detail = LLMGenerationDetail( + tenant_id=self._application_generate_entity.app_config.tenant_id, + app_id=self._application_generate_entity.app_config.app_id, + message_id=message.id, + reasoning_content=json.dumps(reasoning_content) if reasoning_content else None, + tool_calls=json.dumps(tool_calls) if tool_calls else None, + sequence=json.dumps(sequence) if sequence else None, + ) + session.add(generation_detail) + def _seed_graph_runtime_state_from_queue_manager(self) -> None: """Bootstrap the cached runtime state from the queue manager when present.""" candidate = self._base_task_pipeline.queue_manager.graph_runtime_state diff --git a/api/core/app/apps/agent_chat/app_runner.py b/api/core/app/apps/agent_chat/app_runner.py index 2760466a3b..f5cf7a2c56 100644 --- a/api/core/app/apps/agent_chat/app_runner.py +++ b/api/core/app/apps/agent_chat/app_runner.py @@ -3,10 +3,8 @@ from typing import cast from sqlalchemy import select -from core.agent.cot_chat_agent_runner import CotChatAgentRunner -from core.agent.cot_completion_agent_runner import CotCompletionAgentRunner +from core.agent.agent_app_runner import AgentAppRunner from core.agent.entities import AgentEntity -from core.agent.fc_agent_runner import FunctionCallAgentRunner from core.app.apps.agent_chat.app_config_manager import AgentChatAppConfig from core.app.apps.base_app_queue_manager import AppQueueManager, PublishFrom from core.app.apps.base_app_runner import AppRunner @@ -14,8 +12,7 @@ from core.app.entities.app_invoke_entities import AgentChatAppGenerateEntity from core.app.entities.queue_entities import QueueAnnotationReplyEvent from core.memory.token_buffer_memory import TokenBufferMemory from core.model_manager import ModelInstance -from core.model_runtime.entities.llm_entities import LLMMode -from core.model_runtime.entities.model_entities import ModelFeature, ModelPropertyKey +from core.model_runtime.entities.model_entities import ModelFeature from core.model_runtime.model_providers.__base.large_language_model import LargeLanguageModel from core.moderation.base import ModerationError from extensions.ext_database import db @@ -194,22 +191,7 @@ class AgentChatAppRunner(AppRunner): raise ValueError("Message not found") db.session.close() - runner_cls: type[FunctionCallAgentRunner] | type[CotChatAgentRunner] | type[CotCompletionAgentRunner] - # start agent runner - if agent_entity.strategy == AgentEntity.Strategy.CHAIN_OF_THOUGHT: - # check LLM mode - if model_schema.model_properties.get(ModelPropertyKey.MODE) == LLMMode.CHAT: - runner_cls = CotChatAgentRunner - elif model_schema.model_properties.get(ModelPropertyKey.MODE) == LLMMode.COMPLETION: - runner_cls = CotCompletionAgentRunner - else: - raise ValueError(f"Invalid LLM mode: {model_schema.model_properties.get(ModelPropertyKey.MODE)}") - elif agent_entity.strategy == AgentEntity.Strategy.FUNCTION_CALLING: - runner_cls = FunctionCallAgentRunner - else: - raise ValueError(f"Invalid agent strategy: {agent_entity.strategy}") - - runner = runner_cls( + runner = AgentAppRunner( tenant_id=app_config.tenant_id, application_generate_entity=application_generate_entity, conversation=conversation_result, diff --git a/api/core/app/apps/base_app_generator.py b/api/core/app/apps/base_app_generator.py index 1b0474142e..e4486e892c 100644 --- a/api/core/app/apps/base_app_generator.py +++ b/api/core/app/apps/base_app_generator.py @@ -75,12 +75,24 @@ class BaseAppGenerator: user_inputs = {**user_inputs, **files_inputs, **file_list_inputs} # Check if all files are converted to File - if any(filter(lambda v: isinstance(v, dict), user_inputs.values())): - raise ValueError("Invalid input type") - if any( - filter(lambda v: isinstance(v, dict), filter(lambda item: isinstance(item, list), user_inputs.values())) - ): - raise ValueError("Invalid input type") + invalid_dict_keys = [ + k + for k, v in user_inputs.items() + if isinstance(v, dict) + and entity_dictionary[k].type not in {VariableEntityType.FILE, VariableEntityType.JSON_OBJECT} + ] + if invalid_dict_keys: + raise ValueError(f"Invalid input type for {invalid_dict_keys}") + + invalid_list_dict_keys = [ + k + for k, v in user_inputs.items() + if isinstance(v, list) + and any(isinstance(item, dict) for item in v) + and entity_dictionary[k].type != VariableEntityType.FILE_LIST + ] + if invalid_list_dict_keys: + raise ValueError(f"Invalid input type for {invalid_list_dict_keys}") return user_inputs @@ -104,8 +116,9 @@ class BaseAppGenerator: variable_entity.type in {VariableEntityType.FILE, VariableEntityType.FILE_LIST} and not variable_entity.required ): - # Treat empty string (frontend default) or empty list as unset - if not value and isinstance(value, (str, list)): + # Treat empty string (frontend default) as unset + # For FILE_LIST, allow empty list [] to pass through + if isinstance(value, str) and not value: return None if variable_entity.type in { @@ -175,6 +188,9 @@ class BaseAppGenerator: value = True elif value == 0: value = False + case VariableEntityType.JSON_OBJECT: + if not isinstance(value, dict): + raise ValueError(f"{variable_entity.variable} in input form must be a dict") case _: raise AssertionError("this statement should be unreachable.") diff --git a/api/core/app/apps/base_app_queue_manager.py b/api/core/app/apps/base_app_queue_manager.py index 698eee9894..b41bedbea4 100644 --- a/api/core/app/apps/base_app_queue_manager.py +++ b/api/core/app/apps/base_app_queue_manager.py @@ -90,6 +90,7 @@ class AppQueueManager: """ self._clear_task_belong_cache() self._q.put(None) + self._graph_runtime_state = None # Release reference to allow GC to reclaim memory def _clear_task_belong_cache(self) -> None: """ diff --git a/api/core/app/apps/common/workflow_response_converter.py b/api/core/app/apps/common/workflow_response_converter.py index 38ecec5d30..0f3f9972c3 100644 --- a/api/core/app/apps/common/workflow_response_converter.py +++ b/api/core/app/apps/common/workflow_response_converter.py @@ -671,7 +671,7 @@ class WorkflowResponseConverter: task_id=task_id, data=AgentLogStreamResponse.Data( node_execution_id=event.node_execution_id, - id=event.id, + message_id=event.id, parent_id=event.parent_id, label=event.label, error=event.error, diff --git a/api/core/app/apps/pipeline/pipeline_generator.py b/api/core/app/apps/pipeline/pipeline_generator.py index 13eb40fd60..ea4441b5d8 100644 --- a/api/core/app/apps/pipeline/pipeline_generator.py +++ b/api/core/app/apps/pipeline/pipeline_generator.py @@ -130,7 +130,7 @@ class PipelineGenerator(BaseAppGenerator): pipeline=pipeline, workflow=workflow, start_node_id=start_node_id ) documents: list[Document] = [] - if invoke_from == InvokeFrom.PUBLISHED and not is_retry and not args.get("original_document_id"): + if invoke_from == InvokeFrom.PUBLISHED_PIPELINE and not is_retry and not args.get("original_document_id"): from services.dataset_service import DocumentService for datasource_info in datasource_info_list: @@ -156,7 +156,7 @@ class PipelineGenerator(BaseAppGenerator): for i, datasource_info in enumerate(datasource_info_list): workflow_run_id = str(uuid.uuid4()) document_id = args.get("original_document_id") or None - if invoke_from == InvokeFrom.PUBLISHED and not is_retry: + if invoke_from == InvokeFrom.PUBLISHED_PIPELINE and not is_retry: document_id = document_id or documents[i].id document_pipeline_execution_log = DocumentPipelineExecutionLog( document_id=document_id, diff --git a/api/core/app/apps/pipeline/pipeline_runner.py b/api/core/app/apps/pipeline/pipeline_runner.py index 4be9e01fbf..0157521ae9 100644 --- a/api/core/app/apps/pipeline/pipeline_runner.py +++ b/api/core/app/apps/pipeline/pipeline_runner.py @@ -73,9 +73,15 @@ class PipelineRunner(WorkflowBasedAppRunner): """ app_config = self.application_generate_entity.app_config app_config = cast(PipelineConfig, app_config) + invoke_from = self.application_generate_entity.invoke_from + + if self.application_generate_entity.single_iteration_run or self.application_generate_entity.single_loop_run: + invoke_from = InvokeFrom.DEBUGGER + + user_from = self._resolve_user_from(invoke_from) user_id = None - if self.application_generate_entity.invoke_from in {InvokeFrom.WEB_APP, InvokeFrom.SERVICE_API}: + if invoke_from in {InvokeFrom.WEB_APP, InvokeFrom.SERVICE_API}: end_user = db.session.query(EndUser).where(EndUser.id == self.application_generate_entity.user_id).first() if end_user: user_id = end_user.session_id @@ -117,7 +123,7 @@ class PipelineRunner(WorkflowBasedAppRunner): dataset_id=self.application_generate_entity.dataset_id, datasource_type=self.application_generate_entity.datasource_type, datasource_info=self.application_generate_entity.datasource_info, - invoke_from=self.application_generate_entity.invoke_from.value, + invoke_from=invoke_from.value, ) rag_pipeline_variables = [] @@ -149,6 +155,8 @@ class PipelineRunner(WorkflowBasedAppRunner): graph_runtime_state=graph_runtime_state, start_node_id=self.application_generate_entity.start_node_id, workflow=workflow, + user_from=user_from, + invoke_from=invoke_from, ) # RUN WORKFLOW @@ -159,12 +167,8 @@ class PipelineRunner(WorkflowBasedAppRunner): graph=graph, graph_config=workflow.graph_dict, user_id=self.application_generate_entity.user_id, - user_from=( - UserFrom.ACCOUNT - if self.application_generate_entity.invoke_from in {InvokeFrom.EXPLORE, InvokeFrom.DEBUGGER} - else UserFrom.END_USER - ), - invoke_from=self.application_generate_entity.invoke_from, + user_from=user_from, + invoke_from=invoke_from, call_depth=self.application_generate_entity.call_depth, graph_runtime_state=graph_runtime_state, variable_pool=variable_pool, @@ -210,7 +214,12 @@ class PipelineRunner(WorkflowBasedAppRunner): return workflow def _init_rag_pipeline_graph( - self, workflow: Workflow, graph_runtime_state: GraphRuntimeState, start_node_id: str | None = None + self, + workflow: Workflow, + graph_runtime_state: GraphRuntimeState, + start_node_id: str | None = None, + user_from: UserFrom = UserFrom.ACCOUNT, + invoke_from: InvokeFrom = InvokeFrom.SERVICE_API, ) -> Graph: """ Init pipeline graph @@ -253,8 +262,8 @@ class PipelineRunner(WorkflowBasedAppRunner): workflow_id=workflow.id, graph_config=graph_config, user_id=self.application_generate_entity.user_id, - user_from=UserFrom.ACCOUNT, - invoke_from=InvokeFrom.SERVICE_API, + user_from=user_from, + invoke_from=invoke_from, call_depth=0, ) diff --git a/api/core/app/apps/workflow/app_runner.py b/api/core/app/apps/workflow/app_runner.py index 894e6f397a..8dbdc1d58c 100644 --- a/api/core/app/apps/workflow/app_runner.py +++ b/api/core/app/apps/workflow/app_runner.py @@ -20,7 +20,6 @@ from core.workflow.workflow_entry import WorkflowEntry from extensions.ext_redis import redis_client from extensions.otel import WorkflowAppRunnerHandler, trace_span from libs.datetime_utils import naive_utc_now -from models.enums import UserFrom from models.workflow import Workflow logger = logging.getLogger(__name__) @@ -74,7 +73,12 @@ class WorkflowAppRunner(WorkflowBasedAppRunner): workflow_execution_id=self.application_generate_entity.workflow_execution_id, ) + invoke_from = self.application_generate_entity.invoke_from # if only single iteration or single loop run is requested + if self.application_generate_entity.single_iteration_run or self.application_generate_entity.single_loop_run: + invoke_from = InvokeFrom.DEBUGGER + user_from = self._resolve_user_from(invoke_from) + if self.application_generate_entity.single_iteration_run or self.application_generate_entity.single_loop_run: graph, variable_pool, graph_runtime_state = self._prepare_single_node_execution( workflow=self._workflow, @@ -102,6 +106,8 @@ class WorkflowAppRunner(WorkflowBasedAppRunner): workflow_id=self._workflow.id, tenant_id=self._workflow.tenant_id, user_id=self.application_generate_entity.user_id, + user_from=user_from, + invoke_from=invoke_from, root_node_id=self._root_node_id, ) @@ -120,12 +126,8 @@ class WorkflowAppRunner(WorkflowBasedAppRunner): graph=graph, graph_config=self._workflow.graph_dict, user_id=self.application_generate_entity.user_id, - user_from=( - UserFrom.ACCOUNT - if self.application_generate_entity.invoke_from in {InvokeFrom.EXPLORE, InvokeFrom.DEBUGGER} - else UserFrom.END_USER - ), - invoke_from=self.application_generate_entity.invoke_from, + user_from=user_from, + invoke_from=invoke_from, call_depth=self.application_generate_entity.call_depth, variable_pool=variable_pool, graph_runtime_state=graph_runtime_state, diff --git a/api/core/app/apps/workflow/generate_task_pipeline.py b/api/core/app/apps/workflow/generate_task_pipeline.py index 842ad545ad..2b8ed38c63 100644 --- a/api/core/app/apps/workflow/generate_task_pipeline.py +++ b/api/core/app/apps/workflow/generate_task_pipeline.py @@ -13,6 +13,7 @@ from core.app.apps.common.workflow_response_converter import WorkflowResponseCon from core.app.entities.app_invoke_entities import InvokeFrom, WorkflowAppGenerateEntity from core.app.entities.queue_entities import ( AppQueueEvent, + ChunkType, MessageQueueMessage, QueueAgentLogEvent, QueueErrorEvent, @@ -483,11 +484,33 @@ class WorkflowAppGenerateTaskPipeline(GraphRuntimeStateSupport): if delta_text is None: return + tool_call = event.tool_call + tool_result = event.tool_result + tool_payload = tool_call or tool_result + tool_call_id = tool_payload.id if tool_payload and tool_payload.id else None + tool_name = tool_payload.name if tool_payload and tool_payload.name else None + tool_arguments = tool_call.arguments if tool_call else None + tool_elapsed_time = tool_result.elapsed_time if tool_result else None + tool_files = tool_result.files if tool_result else [] + tool_icon = tool_payload.icon if tool_payload else None + tool_icon_dark = tool_payload.icon_dark if tool_payload else None + # only publish tts message at text chunk streaming if tts_publisher and queue_message: tts_publisher.publish(queue_message) - yield self._text_chunk_to_stream_response(delta_text, from_variable_selector=event.from_variable_selector) + yield self._text_chunk_to_stream_response( + text=delta_text, + from_variable_selector=event.from_variable_selector, + chunk_type=event.chunk_type, + tool_call_id=tool_call_id, + tool_name=tool_name, + tool_arguments=tool_arguments, + tool_files=tool_files, + tool_elapsed_time=tool_elapsed_time, + tool_icon=tool_icon, + tool_icon_dark=tool_icon_dark, + ) def _handle_agent_log_event(self, event: QueueAgentLogEvent, **kwargs) -> Generator[StreamResponse, None, None]: """Handle agent log events.""" @@ -650,16 +673,61 @@ class WorkflowAppGenerateTaskPipeline(GraphRuntimeStateSupport): session.add(workflow_app_log) def _text_chunk_to_stream_response( - self, text: str, from_variable_selector: list[str] | None = None + self, + text: str, + from_variable_selector: list[str] | None = None, + chunk_type: ChunkType | None = None, + tool_call_id: str | None = None, + tool_name: str | None = None, + tool_arguments: str | None = None, + tool_files: list[str] | None = None, + tool_error: str | None = None, + tool_elapsed_time: float | None = None, + tool_icon: str | dict | None = None, + tool_icon_dark: str | dict | None = None, ) -> TextChunkStreamResponse: """ Handle completed event. :param text: text :return: """ + from core.app.entities.task_entities import ChunkType as ResponseChunkType + + response_chunk_type = ResponseChunkType(chunk_type.value) if chunk_type else ResponseChunkType.TEXT + + data = TextChunkStreamResponse.Data( + text=text, + from_variable_selector=from_variable_selector, + chunk_type=response_chunk_type, + ) + + if response_chunk_type == ResponseChunkType.TOOL_CALL: + data = data.model_copy( + update={ + "tool_call_id": tool_call_id, + "tool_name": tool_name, + "tool_arguments": tool_arguments, + "tool_icon": tool_icon, + "tool_icon_dark": tool_icon_dark, + } + ) + elif response_chunk_type == ResponseChunkType.TOOL_RESULT: + data = data.model_copy( + update={ + "tool_call_id": tool_call_id, + "tool_name": tool_name, + "tool_arguments": tool_arguments, + "tool_files": tool_files, + "tool_error": tool_error, + "tool_elapsed_time": tool_elapsed_time, + "tool_icon": tool_icon, + "tool_icon_dark": tool_icon_dark, + } + ) + response = TextChunkStreamResponse( task_id=self._application_generate_entity.task_id, - data=TextChunkStreamResponse.Data(text=text, from_variable_selector=from_variable_selector), + data=data, ) return response diff --git a/api/core/app/apps/workflow_app_runner.py b/api/core/app/apps/workflow_app_runner.py index 0e125b3538..b306a0376e 100644 --- a/api/core/app/apps/workflow_app_runner.py +++ b/api/core/app/apps/workflow_app_runner.py @@ -77,10 +77,18 @@ class WorkflowBasedAppRunner: self._app_id = app_id self._graph_engine_layers = graph_engine_layers + @staticmethod + def _resolve_user_from(invoke_from: InvokeFrom) -> UserFrom: + if invoke_from in {InvokeFrom.EXPLORE, InvokeFrom.DEBUGGER}: + return UserFrom.ACCOUNT + return UserFrom.END_USER + def _init_graph( self, graph_config: Mapping[str, Any], graph_runtime_state: GraphRuntimeState, + user_from: UserFrom, + invoke_from: InvokeFrom, workflow_id: str = "", tenant_id: str = "", user_id: str = "", @@ -105,8 +113,8 @@ class WorkflowBasedAppRunner: workflow_id=workflow_id, graph_config=graph_config, user_id=user_id, - user_from=UserFrom.ACCOUNT, - invoke_from=InvokeFrom.SERVICE_API, + user_from=user_from, + invoke_from=invoke_from, call_depth=0, ) @@ -250,7 +258,7 @@ class WorkflowBasedAppRunner: graph_config=graph_config, user_id="", user_from=UserFrom.ACCOUNT, - invoke_from=InvokeFrom.SERVICE_API, + invoke_from=InvokeFrom.DEBUGGER, call_depth=0, ) @@ -455,12 +463,20 @@ class WorkflowBasedAppRunner: ) ) elif isinstance(event, NodeRunStreamChunkEvent): + from core.app.entities.queue_entities import ChunkType as QueueChunkType + + if event.is_final and not event.chunk: + return + self._publish_event( QueueTextChunkEvent( text=event.chunk, from_variable_selector=list(event.selector), in_iteration_id=event.in_iteration_id, in_loop_id=event.in_loop_id, + chunk_type=QueueChunkType(event.chunk_type.value), + tool_call=event.tool_call, + tool_result=event.tool_result, ) ) elif isinstance(event, NodeRunRetrieverResourceEvent): diff --git a/api/core/app/entities/app_invoke_entities.py b/api/core/app/entities/app_invoke_entities.py index 0cb573cb86..5bc453420d 100644 --- a/api/core/app/entities/app_invoke_entities.py +++ b/api/core/app/entities/app_invoke_entities.py @@ -42,7 +42,8 @@ class InvokeFrom(StrEnum): # DEBUGGER indicates that this invocation is from # the workflow (or chatflow) edit page. DEBUGGER = "debugger" - PUBLISHED = "published" + # PUBLISHED_PIPELINE indicates that this invocation runs a published RAG pipeline workflow. + PUBLISHED_PIPELINE = "published" # VALIDATION indicates that this invocation is from validation. VALIDATION = "validation" diff --git a/api/core/app/entities/llm_generation_entities.py b/api/core/app/entities/llm_generation_entities.py new file mode 100644 index 0000000000..33e97e3299 --- /dev/null +++ b/api/core/app/entities/llm_generation_entities.py @@ -0,0 +1,70 @@ +""" +LLM Generation Detail entities. + +Defines the structure for storing and transmitting LLM generation details +including reasoning content, tool calls, and their sequence. +""" + +from typing import Literal + +from pydantic import BaseModel, Field + + +class ContentSegment(BaseModel): + """Represents a content segment in the generation sequence.""" + + type: Literal["content"] = "content" + start: int = Field(..., description="Start position in the text") + end: int = Field(..., description="End position in the text") + + +class ReasoningSegment(BaseModel): + """Represents a reasoning segment in the generation sequence.""" + + type: Literal["reasoning"] = "reasoning" + index: int = Field(..., description="Index into reasoning_content array") + + +class ToolCallSegment(BaseModel): + """Represents a tool call segment in the generation sequence.""" + + type: Literal["tool_call"] = "tool_call" + index: int = Field(..., description="Index into tool_calls array") + + +SequenceSegment = ContentSegment | ReasoningSegment | ToolCallSegment + + +class ToolCallDetail(BaseModel): + """Represents a tool call with its arguments and result.""" + + id: str = Field(default="", description="Unique identifier for the tool call") + name: str = Field(..., description="Name of the tool") + arguments: str = Field(default="", description="JSON string of tool arguments") + result: str = Field(default="", description="Result from the tool execution") + elapsed_time: float | None = Field(default=None, description="Elapsed time in seconds") + + +class LLMGenerationDetailData(BaseModel): + """ + Domain model for LLM generation detail. + + Contains the structured data for reasoning content, tool calls, + and their display sequence. + """ + + reasoning_content: list[str] = Field(default_factory=list, description="List of reasoning segments") + tool_calls: list[ToolCallDetail] = Field(default_factory=list, description="List of tool call details") + sequence: list[SequenceSegment] = Field(default_factory=list, description="Display order of segments") + + def is_empty(self) -> bool: + """Check if there's any meaningful generation detail.""" + return not self.reasoning_content and not self.tool_calls + + def to_response_dict(self) -> dict: + """Convert to dictionary for API response.""" + return { + "reasoning_content": self.reasoning_content, + "tool_calls": [tc.model_dump() for tc in self.tool_calls], + "sequence": [seg.model_dump() for seg in self.sequence], + } diff --git a/api/core/app/entities/queue_entities.py b/api/core/app/entities/queue_entities.py index 77d6bf03b4..fdc4014caa 100644 --- a/api/core/app/entities/queue_entities.py +++ b/api/core/app/entities/queue_entities.py @@ -7,7 +7,7 @@ from pydantic import BaseModel, ConfigDict, Field from core.model_runtime.entities.llm_entities import LLMResult, LLMResultChunk from core.rag.entities.citation_metadata import RetrievalSourceMetadata -from core.workflow.entities import AgentNodeStrategyInit +from core.workflow.entities import AgentNodeStrategyInit, ToolCall, ToolResult from core.workflow.enums import WorkflowNodeExecutionMetadataKey from core.workflow.nodes import NodeType @@ -177,6 +177,17 @@ class QueueLoopCompletedEvent(AppQueueEvent): error: str | None = None +class ChunkType(StrEnum): + """Stream chunk type for LLM-related events.""" + + TEXT = "text" # Normal text streaming + TOOL_CALL = "tool_call" # Tool call arguments streaming + TOOL_RESULT = "tool_result" # Tool execution result + THOUGHT = "thought" # Agent thinking process (ReAct) + THOUGHT_START = "thought_start" # Agent thought start + THOUGHT_END = "thought_end" # Agent thought end + + class QueueTextChunkEvent(AppQueueEvent): """ QueueTextChunkEvent entity @@ -191,6 +202,16 @@ class QueueTextChunkEvent(AppQueueEvent): in_loop_id: str | None = None """loop id if node is in loop""" + # Extended fields for Agent/Tool streaming + chunk_type: ChunkType = ChunkType.TEXT + """type of the chunk""" + + # Tool streaming payloads + tool_call: ToolCall | None = None + """structured tool call info""" + tool_result: ToolResult | None = None + """structured tool result info""" + class QueueAgentMessageEvent(AppQueueEvent): """ diff --git a/api/core/app/entities/task_entities.py b/api/core/app/entities/task_entities.py index 79a5e657b3..0998510b60 100644 --- a/api/core/app/entities/task_entities.py +++ b/api/core/app/entities/task_entities.py @@ -113,6 +113,38 @@ class MessageStreamResponse(StreamResponse): answer: str from_variable_selector: list[str] | None = None + # Extended fields for Agent/Tool streaming (imported at runtime to avoid circular import) + chunk_type: str | None = None + """type of the chunk: text, tool_call, tool_result, thought""" + + # Tool call fields (when chunk_type == "tool_call") + tool_call_id: str | None = None + """unique identifier for this tool call""" + tool_name: str | None = None + """name of the tool being called""" + tool_arguments: str | None = None + """accumulated tool arguments JSON""" + + # Tool result fields (when chunk_type == "tool_result") + tool_files: list[str] | None = None + """file IDs produced by tool""" + tool_error: str | None = None + """error message if tool failed""" + tool_elapsed_time: float | None = None + """elapsed time spent executing the tool""" + tool_icon: str | dict | None = None + """icon of the tool""" + tool_icon_dark: str | dict | None = None + """dark theme icon of the tool""" + + def model_dump(self, *args, **kwargs) -> dict[str, object]: + kwargs.setdefault("exclude_none", True) + return super().model_dump(*args, **kwargs) + + def model_dump_json(self, *args, **kwargs) -> str: + kwargs.setdefault("exclude_none", True) + return super().model_dump_json(*args, **kwargs) + class MessageAudioStreamResponse(StreamResponse): """ @@ -582,6 +614,17 @@ class LoopNodeCompletedStreamResponse(StreamResponse): data: Data +class ChunkType(StrEnum): + """Stream chunk type for LLM-related events.""" + + TEXT = "text" # Normal text streaming + TOOL_CALL = "tool_call" # Tool call arguments streaming + TOOL_RESULT = "tool_result" # Tool execution result + THOUGHT = "thought" # Agent thinking process (ReAct) + THOUGHT_START = "thought_start" # Agent thought start + THOUGHT_END = "thought_end" # Agent thought end + + class TextChunkStreamResponse(StreamResponse): """ TextChunkStreamResponse entity @@ -595,6 +638,36 @@ class TextChunkStreamResponse(StreamResponse): text: str from_variable_selector: list[str] | None = None + # Extended fields for Agent/Tool streaming + chunk_type: ChunkType = ChunkType.TEXT + """type of the chunk""" + + # Tool call fields (when chunk_type == TOOL_CALL) + tool_call_id: str | None = None + """unique identifier for this tool call""" + tool_name: str | None = None + """name of the tool being called""" + tool_arguments: str | None = None + """accumulated tool arguments JSON""" + + # Tool result fields (when chunk_type == TOOL_RESULT) + tool_files: list[str] | None = None + """file IDs produced by tool""" + tool_error: str | None = None + """error message if tool failed""" + + # Tool elapsed time fields (when chunk_type == TOOL_RESULT) + tool_elapsed_time: float | None = None + """elapsed time spent executing the tool""" + + def model_dump(self, *args, **kwargs) -> dict[str, object]: + kwargs.setdefault("exclude_none", True) + return super().model_dump(*args, **kwargs) + + def model_dump_json(self, *args, **kwargs) -> str: + kwargs.setdefault("exclude_none", True) + return super().model_dump_json(*args, **kwargs) + event: StreamEvent = StreamEvent.TEXT_CHUNK data: Data @@ -743,7 +816,7 @@ class AgentLogStreamResponse(StreamResponse): """ node_execution_id: str - id: str + message_id: str label: str parent_id: str | None = None error: str | None = None diff --git a/api/core/app/features/annotation_reply/annotation_reply.py b/api/core/app/features/annotation_reply/annotation_reply.py index 79fbafe39e..3f9f3da9b2 100644 --- a/api/core/app/features/annotation_reply/annotation_reply.py +++ b/api/core/app/features/annotation_reply/annotation_reply.py @@ -75,7 +75,7 @@ class AnnotationReplyFeature: AppAnnotationService.add_annotation_history( annotation.id, app_record.id, - annotation.question, + annotation.question_text, annotation.content, query, user_id, diff --git a/api/core/app/layers/conversation_variable_persist_layer.py b/api/core/app/layers/conversation_variable_persist_layer.py new file mode 100644 index 0000000000..77cc00bdc9 --- /dev/null +++ b/api/core/app/layers/conversation_variable_persist_layer.py @@ -0,0 +1,60 @@ +import logging + +from core.variables import Variable +from core.workflow.constants import CONVERSATION_VARIABLE_NODE_ID +from core.workflow.conversation_variable_updater import ConversationVariableUpdater +from core.workflow.enums import NodeType +from core.workflow.graph_engine.layers.base import GraphEngineLayer +from core.workflow.graph_events import GraphEngineEvent, NodeRunSucceededEvent +from core.workflow.nodes.variable_assigner.common import helpers as common_helpers + +logger = logging.getLogger(__name__) + + +class ConversationVariablePersistenceLayer(GraphEngineLayer): + def __init__(self, conversation_variable_updater: ConversationVariableUpdater) -> None: + super().__init__() + self._conversation_variable_updater = conversation_variable_updater + + def on_graph_start(self) -> None: + pass + + def on_event(self, event: GraphEngineEvent) -> None: + if not isinstance(event, NodeRunSucceededEvent): + return + if event.node_type != NodeType.VARIABLE_ASSIGNER: + return + if self.graph_runtime_state is None: + return + + updated_variables = common_helpers.get_updated_variables(event.node_run_result.process_data) or [] + if not updated_variables: + return + + conversation_id = self.graph_runtime_state.system_variable.conversation_id + if conversation_id is None: + return + + updated_any = False + for item in updated_variables: + selector = item.selector + if len(selector) < 2: + logger.warning("Conversation variable selector invalid. selector=%s", selector) + continue + if selector[0] != CONVERSATION_VARIABLE_NODE_ID: + continue + variable = self.graph_runtime_state.variable_pool.get(selector) + if not isinstance(variable, Variable): + logger.warning( + "Conversation variable not found in variable pool. selector=%s", + selector, + ) + continue + self._conversation_variable_updater.update(conversation_id=conversation_id, variable=variable) + updated_any = True + + if updated_any: + self._conversation_variable_updater.flush() + + def on_graph_end(self, error: Exception | None) -> None: + pass diff --git a/api/core/app/layers/pause_state_persist_layer.py b/api/core/app/layers/pause_state_persist_layer.py index 61a3e1baca..bf76ae8178 100644 --- a/api/core/app/layers/pause_state_persist_layer.py +++ b/api/core/app/layers/pause_state_persist_layer.py @@ -66,6 +66,7 @@ class PauseStatePersistenceLayer(GraphEngineLayer): """ if isinstance(session_factory, Engine): session_factory = sessionmaker(session_factory) + super().__init__() self._session_maker = session_factory self._state_owner_user_id = state_owner_user_id self._generate_entity = generate_entity @@ -98,8 +99,6 @@ class PauseStatePersistenceLayer(GraphEngineLayer): if not isinstance(event, GraphRunPausedEvent): return - assert self.graph_runtime_state is not None - entity_wrapper: _GenerateEntityUnion if isinstance(self._generate_entity, WorkflowAppGenerateEntity): entity_wrapper = _WorkflowGenerateEntityWrapper(entity=self._generate_entity) diff --git a/api/core/app/layers/trigger_post_layer.py b/api/core/app/layers/trigger_post_layer.py index fe1a46a945..225b758fcb 100644 --- a/api/core/app/layers/trigger_post_layer.py +++ b/api/core/app/layers/trigger_post_layer.py @@ -33,6 +33,7 @@ class TriggerPostLayer(GraphEngineLayer): trigger_log_id: str, session_maker: sessionmaker[Session], ): + super().__init__() self.trigger_log_id = trigger_log_id self.start_time = start_time self.cfs_plan_scheduler_entity = cfs_plan_scheduler_entity @@ -57,10 +58,6 @@ class TriggerPostLayer(GraphEngineLayer): elapsed_time = (datetime.now(UTC) - self.start_time).total_seconds() # Extract relevant data from result - if not self.graph_runtime_state: - logger.exception("Graph runtime state is not set") - return - outputs = self.graph_runtime_state.outputs # BASICLY, workflow_execution_id is the same as workflow_run_id diff --git a/api/core/app/task_pipeline/easy_ui_based_generate_task_pipeline.py b/api/core/app/task_pipeline/easy_ui_based_generate_task_pipeline.py index 5c169f4db1..a1852ffe19 100644 --- a/api/core/app/task_pipeline/easy_ui_based_generate_task_pipeline.py +++ b/api/core/app/task_pipeline/easy_ui_based_generate_task_pipeline.py @@ -1,4 +1,5 @@ import logging +import re import time from collections.abc import Generator from threading import Thread @@ -58,7 +59,7 @@ from core.prompt.utils.prompt_template_parser import PromptTemplateParser from events.message_event import message_was_created from extensions.ext_database import db from libs.datetime_utils import naive_utc_now -from models.model import AppMode, Conversation, Message, MessageAgentThought +from models.model import AppMode, Conversation, LLMGenerationDetail, Message, MessageAgentThought logger = logging.getLogger(__name__) @@ -68,6 +69,8 @@ class EasyUIBasedGenerateTaskPipeline(BasedGenerateTaskPipeline): EasyUIBasedGenerateTaskPipeline is a class that generate stream output and state management for Application. """ + _THINK_PATTERN = re.compile(r"]*>(.*?)", re.IGNORECASE | re.DOTALL) + _task_state: EasyUITaskState _application_generate_entity: Union[ChatAppGenerateEntity, CompletionAppGenerateEntity, AgentChatAppGenerateEntity] @@ -342,9 +345,11 @@ class EasyUIBasedGenerateTaskPipeline(BasedGenerateTaskPipeline): self._task_state.llm_result.message.content = current_content if isinstance(event, QueueLLMChunkEvent): + event_type = self._message_cycle_manager.get_message_event_type(message_id=self._message_id) yield self._message_cycle_manager.message_to_stream_response( answer=cast(str, delta_text), message_id=self._message_id, + event_type=event_type, ) else: yield self._agent_message_to_stream_response( @@ -407,11 +412,136 @@ class EasyUIBasedGenerateTaskPipeline(BasedGenerateTaskPipeline): ) ) + # Save LLM generation detail if there's reasoning_content + self._save_generation_detail(session=session, message=message, llm_result=llm_result) + message_was_created.send( message, application_generate_entity=self._application_generate_entity, ) + def _save_generation_detail(self, *, session: Session, message: Message, llm_result: LLMResult) -> None: + """ + Save LLM generation detail for Completion/Chat/Agent-Chat applications. + For Agent-Chat, also merges MessageAgentThought records. + """ + import json + + reasoning_list: list[str] = [] + tool_calls_list: list[dict] = [] + sequence: list[dict] = [] + answer = message.answer or "" + + # Check if this is Agent-Chat mode by looking for agent thoughts + agent_thoughts = ( + session.query(MessageAgentThought) + .filter_by(message_id=message.id) + .order_by(MessageAgentThought.position.asc()) + .all() + ) + + if agent_thoughts: + # Agent-Chat mode: merge MessageAgentThought records + content_pos = 0 + cleaned_answer_parts: list[str] = [] + for thought in agent_thoughts: + # Add thought/reasoning + if thought.thought: + reasoning_text = thought.thought + if " blocks and clean the final answer + clean_answer, reasoning_content = self._split_reasoning_from_answer(answer) + if reasoning_content: + answer = clean_answer + llm_result.message.content = clean_answer + llm_result.reasoning_content = reasoning_content + message.answer = clean_answer + if reasoning_content: + reasoning_list = [reasoning_content] + # Content comes first, then reasoning + if answer: + sequence.append({"type": "content", "start": 0, "end": len(answer)}) + sequence.append({"type": "reasoning", "index": 0}) + + # Only save if there's meaningful generation detail + if not reasoning_list and not tool_calls_list: + return + + # Check if generation detail already exists + existing = session.query(LLMGenerationDetail).filter_by(message_id=message.id).first() + + if existing: + existing.reasoning_content = json.dumps(reasoning_list) if reasoning_list else None + existing.tool_calls = json.dumps(tool_calls_list) if tool_calls_list else None + existing.sequence = json.dumps(sequence) if sequence else None + else: + generation_detail = LLMGenerationDetail( + tenant_id=self._application_generate_entity.app_config.tenant_id, + app_id=self._application_generate_entity.app_config.app_id, + message_id=message.id, + reasoning_content=json.dumps(reasoning_list) if reasoning_list else None, + tool_calls=json.dumps(tool_calls_list) if tool_calls_list else None, + sequence=json.dumps(sequence) if sequence else None, + ) + session.add(generation_detail) + + @classmethod + def _split_reasoning_from_answer(cls, text: str) -> tuple[str, str]: + """ + Extract reasoning segments from blocks and return (clean_text, reasoning). + """ + matches = cls._THINK_PATTERN.findall(text) + reasoning_content = "\n".join(match.strip() for match in matches) if matches else "" + + clean_text = cls._THINK_PATTERN.sub("", text) + clean_text = re.sub(r"\n\s*\n", "\n\n", clean_text).strip() + + return clean_text, reasoning_content or "" + def _handle_stop(self, event: QueueStopEvent): """ Handle stop. diff --git a/api/core/app/task_pipeline/message_cycle_manager.py b/api/core/app/task_pipeline/message_cycle_manager.py index 2e6f92efa5..f9f341fcea 100644 --- a/api/core/app/task_pipeline/message_cycle_manager.py +++ b/api/core/app/task_pipeline/message_cycle_manager.py @@ -5,7 +5,7 @@ from threading import Thread from typing import Union from flask import Flask, current_app -from sqlalchemy import select +from sqlalchemy import exists, select from sqlalchemy.orm import Session from configs import dify_config @@ -54,6 +54,20 @@ class MessageCycleManager: ): self._application_generate_entity = application_generate_entity self._task_state = task_state + self._message_has_file: set[str] = set() + + def get_message_event_type(self, message_id: str) -> StreamEvent: + if message_id in self._message_has_file: + return StreamEvent.MESSAGE_FILE + + with Session(db.engine, expire_on_commit=False) as session: + has_file = session.query(exists().where(MessageFile.message_id == message_id)).scalar() + + if has_file: + self._message_has_file.add(message_id) + return StreamEvent.MESSAGE_FILE + + return StreamEvent.MESSAGE def generate_conversation_name(self, *, conversation_id: str, query: str) -> Thread | None: """ @@ -214,26 +228,71 @@ class MessageCycleManager: return None def message_to_stream_response( - self, answer: str, message_id: str, from_variable_selector: list[str] | None = None + self, + answer: str, + message_id: str, + from_variable_selector: list[str] | None = None, + chunk_type: str | None = None, + tool_call_id: str | None = None, + tool_name: str | None = None, + tool_arguments: str | None = None, + tool_files: list[str] | None = None, + tool_error: str | None = None, + tool_elapsed_time: float | None = None, + tool_icon: str | dict | None = None, + tool_icon_dark: str | dict | None = None, + event_type: StreamEvent | None = None, ) -> MessageStreamResponse: """ Message to stream response. :param answer: answer :param message_id: message id + :param from_variable_selector: from variable selector + :param chunk_type: type of the chunk (text, function_call, tool_result, thought) + :param tool_call_id: unique identifier for this tool call + :param tool_name: name of the tool being called + :param tool_arguments: accumulated tool arguments JSON + :param tool_files: file IDs produced by tool + :param tool_error: error message if tool failed :return: """ - with Session(db.engine, expire_on_commit=False) as session: - message_file = session.scalar(select(MessageFile).where(MessageFile.id == message_id)) - event_type = StreamEvent.MESSAGE_FILE if message_file else StreamEvent.MESSAGE - - return MessageStreamResponse( + response = MessageStreamResponse( task_id=self._application_generate_entity.task_id, id=message_id, answer=answer, from_variable_selector=from_variable_selector, - event=event_type, + event=event_type or StreamEvent.MESSAGE, ) + if chunk_type: + response = response.model_copy(update={"chunk_type": chunk_type}) + + if chunk_type == "tool_call": + response = response.model_copy( + update={ + "tool_call_id": tool_call_id, + "tool_name": tool_name, + "tool_arguments": tool_arguments, + "tool_icon": tool_icon, + "tool_icon_dark": tool_icon_dark, + } + ) + elif chunk_type == "tool_result": + response = response.model_copy( + update={ + "tool_call_id": tool_call_id, + "tool_name": tool_name, + "tool_arguments": tool_arguments, + "tool_files": tool_files, + "tool_error": tool_error, + "tool_elapsed_time": tool_elapsed_time, + "tool_icon": tool_icon, + "tool_icon_dark": tool_icon_dark, + } + ) + + return response + def message_replace_to_stream_response(self, answer: str, reason: str = "") -> MessageReplaceStreamResponse: """ Message replace to stream response. diff --git a/api/core/callback_handler/index_tool_callback_handler.py b/api/core/callback_handler/index_tool_callback_handler.py index d0279349ca..5249fea8cd 100644 --- a/api/core/callback_handler/index_tool_callback_handler.py +++ b/api/core/callback_handler/index_tool_callback_handler.py @@ -5,7 +5,6 @@ from sqlalchemy import select from core.app.apps.base_app_queue_manager import AppQueueManager, PublishFrom from core.app.entities.app_invoke_entities import InvokeFrom -from core.app.entities.queue_entities import QueueRetrieverResourcesEvent from core.rag.entities.citation_metadata import RetrievalSourceMetadata from core.rag.index_processor.constant.index_type import IndexStructureType from core.rag.models.document import Document @@ -90,6 +89,8 @@ class DatasetIndexToolCallbackHandler: # TODO(-LAN-): Improve type check def return_retriever_resource_info(self, resource: Sequence[RetrievalSourceMetadata]): """Handle return_retriever_resource_info.""" + from core.app.entities.queue_entities import QueueRetrieverResourcesEvent + self._queue_manager.publish( QueueRetrieverResourcesEvent(retriever_resources=resource), PublishFrom.APPLICATION_MANAGER ) diff --git a/api/core/datasource/__base/datasource_plugin.py b/api/core/datasource/__base/datasource_plugin.py index 50c7249fe4..451e4fda0e 100644 --- a/api/core/datasource/__base/datasource_plugin.py +++ b/api/core/datasource/__base/datasource_plugin.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from abc import ABC, abstractmethod from configs import dify_config @@ -30,7 +32,7 @@ class DatasourcePlugin(ABC): """ return DatasourceProviderType.LOCAL_FILE - def fork_datasource_runtime(self, runtime: DatasourceRuntime) -> "DatasourcePlugin": + def fork_datasource_runtime(self, runtime: DatasourceRuntime) -> DatasourcePlugin: return self.__class__( entity=self.entity.model_copy(), runtime=runtime, diff --git a/api/core/datasource/entities/datasource_entities.py b/api/core/datasource/entities/datasource_entities.py index 260dcf04f5..dde7d59726 100644 --- a/api/core/datasource/entities/datasource_entities.py +++ b/api/core/datasource/entities/datasource_entities.py @@ -1,3 +1,5 @@ +from __future__ import annotations + import enum from enum import StrEnum from typing import Any @@ -31,7 +33,7 @@ class DatasourceProviderType(enum.StrEnum): ONLINE_DRIVE = "online_drive" @classmethod - def value_of(cls, value: str) -> "DatasourceProviderType": + def value_of(cls, value: str) -> DatasourceProviderType: """ Get value of given mode. @@ -81,7 +83,7 @@ class DatasourceParameter(PluginParameter): typ: DatasourceParameterType, required: bool, options: list[str] | None = None, - ) -> "DatasourceParameter": + ) -> DatasourceParameter: """ get a simple datasource parameter @@ -187,14 +189,14 @@ class DatasourceInvokeMeta(BaseModel): tool_config: dict | None = None @classmethod - def empty(cls) -> "DatasourceInvokeMeta": + def empty(cls) -> DatasourceInvokeMeta: """ Get an empty instance of DatasourceInvokeMeta """ return cls(time_cost=0.0, error=None, tool_config={}) @classmethod - def error_instance(cls, error: str) -> "DatasourceInvokeMeta": + def error_instance(cls, error: str) -> DatasourceInvokeMeta: """ Get an instance of DatasourceInvokeMeta with error """ diff --git a/api/core/db/session_factory.py b/api/core/db/session_factory.py index 1dae2eafd4..45d4bc4594 100644 --- a/api/core/db/session_factory.py +++ b/api/core/db/session_factory.py @@ -1,7 +1,7 @@ from sqlalchemy import Engine from sqlalchemy.orm import Session, sessionmaker -_session_maker: sessionmaker | None = None +_session_maker: sessionmaker[Session] | None = None def configure_session_factory(engine: Engine, expire_on_commit: bool = False): @@ -10,7 +10,7 @@ def configure_session_factory(engine: Engine, expire_on_commit: bool = False): _session_maker = sessionmaker(bind=engine, expire_on_commit=expire_on_commit) -def get_session_maker() -> sessionmaker: +def get_session_maker() -> sessionmaker[Session]: if _session_maker is None: raise RuntimeError("Session factory not configured. Call configure_session_factory() first.") return _session_maker @@ -27,7 +27,7 @@ class SessionFactory: configure_session_factory(engine, expire_on_commit) @staticmethod - def get_session_maker() -> sessionmaker: + def get_session_maker() -> sessionmaker[Session]: return get_session_maker() @staticmethod diff --git a/api/core/entities/mcp_provider.py b/api/core/entities/mcp_provider.py index 7fdf5e4be6..135d2a4945 100644 --- a/api/core/entities/mcp_provider.py +++ b/api/core/entities/mcp_provider.py @@ -1,3 +1,5 @@ +from __future__ import annotations + import json from datetime import datetime from enum import StrEnum @@ -75,7 +77,7 @@ class MCPProviderEntity(BaseModel): updated_at: datetime @classmethod - def from_db_model(cls, db_provider: "MCPToolProvider") -> "MCPProviderEntity": + def from_db_model(cls, db_provider: MCPToolProvider) -> MCPProviderEntity: """Create entity from database model with decryption""" return cls( diff --git a/api/core/entities/model_entities.py b/api/core/entities/model_entities.py index 12431976f0..a123fb0321 100644 --- a/api/core/entities/model_entities.py +++ b/api/core/entities/model_entities.py @@ -30,7 +30,6 @@ class SimpleModelProviderEntity(BaseModel): label: I18nObject icon_small: I18nObject | None = None icon_small_dark: I18nObject | None = None - icon_large: I18nObject | None = None supported_model_types: list[ModelType] def __init__(self, provider_entity: ProviderEntity): @@ -44,7 +43,6 @@ class SimpleModelProviderEntity(BaseModel): label=provider_entity.label, icon_small=provider_entity.icon_small, icon_small_dark=provider_entity.icon_small_dark, - icon_large=provider_entity.icon_large, supported_model_types=provider_entity.supported_model_types, ) @@ -94,7 +92,6 @@ class DefaultModelProviderEntity(BaseModel): provider: str label: I18nObject icon_small: I18nObject | None = None - icon_large: I18nObject | None = None supported_model_types: Sequence[ModelType] = [] diff --git a/api/core/entities/provider_entities.py b/api/core/entities/provider_entities.py index 8a8067332d..0078ec7e4f 100644 --- a/api/core/entities/provider_entities.py +++ b/api/core/entities/provider_entities.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from enum import StrEnum, auto from typing import Union @@ -178,7 +180,7 @@ class BasicProviderConfig(BaseModel): TOOLS_SELECTOR = CommonParameterType.TOOLS_SELECTOR @classmethod - def value_of(cls, value: str) -> "ProviderConfig.Type": + def value_of(cls, value: str) -> ProviderConfig.Type: """ Get value of given mode. diff --git a/api/core/file/helpers.py b/api/core/file/helpers.py index 6d553d7dc6..2ac483673a 100644 --- a/api/core/file/helpers.py +++ b/api/core/file/helpers.py @@ -8,8 +8,9 @@ import urllib.parse from configs import dify_config -def get_signed_file_url(upload_file_id: str, as_attachment=False) -> str: - url = f"{dify_config.FILES_URL}/files/{upload_file_id}/file-preview" +def get_signed_file_url(upload_file_id: str, as_attachment=False, for_external: bool = True) -> str: + base_url = dify_config.FILES_URL if for_external else (dify_config.INTERNAL_FILES_URL or dify_config.FILES_URL) + url = f"{base_url}/files/{upload_file_id}/file-preview" timestamp = str(int(time.time())) nonce = os.urandom(16).hex() diff --git a/api/core/file/models.py b/api/core/file/models.py index d149205d77..6324523b22 100644 --- a/api/core/file/models.py +++ b/api/core/file/models.py @@ -112,17 +112,17 @@ class File(BaseModel): return text - def generate_url(self) -> str | None: + def generate_url(self, for_external: bool = True) -> str | None: if self.transfer_method == FileTransferMethod.REMOTE_URL: return self.remote_url elif self.transfer_method == FileTransferMethod.LOCAL_FILE: if self.related_id is None: raise ValueError("Missing file related_id") - return helpers.get_signed_file_url(upload_file_id=self.related_id) + return helpers.get_signed_file_url(upload_file_id=self.related_id, for_external=for_external) elif self.transfer_method in [FileTransferMethod.TOOL_FILE, FileTransferMethod.DATASOURCE_FILE]: assert self.related_id is not None assert self.extension is not None - return sign_tool_file(tool_file_id=self.related_id, extension=self.extension) + return sign_tool_file(tool_file_id=self.related_id, extension=self.extension, for_external=for_external) return None def to_plugin_parameter(self) -> dict[str, Any]: @@ -133,7 +133,7 @@ class File(BaseModel): "extension": self.extension, "size": self.size, "type": self.type, - "url": self.generate_url(), + "url": self.generate_url(for_external=False), } @model_validator(mode="after") diff --git a/api/core/helper/code_executor/jinja2/jinja2_transformer.py b/api/core/helper/code_executor/jinja2/jinja2_transformer.py index 969125d2f7..5e4807401e 100644 --- a/api/core/helper/code_executor/jinja2/jinja2_transformer.py +++ b/api/core/helper/code_executor/jinja2/jinja2_transformer.py @@ -1,9 +1,14 @@ +from collections.abc import Mapping from textwrap import dedent +from typing import Any from core.helper.code_executor.template_transformer import TemplateTransformer class Jinja2TemplateTransformer(TemplateTransformer): + # Use separate placeholder for base64-encoded template to avoid confusion + _template_b64_placeholder: str = "{{template_b64}}" + @classmethod def transform_response(cls, response: str): """ @@ -13,18 +18,35 @@ class Jinja2TemplateTransformer(TemplateTransformer): """ return {"result": cls.extract_result_str_from_response(response)} + @classmethod + def assemble_runner_script(cls, code: str, inputs: Mapping[str, Any]) -> str: + """ + Override base class to use base64 encoding for template code. + This prevents issues with special characters (quotes, newlines) in templates + breaking the generated Python script. Fixes #26818. + """ + script = cls.get_runner_script() + # Encode template as base64 to safely embed any content including quotes + code_b64 = cls.serialize_code(code) + script = script.replace(cls._template_b64_placeholder, code_b64) + inputs_str = cls.serialize_inputs(inputs) + script = script.replace(cls._inputs_placeholder, inputs_str) + return script + @classmethod def get_runner_script(cls) -> str: runner_script = dedent(f""" - # declare main function - def main(**inputs): - import jinja2 - template = jinja2.Template('''{cls._code_placeholder}''') - return template.render(**inputs) - + import jinja2 import json from base64 import b64decode + # declare main function + def main(**inputs): + # Decode base64-encoded template to handle special characters safely + template_code = b64decode('{cls._template_b64_placeholder}').decode('utf-8') + template = jinja2.Template(template_code) + return template.render(**inputs) + # decode and prepare input dict inputs_obj = json.loads(b64decode('{cls._inputs_placeholder}').decode('utf-8')) diff --git a/api/core/helper/code_executor/template_transformer.py b/api/core/helper/code_executor/template_transformer.py index 3965f8cb31..5cdea19a8d 100644 --- a/api/core/helper/code_executor/template_transformer.py +++ b/api/core/helper/code_executor/template_transformer.py @@ -13,6 +13,15 @@ class TemplateTransformer(ABC): _inputs_placeholder: str = "{{inputs}}" _result_tag: str = "<>" + @classmethod + def serialize_code(cls, code: str) -> str: + """ + Serialize template code to base64 to safely embed in generated script. + This prevents issues with special characters like quotes breaking the script. + """ + code_bytes = code.encode("utf-8") + return b64encode(code_bytes).decode("utf-8") + @classmethod def transform_caller(cls, code: str, inputs: Mapping[str, Any]) -> tuple[str, str]: """ @@ -67,7 +76,7 @@ class TemplateTransformer(ABC): Post-process the result to convert scientific notation strings back to numbers """ - def convert_scientific_notation(value): + def convert_scientific_notation(value: Any) -> Any: if isinstance(value, str): # Check if the string looks like scientific notation if re.match(r"^-?\d+\.?\d*e[+-]\d+$", value, re.IGNORECASE): @@ -81,7 +90,7 @@ class TemplateTransformer(ABC): return [convert_scientific_notation(v) for v in value] return value - return convert_scientific_notation(result) # type: ignore[no-any-return] + return convert_scientific_notation(result) @classmethod @abstractmethod diff --git a/api/core/helper/ssrf_proxy.py b/api/core/helper/ssrf_proxy.py index 6c98aea1be..1785cbde4c 100644 --- a/api/core/helper/ssrf_proxy.py +++ b/api/core/helper/ssrf_proxy.py @@ -72,7 +72,57 @@ def _get_ssrf_client(ssl_verify_enabled: bool) -> httpx.Client: ) +def _get_user_provided_host_header(headers: dict | None) -> str | None: + """ + Extract the user-provided Host header from the headers dict. + + This is needed because when using a forward proxy, httpx may override the Host header. + We preserve the user's explicit Host header to support virtual hosting and other use cases. + """ + if not headers: + return None + # Case-insensitive lookup for Host header + for key, value in headers.items(): + if key.lower() == "host": + return value + return None + + +def _inject_trace_headers(headers: dict | None) -> dict: + """ + Inject W3C traceparent header for distributed tracing. + + When OTEL is enabled, HTTPXClientInstrumentor handles trace propagation automatically. + When OTEL is disabled, we manually inject the traceparent header. + """ + if headers is None: + headers = {} + + # Skip if already present (case-insensitive check) + for key in headers: + if key.lower() == "traceparent": + return headers + + # Skip if OTEL is enabled - HTTPXClientInstrumentor handles this automatically + if dify_config.ENABLE_OTEL: + return headers + + # Generate and inject traceparent for non-OTEL scenarios + try: + from core.helper.trace_id_helper import generate_traceparent_header + + traceparent = generate_traceparent_header() + if traceparent: + headers["traceparent"] = traceparent + except Exception: + # Silently ignore errors to avoid breaking requests + logger.debug("Failed to generate traceparent header", exc_info=True) + + return headers + + def make_request(method, url, max_retries=SSRF_DEFAULT_MAX_RETRIES, **kwargs): + # Convert requests-style allow_redirects to httpx-style follow_redirects if "allow_redirects" in kwargs: allow_redirects = kwargs.pop("allow_redirects") if "follow_redirects" not in kwargs: @@ -90,10 +140,27 @@ def make_request(method, url, max_retries=SSRF_DEFAULT_MAX_RETRIES, **kwargs): verify_option = kwargs.pop("ssl_verify", dify_config.HTTP_REQUEST_NODE_SSL_VERIFY) client = _get_ssrf_client(verify_option) + # Inject traceparent header for distributed tracing (when OTEL is not enabled) + headers = kwargs.get("headers") or {} + headers = _inject_trace_headers(headers) + kwargs["headers"] = headers + + # Preserve user-provided Host header + # When using a forward proxy, httpx may override the Host header based on the URL. + # We extract and preserve any explicitly set Host header to support virtual hosting. + user_provided_host = _get_user_provided_host_header(headers) + retries = 0 while retries <= max_retries: try: + # Preserve the user-provided Host header + # httpx may override the Host header when using a proxy + headers = {k: v for k, v in headers.items() if k.lower() != "host"} + if user_provided_host is not None: + headers["host"] = user_provided_host + kwargs["headers"] = headers response = client.request(method=method, url=url, **kwargs) + # Check for SSRF protection by Squid proxy if response.status_code in (401, 403): # Check if this is a Squid SSRF rejection diff --git a/api/core/helper/tool_provider_cache.py b/api/core/helper/tool_provider_cache.py deleted file mode 100644 index eef5937407..0000000000 --- a/api/core/helper/tool_provider_cache.py +++ /dev/null @@ -1,56 +0,0 @@ -import json -import logging -from typing import Any - -from core.tools.entities.api_entities import ToolProviderTypeApiLiteral -from extensions.ext_redis import redis_client, redis_fallback - -logger = logging.getLogger(__name__) - - -class ToolProviderListCache: - """Cache for tool provider lists""" - - CACHE_TTL = 300 # 5 minutes - - @staticmethod - def _generate_cache_key(tenant_id: str, typ: ToolProviderTypeApiLiteral = None) -> str: - """Generate cache key for tool providers list""" - type_filter = typ or "all" - return f"tool_providers:tenant_id:{tenant_id}:type:{type_filter}" - - @staticmethod - @redis_fallback(default_return=None) - def get_cached_providers(tenant_id: str, typ: ToolProviderTypeApiLiteral = None) -> list[dict[str, Any]] | None: - """Get cached tool providers""" - cache_key = ToolProviderListCache._generate_cache_key(tenant_id, typ) - cached_data = redis_client.get(cache_key) - if cached_data: - try: - return json.loads(cached_data.decode("utf-8")) - except (json.JSONDecodeError, UnicodeDecodeError): - logger.warning("Failed to decode cached tool providers data") - return None - return None - - @staticmethod - @redis_fallback() - def set_cached_providers(tenant_id: str, typ: ToolProviderTypeApiLiteral, providers: list[dict[str, Any]]): - """Cache tool providers""" - cache_key = ToolProviderListCache._generate_cache_key(tenant_id, typ) - redis_client.setex(cache_key, ToolProviderListCache.CACHE_TTL, json.dumps(providers)) - - @staticmethod - @redis_fallback() - def invalidate_cache(tenant_id: str, typ: ToolProviderTypeApiLiteral = None): - """Invalidate cache for tool providers""" - if typ: - # Invalidate specific type cache - cache_key = ToolProviderListCache._generate_cache_key(tenant_id, typ) - redis_client.delete(cache_key) - else: - # Invalidate all caches for this tenant - pattern = f"tool_providers:tenant_id:{tenant_id}:*" - keys = list(redis_client.scan_iter(pattern)) - if keys: - redis_client.delete(*keys) diff --git a/api/core/helper/trace_id_helper.py b/api/core/helper/trace_id_helper.py index 820502e558..e827859109 100644 --- a/api/core/helper/trace_id_helper.py +++ b/api/core/helper/trace_id_helper.py @@ -103,3 +103,60 @@ def parse_traceparent_header(traceparent: str) -> str | None: if len(parts) == 4 and len(parts[1]) == 32: return parts[1] return None + + +def get_span_id_from_otel_context() -> str | None: + """ + Retrieve the current span ID from the active OpenTelemetry trace context. + + Returns: + A 16-character hex string representing the span ID, or None if not available. + """ + try: + from opentelemetry.trace import get_current_span + from opentelemetry.trace.span import INVALID_SPAN_ID + + span = get_current_span() + if not span: + return None + + span_context = span.get_span_context() + if not span_context or span_context.span_id == INVALID_SPAN_ID: + return None + + return f"{span_context.span_id:016x}" + except Exception: + return None + + +def generate_traceparent_header() -> str | None: + """ + Generate a W3C traceparent header from the current context. + + Uses OpenTelemetry context if available, otherwise uses the + ContextVar-based trace_id from the logging context. + + Format: {version}-{trace_id}-{span_id}-{flags} + Example: 00-5b8aa5a2d2c872e8321cf37308d69df2-051581bf3bb55c45-01 + + Returns: + A valid traceparent header string, or None if generation fails. + """ + import uuid + + # Try OTEL context first + trace_id = get_trace_id_from_otel_context() + span_id = get_span_id_from_otel_context() + + if trace_id and span_id: + return f"00-{trace_id}-{span_id}-01" + + # Fallback: use ContextVar-based trace_id or generate new one + from core.logging.context import get_trace_id as get_logging_trace_id + + trace_id = get_logging_trace_id() or uuid.uuid4().hex + + # Generate a new span_id (16 hex chars) + span_id = uuid.uuid4().hex[:16] + + return f"00-{trace_id}-{span_id}-01" diff --git a/api/core/hosting_configuration.py b/api/core/hosting_configuration.py index af860a1070..370e64e385 100644 --- a/api/core/hosting_configuration.py +++ b/api/core/hosting_configuration.py @@ -56,6 +56,10 @@ class HostingConfiguration: self.provider_map[f"{DEFAULT_PLUGIN_ID}/minimax/minimax"] = self.init_minimax() self.provider_map[f"{DEFAULT_PLUGIN_ID}/spark/spark"] = self.init_spark() self.provider_map[f"{DEFAULT_PLUGIN_ID}/zhipuai/zhipuai"] = self.init_zhipuai() + self.provider_map[f"{DEFAULT_PLUGIN_ID}/gemini/google"] = self.init_gemini() + self.provider_map[f"{DEFAULT_PLUGIN_ID}/x/x"] = self.init_xai() + self.provider_map[f"{DEFAULT_PLUGIN_ID}/deepseek/deepseek"] = self.init_deepseek() + self.provider_map[f"{DEFAULT_PLUGIN_ID}/tongyi/tongyi"] = self.init_tongyi() self.moderation_config = self.init_moderation_config() @@ -128,7 +132,7 @@ class HostingConfiguration: quotas: list[HostingQuota] = [] if dify_config.HOSTED_OPENAI_TRIAL_ENABLED: - hosted_quota_limit = dify_config.HOSTED_OPENAI_QUOTA_LIMIT + hosted_quota_limit = 0 trial_models = self.parse_restrict_models_from_env("HOSTED_OPENAI_TRIAL_MODELS") trial_quota = TrialHostingQuota(quota_limit=hosted_quota_limit, restrict_models=trial_models) quotas.append(trial_quota) @@ -156,18 +160,49 @@ class HostingConfiguration: quota_unit=quota_unit, ) - @staticmethod - def init_anthropic() -> HostingProvider: - quota_unit = QuotaUnit.TOKENS + def init_gemini(self) -> HostingProvider: + quota_unit = QuotaUnit.CREDITS + quotas: list[HostingQuota] = [] + + if dify_config.HOSTED_GEMINI_TRIAL_ENABLED: + hosted_quota_limit = 0 + trial_models = self.parse_restrict_models_from_env("HOSTED_GEMINI_TRIAL_MODELS") + trial_quota = TrialHostingQuota(quota_limit=hosted_quota_limit, restrict_models=trial_models) + quotas.append(trial_quota) + + if dify_config.HOSTED_GEMINI_PAID_ENABLED: + paid_models = self.parse_restrict_models_from_env("HOSTED_GEMINI_PAID_MODELS") + paid_quota = PaidHostingQuota(restrict_models=paid_models) + quotas.append(paid_quota) + + if len(quotas) > 0: + credentials = { + "google_api_key": dify_config.HOSTED_GEMINI_API_KEY, + } + + if dify_config.HOSTED_GEMINI_API_BASE: + credentials["google_base_url"] = dify_config.HOSTED_GEMINI_API_BASE + + return HostingProvider(enabled=True, credentials=credentials, quota_unit=quota_unit, quotas=quotas) + + return HostingProvider( + enabled=False, + quota_unit=quota_unit, + ) + + def init_anthropic(self) -> HostingProvider: + quota_unit = QuotaUnit.CREDITS quotas: list[HostingQuota] = [] if dify_config.HOSTED_ANTHROPIC_TRIAL_ENABLED: - hosted_quota_limit = dify_config.HOSTED_ANTHROPIC_QUOTA_LIMIT - trial_quota = TrialHostingQuota(quota_limit=hosted_quota_limit) + hosted_quota_limit = 0 + trail_models = self.parse_restrict_models_from_env("HOSTED_ANTHROPIC_TRIAL_MODELS") + trial_quota = TrialHostingQuota(quota_limit=hosted_quota_limit, restrict_models=trail_models) quotas.append(trial_quota) if dify_config.HOSTED_ANTHROPIC_PAID_ENABLED: - paid_quota = PaidHostingQuota() + paid_models = self.parse_restrict_models_from_env("HOSTED_ANTHROPIC_PAID_MODELS") + paid_quota = PaidHostingQuota(restrict_models=paid_models) quotas.append(paid_quota) if len(quotas) > 0: @@ -185,6 +220,94 @@ class HostingConfiguration: quota_unit=quota_unit, ) + def init_tongyi(self) -> HostingProvider: + quota_unit = QuotaUnit.CREDITS + quotas: list[HostingQuota] = [] + + if dify_config.HOSTED_TONGYI_TRIAL_ENABLED: + hosted_quota_limit = 0 + trail_models = self.parse_restrict_models_from_env("HOSTED_TONGYI_TRIAL_MODELS") + trial_quota = TrialHostingQuota(quota_limit=hosted_quota_limit, restrict_models=trail_models) + quotas.append(trial_quota) + + if dify_config.HOSTED_TONGYI_PAID_ENABLED: + paid_models = self.parse_restrict_models_from_env("HOSTED_TONGYI_PAID_MODELS") + paid_quota = PaidHostingQuota(restrict_models=paid_models) + quotas.append(paid_quota) + + if len(quotas) > 0: + credentials = { + "dashscope_api_key": dify_config.HOSTED_TONGYI_API_KEY, + "use_international_endpoint": dify_config.HOSTED_TONGYI_USE_INTERNATIONAL_ENDPOINT, + } + + return HostingProvider(enabled=True, credentials=credentials, quota_unit=quota_unit, quotas=quotas) + + return HostingProvider( + enabled=False, + quota_unit=quota_unit, + ) + + def init_xai(self) -> HostingProvider: + quota_unit = QuotaUnit.CREDITS + quotas: list[HostingQuota] = [] + + if dify_config.HOSTED_XAI_TRIAL_ENABLED: + hosted_quota_limit = 0 + trail_models = self.parse_restrict_models_from_env("HOSTED_XAI_TRIAL_MODELS") + trial_quota = TrialHostingQuota(quota_limit=hosted_quota_limit, restrict_models=trail_models) + quotas.append(trial_quota) + + if dify_config.HOSTED_XAI_PAID_ENABLED: + paid_models = self.parse_restrict_models_from_env("HOSTED_XAI_PAID_MODELS") + paid_quota = PaidHostingQuota(restrict_models=paid_models) + quotas.append(paid_quota) + + if len(quotas) > 0: + credentials = { + "api_key": dify_config.HOSTED_XAI_API_KEY, + } + + if dify_config.HOSTED_XAI_API_BASE: + credentials["endpoint_url"] = dify_config.HOSTED_XAI_API_BASE + + return HostingProvider(enabled=True, credentials=credentials, quota_unit=quota_unit, quotas=quotas) + + return HostingProvider( + enabled=False, + quota_unit=quota_unit, + ) + + def init_deepseek(self) -> HostingProvider: + quota_unit = QuotaUnit.CREDITS + quotas: list[HostingQuota] = [] + + if dify_config.HOSTED_DEEPSEEK_TRIAL_ENABLED: + hosted_quota_limit = 0 + trail_models = self.parse_restrict_models_from_env("HOSTED_DEEPSEEK_TRIAL_MODELS") + trial_quota = TrialHostingQuota(quota_limit=hosted_quota_limit, restrict_models=trail_models) + quotas.append(trial_quota) + + if dify_config.HOSTED_DEEPSEEK_PAID_ENABLED: + paid_models = self.parse_restrict_models_from_env("HOSTED_DEEPSEEK_PAID_MODELS") + paid_quota = PaidHostingQuota(restrict_models=paid_models) + quotas.append(paid_quota) + + if len(quotas) > 0: + credentials = { + "api_key": dify_config.HOSTED_DEEPSEEK_API_KEY, + } + + if dify_config.HOSTED_DEEPSEEK_API_BASE: + credentials["endpoint_url"] = dify_config.HOSTED_DEEPSEEK_API_BASE + + return HostingProvider(enabled=True, credentials=credentials, quota_unit=quota_unit, quotas=quotas) + + return HostingProvider( + enabled=False, + quota_unit=quota_unit, + ) + @staticmethod def init_minimax() -> HostingProvider: quota_unit = QuotaUnit.TOKENS diff --git a/api/core/indexing_runner.py b/api/core/indexing_runner.py index 199d22e8f5..599a655ab9 100644 --- a/api/core/indexing_runner.py +++ b/api/core/indexing_runner.py @@ -406,7 +406,7 @@ class IndexingRunner: datasource_type=DatasourceType.NOTION, notion_info=NotionInfo.model_validate( { - "credential_id": data_source_info["credential_id"], + "credential_id": data_source_info.get("credential_id"), "notion_workspace_id": data_source_info["notion_workspace_id"], "notion_obj_id": data_source_info["notion_page_id"], "notion_page_type": data_source_info["type"], diff --git a/api/core/logging/__init__.py b/api/core/logging/__init__.py new file mode 100644 index 0000000000..db046cc9fa --- /dev/null +++ b/api/core/logging/__init__.py @@ -0,0 +1,20 @@ +"""Structured logging components for Dify.""" + +from core.logging.context import ( + clear_request_context, + get_request_id, + get_trace_id, + init_request_context, +) +from core.logging.filters import IdentityContextFilter, TraceContextFilter +from core.logging.structured_formatter import StructuredJSONFormatter + +__all__ = [ + "IdentityContextFilter", + "StructuredJSONFormatter", + "TraceContextFilter", + "clear_request_context", + "get_request_id", + "get_trace_id", + "init_request_context", +] diff --git a/api/core/logging/context.py b/api/core/logging/context.py new file mode 100644 index 0000000000..18633a0b05 --- /dev/null +++ b/api/core/logging/context.py @@ -0,0 +1,35 @@ +"""Request context for logging - framework agnostic. + +This module provides request-scoped context variables for logging, +using Python's contextvars for thread-safe and async-safe storage. +""" + +import uuid +from contextvars import ContextVar + +_request_id: ContextVar[str] = ContextVar("log_request_id", default="") +_trace_id: ContextVar[str] = ContextVar("log_trace_id", default="") + + +def get_request_id() -> str: + """Get current request ID (10 hex chars).""" + return _request_id.get() + + +def get_trace_id() -> str: + """Get fallback trace ID when OTEL is unavailable (32 hex chars).""" + return _trace_id.get() + + +def init_request_context() -> None: + """Initialize request context. Call at start of each request.""" + req_id = uuid.uuid4().hex[:10] + trace_id = uuid.uuid5(uuid.NAMESPACE_DNS, req_id).hex + _request_id.set(req_id) + _trace_id.set(trace_id) + + +def clear_request_context() -> None: + """Clear request context. Call at end of request (optional).""" + _request_id.set("") + _trace_id.set("") diff --git a/api/core/logging/filters.py b/api/core/logging/filters.py new file mode 100644 index 0000000000..1e8aa8d566 --- /dev/null +++ b/api/core/logging/filters.py @@ -0,0 +1,94 @@ +"""Logging filters for structured logging.""" + +import contextlib +import logging + +import flask + +from core.logging.context import get_request_id, get_trace_id + + +class TraceContextFilter(logging.Filter): + """ + Filter that adds trace_id and span_id to log records. + Integrates with OpenTelemetry when available, falls back to ContextVar-based trace_id. + """ + + def filter(self, record: logging.LogRecord) -> bool: + # Get trace context from OpenTelemetry + trace_id, span_id = self._get_otel_context() + + # Set trace_id (fallback to ContextVar if no OTEL context) + if trace_id: + record.trace_id = trace_id + else: + record.trace_id = get_trace_id() + + record.span_id = span_id or "" + + # For backward compatibility, also set req_id + record.req_id = get_request_id() + + return True + + def _get_otel_context(self) -> tuple[str, str]: + """Extract trace_id and span_id from OpenTelemetry context.""" + with contextlib.suppress(Exception): + from opentelemetry.trace import get_current_span + from opentelemetry.trace.span import INVALID_SPAN_ID, INVALID_TRACE_ID + + span = get_current_span() + if span and span.get_span_context(): + ctx = span.get_span_context() + if ctx.is_valid and ctx.trace_id != INVALID_TRACE_ID: + trace_id = f"{ctx.trace_id:032x}" + span_id = f"{ctx.span_id:016x}" if ctx.span_id != INVALID_SPAN_ID else "" + return trace_id, span_id + return "", "" + + +class IdentityContextFilter(logging.Filter): + """ + Filter that adds user identity context to log records. + Extracts tenant_id, user_id, and user_type from Flask-Login current_user. + """ + + def filter(self, record: logging.LogRecord) -> bool: + identity = self._extract_identity() + record.tenant_id = identity.get("tenant_id", "") + record.user_id = identity.get("user_id", "") + record.user_type = identity.get("user_type", "") + return True + + def _extract_identity(self) -> dict[str, str]: + """Extract identity from current_user if in request context.""" + try: + if not flask.has_request_context(): + return {} + from flask_login import current_user + + # Check if user is authenticated using the proxy + if not current_user.is_authenticated: + return {} + + # Access the underlying user object + user = current_user + + from models import Account + from models.model import EndUser + + identity: dict[str, str] = {} + + if isinstance(user, Account): + if user.current_tenant_id: + identity["tenant_id"] = user.current_tenant_id + identity["user_id"] = user.id + identity["user_type"] = "account" + elif isinstance(user, EndUser): + identity["tenant_id"] = user.tenant_id + identity["user_id"] = user.id + identity["user_type"] = user.type or "end_user" + + return identity + except Exception: + return {} diff --git a/api/core/logging/structured_formatter.py b/api/core/logging/structured_formatter.py new file mode 100644 index 0000000000..4295d2dd34 --- /dev/null +++ b/api/core/logging/structured_formatter.py @@ -0,0 +1,107 @@ +"""Structured JSON log formatter for Dify.""" + +import logging +import traceback +from datetime import UTC, datetime +from typing import Any + +import orjson + +from configs import dify_config + + +class StructuredJSONFormatter(logging.Formatter): + """ + JSON log formatter following the specified schema: + { + "ts": "ISO 8601 UTC", + "severity": "INFO|ERROR|WARN|DEBUG", + "service": "service name", + "caller": "file:line", + "trace_id": "hex 32", + "span_id": "hex 16", + "identity": { "tenant_id", "user_id", "user_type" }, + "message": "log message", + "attributes": { ... }, + "stack_trace": "..." + } + """ + + SEVERITY_MAP: dict[int, str] = { + logging.DEBUG: "DEBUG", + logging.INFO: "INFO", + logging.WARNING: "WARN", + logging.ERROR: "ERROR", + logging.CRITICAL: "ERROR", + } + + def __init__(self, service_name: str | None = None): + super().__init__() + self._service_name = service_name or dify_config.APPLICATION_NAME + + def format(self, record: logging.LogRecord) -> str: + log_dict = self._build_log_dict(record) + try: + return orjson.dumps(log_dict).decode("utf-8") + except TypeError: + # Fallback: convert non-serializable objects to string + import json + + return json.dumps(log_dict, default=str, ensure_ascii=False) + + def _build_log_dict(self, record: logging.LogRecord) -> dict[str, Any]: + # Core fields + log_dict: dict[str, Any] = { + "ts": datetime.now(UTC).isoformat(timespec="milliseconds").replace("+00:00", "Z"), + "severity": self.SEVERITY_MAP.get(record.levelno, "INFO"), + "service": self._service_name, + "caller": f"{record.filename}:{record.lineno}", + "message": record.getMessage(), + } + + # Trace context (from TraceContextFilter) + trace_id = getattr(record, "trace_id", "") + span_id = getattr(record, "span_id", "") + + if trace_id: + log_dict["trace_id"] = trace_id + if span_id: + log_dict["span_id"] = span_id + + # Identity context (from IdentityContextFilter) + identity = self._extract_identity(record) + if identity: + log_dict["identity"] = identity + + # Dynamic attributes + attributes = getattr(record, "attributes", None) + if attributes: + log_dict["attributes"] = attributes + + # Stack trace for errors with exceptions + if record.exc_info and record.levelno >= logging.ERROR: + log_dict["stack_trace"] = self._format_exception(record.exc_info) + + return log_dict + + def _extract_identity(self, record: logging.LogRecord) -> dict[str, str] | None: + tenant_id = getattr(record, "tenant_id", None) + user_id = getattr(record, "user_id", None) + user_type = getattr(record, "user_type", None) + + if not any([tenant_id, user_id, user_type]): + return None + + identity: dict[str, str] = {} + if tenant_id: + identity["tenant_id"] = tenant_id + if user_id: + identity["user_id"] = user_id + if user_type: + identity["user_type"] = user_type + return identity + + def _format_exception(self, exc_info: tuple[Any, ...]) -> str: + if exc_info and exc_info[0] is not None: + return "".join(traceback.format_exception(*exc_info)) + return "" diff --git a/api/core/mcp/auth/auth_flow.py b/api/core/mcp/auth/auth_flow.py index 92787b39dd..aef1afb235 100644 --- a/api/core/mcp/auth/auth_flow.py +++ b/api/core/mcp/auth/auth_flow.py @@ -47,7 +47,11 @@ def build_protected_resource_metadata_discovery_urls( """ Build a list of URLs to try for Protected Resource Metadata discovery. - Per SEP-985, supports fallback when discovery fails at one URL. + Per RFC 9728 Section 5.1, supports fallback when discovery fails at one URL. + Priority order: + 1. URL from WWW-Authenticate header (if provided) + 2. Well-known URI with path: https://example.com/.well-known/oauth-protected-resource/public/mcp + 3. Well-known URI at root: https://example.com/.well-known/oauth-protected-resource """ urls = [] @@ -58,9 +62,18 @@ def build_protected_resource_metadata_discovery_urls( # Fallback: construct from server URL parsed = urlparse(server_url) base_url = f"{parsed.scheme}://{parsed.netloc}" - fallback_url = urljoin(base_url, "/.well-known/oauth-protected-resource") - if fallback_url not in urls: - urls.append(fallback_url) + path = parsed.path.rstrip("/") + + # Priority 2: With path insertion (e.g., /.well-known/oauth-protected-resource/public/mcp) + if path: + path_url = f"{base_url}/.well-known/oauth-protected-resource{path}" + if path_url not in urls: + urls.append(path_url) + + # Priority 3: At root (e.g., /.well-known/oauth-protected-resource) + root_url = f"{base_url}/.well-known/oauth-protected-resource" + if root_url not in urls: + urls.append(root_url) return urls @@ -71,30 +84,34 @@ def build_oauth_authorization_server_metadata_discovery_urls(auth_server_url: st Supports both OAuth 2.0 (RFC 8414) and OpenID Connect discovery. - Per RFC 8414 section 3: - - If issuer has no path: https://example.com/.well-known/oauth-authorization-server - - If issuer has path: https://example.com/.well-known/oauth-authorization-server{path} - - Example: - - issuer: https://example.com/oauth - - metadata: https://example.com/.well-known/oauth-authorization-server/oauth + Per RFC 8414 section 3.1 and section 5, try all possible endpoints: + - OAuth 2.0 with path insertion: https://example.com/.well-known/oauth-authorization-server/tenant1 + - OpenID Connect with path insertion: https://example.com/.well-known/openid-configuration/tenant1 + - OpenID Connect path appending: https://example.com/tenant1/.well-known/openid-configuration + - OAuth 2.0 at root: https://example.com/.well-known/oauth-authorization-server + - OpenID Connect at root: https://example.com/.well-known/openid-configuration """ urls = [] base_url = auth_server_url or server_url parsed = urlparse(base_url) base = f"{parsed.scheme}://{parsed.netloc}" - path = parsed.path.rstrip("/") # Remove trailing slash + path = parsed.path.rstrip("/") + # OAuth 2.0 Authorization Server Metadata at root (MCP-03-26) + urls.append(f"{base}/.well-known/oauth-authorization-server") - # Try OpenID Connect discovery first (more common) - urls.append(urljoin(base + "/", ".well-known/openid-configuration")) + # OpenID Connect Discovery at root + urls.append(f"{base}/.well-known/openid-configuration") - # OAuth 2.0 Authorization Server Metadata (RFC 8414) - # Include the path component if present in the issuer URL if path: - urls.append(urljoin(base, f".well-known/oauth-authorization-server{path}")) - else: - urls.append(urljoin(base, ".well-known/oauth-authorization-server")) + # OpenID Connect Discovery with path insertion + urls.append(f"{base}/.well-known/openid-configuration{path}") + + # OpenID Connect Discovery path appending + urls.append(f"{base}{path}/.well-known/openid-configuration") + + # OAuth 2.0 Authorization Server Metadata with path insertion + urls.append(f"{base}/.well-known/oauth-authorization-server{path}") return urls diff --git a/api/core/mcp/client/sse_client.py b/api/core/mcp/client/sse_client.py index 24ca59ee45..1de1d5a073 100644 --- a/api/core/mcp/client/sse_client.py +++ b/api/core/mcp/client/sse_client.py @@ -61,6 +61,7 @@ class SSETransport: self.timeout = timeout self.sse_read_timeout = sse_read_timeout self.endpoint_url: str | None = None + self.event_source: EventSource | None = None def _validate_endpoint_url(self, endpoint_url: str) -> bool: """Validate that the endpoint URL matches the connection origin. @@ -237,6 +238,9 @@ class SSETransport: write_queue: WriteQueue = queue.Queue() status_queue: StatusQueue = queue.Queue() + # Store event_source for graceful shutdown + self.event_source = event_source + # Start SSE reader thread executor.submit(self.sse_reader, event_source, read_queue, status_queue) @@ -296,6 +300,13 @@ def sse_client( logger.exception("Error connecting to SSE endpoint") raise finally: + # Close the SSE connection to unblock the reader thread + if transport.event_source is not None: + try: + transport.event_source.response.close() + except RuntimeError: + pass + # Clean up queues if read_queue: read_queue.put(None) diff --git a/api/core/mcp/client/streamable_client.py b/api/core/mcp/client/streamable_client.py index 805c16c838..5c3cd0d8f8 100644 --- a/api/core/mcp/client/streamable_client.py +++ b/api/core/mcp/client/streamable_client.py @@ -8,6 +8,7 @@ and session management. import logging import queue +import threading from collections.abc import Callable, Generator from concurrent.futures import ThreadPoolExecutor from contextlib import contextmanager @@ -103,6 +104,9 @@ class StreamableHTTPTransport: CONTENT_TYPE: JSON, **self.headers, } + self.stop_event = threading.Event() + self._active_responses: list[httpx.Response] = [] + self._lock = threading.Lock() def _update_headers_with_session(self, base_headers: dict[str, str]) -> dict[str, str]: """Update headers with session ID if available.""" @@ -111,6 +115,30 @@ class StreamableHTTPTransport: headers[MCP_SESSION_ID] = self.session_id return headers + def _register_response(self, response: httpx.Response): + """Register a response for cleanup on shutdown.""" + with self._lock: + self._active_responses.append(response) + + def _unregister_response(self, response: httpx.Response): + """Unregister a response after it's closed.""" + with self._lock: + try: + self._active_responses.remove(response) + except ValueError as e: + logger.debug("Ignoring error during response unregister: %s", e) + + def close_active_responses(self): + """Close all active SSE connections to unblock threads.""" + with self._lock: + responses_to_close = list(self._active_responses) + self._active_responses.clear() + for response in responses_to_close: + try: + response.close() + except RuntimeError as e: + logger.debug("Ignoring error during active response close: %s", e) + def _is_initialization_request(self, message: JSONRPCMessage) -> bool: """Check if the message is an initialization request.""" return isinstance(message.root, JSONRPCRequest) and message.root.method == "initialize" @@ -195,11 +223,21 @@ class StreamableHTTPTransport: event_source.response.raise_for_status() logger.debug("GET SSE connection established") - for sse in event_source.iter_sse(): - self._handle_sse_event(sse, server_to_client_queue) + # Register response for cleanup + self._register_response(event_source.response) + + try: + for sse in event_source.iter_sse(): + if self.stop_event.is_set(): + logger.debug("GET stream received stop signal") + break + self._handle_sse_event(sse, server_to_client_queue) + finally: + self._unregister_response(event_source.response) except Exception as exc: - logger.debug("GET stream error (non-fatal): %s", exc) + if not self.stop_event.is_set(): + logger.debug("GET stream error (non-fatal): %s", exc) def _handle_resumption_request(self, ctx: RequestContext): """Handle a resumption request using GET with SSE.""" @@ -224,15 +262,24 @@ class StreamableHTTPTransport: event_source.response.raise_for_status() logger.debug("Resumption GET SSE connection established") - for sse in event_source.iter_sse(): - is_complete = self._handle_sse_event( - sse, - ctx.server_to_client_queue, - original_request_id, - ctx.metadata.on_resumption_token_update if ctx.metadata else None, - ) - if is_complete: - break + # Register response for cleanup + self._register_response(event_source.response) + + try: + for sse in event_source.iter_sse(): + if self.stop_event.is_set(): + logger.debug("Resumption stream received stop signal") + break + is_complete = self._handle_sse_event( + sse, + ctx.server_to_client_queue, + original_request_id, + ctx.metadata.on_resumption_token_update if ctx.metadata else None, + ) + if is_complete: + break + finally: + self._unregister_response(event_source.response) def _handle_post_request(self, ctx: RequestContext): """Handle a POST request with response processing.""" @@ -266,17 +313,20 @@ class StreamableHTTPTransport: if is_initialization: self._maybe_extract_session_id_from_response(response) - content_type = cast(str, response.headers.get(CONTENT_TYPE, "").lower()) + # Per https://modelcontextprotocol.io/specification/2025-06-18/basic#notifications: + # The server MUST NOT send a response to notifications. + if isinstance(message.root, JSONRPCRequest): + content_type = cast(str, response.headers.get(CONTENT_TYPE, "").lower()) - if content_type.startswith(JSON): - self._handle_json_response(response, ctx.server_to_client_queue) - elif content_type.startswith(SSE): - self._handle_sse_response(response, ctx) - else: - self._handle_unexpected_content_type( - content_type, - ctx.server_to_client_queue, - ) + if content_type.startswith(JSON): + self._handle_json_response(response, ctx.server_to_client_queue) + elif content_type.startswith(SSE): + self._handle_sse_response(response, ctx) + else: + self._handle_unexpected_content_type( + content_type, + ctx.server_to_client_queue, + ) def _handle_json_response( self, @@ -295,17 +345,27 @@ class StreamableHTTPTransport: def _handle_sse_response(self, response: httpx.Response, ctx: RequestContext): """Handle SSE response from the server.""" try: + # Register response for cleanup + self._register_response(response) + event_source = EventSource(response) - for sse in event_source.iter_sse(): - is_complete = self._handle_sse_event( - sse, - ctx.server_to_client_queue, - resumption_callback=(ctx.metadata.on_resumption_token_update if ctx.metadata else None), - ) - if is_complete: - break + try: + for sse in event_source.iter_sse(): + if self.stop_event.is_set(): + logger.debug("SSE response stream received stop signal") + break + is_complete = self._handle_sse_event( + sse, + ctx.server_to_client_queue, + resumption_callback=(ctx.metadata.on_resumption_token_update if ctx.metadata else None), + ) + if is_complete: + break + finally: + self._unregister_response(response) except Exception as e: - ctx.server_to_client_queue.put(e) + if not self.stop_event.is_set(): + ctx.server_to_client_queue.put(e) def _handle_unexpected_content_type( self, @@ -345,6 +405,11 @@ class StreamableHTTPTransport: """ while True: try: + # Check if we should stop + if self.stop_event.is_set(): + logger.debug("Post writer received stop signal") + break + # Read message from client queue with timeout to check stop_event periodically session_message = client_to_server_queue.get(timeout=DEFAULT_QUEUE_READ_TIMEOUT) if session_message is None: @@ -381,7 +446,8 @@ class StreamableHTTPTransport: except queue.Empty: continue except Exception as exc: - server_to_client_queue.put(exc) + if not self.stop_event.is_set(): + server_to_client_queue.put(exc) def terminate_session(self, client: httpx.Client): """Terminate the session by sending a DELETE request.""" @@ -465,6 +531,12 @@ def streamablehttp_client( transport.get_session_id, ) finally: + # Set stop event to signal all threads to stop + transport.stop_event.set() + + # Close all active SSE connections to unblock threads + transport.close_active_responses() + if transport.session_id and terminate_on_close: transport.terminate_session(client) diff --git a/api/core/mcp/mcp_client.py b/api/core/mcp/mcp_client.py index b0e0dab9be..2b0645b558 100644 --- a/api/core/mcp/mcp_client.py +++ b/api/core/mcp/mcp_client.py @@ -59,7 +59,7 @@ class MCPClient: try: logger.debug("Not supported method %s found in URL path, trying default 'mcp' method.", method_name) self.connect_server(sse_client, "sse") - except MCPConnectionError: + except (MCPConnectionError, ValueError): logger.debug("MCP connection failed with 'sse', falling back to 'mcp' method.") self.connect_server(streamablehttp_client, "mcp") diff --git a/api/core/mcp/session/base_session.py b/api/core/mcp/session/base_session.py index c97ae6eac7..84a6fd0d1f 100644 --- a/api/core/mcp/session/base_session.py +++ b/api/core/mcp/session/base_session.py @@ -68,13 +68,7 @@ class RequestResponder(Generic[ReceiveRequestT, SendResultT]): request_id: RequestId, request_meta: RequestParams.Meta | None, request: ReceiveRequestT, - session: """BaseSession[ - SendRequestT, - SendNotificationT, - SendResultT, - ReceiveRequestT, - ReceiveNotificationT - ]""", + session: """BaseSession[SendRequestT, SendNotificationT, SendResultT, ReceiveRequestT, ReceiveNotificationT]""", on_complete: Callable[["RequestResponder[ReceiveRequestT, SendResultT]"], Any], ): self.request_id = request_id diff --git a/api/core/model_runtime/README.md b/api/core/model_runtime/README.md index a6caa7eb1e..b9d2c55210 100644 --- a/api/core/model_runtime/README.md +++ b/api/core/model_runtime/README.md @@ -18,34 +18,20 @@ This module provides the interface for invoking and authenticating various model - Model provider display - ![image-20231210143654461](./docs/en_US/images/index/image-20231210143654461.png) - - Displays a list of all supported providers, including provider names, icons, supported model types list, predefined model list, configuration method, and credentials form rules, etc. For detailed rule design, see: [Schema](./docs/en_US/schema.md). + Displays a list of all supported providers, including provider names, icons, supported model types list, predefined model list, configuration method, and credentials form rules, etc. - Selectable model list display - ![image-20231210144229650](./docs/en_US/images/index/image-20231210144229650.png) - After configuring provider/model credentials, the dropdown (application orchestration interface/default model) allows viewing of the available LLM list. Greyed out items represent predefined model lists from providers without configured credentials, facilitating user review of supported models. - In addition, this list also returns configurable parameter information and rules for LLM, as shown below: - - ![image-20231210144814617](./docs/en_US/images/index/image-20231210144814617.png) - - These parameters are all defined in the backend, allowing different settings for various parameters supported by different models, as detailed in: [Schema](./docs/en_US/schema.md#ParameterRule). + In addition, this list also returns configurable parameter information and rules for LLM. These parameters are all defined in the backend, allowing different settings for various parameters supported by different models. - Provider/model credential authentication - ![image-20231210151548521](./docs/en_US/images/index/image-20231210151548521.png) - - ![image-20231210151628992](./docs/en_US/images/index/image-20231210151628992.png) - - The provider list returns configuration information for the credentials form, which can be authenticated through Runtime's interface. The first image above is a provider credential DEMO, and the second is a model credential DEMO. + The provider list returns configuration information for the credentials form, which can be authenticated through Runtime's interface. ## Structure -![](./docs/en_US/images/index/image-20231210165243632.png) - Model Runtime is divided into three layers: - The outermost layer is the factory method @@ -60,9 +46,6 @@ Model Runtime is divided into three layers: It offers direct invocation of various model types, predefined model configuration information, getting predefined/remote model lists, model credential authentication methods. Different models provide additional special methods, like LLM's pre-computed tokens method, cost information obtaining method, etc., **allowing horizontal expansion** for different models under the same provider (within supported model types). -## Next Steps +## Documentation -- Add new provider configuration: [Link](./docs/en_US/provider_scale_out.md) -- Add new models for existing providers: [Link](./docs/en_US/provider_scale_out.md#AddModel) -- View YAML configuration rules: [Link](./docs/en_US/schema.md) -- Implement interface methods: [Link](./docs/en_US/interfaces.md) +For detailed documentation on how to add new providers or models, please refer to the [Dify documentation](https://docs.dify.ai/). diff --git a/api/core/model_runtime/README_CN.md b/api/core/model_runtime/README_CN.md index dfe614347a..0a8b56b3fe 100644 --- a/api/core/model_runtime/README_CN.md +++ b/api/core/model_runtime/README_CN.md @@ -18,34 +18,20 @@ - 模型供应商展示 - ![image-20231210143654461](./docs/zh_Hans/images/index/image-20231210143654461.png) - -​ 展示所有已支持的供应商列表,除了返回供应商名称、图标之外,还提供了支持的模型类型列表,预定义模型列表、配置方式以及配置凭据的表单规则等等,规则设计详见:[Schema](./docs/zh_Hans/schema.md)。 + 展示所有已支持的供应商列表,除了返回供应商名称、图标之外,还提供了支持的模型类型列表,预定义模型列表、配置方式以及配置凭据的表单规则等等。 - 可选择的模型列表展示 - ![image-20231210144229650](./docs/zh_Hans/images/index/image-20231210144229650.png) + 配置供应商/模型凭据后,可在此下拉(应用编排界面/默认模型)查看可用的 LLM 列表,其中灰色的为未配置凭据供应商的预定义模型列表,方便用户查看已支持的模型。 -​ 配置供应商/模型凭据后,可在此下拉(应用编排界面/默认模型)查看可用的 LLM 列表,其中灰色的为未配置凭据供应商的预定义模型列表,方便用户查看已支持的模型。 - -​ 除此之外,该列表还返回了 LLM 可配置的参数信息和规则,如下图: - -​ ![image-20231210144814617](./docs/zh_Hans/images/index/image-20231210144814617.png) - -​ 这里的参数均为后端定义,相比之前只有 5 种固定参数,这里可为不同模型设置所支持的各种参数,详见:[Schema](./docs/zh_Hans/schema.md#ParameterRule)。 + 除此之外,该列表还返回了 LLM 可配置的参数信息和规则。这里的参数均为后端定义,相比之前只有 5 种固定参数,这里可为不同模型设置所支持的各种参数。 - 供应商/模型凭据鉴权 - ![image-20231210151548521](./docs/zh_Hans/images/index/image-20231210151548521.png) - -![image-20231210151628992](./docs/zh_Hans/images/index/image-20231210151628992.png) - -​ 供应商列表返回了凭据表单的配置信息,可通过 Runtime 提供的接口对凭据进行鉴权,上图 1 为供应商凭据 DEMO,上图 2 为模型凭据 DEMO。 + 供应商列表返回了凭据表单的配置信息,可通过 Runtime 提供的接口对凭据进行鉴权。 ## 结构 -![](./docs/zh_Hans/images/index/image-20231210165243632.png) - Model Runtime 分三层: - 最外层为工厂方法 @@ -59,8 +45,7 @@ Model Runtime 分三层: 对于供应商/模型凭据,有两种情况 - 如 OpenAI 这类中心化供应商,需要定义如**api_key**这类的鉴权凭据 - - 如[**Xinference**](https://github.com/xorbitsai/inference)这类本地部署的供应商,需要定义如**server_url**这类的地址凭据,有时候还需要定义**model_uid**之类的模型类型凭据,就像下面这样,当在供应商层定义了这些凭据后,就可以在前端页面上直接展示,无需修改前端逻辑。 - ![Alt text](docs/zh_Hans/images/index/image.png) + - 如[**Xinference**](https://github.com/xorbitsai/inference)这类本地部署的供应商,需要定义如**server_url**这类的地址凭据,有时候还需要定义**model_uid**之类的模型类型凭据。当在供应商层定义了这些凭据后,就可以在前端页面上直接展示,无需修改前端逻辑。 当配置好凭据后,就可以通过 DifyRuntime 的外部接口直接获取到对应供应商所需要的**Schema**(凭据表单规则),从而在可以在不修改前端逻辑的情况下,提供新的供应商/模型的支持。 @@ -74,20 +59,6 @@ Model Runtime 分三层: - 模型凭据 (**在供应商层定义**):这是一类不经常变动,一般在配置好后就不会再变动的参数,如 **api_key**、**server_url** 等。在 DifyRuntime 中,他们的参数名一般为**credentials: dict[str, any]**,Provider 层的 credentials 会直接被传递到这一层,不需要再单独定义。 -## 下一步 +## 文档 -### [增加新的供应商配置 👈🏻](./docs/zh_Hans/provider_scale_out.md) - -当添加后,这里将会出现一个新的供应商 - -![Alt text](docs/zh_Hans/images/index/image-1.png) - -### [为已存在的供应商新增模型 👈🏻](./docs/zh_Hans/provider_scale_out.md#%E5%A2%9E%E5%8A%A0%E6%A8%A1%E5%9E%8B) - -当添加后,对应供应商的模型列表中将会出现一个新的预定义模型供用户选择,如 GPT-3.5 GPT-4 ChatGLM3-6b 等,而对于支持自定义模型的供应商,则不需要新增模型。 - -![Alt text](docs/zh_Hans/images/index/image-2.png) - -### [接口的具体实现 👈🏻](./docs/zh_Hans/interfaces.md) - -你可以在这里找到你想要查看的接口的具体实现,以及接口的参数和返回值的具体含义。 +有关如何添加新供应商或模型的详细文档,请参阅 [Dify 文档](https://docs.dify.ai/)。 diff --git a/api/core/model_runtime/entities/message_entities.py b/api/core/model_runtime/entities/message_entities.py index 89dae2dbff..3ac83b4c96 100644 --- a/api/core/model_runtime/entities/message_entities.py +++ b/api/core/model_runtime/entities/message_entities.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from abc import ABC from collections.abc import Mapping, Sequence from enum import StrEnum, auto @@ -17,7 +19,7 @@ class PromptMessageRole(StrEnum): TOOL = auto() @classmethod - def value_of(cls, value: str) -> "PromptMessageRole": + def value_of(cls, value: str) -> PromptMessageRole: """ Get value of given mode. diff --git a/api/core/model_runtime/entities/model_entities.py b/api/core/model_runtime/entities/model_entities.py index aee6ce1108..19194d162c 100644 --- a/api/core/model_runtime/entities/model_entities.py +++ b/api/core/model_runtime/entities/model_entities.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from decimal import Decimal from enum import StrEnum, auto from typing import Any @@ -20,7 +22,7 @@ class ModelType(StrEnum): TTS = auto() @classmethod - def value_of(cls, origin_model_type: str) -> "ModelType": + def value_of(cls, origin_model_type: str) -> ModelType: """ Get model type from origin model type. @@ -103,7 +105,7 @@ class DefaultParameterName(StrEnum): JSON_SCHEMA = auto() @classmethod - def value_of(cls, value: Any) -> "DefaultParameterName": + def value_of(cls, value: Any) -> DefaultParameterName: """ Get parameter name from value. diff --git a/api/core/model_runtime/entities/provider_entities.py b/api/core/model_runtime/entities/provider_entities.py index 648b209ef1..2d88751668 100644 --- a/api/core/model_runtime/entities/provider_entities.py +++ b/api/core/model_runtime/entities/provider_entities.py @@ -100,7 +100,6 @@ class SimpleProviderEntity(BaseModel): label: I18nObject icon_small: I18nObject | None = None icon_small_dark: I18nObject | None = None - icon_large: I18nObject | None = None supported_model_types: Sequence[ModelType] models: list[AIModelEntity] = [] @@ -123,7 +122,6 @@ class ProviderEntity(BaseModel): label: I18nObject description: I18nObject | None = None icon_small: I18nObject | None = None - icon_large: I18nObject | None = None icon_small_dark: I18nObject | None = None background: str | None = None help: ProviderHelpEntity | None = None @@ -157,7 +155,6 @@ class ProviderEntity(BaseModel): provider=self.provider, label=self.label, icon_small=self.icon_small, - icon_large=self.icon_large, supported_model_types=self.supported_model_types, models=self.models, ) diff --git a/api/core/model_runtime/model_providers/model_provider_factory.py b/api/core/model_runtime/model_providers/model_provider_factory.py index b8704ef4ed..28f162a928 100644 --- a/api/core/model_runtime/model_providers/model_provider_factory.py +++ b/api/core/model_runtime/model_providers/model_provider_factory.py @@ -1,3 +1,5 @@ +from __future__ import annotations + import hashlib import logging from collections.abc import Sequence @@ -38,7 +40,7 @@ class ModelProviderFactory: plugin_providers = self.get_plugin_model_providers() return [provider.declaration for provider in plugin_providers] - def get_plugin_model_providers(self) -> Sequence["PluginModelProviderEntity"]: + def get_plugin_model_providers(self) -> Sequence[PluginModelProviderEntity]: """ Get all plugin model providers :return: list of plugin model providers @@ -76,7 +78,7 @@ class ModelProviderFactory: plugin_model_provider_entity = self.get_plugin_model_provider(provider=provider) return plugin_model_provider_entity.declaration - def get_plugin_model_provider(self, provider: str) -> "PluginModelProviderEntity": + def get_plugin_model_provider(self, provider: str) -> PluginModelProviderEntity: """ Get plugin model provider :param provider: provider name @@ -285,7 +287,7 @@ class ModelProviderFactory: """ Get provider icon :param provider: provider name - :param icon_type: icon type (icon_small or icon_large) + :param icon_type: icon type (icon_small or icon_small_dark) :param lang: language (zh_Hans or en_US) :return: provider icon """ @@ -309,13 +311,7 @@ class ModelProviderFactory: else: file_name = provider_schema.icon_small_dark.en_US else: - if not provider_schema.icon_large: - raise ValueError(f"Provider {provider} does not have large icon.") - - if lang.lower() == "zh_hans": - file_name = provider_schema.icon_large.zh_Hans - else: - file_name = provider_schema.icon_large.en_US + raise ValueError(f"Unsupported icon type: {icon_type}.") if not file_name: raise ValueError(f"Provider {provider} does not have icon.") diff --git a/api/core/ops/utils.py b/api/core/ops/utils.py index c00f785034..631e3b77b2 100644 --- a/api/core/ops/utils.py +++ b/api/core/ops/utils.py @@ -54,7 +54,7 @@ def generate_dotted_order(run_id: str, start_time: Union[str, datetime], parent_ generate dotted_order for langsmith """ start_time = datetime.fromisoformat(start_time) if isinstance(start_time, str) else start_time - timestamp = start_time.strftime("%Y%m%dT%H%M%S%f")[:-3] + "Z" + timestamp = start_time.strftime("%Y%m%dT%H%M%S%f") + "Z" current_segment = f"{timestamp}{run_id}" if parent_dotted_order is None: diff --git a/api/core/plugin/entities/parameters.py b/api/core/plugin/entities/parameters.py index 88a3a7bd43..bfa662b9f6 100644 --- a/api/core/plugin/entities/parameters.py +++ b/api/core/plugin/entities/parameters.py @@ -76,7 +76,7 @@ class PluginParameter(BaseModel): auto_generate: PluginParameterAutoGenerate | None = None template: PluginParameterTemplate | None = None required: bool = False - default: Union[float, int, str, bool] | None = None + default: Union[float, int, str, bool, list, dict] | None = None min: Union[float, int] | None = None max: Union[float, int] | None = None precision: int | None = None diff --git a/api/core/plugin/entities/plugin_daemon.py b/api/core/plugin/entities/plugin_daemon.py index 3b83121357..6674228dc0 100644 --- a/api/core/plugin/entities/plugin_daemon.py +++ b/api/core/plugin/entities/plugin_daemon.py @@ -1,3 +1,5 @@ +from __future__ import annotations + import enum from collections.abc import Mapping, Sequence from datetime import datetime @@ -242,7 +244,7 @@ class CredentialType(enum.StrEnum): return [item.value for item in cls] @classmethod - def of(cls, credential_type: str) -> "CredentialType": + def of(cls, credential_type: str) -> CredentialType: type_name = credential_type.lower() if type_name in {"api-key", "api_key"}: return cls.API_KEY diff --git a/api/core/plugin/impl/base.py b/api/core/plugin/impl/base.py index a1c84bd5d9..0e49824ad0 100644 --- a/api/core/plugin/impl/base.py +++ b/api/core/plugin/impl/base.py @@ -39,7 +39,7 @@ from core.trigger.errors import ( plugin_daemon_inner_api_baseurl = URL(str(dify_config.PLUGIN_DAEMON_URL)) _plugin_daemon_timeout_config = cast( float | httpx.Timeout | None, - getattr(dify_config, "PLUGIN_DAEMON_TIMEOUT", 300.0), + getattr(dify_config, "PLUGIN_DAEMON_TIMEOUT", 600.0), ) plugin_daemon_request_timeout: httpx.Timeout | None if _plugin_daemon_timeout_config is None: @@ -103,6 +103,9 @@ class BasePluginClient: prepared_headers["X-Api-Key"] = dify_config.PLUGIN_DAEMON_KEY prepared_headers.setdefault("Accept-Encoding", "gzip, deflate, br") + # Inject traceparent header for distributed tracing + self._inject_trace_headers(prepared_headers) + prepared_data: bytes | dict[str, Any] | str | None = ( data if isinstance(data, (bytes, str, dict)) or data is None else None ) @@ -114,6 +117,31 @@ class BasePluginClient: return str(url), prepared_headers, prepared_data, params, files + def _inject_trace_headers(self, headers: dict[str, str]) -> None: + """ + Inject W3C traceparent header for distributed tracing. + + This ensures trace context is propagated to plugin daemon even if + HTTPXClientInstrumentor doesn't cover module-level httpx functions. + """ + if not dify_config.ENABLE_OTEL: + return + + import contextlib + + # Skip if already present (case-insensitive check) + for key in headers: + if key.lower() == "traceparent": + return + + # Inject traceparent - works as fallback when OTEL instrumentation doesn't cover this call + with contextlib.suppress(Exception): + from core.helper.trace_id_helper import generate_traceparent_header + + traceparent = generate_traceparent_header() + if traceparent: + headers["traceparent"] = traceparent + def _stream_request( self, method: str, diff --git a/api/core/provider_manager.py b/api/core/provider_manager.py index 6c818bdc8b..fdbfca4330 100644 --- a/api/core/provider_manager.py +++ b/api/core/provider_manager.py @@ -331,7 +331,6 @@ class ProviderManager: provider=provider_schema.provider, label=provider_schema.label, icon_small=provider_schema.icon_small, - icon_large=provider_schema.icon_large, supported_model_types=provider_schema.supported_model_types, ), ) @@ -619,18 +618,18 @@ class ProviderManager: ) for quota in configuration.quotas: - if quota.quota_type == ProviderQuotaType.TRIAL: + if quota.quota_type in (ProviderQuotaType.TRIAL, ProviderQuotaType.PAID): # Init trial provider records if not exists - if ProviderQuotaType.TRIAL not in provider_quota_to_provider_record_dict: + if quota.quota_type not in provider_quota_to_provider_record_dict: try: # FIXME ignore the type error, only TrialHostingQuota has limit need to change the logic new_provider_record = Provider( tenant_id=tenant_id, # TODO: Use provider name with prefix after the data migration. provider_name=ModelProviderID(provider_name).provider_name, - provider_type=ProviderType.SYSTEM, - quota_type=ProviderQuotaType.TRIAL, - quota_limit=quota.quota_limit, # type: ignore + provider_type=ProviderType.SYSTEM.value, + quota_type=quota.quota_type, + quota_limit=0, # type: ignore quota_used=0, is_valid=True, ) @@ -642,8 +641,8 @@ class ProviderManager: stmt = select(Provider).where( Provider.tenant_id == tenant_id, Provider.provider_name == ModelProviderID(provider_name).provider_name, - Provider.provider_type == ProviderType.SYSTEM, - Provider.quota_type == ProviderQuotaType.TRIAL, + Provider.provider_type == ProviderType.SYSTEM.value, + Provider.quota_type == quota.quota_type, ) existed_provider_record = db.session.scalar(stmt) if not existed_provider_record: @@ -913,6 +912,22 @@ class ProviderManager: provider_record ) quota_configurations = [] + + if dify_config.EDITION == "CLOUD": + from services.credit_pool_service import CreditPoolService + + trail_pool = CreditPoolService.get_pool( + tenant_id=tenant_id, + pool_type=ProviderQuotaType.TRIAL.value, + ) + paid_pool = CreditPoolService.get_pool( + tenant_id=tenant_id, + pool_type=ProviderQuotaType.PAID.value, + ) + else: + trail_pool = None + paid_pool = None + for provider_quota in provider_hosting_configuration.quotas: if provider_quota.quota_type not in quota_type_to_provider_records_dict: if provider_quota.quota_type == ProviderQuotaType.FREE: @@ -933,16 +948,36 @@ class ProviderManager: raise ValueError("quota_used is None") if provider_record.quota_limit is None: raise ValueError("quota_limit is None") + if provider_quota.quota_type == ProviderQuotaType.TRIAL and trail_pool is not None: + quota_configuration = QuotaConfiguration( + quota_type=provider_quota.quota_type, + quota_unit=provider_hosting_configuration.quota_unit or QuotaUnit.TOKENS, + quota_used=trail_pool.quota_used, + quota_limit=trail_pool.quota_limit, + is_valid=trail_pool.quota_limit > trail_pool.quota_used or trail_pool.quota_limit == -1, + restrict_models=provider_quota.restrict_models, + ) - quota_configuration = QuotaConfiguration( - quota_type=provider_quota.quota_type, - quota_unit=provider_hosting_configuration.quota_unit or QuotaUnit.TOKENS, - quota_used=provider_record.quota_used, - quota_limit=provider_record.quota_limit, - is_valid=provider_record.quota_limit > provider_record.quota_used - or provider_record.quota_limit == -1, - restrict_models=provider_quota.restrict_models, - ) + elif provider_quota.quota_type == ProviderQuotaType.PAID and paid_pool is not None: + quota_configuration = QuotaConfiguration( + quota_type=provider_quota.quota_type, + quota_unit=provider_hosting_configuration.quota_unit or QuotaUnit.TOKENS, + quota_used=paid_pool.quota_used, + quota_limit=paid_pool.quota_limit, + is_valid=paid_pool.quota_limit > paid_pool.quota_used or paid_pool.quota_limit == -1, + restrict_models=provider_quota.restrict_models, + ) + + else: + quota_configuration = QuotaConfiguration( + quota_type=provider_quota.quota_type, + quota_unit=provider_hosting_configuration.quota_unit or QuotaUnit.TOKENS, + quota_used=provider_record.quota_used, + quota_limit=provider_record.quota_limit, + is_valid=provider_record.quota_limit > provider_record.quota_used + or provider_record.quota_limit == -1, + restrict_models=provider_quota.restrict_models, + ) quota_configurations.append(quota_configuration) diff --git a/api/core/rag/cleaner/clean_processor.py b/api/core/rag/cleaner/clean_processor.py index 9cb009035b..e182c35b99 100644 --- a/api/core/rag/cleaner/clean_processor.py +++ b/api/core/rag/cleaner/clean_processor.py @@ -27,26 +27,44 @@ class CleanProcessor: pattern = r"([a-zA-Z0-9_.+-]+@[a-zA-Z0-9-]+\.[a-zA-Z0-9-.]+)" text = re.sub(pattern, "", text) - # Remove URL but keep Markdown image URLs - # First, temporarily replace Markdown image URLs with a placeholder - markdown_image_pattern = r"!\[.*?\]\((https?://[^\s)]+)\)" - placeholders: list[str] = [] + # Remove URL but keep Markdown image URLs and link URLs + # Replace the ENTIRE markdown link/image with a single placeholder to protect + # the link text (which might also be a URL) from being removed + markdown_link_pattern = r"\[([^\]]*)\]\((https?://[^)]+)\)" + markdown_image_pattern = r"!\[.*?\]\((https?://[^)]+)\)" + placeholders: list[tuple[str, str, str]] = [] # (type, text, url) - def replace_with_placeholder(match, placeholders=placeholders): + def replace_markdown_with_placeholder(match, placeholders=placeholders): + link_type = "link" + link_text = match.group(1) + url = match.group(2) + placeholder = f"__MARKDOWN_PLACEHOLDER_{len(placeholders)}__" + placeholders.append((link_type, link_text, url)) + return placeholder + + def replace_image_with_placeholder(match, placeholders=placeholders): + link_type = "image" url = match.group(1) - placeholder = f"__MARKDOWN_IMAGE_URL_{len(placeholders)}__" - placeholders.append(url) - return f"![image]({placeholder})" + placeholder = f"__MARKDOWN_PLACEHOLDER_{len(placeholders)}__" + placeholders.append((link_type, "image", url)) + return placeholder - text = re.sub(markdown_image_pattern, replace_with_placeholder, text) + # Protect markdown links first + text = re.sub(markdown_link_pattern, replace_markdown_with_placeholder, text) + # Then protect markdown images + text = re.sub(markdown_image_pattern, replace_image_with_placeholder, text) # Now remove all remaining URLs - url_pattern = r"https?://[^\s)]+" + url_pattern = r"https?://\S+" text = re.sub(url_pattern, "", text) - # Finally, restore the Markdown image URLs - for i, url in enumerate(placeholders): - text = text.replace(f"__MARKDOWN_IMAGE_URL_{i}__", url) + # Restore the Markdown links and images + for i, (link_type, text_or_alt, url) in enumerate(placeholders): + placeholder = f"__MARKDOWN_PLACEHOLDER_{i}__" + if link_type == "link": + text = text.replace(placeholder, f"[{text_or_alt}]({url})") + else: # image + text = text.replace(placeholder, f"![{text_or_alt}]({url})") return text def filter_string(self, text): diff --git a/api/core/rag/datasource/keyword/jieba/jieba.py b/api/core/rag/datasource/keyword/jieba/jieba.py index 97052717db..0f19ecadc8 100644 --- a/api/core/rag/datasource/keyword/jieba/jieba.py +++ b/api/core/rag/datasource/keyword/jieba/jieba.py @@ -90,13 +90,17 @@ class Jieba(BaseKeyword): sorted_chunk_indices = self._retrieve_ids_by_query(keyword_table or {}, query, k) documents = [] + + segment_query_stmt = db.session.query(DocumentSegment).where( + DocumentSegment.dataset_id == self.dataset.id, DocumentSegment.index_node_id.in_(sorted_chunk_indices) + ) + if document_ids_filter: + segment_query_stmt = segment_query_stmt.where(DocumentSegment.document_id.in_(document_ids_filter)) + + segments = db.session.execute(segment_query_stmt).scalars().all() + segment_map = {segment.index_node_id: segment for segment in segments} for chunk_index in sorted_chunk_indices: - segment_query = db.session.query(DocumentSegment).where( - DocumentSegment.dataset_id == self.dataset.id, DocumentSegment.index_node_id == chunk_index - ) - if document_ids_filter: - segment_query = segment_query.where(DocumentSegment.document_id.in_(document_ids_filter)) - segment = segment_query.first() + segment = segment_map.get(chunk_index) if segment: documents.append( diff --git a/api/core/rag/datasource/retrieval_service.py b/api/core/rag/datasource/retrieval_service.py index a4b48ab092..71e22b2dce 100644 --- a/api/core/rag/datasource/retrieval_service.py +++ b/api/core/rag/datasource/retrieval_service.py @@ -1,4 +1,5 @@ import concurrent.futures +import logging from concurrent.futures import ThreadPoolExecutor from typing import Any @@ -7,12 +8,13 @@ from sqlalchemy import select from sqlalchemy.orm import Session, load_only from configs import dify_config +from core.db.session_factory import session_factory from core.model_manager import ModelManager from core.model_runtime.entities.model_entities import ModelType from core.rag.data_post_processor.data_post_processor import DataPostProcessor from core.rag.datasource.keyword.keyword_factory import Keyword from core.rag.datasource.vdb.vector_factory import Vector -from core.rag.embedding.retrieval import RetrievalSegments +from core.rag.embedding.retrieval import RetrievalChildChunk, RetrievalSegments from core.rag.entities.metadata_entities import MetadataCondition from core.rag.index_processor.constant.doc_type import DocType from core.rag.index_processor.constant.index_type import IndexStructureType @@ -35,6 +37,8 @@ default_retrieval_model = { "score_threshold_enabled": False, } +logger = logging.getLogger(__name__) + class RetrievalService: # Cache precompiled regular expressions to avoid repeated compilation @@ -105,7 +109,12 @@ class RetrievalService: ) ) - concurrent.futures.wait(futures, timeout=3600, return_when=concurrent.futures.ALL_COMPLETED) + if futures: + for future in concurrent.futures.as_completed(futures, timeout=3600): + if exceptions: + for f in futures: + f.cancel() + break if exceptions: raise ValueError(";\n".join(exceptions)) @@ -138,37 +147,47 @@ class RetrievalService: @classmethod def _deduplicate_documents(cls, documents: list[Document]) -> list[Document]: - """Deduplicate documents based on doc_id to avoid duplicate chunks in hybrid search.""" + """Deduplicate documents in O(n) while preserving first-seen order. + + Rules: + - For provider == "dify" and metadata["doc_id"] exists: keep the doc with the highest + metadata["score"] among duplicates; if a later duplicate has no score, ignore it. + - For non-dify documents (or dify without doc_id): deduplicate by content key + (provider, page_content), keeping the first occurrence. + """ if not documents: return documents - unique_documents = [] - seen_doc_ids = set() + # Map of dedup key -> chosen Document + chosen: dict[tuple, Document] = {} + # Preserve the order of first appearance of each dedup key + order: list[tuple] = [] - for document in documents: - # For dify provider documents, use doc_id for deduplication - if document.provider == "dify" and document.metadata is not None and "doc_id" in document.metadata: - doc_id = document.metadata["doc_id"] - if doc_id not in seen_doc_ids: - seen_doc_ids.add(doc_id) - unique_documents.append(document) - # If duplicate, keep the one with higher score - elif "score" in document.metadata: - # Find existing document with same doc_id and compare scores - for i, existing_doc in enumerate(unique_documents): - if ( - existing_doc.metadata - and existing_doc.metadata.get("doc_id") == doc_id - and existing_doc.metadata.get("score", 0) < document.metadata.get("score", 0) - ): - unique_documents[i] = document - break + for doc in documents: + is_dify = doc.provider == "dify" + doc_id = (doc.metadata or {}).get("doc_id") if is_dify else None + + if is_dify and doc_id: + key = ("dify", doc_id) + if key not in chosen: + chosen[key] = doc + order.append(key) + else: + # Only replace if the new one has a score and it's strictly higher + if "score" in doc.metadata: + new_score = float(doc.metadata.get("score", 0.0)) + old_score = float(chosen[key].metadata.get("score", 0.0)) if chosen[key].metadata else 0.0 + if new_score > old_score: + chosen[key] = doc else: - # For non-dify documents, use content-based deduplication - if document not in unique_documents: - unique_documents.append(document) + # Content-based dedup for non-dify or dify without doc_id + content_key = (doc.provider or "dify", doc.page_content) + if content_key not in chosen: + chosen[content_key] = doc + order.append(content_key) + # If duplicate content appears, we keep the first occurrence (no score comparison) - return unique_documents + return [chosen[k] for k in order] @classmethod def _get_dataset(cls, dataset_id: str) -> Dataset | None: @@ -199,6 +218,7 @@ class RetrievalService: ) all_documents.extend(documents) except Exception as e: + logger.error(e, exc_info=True) exceptions.append(str(e)) @classmethod @@ -292,6 +312,7 @@ class RetrievalService: else: all_documents.extend(documents) except Exception as e: + logger.error(e, exc_info=True) exceptions.append(str(e)) @classmethod @@ -340,6 +361,7 @@ class RetrievalService: else: all_documents.extend(documents) except Exception as e: + logger.error(e, exc_info=True) exceptions.append(str(e)) @staticmethod @@ -434,70 +456,119 @@ class RetrievalService: .first() ) - if not segment: - continue + valid_dataset_documents = {} + image_doc_ids: list[Any] = [] + child_index_node_ids = [] + index_node_ids = [] + doc_to_document_map = {} + for document in documents: + document_id = document.metadata.get("document_id") + if document_id not in dataset_documents: + continue - if segment.id not in include_segment_ids: - include_segment_ids.add(segment.id) - if child_chunk: - child_chunk_detail = { - "id": child_chunk.id, - "content": child_chunk.content, - "position": child_chunk.position, - "score": document.metadata.get("score", 0.0), - } - map_detail = { - "max_score": document.metadata.get("score", 0.0), - "child_chunks": [child_chunk_detail], - } - segment_child_map[segment.id] = map_detail - record = { - "segment": segment, - } - if attachment_info: - segment_file_map[segment.id] = [attachment_info] - records.append(record) - else: - if child_chunk: - child_chunk_detail = { - "id": child_chunk.id, - "content": child_chunk.content, - "position": child_chunk.position, - "score": document.metadata.get("score", 0.0), - } - if segment.id in segment_child_map: - segment_child_map[segment.id]["child_chunks"].append(child_chunk_detail) - segment_child_map[segment.id]["max_score"] = max( - segment_child_map[segment.id]["max_score"], document.metadata.get("score", 0.0) - ) - else: - segment_child_map[segment.id] = { - "max_score": document.metadata.get("score", 0.0), - "child_chunks": [child_chunk_detail], - } - if attachment_info: - if segment.id in segment_file_map: - segment_file_map[segment.id].append(attachment_info) - else: - segment_file_map[segment.id] = [attachment_info] + dataset_document = dataset_documents[document_id] + if not dataset_document: + continue + valid_dataset_documents[document_id] = dataset_document + + if dataset_document.doc_form == IndexStructureType.PARENT_CHILD_INDEX: + doc_id = document.metadata.get("doc_id") or "" + doc_to_document_map[doc_id] = document + if document.metadata.get("doc_type") == DocType.IMAGE: + image_doc_ids.append(doc_id) else: - # Handle normal documents - segment = None - if document.metadata.get("doc_type") == DocType.IMAGE: - attachment_info_dict = cls.get_segment_attachment_info( - dataset_document.dataset_id, - dataset_document.tenant_id, - document.metadata.get("doc_id") or "", - session, - ) - if attachment_info_dict: - attachment_info = attachment_info_dict["attachment_info"] - segment_id = attachment_info_dict["segment_id"] - document_segment_stmt = select(DocumentSegment).where( - DocumentSegment.dataset_id == dataset_document.dataset_id, - DocumentSegment.enabled == True, - DocumentSegment.status == "completed", - DocumentSegment.id == segment_id, + child_index_node_ids.append(doc_id) + else: + doc_id = document.metadata.get("doc_id") or "" + doc_to_document_map[doc_id] = document + if document.metadata.get("doc_type") == DocType.IMAGE: + image_doc_ids.append(doc_id) + else: + index_node_ids.append(doc_id) + + image_doc_ids = [i for i in image_doc_ids if i] + child_index_node_ids = [i for i in child_index_node_ids if i] + index_node_ids = [i for i in index_node_ids if i] + + segment_ids: list[str] = [] + index_node_segments: list[DocumentSegment] = [] + segments: list[DocumentSegment] = [] + attachment_map: dict[str, list[dict[str, Any]]] = {} + child_chunk_map: dict[str, list[ChildChunk]] = {} + doc_segment_map: dict[str, list[str]] = {} + + with session_factory.create_session() as session: + attachments = cls.get_segment_attachment_infos(image_doc_ids, session) + + for attachment in attachments: + segment_ids.append(attachment["segment_id"]) + if attachment["segment_id"] in attachment_map: + attachment_map[attachment["segment_id"]].append(attachment["attachment_info"]) + else: + attachment_map[attachment["segment_id"]] = [attachment["attachment_info"]] + if attachment["segment_id"] in doc_segment_map: + doc_segment_map[attachment["segment_id"]].append(attachment["attachment_id"]) + else: + doc_segment_map[attachment["segment_id"]] = [attachment["attachment_id"]] + child_chunk_stmt = select(ChildChunk).where(ChildChunk.index_node_id.in_(child_index_node_ids)) + child_index_nodes = session.execute(child_chunk_stmt).scalars().all() + + for i in child_index_nodes: + segment_ids.append(i.segment_id) + if i.segment_id in child_chunk_map: + child_chunk_map[i.segment_id].append(i) + else: + child_chunk_map[i.segment_id] = [i] + if i.segment_id in doc_segment_map: + doc_segment_map[i.segment_id].append(i.index_node_id) + else: + doc_segment_map[i.segment_id] = [i.index_node_id] + + if index_node_ids: + document_segment_stmt = select(DocumentSegment).where( + DocumentSegment.enabled == True, + DocumentSegment.status == "completed", + DocumentSegment.index_node_id.in_(index_node_ids), + ) + index_node_segments = session.execute(document_segment_stmt).scalars().all() # type: ignore + for index_node_segment in index_node_segments: + doc_segment_map[index_node_segment.id] = [index_node_segment.index_node_id] + if segment_ids: + document_segment_stmt = select(DocumentSegment).where( + DocumentSegment.enabled == True, + DocumentSegment.status == "completed", + DocumentSegment.id.in_(segment_ids), + ) + segments = session.execute(document_segment_stmt).scalars().all() # type: ignore + + if index_node_segments: + segments.extend(index_node_segments) + + for segment in segments: + child_chunks: list[ChildChunk] = child_chunk_map.get(segment.id, []) + attachment_infos: list[dict[str, Any]] = attachment_map.get(segment.id, []) + ds_dataset_document: DatasetDocument | None = valid_dataset_documents.get(segment.document_id) + + if ds_dataset_document and ds_dataset_document.doc_form == IndexStructureType.PARENT_CHILD_INDEX: + if segment.id not in include_segment_ids: + include_segment_ids.add(segment.id) + if child_chunks or attachment_infos: + child_chunk_details = [] + max_score = 0.0 + for child_chunk in child_chunks: + document = doc_to_document_map[child_chunk.index_node_id] + child_chunk_detail = { + "id": child_chunk.id, + "content": child_chunk.content, + "position": child_chunk.position, + "score": document.metadata.get("score", 0.0) if document else 0.0, + } + child_chunk_details.append(child_chunk_detail) + max_score = max(max_score, document.metadata.get("score", 0.0) if document else 0.0) + for attachment_info in attachment_infos: + file_document = doc_to_document_map[attachment_info["id"]] + max_score = max( + max_score, file_document.metadata.get("score", 0.0) if file_document else 0.0 ) segment = session.scalar(document_segment_stmt) if segment: @@ -539,24 +610,40 @@ class RetrievalService: record = { "segment": segment, "score": document.metadata.get("score"), # type: ignore + + map_detail = { + "max_score": max_score, + "child_chunks": child_chunk_details, } - if attachment_info: - segment_file_map[segment.id] = [attachment_info] - records.append(record) - else: - if attachment_info: - attachment_infos = segment_file_map.get(segment.id, []) - if attachment_info not in attachment_infos: - attachment_infos.append(attachment_info) - segment_file_map[segment.id] = attachment_infos + segment_child_map[segment.id] = map_detail + record: dict[str, Any] = { + "segment": segment, + } + records.append(record) + else: + if segment.id not in include_segment_ids: + include_segment_ids.add(segment.id) + max_score = 0.0 + segment_document = doc_to_document_map.get(segment.index_node_id) + if segment_document: + max_score = max(max_score, segment_document.metadata.get("score", 0.0)) + for attachment_info in attachment_infos: + file_doc = doc_to_document_map.get(attachment_info["id"]) + if file_doc: + max_score = max(max_score, file_doc.metadata.get("score", 0.0)) + record = { + "segment": segment, + "score": max_score, + } + records.append(record) # Add child chunks information to records for record in records: if record["segment"].id in segment_child_map: record["child_chunks"] = segment_child_map[record["segment"].id].get("child_chunks") # type: ignore - record["score"] = segment_child_map[record["segment"].id]["max_score"] - if record["segment"].id in segment_file_map: - record["files"] = segment_file_map[record["segment"].id] # type: ignore[assignment] + record["score"] = segment_child_map[record["segment"].id]["max_score"] # type: ignore + if record["segment"].id in attachment_map: + record["files"] = attachment_map[record["segment"].id] # type: ignore[assignment] # Batch query summaries for segments retrieved via summary (only enabled summaries) if summary_segment_ids: @@ -575,15 +662,26 @@ class RetrievalService: if summary.summary_content: segment_summary_map[summary.chunk_id] = summary.summary_content - result = [] + result: list[RetrievalSegments] = [] for record in records: # Extract segment segment = record["segment"] # Extract child_chunks, ensuring it's a list or None - child_chunks = record.get("child_chunks") - if not isinstance(child_chunks, list): - child_chunks = None + raw_child_chunks = record.get("child_chunks") + child_chunks_list: list[RetrievalChildChunk] | None = None + if isinstance(raw_child_chunks, list): + # Sort by score descending + sorted_chunks = sorted(raw_child_chunks, key=lambda x: x.get("score", 0.0), reverse=True) + child_chunks_list = [ + RetrievalChildChunk( + id=chunk["id"], + content=chunk["content"], + score=chunk.get("score", 0.0), + position=chunk["position"], + ) + for chunk in sorted_chunks + ] # Extract files, ensuring it's a list or None files = record.get("files") @@ -604,14 +702,14 @@ class RetrievalService: # Create RetrievalSegments object retrieval_segment = RetrievalSegments( segment=segment, - child_chunks=child_chunks, + child_chunks=child_chunks_list, score=score, files=files, - summary=summary_content, + summary=summary_content ) result.append(retrieval_segment) - return result + return sorted(result, key=lambda x: x.score if x.score is not None else 0.0, reverse=True) except Exception as e: db.session.rollback() raise e @@ -621,6 +719,8 @@ class RetrievalService: flask_app: Flask, retrieval_method: RetrievalMethod, dataset: Dataset, + all_documents: list[Document], + exceptions: list[str], query: str | None = None, top_k: int = 4, score_threshold: float | None = 0.0, @@ -629,8 +729,6 @@ class RetrievalService: weights: dict | None = None, document_ids_filter: list[str] | None = None, attachment_id: str | None = None, - all_documents: list[Document] = [], - exceptions: list[str] = [], ): if not query and not attachment_id: return @@ -703,7 +801,14 @@ class RetrievalService: document_ids_filter=document_ids_filter, ) ) - concurrent.futures.wait(futures, timeout=300, return_when=concurrent.futures.ALL_COMPLETED) + # Use as_completed for early error propagation - cancel remaining futures on first error + if futures: + for future in concurrent.futures.as_completed(futures, timeout=300): + if future.exception(): + # Cancel remaining futures to avoid unnecessary waiting + for f in futures: + f.cancel() + break if exceptions: raise ValueError(";\n".join(exceptions)) @@ -752,3 +857,37 @@ class RetrievalService: } return {"attachment_info": attachment_info, "segment_id": attachment_binding.segment_id} return None + + @classmethod + def get_segment_attachment_infos(cls, attachment_ids: list[str], session: Session) -> list[dict[str, Any]]: + attachment_infos = [] + upload_files = session.query(UploadFile).where(UploadFile.id.in_(attachment_ids)).all() + if upload_files: + upload_file_ids = [upload_file.id for upload_file in upload_files] + attachment_bindings = ( + session.query(SegmentAttachmentBinding) + .where(SegmentAttachmentBinding.attachment_id.in_(upload_file_ids)) + .all() + ) + attachment_binding_map = {binding.attachment_id: binding for binding in attachment_bindings} + + if attachment_bindings: + for upload_file in upload_files: + attachment_binding = attachment_binding_map.get(upload_file.id) + attachment_info = { + "id": upload_file.id, + "name": upload_file.name, + "extension": "." + upload_file.extension, + "mime_type": upload_file.mime_type, + "source_url": sign_upload_file(upload_file.id, upload_file.extension), + "size": upload_file.size, + } + if attachment_binding: + attachment_infos.append( + { + "attachment_id": attachment_binding.attachment_id, + "attachment_info": attachment_info, + "segment_id": attachment_binding.segment_id, + } + ) + return attachment_infos diff --git a/api/core/rag/datasource/vdb/clickzetta/clickzetta_vector.py b/api/core/rag/datasource/vdb/clickzetta/clickzetta_vector.py index a306f9ba0c..91bb71bfa6 100644 --- a/api/core/rag/datasource/vdb/clickzetta/clickzetta_vector.py +++ b/api/core/rag/datasource/vdb/clickzetta/clickzetta_vector.py @@ -1,3 +1,5 @@ +from __future__ import annotations + import contextlib import json import logging @@ -6,7 +8,7 @@ import re import threading import time import uuid -from typing import TYPE_CHECKING, Any, Optional +from typing import TYPE_CHECKING, Any import clickzetta # type: ignore from pydantic import BaseModel, model_validator @@ -76,7 +78,7 @@ class ClickzettaConnectionPool: Manages connection reuse across ClickzettaVector instances. """ - _instance: Optional["ClickzettaConnectionPool"] = None + _instance: ClickzettaConnectionPool | None = None _lock = threading.Lock() def __init__(self): @@ -89,7 +91,7 @@ class ClickzettaConnectionPool: self._start_cleanup_thread() @classmethod - def get_instance(cls) -> "ClickzettaConnectionPool": + def get_instance(cls) -> ClickzettaConnectionPool: """Get singleton instance of connection pool.""" if cls._instance is None: with cls._lock: @@ -104,7 +106,7 @@ class ClickzettaConnectionPool: f"{config.workspace}:{config.vcluster}:{config.schema_name}" ) - def _create_connection(self, config: ClickzettaConfig) -> "Connection": + def _create_connection(self, config: ClickzettaConfig) -> Connection: """Create a new ClickZetta connection.""" max_retries = 3 retry_delay = 1.0 @@ -134,7 +136,7 @@ class ClickzettaConnectionPool: raise RuntimeError(f"Failed to create ClickZetta connection after {max_retries} attempts") - def _configure_connection(self, connection: "Connection"): + def _configure_connection(self, connection: Connection): """Configure connection session settings.""" try: with connection.cursor() as cursor: @@ -181,7 +183,7 @@ class ClickzettaConnectionPool: except Exception: logger.exception("Failed to configure connection, continuing with defaults") - def _is_connection_valid(self, connection: "Connection") -> bool: + def _is_connection_valid(self, connection: Connection) -> bool: """Check if connection is still valid.""" try: with connection.cursor() as cursor: @@ -190,7 +192,7 @@ class ClickzettaConnectionPool: except Exception: return False - def get_connection(self, config: ClickzettaConfig) -> "Connection": + def get_connection(self, config: ClickzettaConfig) -> Connection: """Get a connection from the pool or create a new one.""" config_key = self._get_config_key(config) @@ -221,7 +223,7 @@ class ClickzettaConnectionPool: # No valid connection found, create new one return self._create_connection(config) - def return_connection(self, config: ClickzettaConfig, connection: "Connection"): + def return_connection(self, config: ClickzettaConfig, connection: Connection): """Return a connection to the pool.""" config_key = self._get_config_key(config) @@ -315,22 +317,22 @@ class ClickzettaVector(BaseVector): self._connection_pool = ClickzettaConnectionPool.get_instance() self._init_write_queue() - def _get_connection(self) -> "Connection": + def _get_connection(self) -> Connection: """Get a connection from the pool.""" return self._connection_pool.get_connection(self._config) - def _return_connection(self, connection: "Connection"): + def _return_connection(self, connection: Connection): """Return a connection to the pool.""" self._connection_pool.return_connection(self._config, connection) class ConnectionContext: """Context manager for borrowing and returning connections.""" - def __init__(self, vector_instance: "ClickzettaVector"): + def __init__(self, vector_instance: ClickzettaVector): self.vector = vector_instance self.connection: Connection | None = None - def __enter__(self) -> "Connection": + def __enter__(self) -> Connection: self.connection = self.vector._get_connection() return self.connection @@ -338,7 +340,7 @@ class ClickzettaVector(BaseVector): if self.connection: self.vector._return_connection(self.connection) - def get_connection_context(self) -> "ClickzettaVector.ConnectionContext": + def get_connection_context(self) -> ClickzettaVector.ConnectionContext: """Get a connection context manager.""" return self.ConnectionContext(self) @@ -437,7 +439,7 @@ class ClickzettaVector(BaseVector): """Return the vector database type.""" return "clickzetta" - def _ensure_connection(self) -> "Connection": + def _ensure_connection(self) -> Connection: """Get a connection from the pool.""" return self._get_connection() @@ -984,9 +986,11 @@ class ClickzettaVector(BaseVector): # No need for dataset_id filter since each dataset has its own table - # Use simple quote escaping for LIKE clause - escaped_query = query.replace("'", "''") - filter_clauses.append(f"{Field.CONTENT_KEY} LIKE '%{escaped_query}%'") + # Escape special characters for LIKE clause to prevent SQL injection + from libs.helper import escape_like_pattern + + escaped_query = escape_like_pattern(query).replace("'", "''") + filter_clauses.append(f"{Field.CONTENT_KEY} LIKE '%{escaped_query}%' ESCAPE '\\\\'") where_clause = " AND ".join(filter_clauses) search_sql = f""" diff --git a/api/core/rag/datasource/vdb/iris/iris_vector.py b/api/core/rag/datasource/vdb/iris/iris_vector.py index b1bfabb76e..5bdb0af0b3 100644 --- a/api/core/rag/datasource/vdb/iris/iris_vector.py +++ b/api/core/rag/datasource/vdb/iris/iris_vector.py @@ -287,11 +287,15 @@ class IrisVector(BaseVector): cursor.execute(sql, (query,)) else: # Fallback to LIKE search (inefficient for large datasets) - query_pattern = f"%{query}%" + # Escape special characters for LIKE clause to prevent SQL injection + from libs.helper import escape_like_pattern + + escaped_query = escape_like_pattern(query) + query_pattern = f"%{escaped_query}%" sql = f""" SELECT TOP {top_k} id, text, meta FROM {self.schema}.{self.table_name} - WHERE text LIKE ? + WHERE text LIKE ? ESCAPE '\\' """ cursor.execute(sql, (query_pattern,)) diff --git a/api/core/rag/datasource/vdb/oracle/oraclevector.py b/api/core/rag/datasource/vdb/oracle/oraclevector.py index d82ab89a34..cb05c22b55 100644 --- a/api/core/rag/datasource/vdb/oracle/oraclevector.py +++ b/api/core/rag/datasource/vdb/oracle/oraclevector.py @@ -289,7 +289,8 @@ class OracleVector(BaseVector): words = pseg.cut(query) current_entity = "" for word, pos in words: - if pos in {"nr", "Ng", "eng", "nz", "n", "ORG", "v"}: # nr: 人名,ns: 地名,nt: 机构名 + # `nr`: Person, `ns`: Location, `nt`: Organization + if pos in {"nr", "Ng", "eng", "nz", "n", "ORG", "v"}: current_entity += word else: if current_entity: diff --git a/api/core/rag/datasource/vdb/pgvector/pgvector.py b/api/core/rag/datasource/vdb/pgvector/pgvector.py index 445a0a7f8b..0615b8312c 100644 --- a/api/core/rag/datasource/vdb/pgvector/pgvector.py +++ b/api/core/rag/datasource/vdb/pgvector/pgvector.py @@ -255,7 +255,10 @@ class PGVector(BaseVector): return with self._get_cursor() as cur: - cur.execute("CREATE EXTENSION IF NOT EXISTS vector") + cur.execute("SELECT 1 FROM pg_extension WHERE extname = 'vector'") + if not cur.fetchone(): + cur.execute("CREATE EXTENSION vector") + cur.execute(SQL_CREATE_TABLE.format(table_name=self.table_name, dimension=dimension)) # PG hnsw index only support 2000 dimension or less # ref: https://github.com/pgvector/pgvector?tab=readme-ov-file#indexing diff --git a/api/core/rag/datasource/vdb/pyvastbase/vastbase_vector.py b/api/core/rag/datasource/vdb/pyvastbase/vastbase_vector.py index 86b6ace3f6..d080e8da58 100644 --- a/api/core/rag/datasource/vdb/pyvastbase/vastbase_vector.py +++ b/api/core/rag/datasource/vdb/pyvastbase/vastbase_vector.py @@ -213,7 +213,7 @@ class VastbaseVector(BaseVector): with self._get_cursor() as cur: cur.execute(SQL_CREATE_TABLE.format(table_name=self.table_name, dimension=dimension)) - # Vastbase 支持的向量维度取值范围为 [1,16000] + # Vastbase supports vector dimensions in the range [1, 16,000] if dimension <= 16000: cur.execute(SQL_CREATE_INDEX.format(table_name=self.table_name)) redis_client.set(collection_exist_cache_key, 1, ex=3600) diff --git a/api/core/rag/datasource/vdb/vector_factory.py b/api/core/rag/datasource/vdb/vector_factory.py index 9573b491a5..b9772b3c08 100644 --- a/api/core/rag/datasource/vdb/vector_factory.py +++ b/api/core/rag/datasource/vdb/vector_factory.py @@ -163,7 +163,7 @@ class Vector: from core.rag.datasource.vdb.lindorm.lindorm_vector import LindormVectorStoreFactory return LindormVectorStoreFactory - case VectorType.OCEANBASE: + case VectorType.OCEANBASE | VectorType.SEEKDB: from core.rag.datasource.vdb.oceanbase.oceanbase_vector import OceanBaseVectorFactory return OceanBaseVectorFactory diff --git a/api/core/rag/datasource/vdb/vector_type.py b/api/core/rag/datasource/vdb/vector_type.py index 263d22195e..bd99a31446 100644 --- a/api/core/rag/datasource/vdb/vector_type.py +++ b/api/core/rag/datasource/vdb/vector_type.py @@ -27,6 +27,7 @@ class VectorType(StrEnum): UPSTASH = "upstash" TIDB_ON_QDRANT = "tidb_on_qdrant" OCEANBASE = "oceanbase" + SEEKDB = "seekdb" OPENGAUSS = "opengauss" TABLESTORE = "tablestore" HUAWEI_CLOUD = "huawei_cloud" diff --git a/api/core/rag/datasource/vdb/weaviate/weaviate_vector.py b/api/core/rag/datasource/vdb/weaviate/weaviate_vector.py index 84d1e26b34..b48dd93f04 100644 --- a/api/core/rag/datasource/vdb/weaviate/weaviate_vector.py +++ b/api/core/rag/datasource/vdb/weaviate/weaviate_vector.py @@ -66,6 +66,8 @@ class WeaviateVector(BaseVector): in a Weaviate collection. """ + _DOCUMENT_ID_PROPERTY = "document_id" + def __init__(self, collection_name: str, config: WeaviateConfig, attributes: list): """ Initializes the Weaviate vector store. @@ -353,15 +355,12 @@ class WeaviateVector(BaseVector): return [] col = self._client.collections.use(self._collection_name) - props = list({*self._attributes, "document_id", Field.TEXT_KEY.value}) + props = list({*self._attributes, self._DOCUMENT_ID_PROPERTY, Field.TEXT_KEY.value}) where = None doc_ids = kwargs.get("document_ids_filter") or [] if doc_ids: - ors = [Filter.by_property("document_id").equal(x) for x in doc_ids] - where = ors[0] - for f in ors[1:]: - where = where | f + where = Filter.by_property(self._DOCUMENT_ID_PROPERTY).contains_any(doc_ids) top_k = int(kwargs.get("top_k", 4)) score_threshold = float(kwargs.get("score_threshold") or 0.0) @@ -408,10 +407,7 @@ class WeaviateVector(BaseVector): where = None doc_ids = kwargs.get("document_ids_filter") or [] if doc_ids: - ors = [Filter.by_property("document_id").equal(x) for x in doc_ids] - where = ors[0] - for f in ors[1:]: - where = where | f + where = Filter.by_property(self._DOCUMENT_ID_PROPERTY).contains_any(doc_ids) top_k = int(kwargs.get("top_k", 4)) diff --git a/api/core/rag/docstore/dataset_docstore.py b/api/core/rag/docstore/dataset_docstore.py index 1fe74d3042..69adac522d 100644 --- a/api/core/rag/docstore/dataset_docstore.py +++ b/api/core/rag/docstore/dataset_docstore.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from collections.abc import Sequence from typing import Any @@ -22,7 +24,7 @@ class DatasetDocumentStore: self._document_id = document_id @classmethod - def from_dict(cls, config_dict: dict[str, Any]) -> "DatasetDocumentStore": + def from_dict(cls, config_dict: dict[str, Any]) -> DatasetDocumentStore: return cls(**config_dict) def to_dict(self) -> dict[str, Any]: diff --git a/api/core/rag/extractor/entity/extract_setting.py b/api/core/rag/extractor/entity/extract_setting.py index c3bfbce98f..0c42034073 100644 --- a/api/core/rag/extractor/entity/extract_setting.py +++ b/api/core/rag/extractor/entity/extract_setting.py @@ -10,7 +10,7 @@ class NotionInfo(BaseModel): """ credential_id: str | None = None - notion_workspace_id: str + notion_workspace_id: str | None = "" notion_obj_id: str notion_page_type: str document: Document | None = None diff --git a/api/core/rag/extractor/extract_processor.py b/api/core/rag/extractor/extract_processor.py index 0f62f9c4b6..6d28ce25bc 100644 --- a/api/core/rag/extractor/extract_processor.py +++ b/api/core/rag/extractor/extract_processor.py @@ -112,7 +112,7 @@ class ExtractProcessor: if file_extension in {".xlsx", ".xls"}: extractor = ExcelExtractor(file_path) elif file_extension == ".pdf": - extractor = PdfExtractor(file_path) + extractor = PdfExtractor(file_path, upload_file.tenant_id, upload_file.created_by) elif file_extension in {".md", ".markdown", ".mdx"}: extractor = ( UnstructuredMarkdownExtractor(file_path, unstructured_api_url, unstructured_api_key) @@ -148,7 +148,7 @@ class ExtractProcessor: if file_extension in {".xlsx", ".xls"}: extractor = ExcelExtractor(file_path) elif file_extension == ".pdf": - extractor = PdfExtractor(file_path) + extractor = PdfExtractor(file_path, upload_file.tenant_id, upload_file.created_by) elif file_extension in {".md", ".markdown", ".mdx"}: extractor = MarkdownExtractor(file_path, autodetect_encoding=True) elif file_extension in {".htm", ".html"}: @@ -166,7 +166,7 @@ class ExtractProcessor: elif extract_setting.datasource_type == DatasourceType.NOTION: assert extract_setting.notion_info is not None, "notion_info is required" extractor = NotionExtractor( - notion_workspace_id=extract_setting.notion_info.notion_workspace_id, + notion_workspace_id=extract_setting.notion_info.notion_workspace_id or "", notion_obj_id=extract_setting.notion_info.notion_obj_id, notion_page_type=extract_setting.notion_info.notion_page_type, document_model=extract_setting.notion_info.document, diff --git a/api/core/rag/extractor/firecrawl/firecrawl_app.py b/api/core/rag/extractor/firecrawl/firecrawl_app.py index 789ac8557d..5d6223db06 100644 --- a/api/core/rag/extractor/firecrawl/firecrawl_app.py +++ b/api/core/rag/extractor/firecrawl/firecrawl_app.py @@ -25,7 +25,7 @@ class FirecrawlApp: } if params: json_data.update(params) - response = self._post_request(f"{self.base_url}/v2/scrape", json_data, headers) + response = self._post_request(self._build_url("v2/scrape"), json_data, headers) if response.status_code == 200: response_data = response.json() data = response_data["data"] @@ -42,7 +42,7 @@ class FirecrawlApp: json_data = {"url": url} if params: json_data.update(params) - response = self._post_request(f"{self.base_url}/v2/crawl", json_data, headers) + response = self._post_request(self._build_url("v2/crawl"), json_data, headers) if response.status_code == 200: # There's also another two fields in the response: "success" (bool) and "url" (str) job_id = response.json().get("id") @@ -58,7 +58,7 @@ class FirecrawlApp: if params: # Pass through provided params, including optional "sitemap": "only" | "include" | "skip" json_data.update(params) - response = self._post_request(f"{self.base_url}/v2/map", json_data, headers) + response = self._post_request(self._build_url("v2/map"), json_data, headers) if response.status_code == 200: return cast(dict[str, Any], response.json()) elif response.status_code in {402, 409, 500, 429, 408}: @@ -69,7 +69,7 @@ class FirecrawlApp: def check_crawl_status(self, job_id) -> dict[str, Any]: headers = self._prepare_headers() - response = self._get_request(f"{self.base_url}/v2/crawl/{job_id}", headers) + response = self._get_request(self._build_url(f"v2/crawl/{job_id}"), headers) if response.status_code == 200: crawl_status_response = response.json() if crawl_status_response.get("status") == "completed": @@ -120,6 +120,10 @@ class FirecrawlApp: def _prepare_headers(self) -> dict[str, Any]: return {"Content-Type": "application/json", "Authorization": f"Bearer {self.api_key}"} + def _build_url(self, path: str) -> str: + # ensure exactly one slash between base and path, regardless of user-provided base_url + return f"{self.base_url.rstrip('/')}/{path.lstrip('/')}" + def _post_request(self, url, data, headers, retries=3, backoff_factor=0.5) -> httpx.Response: for attempt in range(retries): response = httpx.post(url, headers=headers, json=data) @@ -139,7 +143,11 @@ class FirecrawlApp: return response def _handle_error(self, response, action): - error_message = response.json().get("error", "Unknown error occurred") + try: + payload = response.json() + error_message = payload.get("error") or payload.get("message") or response.text or "Unknown error occurred" + except json.JSONDecodeError: + error_message = response.text or "Unknown error occurred" raise Exception(f"Failed to {action}. Status code: {response.status_code}. Error: {error_message}") # type: ignore[return] def search(self, query: str, params: dict[str, Any] | None = None) -> dict[str, Any]: @@ -160,7 +168,7 @@ class FirecrawlApp: } if params: json_data.update(params) - response = self._post_request(f"{self.base_url}/v2/search", json_data, headers) + response = self._post_request(self._build_url("v2/search"), json_data, headers) if response.status_code == 200: response_data = response.json() if not response_data.get("success"): diff --git a/api/core/rag/extractor/helpers.py b/api/core/rag/extractor/helpers.py index 5166c0c768..5b466b281c 100644 --- a/api/core/rag/extractor/helpers.py +++ b/api/core/rag/extractor/helpers.py @@ -45,6 +45,6 @@ def detect_file_encodings(file_path: str, timeout: int = 5, sample_size: int = 1 except concurrent.futures.TimeoutError: raise TimeoutError(f"Timeout reached while detecting encoding for {file_path}") - if all(encoding["encoding"] is None for encoding in encodings): + if all(encoding.encoding is None for encoding in encodings): raise RuntimeError(f"Could not detect encoding for {file_path}") - return [FileEncoding(**enc) for enc in encodings if enc["encoding"] is not None] + return [enc for enc in encodings if enc.encoding is not None] diff --git a/api/core/rag/extractor/notion_extractor.py b/api/core/rag/extractor/notion_extractor.py index e87ab38349..372af8fd94 100644 --- a/api/core/rag/extractor/notion_extractor.py +++ b/api/core/rag/extractor/notion_extractor.py @@ -48,13 +48,21 @@ class NotionExtractor(BaseExtractor): if notion_access_token: self._notion_access_token = notion_access_token else: - self._notion_access_token = self._get_access_token(tenant_id, self._credential_id) - if not self._notion_access_token: + try: + self._notion_access_token = self._get_access_token(tenant_id, self._credential_id) + except Exception as e: + logger.warning( + ( + "Failed to get Notion access token from datasource credentials: %s, " + "falling back to environment variable NOTION_INTEGRATION_TOKEN" + ), + e, + ) integration_token = dify_config.NOTION_INTEGRATION_TOKEN if integration_token is None: raise ValueError( "Must specify `integration_token` or set environment variable `NOTION_INTEGRATION_TOKEN`." - ) + ) from e self._notion_access_token = integration_token diff --git a/api/core/rag/extractor/pdf_extractor.py b/api/core/rag/extractor/pdf_extractor.py index 80530d99a6..6aabcac704 100644 --- a/api/core/rag/extractor/pdf_extractor.py +++ b/api/core/rag/extractor/pdf_extractor.py @@ -1,25 +1,57 @@ """Abstract interface for document loader implementations.""" import contextlib +import io +import logging +import uuid from collections.abc import Iterator +import pypdfium2 +import pypdfium2.raw as pdfium_c + +from configs import dify_config from core.rag.extractor.blob.blob import Blob from core.rag.extractor.extractor_base import BaseExtractor from core.rag.models.document import Document +from extensions.ext_database import db from extensions.ext_storage import storage +from libs.datetime_utils import naive_utc_now +from models.enums import CreatorUserRole +from models.model import UploadFile + +logger = logging.getLogger(__name__) class PdfExtractor(BaseExtractor): - """Load pdf files. - + """ + PdfExtractor is used to extract text and images from PDF files. Args: - file_path: Path to the file to load. + file_path: Path to the PDF file. + tenant_id: Workspace ID. + user_id: ID of the user performing the extraction. + file_cache_key: Optional cache key for the extracted text. """ - def __init__(self, file_path: str, file_cache_key: str | None = None): - """Initialize with file path.""" + # Magic bytes for image format detection: (magic_bytes, extension, mime_type) + IMAGE_FORMATS = [ + (b"\xff\xd8\xff", "jpg", "image/jpeg"), + (b"\x89PNG\r\n\x1a\n", "png", "image/png"), + (b"\x00\x00\x00\x0c\x6a\x50\x20\x20\x0d\x0a\x87\x0a", "jp2", "image/jp2"), + (b"GIF8", "gif", "image/gif"), + (b"BM", "bmp", "image/bmp"), + (b"II*\x00", "tiff", "image/tiff"), + (b"MM\x00*", "tiff", "image/tiff"), + (b"II+\x00", "tiff", "image/tiff"), + (b"MM\x00+", "tiff", "image/tiff"), + ] + MAX_MAGIC_LEN = max(len(m) for m, _, _ in IMAGE_FORMATS) + + def __init__(self, file_path: str, tenant_id: str, user_id: str, file_cache_key: str | None = None): + """Initialize PdfExtractor.""" self._file_path = file_path + self._tenant_id = tenant_id + self._user_id = user_id self._file_cache_key = file_cache_key def extract(self) -> list[Document]: @@ -50,7 +82,6 @@ class PdfExtractor(BaseExtractor): def parse(self, blob: Blob) -> Iterator[Document]: """Lazily parse the blob.""" - import pypdfium2 # type: ignore with blob.as_bytes_io() as file_path: pdf_reader = pypdfium2.PdfDocument(file_path, autoclose=True) @@ -59,8 +90,87 @@ class PdfExtractor(BaseExtractor): text_page = page.get_textpage() content = text_page.get_text_range() text_page.close() + + image_content = self._extract_images(page) + if image_content: + content += "\n" + image_content + page.close() metadata = {"source": blob.source, "page": page_number} yield Document(page_content=content, metadata=metadata) finally: pdf_reader.close() + + def _extract_images(self, page) -> str: + """ + Extract images from a PDF page, save them to storage and database, + and return markdown image links. + + Args: + page: pypdfium2 page object. + + Returns: + Markdown string containing links to the extracted images. + """ + image_content = [] + upload_files = [] + base_url = dify_config.INTERNAL_FILES_URL or dify_config.FILES_URL + + try: + image_objects = page.get_objects(filter=(pdfium_c.FPDF_PAGEOBJ_IMAGE,)) + for obj in image_objects: + try: + # Extract image bytes + img_byte_arr = io.BytesIO() + # Extract DCTDecode (JPEG) and JPXDecode (JPEG 2000) images directly + # Fallback to png for other formats + obj.extract(img_byte_arr, fb_format="png") + img_bytes = img_byte_arr.getvalue() + + if not img_bytes: + continue + + header = img_bytes[: self.MAX_MAGIC_LEN] + image_ext = None + mime_type = None + for magic, ext, mime in self.IMAGE_FORMATS: + if header.startswith(magic): + image_ext = ext + mime_type = mime + break + + if not image_ext or not mime_type: + continue + + file_uuid = str(uuid.uuid4()) + file_key = "image_files/" + self._tenant_id + "/" + file_uuid + "." + image_ext + + storage.save(file_key, img_bytes) + + # save file to db + upload_file = UploadFile( + tenant_id=self._tenant_id, + storage_type=dify_config.STORAGE_TYPE, + key=file_key, + name=file_key, + size=len(img_bytes), + extension=image_ext, + mime_type=mime_type, + created_by=self._user_id, + created_by_role=CreatorUserRole.ACCOUNT, + created_at=naive_utc_now(), + used=True, + used_by=self._user_id, + used_at=naive_utc_now(), + ) + upload_files.append(upload_file) + image_content.append(f"![image]({base_url}/files/{upload_file.id}/file-preview)") + except Exception as e: + logger.warning("Failed to extract image from PDF: %s", e) + continue + except Exception as e: + logger.warning("Failed to get objects from PDF page: %s", e) + if upload_files: + db.session.add_all(upload_files) + db.session.commit() + return "\n".join(image_content) diff --git a/api/core/rag/extractor/word_extractor.py b/api/core/rag/extractor/word_extractor.py index 044b118635..511f5a698d 100644 --- a/api/core/rag/extractor/word_extractor.py +++ b/api/core/rag/extractor/word_extractor.py @@ -7,10 +7,11 @@ import re import tempfile import uuid from urllib.parse import urlparse -from xml.etree import ElementTree import httpx from docx import Document as DocxDocument +from docx.oxml.ns import qn +from docx.text.run import Run from configs import dify_config from core.helper import ssrf_proxy @@ -83,6 +84,7 @@ class WordExtractor(BaseExtractor): def _extract_images_from_docx(self, doc): image_count = 0 image_map = {} + base_url = dify_config.INTERNAL_FILES_URL or dify_config.FILES_URL for r_id, rel in doc.part.rels.items(): if "image" in rel.target_ref: @@ -121,8 +123,7 @@ class WordExtractor(BaseExtractor): used_at=naive_utc_now(), ) db.session.add(upload_file) - # Use r_id as key for external images since target_part is undefined - image_map[r_id] = f"![image]({dify_config.FILES_URL}/files/{upload_file.id}/file-preview)" + image_map[r_id] = f"![image]({base_url}/files/{upload_file.id}/file-preview)" else: image_ext = rel.target_ref.split(".")[-1] if image_ext is None: @@ -150,10 +151,7 @@ class WordExtractor(BaseExtractor): used_at=naive_utc_now(), ) db.session.add(upload_file) - # Use target_part as key for internal images - image_map[rel.target_part] = ( - f"![image]({dify_config.FILES_URL}/files/{upload_file.id}/file-preview)" - ) + image_map[rel.target_part] = f"![image]({base_url}/files/{upload_file.id}/file-preview)" db.session.commit() return image_map @@ -232,44 +230,20 @@ class WordExtractor(BaseExtractor): image_map = self._extract_images_from_docx(doc) - hyperlinks_url = None - url_pattern = re.compile(r"http://[^\s+]+//|https://[^\s+]+") - for para in doc.paragraphs: - for run in para.runs: - if run.text and hyperlinks_url: - result = f" [{run.text}]({hyperlinks_url}) " - run.text = result - hyperlinks_url = None - if "HYPERLINK" in run.element.xml: - try: - xml = ElementTree.XML(run.element.xml) - x_child = [c for c in xml.iter() if c is not None] - for x in x_child: - if x is None: - continue - if x.tag.endswith("instrText"): - if x.text is None: - continue - for i in url_pattern.findall(x.text): - hyperlinks_url = str(i) - except Exception: - logger.exception("Failed to parse HYPERLINK xml") - def parse_paragraph(paragraph): - paragraph_content = [] - - def append_image_link(image_id, has_drawing): + def append_image_link(image_id, has_drawing, target_buffer): """Helper to append image link from image_map based on relationship type.""" rel = doc.part.rels[image_id] if rel.is_external: if image_id in image_map and not has_drawing: - paragraph_content.append(image_map[image_id]) + target_buffer.append(image_map[image_id]) else: image_part = rel.target_part if image_part in image_map and not has_drawing: - paragraph_content.append(image_map[image_part]) + target_buffer.append(image_map[image_part]) - for run in paragraph.runs: + def process_run(run, target_buffer): + # Helper to extract text and embedded images from a run element and append them to target_buffer if hasattr(run.element, "tag") and isinstance(run.element.tag, str) and run.element.tag.endswith("r"): # Process drawing type images drawing_elements = run.element.findall( @@ -290,13 +264,13 @@ class WordExtractor(BaseExtractor): # External image: use embed_id as key if embed_id in image_map: has_drawing = True - paragraph_content.append(image_map[embed_id]) + target_buffer.append(image_map[embed_id]) else: # Internal image: use target_part as key image_part = doc.part.related_parts.get(embed_id) if image_part in image_map: has_drawing = True - paragraph_content.append(image_map[image_part]) + target_buffer.append(image_map[image_part]) # Process pict type images shape_elements = run.element.findall( ".//{http://schemas.openxmlformats.org/wordprocessingml/2006/main}pict" @@ -311,7 +285,7 @@ class WordExtractor(BaseExtractor): "{http://schemas.openxmlformats.org/officeDocument/2006/relationships}id" ) if image_id and image_id in doc.part.rels: - append_image_link(image_id, has_drawing) + append_image_link(image_id, has_drawing, target_buffer) # Find imagedata element in VML image_data = shape.find(".//{urn:schemas-microsoft-com:vml}imagedata") if image_data is not None: @@ -319,9 +293,93 @@ class WordExtractor(BaseExtractor): "{http://schemas.openxmlformats.org/officeDocument/2006/relationships}id" ) if image_id and image_id in doc.part.rels: - append_image_link(image_id, has_drawing) + append_image_link(image_id, has_drawing, target_buffer) if run.text.strip(): - paragraph_content.append(run.text.strip()) + target_buffer.append(run.text.strip()) + + def process_hyperlink(hyperlink_elem, target_buffer): + # Helper to extract text from a hyperlink element and append it to target_buffer + r_id = hyperlink_elem.get(qn("r:id")) + + # Extract text from runs inside the hyperlink + link_text_parts = [] + for run_elem in hyperlink_elem.findall(qn("w:r")): + run = Run(run_elem, paragraph) + # Hyperlink text may be split across multiple runs (e.g., with different formatting), + # so collect all run texts first + if run.text: + link_text_parts.append(run.text) + + link_text = "".join(link_text_parts).strip() + + # Resolve URL + if r_id: + try: + rel = doc.part.rels.get(r_id) + if rel and rel.is_external: + link_text = f"[{link_text or rel.target_ref}]({rel.target_ref})" + except Exception: + logger.exception("Failed to resolve URL for hyperlink with r:id: %s", r_id) + + if link_text: + target_buffer.append(link_text) + + paragraph_content = [] + # State for legacy HYPERLINK fields + hyperlink_field_url = None + hyperlink_field_text_parts: list = [] + is_collecting_field_text = False + # Iterate through paragraph elements in document order + for child in paragraph._element: + tag = child.tag + if tag == qn("w:r"): + # Regular run + run = Run(child, paragraph) + + # Check for fldChar (begin/end/separate) and instrText for legacy hyperlinks + fld_chars = child.findall(qn("w:fldChar")) + instr_texts = child.findall(qn("w:instrText")) + + # Handle Fields + if fld_chars or instr_texts: + # Process instrText to find HYPERLINK "url" + for instr in instr_texts: + if instr.text and "HYPERLINK" in instr.text: + # Quick regex to extract URL + match = re.search(r'HYPERLINK\s+"([^"]+)"', instr.text, re.IGNORECASE) + if match: + hyperlink_field_url = match.group(1) + + # Process fldChar + for fld_char in fld_chars: + fld_char_type = fld_char.get(qn("w:fldCharType")) + if fld_char_type == "begin": + # Start of a field: reset legacy link state + hyperlink_field_url = None + hyperlink_field_text_parts = [] + is_collecting_field_text = False + elif fld_char_type == "separate": + # Separator: if we found a URL, start collecting visible text + if hyperlink_field_url: + is_collecting_field_text = True + elif fld_char_type == "end": + # End of field + if is_collecting_field_text and hyperlink_field_url: + # Create markdown link and append to main content + display_text = "".join(hyperlink_field_text_parts).strip() + if display_text: + link_md = f"[{display_text}]({hyperlink_field_url})" + paragraph_content.append(link_md) + # Reset state + hyperlink_field_url = None + hyperlink_field_text_parts = [] + is_collecting_field_text = False + + # Decide where to append content + target_buffer = hyperlink_field_text_parts if is_collecting_field_text else paragraph_content + process_run(run, target_buffer) + elif tag == qn("w:hyperlink"): + process_hyperlink(child, paragraph_content) return "".join(paragraph_content) if paragraph_content else "" paragraphs = doc.paragraphs.copy() diff --git a/api/core/rag/index_processor/index_processor_base.py b/api/core/rag/index_processor/index_processor_base.py index 5fc149c1a6..8bbdf8ba39 100644 --- a/api/core/rag/index_processor/index_processor_base.py +++ b/api/core/rag/index_processor/index_processor_base.py @@ -241,7 +241,7 @@ class BaseIndexProcessor(ABC): if not filename: parsed_url = urlparse(image_url) - # unquote 处理 URL 中的中文 + # Decode percent-encoded characters in the URL path. path = unquote(parsed_url.path) filename = os.path.basename(path) diff --git a/api/core/rag/pipeline/queue.py b/api/core/rag/pipeline/queue.py index 7472598a7f..bf8db95b4e 100644 --- a/api/core/rag/pipeline/queue.py +++ b/api/core/rag/pipeline/queue.py @@ -1,3 +1,5 @@ +from __future__ import annotations + import json from collections.abc import Sequence from typing import Any @@ -16,7 +18,7 @@ class TaskWrapper(BaseModel): return self.model_dump_json() @classmethod - def deserialize(cls, serialized_data: str) -> "TaskWrapper": + def deserialize(cls, serialized_data: str) -> TaskWrapper: return cls.model_validate_json(serialized_data) diff --git a/api/core/rag/retrieval/dataset_retrieval.py b/api/core/rag/retrieval/dataset_retrieval.py index 635eab73f0..f8f85d141a 100644 --- a/api/core/rag/retrieval/dataset_retrieval.py +++ b/api/core/rag/retrieval/dataset_retrieval.py @@ -7,7 +7,7 @@ from collections.abc import Generator, Mapping from typing import Any, Union, cast from flask import Flask, current_app -from sqlalchemy import and_, or_, select +from sqlalchemy import and_, literal, or_, select from sqlalchemy.orm import Session from core.app.app_config.entities import ( @@ -151,20 +151,14 @@ class DatasetRetrieval: if ModelFeature.TOOL_CALL in features or ModelFeature.MULTI_TOOL_CALL in features: planning_strategy = PlanningStrategy.ROUTER available_datasets = [] - for dataset_id in dataset_ids: - # get dataset from dataset id - dataset_stmt = select(Dataset).where(Dataset.tenant_id == tenant_id, Dataset.id == dataset_id) - dataset = db.session.scalar(dataset_stmt) - # pass if dataset is not available - if not dataset: + dataset_stmt = select(Dataset).where(Dataset.tenant_id == tenant_id, Dataset.id.in_(dataset_ids)) + datasets: list[Dataset] = db.session.execute(dataset_stmt).scalars().all() # type: ignore + for dataset in datasets: + if dataset.available_document_count == 0 and dataset.provider != "external": continue - - # pass if dataset is not available - if dataset and dataset.available_document_count == 0 and dataset.provider != "external": - continue - available_datasets.append(dataset) + if inputs: inputs = {key: str(value) for key, value in inputs.items()} else: @@ -282,26 +276,35 @@ class DatasetRetrieval: ) context_files.append(attachment_info) if show_retrieve_source: + dataset_ids = [record.segment.dataset_id for record in records] + document_ids = [record.segment.document_id for record in records] + dataset_document_stmt = select(DatasetDocument).where( + DatasetDocument.id.in_(document_ids), + DatasetDocument.enabled == True, + DatasetDocument.archived == False, + ) + documents = db.session.execute(dataset_document_stmt).scalars().all() # type: ignore + dataset_stmt = select(Dataset).where( + Dataset.id.in_(dataset_ids), + ) + datasets = db.session.execute(dataset_stmt).scalars().all() # type: ignore + dataset_map = {i.id: i for i in datasets} + document_map = {i.id: i for i in documents} for record in records: segment = record.segment - dataset = db.session.query(Dataset).filter_by(id=segment.dataset_id).first() - dataset_document_stmt = select(DatasetDocument).where( - DatasetDocument.id == segment.document_id, - DatasetDocument.enabled == True, - DatasetDocument.archived == False, - ) - document = db.session.scalar(dataset_document_stmt) - if dataset and document: + dataset_item = dataset_map.get(segment.dataset_id) + document_item = document_map.get(segment.document_id) + if dataset_item and document_item: source = RetrievalSourceMetadata( - dataset_id=dataset.id, - dataset_name=dataset.name, - document_id=document.id, - document_name=document.name, - data_source_type=document.data_source_type, + dataset_id=dataset_item.id, + dataset_name=dataset_item.name, + document_id=document_item.id, + document_name=document_item.name, + data_source_type=document_item.data_source_type, segment_id=segment.id, retriever_from=invoke_from.to_source(), score=record.score or 0.0, - doc_metadata=document.doc_metadata, + doc_metadata=document_item.doc_metadata, ) if invoke_from.to_source() == "dev": @@ -512,7 +515,11 @@ class DatasetRetrieval: 0 ].embedding_model_provider weights["vector_setting"]["embedding_model_name"] = available_datasets[0].embedding_model + dataset_count = len(available_datasets) with measure_time() as timer: + cancel_event = threading.Event() + thread_exceptions: list[Exception] = [] + if query: query_thread = threading.Thread( target=self._multiple_retrieve_thread, @@ -531,6 +538,9 @@ class DatasetRetrieval: "score_threshold": score_threshold, "query": query, "attachment_id": None, + "dataset_count": dataset_count, + "cancel_event": cancel_event, + "thread_exceptions": thread_exceptions, }, ) all_threads.append(query_thread) @@ -554,12 +564,26 @@ class DatasetRetrieval: "score_threshold": score_threshold, "query": None, "attachment_id": attachment_id, + "dataset_count": dataset_count, + "cancel_event": cancel_event, + "thread_exceptions": thread_exceptions, }, ) all_threads.append(attachment_thread) attachment_thread.start() - for thread in all_threads: - thread.join() + + # Poll threads with short timeout to detect errors quickly (fail-fast) + while any(t.is_alive() for t in all_threads): + for thread in all_threads: + thread.join(timeout=0.1) + if thread_exceptions: + cancel_event.set() + break + if thread_exceptions: + break + + if thread_exceptions: + raise thread_exceptions[0] self._on_query(query, attachment_ids, dataset_ids, app_id, user_from, user_id) if all_documents: @@ -1033,7 +1057,7 @@ class DatasetRetrieval: if automatic_metadata_filters: conditions = [] for sequence, filter in enumerate(automatic_metadata_filters): - self._process_metadata_filter_func( + self.process_metadata_filter_func( sequence, filter.get("condition"), # type: ignore filter.get("metadata_name"), # type: ignore @@ -1069,7 +1093,7 @@ class DatasetRetrieval: value=expected_value, ) ) - filters = self._process_metadata_filter_func( + filters = self.process_metadata_filter_func( sequence, condition.comparison_operator, metadata_name, @@ -1165,26 +1189,33 @@ class DatasetRetrieval: return None return automatic_metadata_filters - def _process_metadata_filter_func( - self, sequence: int, condition: str, metadata_name: str, value: Any | None, filters: list + @classmethod + def process_metadata_filter_func( + cls, sequence: int, condition: str, metadata_name: str, value: Any | None, filters: list ): if value is None and condition not in ("empty", "not empty"): return filters json_field = DatasetDocument.doc_metadata[metadata_name].as_string() + from libs.helper import escape_like_pattern + match condition: case "contains": - filters.append(json_field.like(f"%{value}%")) + escaped_value = escape_like_pattern(str(value)) + filters.append(json_field.like(f"%{escaped_value}%", escape="\\")) case "not contains": - filters.append(json_field.notlike(f"%{value}%")) + escaped_value = escape_like_pattern(str(value)) + filters.append(json_field.notlike(f"%{escaped_value}%", escape="\\")) case "start with": - filters.append(json_field.like(f"{value}%")) + escaped_value = escape_like_pattern(str(value)) + filters.append(json_field.like(f"{escaped_value}%", escape="\\")) case "end with": - filters.append(json_field.like(f"%{value}")) + escaped_value = escape_like_pattern(str(value)) + filters.append(json_field.like(f"%{escaped_value}", escape="\\")) case "is" | "=": if isinstance(value, str): @@ -1215,6 +1246,20 @@ class DatasetRetrieval: case "≥" | ">=": filters.append(DatasetDocument.doc_metadata[metadata_name].as_float() >= value) + case "in" | "not in": + if isinstance(value, str): + value_list = [v.strip() for v in value.split(",") if v.strip()] + elif isinstance(value, (list, tuple)): + value_list = [str(v) for v in value if v is not None] + else: + value_list = [str(value)] if value is not None else [] + + if not value_list: + # `field in []` is False, `field not in []` is True + filters.append(literal(condition == "not in")) + else: + op = json_field.in_ if condition == "in" else json_field.notin_ + filters.append(op(value_list)) case _: pass @@ -1386,69 +1431,89 @@ class DatasetRetrieval: score_threshold: float, query: str | None, attachment_id: str | None, + dataset_count: int, + cancel_event: threading.Event | None = None, + thread_exceptions: list[Exception] | None = None, ): - with flask_app.app_context(): - threads = [] - all_documents_item: list[Document] = [] - index_type = None - for dataset in available_datasets: - index_type = dataset.indexing_technique - document_ids_filter = None - if dataset.provider != "external": - if metadata_condition and not metadata_filter_document_ids: - continue - if metadata_filter_document_ids: - document_ids = metadata_filter_document_ids.get(dataset.id, []) - if document_ids: - document_ids_filter = document_ids - else: + try: + with flask_app.app_context(): + threads = [] + all_documents_item: list[Document] = [] + index_type = None + for dataset in available_datasets: + # Check for cancellation signal + if cancel_event and cancel_event.is_set(): + break + index_type = dataset.indexing_technique + document_ids_filter = None + if dataset.provider != "external": + if metadata_condition and not metadata_filter_document_ids: continue - retrieval_thread = threading.Thread( - target=self._retriever, - kwargs={ - "flask_app": flask_app, - "dataset_id": dataset.id, - "query": query, - "top_k": top_k, - "all_documents": all_documents_item, - "document_ids_filter": document_ids_filter, - "metadata_condition": metadata_condition, - "attachment_ids": [attachment_id] if attachment_id else None, - }, - ) - threads.append(retrieval_thread) - retrieval_thread.start() - for thread in threads: - thread.join() + if metadata_filter_document_ids: + document_ids = metadata_filter_document_ids.get(dataset.id, []) + if document_ids: + document_ids_filter = document_ids + else: + continue + retrieval_thread = threading.Thread( + target=self._retriever, + kwargs={ + "flask_app": flask_app, + "dataset_id": dataset.id, + "query": query, + "top_k": top_k, + "all_documents": all_documents_item, + "document_ids_filter": document_ids_filter, + "metadata_condition": metadata_condition, + "attachment_ids": [attachment_id] if attachment_id else None, + }, + ) + threads.append(retrieval_thread) + retrieval_thread.start() - if reranking_enable: - # do rerank for searched documents - data_post_processor = DataPostProcessor(tenant_id, reranking_mode, reranking_model, weights, False) - if query: - all_documents_item = data_post_processor.invoke( - query=query, - documents=all_documents_item, - score_threshold=score_threshold, - top_n=top_k, - query_type=QueryType.TEXT_QUERY, - ) - if attachment_id: - all_documents_item = data_post_processor.invoke( - documents=all_documents_item, - score_threshold=score_threshold, - top_n=top_k, - query_type=QueryType.IMAGE_QUERY, - query=attachment_id, - ) - else: - if index_type == IndexTechniqueType.ECONOMY: - if not query: - all_documents_item = [] - else: - all_documents_item = self.calculate_keyword_score(query, all_documents_item, top_k) - elif index_type == IndexTechniqueType.HIGH_QUALITY: - all_documents_item = self.calculate_vector_score(all_documents_item, top_k, score_threshold) + # Poll threads with short timeout to respond quickly to cancellation + while any(t.is_alive() for t in threads): + for thread in threads: + thread.join(timeout=0.1) + if cancel_event and cancel_event.is_set(): + break + if cancel_event and cancel_event.is_set(): + break + + # Skip second reranking when there is only one dataset + if reranking_enable and dataset_count > 1: + # do rerank for searched documents + data_post_processor = DataPostProcessor(tenant_id, reranking_mode, reranking_model, weights, False) + if query: + all_documents_item = data_post_processor.invoke( + query=query, + documents=all_documents_item, + score_threshold=score_threshold, + top_n=top_k, + query_type=QueryType.TEXT_QUERY, + ) + if attachment_id: + all_documents_item = data_post_processor.invoke( + documents=all_documents_item, + score_threshold=score_threshold, + top_n=top_k, + query_type=QueryType.IMAGE_QUERY, + query=attachment_id, + ) else: - all_documents_item = all_documents_item[:top_k] if top_k else all_documents_item - if all_documents_item: - all_documents.extend(all_documents_item) + if index_type == IndexTechniqueType.ECONOMY: + if not query: + all_documents_item = [] + else: + all_documents_item = self.calculate_keyword_score(query, all_documents_item, top_k) + elif index_type == IndexTechniqueType.HIGH_QUALITY: + all_documents_item = self.calculate_vector_score(all_documents_item, top_k, score_threshold) + else: + all_documents_item = all_documents_item[:top_k] if top_k else all_documents_item + if all_documents_item: + all_documents.extend(all_documents_item) + except Exception as e: + if cancel_event: + cancel_event.set() + if thread_exceptions is not None: + thread_exceptions.append(e) diff --git a/api/core/rag/splitter/fixed_text_splitter.py b/api/core/rag/splitter/fixed_text_splitter.py index 801d2a2a52..b65cb14d8e 100644 --- a/api/core/rag/splitter/fixed_text_splitter.py +++ b/api/core/rag/splitter/fixed_text_splitter.py @@ -2,6 +2,7 @@ from __future__ import annotations +import codecs import re from typing import Any @@ -52,7 +53,7 @@ class FixedRecursiveCharacterTextSplitter(EnhanceRecursiveCharacterTextSplitter) def __init__(self, fixed_separator: str = "\n\n", separators: list[str] | None = None, **kwargs: Any): """Create a new TextSplitter.""" super().__init__(**kwargs) - self._fixed_separator = fixed_separator + self._fixed_separator = codecs.decode(fixed_separator, "unicode_escape") self._separators = separators or ["\n\n", "\n", "。", ". ", " ", ""] def split_text(self, text: str) -> list[str]: @@ -94,7 +95,8 @@ class FixedRecursiveCharacterTextSplitter(EnhanceRecursiveCharacterTextSplitter) splits = re.split(r" +", text) else: splits = text.split(separator) - splits = [item + separator if i < len(splits) else item for i, item in enumerate(splits)] + if self._keep_separator: + splits = [s + separator for s in splits[:-1]] + splits[-1:] else: splits = list(text) if separator == "\n": @@ -103,7 +105,7 @@ class FixedRecursiveCharacterTextSplitter(EnhanceRecursiveCharacterTextSplitter) splits = [s for s in splits if (s not in {"", "\n"})] _good_splits = [] _good_splits_lengths = [] # cache the lengths of the splits - _separator = separator if self._keep_separator else "" + _separator = "" if self._keep_separator else separator s_lens = self._length_function(splits) if separator != "": for s, s_len in zip(splits, s_lens): diff --git a/api/core/repositories/sqlalchemy_workflow_node_execution_repository.py b/api/core/repositories/sqlalchemy_workflow_node_execution_repository.py index 4436773d25..a45d1d1046 100644 --- a/api/core/repositories/sqlalchemy_workflow_node_execution_repository.py +++ b/api/core/repositories/sqlalchemy_workflow_node_execution_repository.py @@ -29,6 +29,7 @@ from models import ( Account, CreatorUserRole, EndUser, + LLMGenerationDetail, WorkflowNodeExecutionModel, WorkflowNodeExecutionTriggeredFrom, ) @@ -457,6 +458,113 @@ class SQLAlchemyWorkflowNodeExecutionRepository(WorkflowNodeExecutionRepository) session.merge(db_model) session.flush() + # Save LLMGenerationDetail for LLM nodes with successful execution + if ( + domain_model.node_type == NodeType.LLM + and domain_model.status == WorkflowNodeExecutionStatus.SUCCEEDED + and domain_model.outputs is not None + ): + self._save_llm_generation_detail(session, domain_model) + + def _save_llm_generation_detail(self, session, execution: WorkflowNodeExecution) -> None: + """ + Save LLM generation detail for LLM nodes. + Extracts reasoning_content, tool_calls, and sequence from outputs and metadata. + """ + outputs = execution.outputs or {} + metadata = execution.metadata or {} + + reasoning_list = self._extract_reasoning(outputs) + tool_calls_list = self._extract_tool_calls(metadata.get(WorkflowNodeExecutionMetadataKey.AGENT_LOG)) + + if not reasoning_list and not tool_calls_list: + return + + sequence = self._build_generation_sequence(outputs.get("text", ""), reasoning_list, tool_calls_list) + self._upsert_generation_detail(session, execution, reasoning_list, tool_calls_list, sequence) + + def _extract_reasoning(self, outputs: Mapping[str, Any]) -> list[str]: + """Extract reasoning_content as a clean list of non-empty strings.""" + reasoning_content = outputs.get("reasoning_content") + if isinstance(reasoning_content, str): + trimmed = reasoning_content.strip() + return [trimmed] if trimmed else [] + if isinstance(reasoning_content, list): + return [item.strip() for item in reasoning_content if isinstance(item, str) and item.strip()] + return [] + + def _extract_tool_calls(self, agent_log: Any) -> list[dict[str, str]]: + """Extract tool call records from agent logs.""" + if not agent_log or not isinstance(agent_log, list): + return [] + + tool_calls: list[dict[str, str]] = [] + for log in agent_log: + log_data = log.data if hasattr(log, "data") else (log.get("data", {}) if isinstance(log, dict) else {}) + tool_name = log_data.get("tool_name") + if tool_name and str(tool_name).strip(): + tool_calls.append( + { + "id": log_data.get("tool_call_id", ""), + "name": tool_name, + "arguments": json.dumps(log_data.get("tool_args", {})), + "result": str(log_data.get("output", "")), + } + ) + return tool_calls + + def _build_generation_sequence( + self, text: str, reasoning_list: list[str], tool_calls_list: list[dict[str, str]] + ) -> list[dict[str, Any]]: + """Build a simple content/reasoning/tool_call sequence.""" + sequence: list[dict[str, Any]] = [] + if text: + sequence.append({"type": "content", "start": 0, "end": len(text)}) + for index in range(len(reasoning_list)): + sequence.append({"type": "reasoning", "index": index}) + for index in range(len(tool_calls_list)): + sequence.append({"type": "tool_call", "index": index}) + return sequence + + def _upsert_generation_detail( + self, + session, + execution: WorkflowNodeExecution, + reasoning_list: list[str], + tool_calls_list: list[dict[str, str]], + sequence: list[dict[str, Any]], + ) -> None: + """Insert or update LLMGenerationDetail with serialized fields.""" + existing = ( + session.query(LLMGenerationDetail) + .filter_by( + workflow_run_id=execution.workflow_execution_id, + node_id=execution.node_id, + ) + .first() + ) + + reasoning_json = json.dumps(reasoning_list) if reasoning_list else None + tool_calls_json = json.dumps(tool_calls_list) if tool_calls_list else None + sequence_json = json.dumps(sequence) if sequence else None + + if existing: + existing.reasoning_content = reasoning_json + existing.tool_calls = tool_calls_json + existing.sequence = sequence_json + return + + generation_detail = LLMGenerationDetail( + tenant_id=self._tenant_id, + app_id=self._app_id, + workflow_run_id=execution.workflow_execution_id, + node_id=execution.node_id, + reasoning_content=reasoning_json, + tool_calls=tool_calls_json, + sequence=sequence_json, + ) + session.add(generation_detail) + def get_db_models_by_workflow_run( self, workflow_run_id: str, diff --git a/api/core/schemas/registry.py b/api/core/schemas/registry.py index 51bfae1cd3..b4ecfe47ff 100644 --- a/api/core/schemas/registry.py +++ b/api/core/schemas/registry.py @@ -1,9 +1,11 @@ +from __future__ import annotations + import json import logging import threading from collections.abc import Mapping, MutableMapping from pathlib import Path -from typing import Any, ClassVar, Optional +from typing import Any, ClassVar class SchemaRegistry: @@ -11,7 +13,7 @@ class SchemaRegistry: logger: ClassVar[logging.Logger] = logging.getLogger(__name__) - _default_instance: ClassVar[Optional["SchemaRegistry"]] = None + _default_instance: ClassVar[SchemaRegistry | None] = None _lock: ClassVar[threading.Lock] = threading.Lock() def __init__(self, base_dir: str): @@ -20,7 +22,7 @@ class SchemaRegistry: self.metadata: MutableMapping[str, MutableMapping[str, Any]] = {} @classmethod - def default_registry(cls) -> "SchemaRegistry": + def default_registry(cls) -> SchemaRegistry: """Returns the default schema registry for builtin schemas (thread-safe singleton)""" if cls._default_instance is None: with cls._lock: diff --git a/api/core/tools/__base/tool.py b/api/core/tools/__base/tool.py index 8ca4eabb7a..24fc11aefc 100644 --- a/api/core/tools/__base/tool.py +++ b/api/core/tools/__base/tool.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from abc import ABC, abstractmethod from collections.abc import Generator from copy import deepcopy @@ -6,6 +8,7 @@ from typing import TYPE_CHECKING, Any if TYPE_CHECKING: from models.model import File +from core.model_runtime.entities.message_entities import PromptMessageTool from core.tools.__base.tool_runtime import ToolRuntime from core.tools.entities.tool_entities import ( ToolEntity, @@ -24,7 +27,7 @@ class Tool(ABC): self.entity = entity self.runtime = runtime - def fork_tool_runtime(self, runtime: ToolRuntime) -> "Tool": + def fork_tool_runtime(self, runtime: ToolRuntime) -> Tool: """ fork a new tool with metadata :return: the new tool @@ -152,6 +155,60 @@ class Tool(ABC): return parameters + def to_prompt_message_tool(self) -> PromptMessageTool: + message_tool = PromptMessageTool( + name=self.entity.identity.name, + description=self.entity.description.llm if self.entity.description else "", + parameters={ + "type": "object", + "properties": {}, + "required": [], + }, + ) + + parameters = self.get_merged_runtime_parameters() + for parameter in parameters: + if parameter.form != ToolParameter.ToolParameterForm.LLM: + continue + + parameter_type = parameter.type.as_normal_type() + if parameter.type in { + ToolParameter.ToolParameterType.SYSTEM_FILES, + ToolParameter.ToolParameterType.FILE, + ToolParameter.ToolParameterType.FILES, + }: + # Determine the description based on parameter type + if parameter.type == ToolParameter.ToolParameterType.FILE: + file_format_desc = " Input the file id with format: [File: file_id]." + else: + file_format_desc = "Input the file id with format: [Files: file_id1, file_id2, ...]. " + + message_tool.parameters["properties"][parameter.name] = { + "type": "string", + "description": (parameter.llm_description or "") + file_format_desc, + } + continue + enum = [] + if parameter.type == ToolParameter.ToolParameterType.SELECT: + enum = [option.value for option in parameter.options] if parameter.options else [] + + message_tool.parameters["properties"][parameter.name] = ( + { + "type": parameter_type, + "description": parameter.llm_description or "", + } + if parameter.input_schema is None + else parameter.input_schema + ) + + if len(enum) > 0: + message_tool.parameters["properties"][parameter.name]["enum"] = enum + + if parameter.required: + message_tool.parameters["required"].append(parameter.name) + + return message_tool + def create_image_message( self, image: str, @@ -166,7 +223,7 @@ class Tool(ABC): type=ToolInvokeMessage.MessageType.IMAGE, message=ToolInvokeMessage.TextMessage(text=image) ) - def create_file_message(self, file: "File") -> ToolInvokeMessage: + def create_file_message(self, file: File) -> ToolInvokeMessage: return ToolInvokeMessage( type=ToolInvokeMessage.MessageType.FILE, message=ToolInvokeMessage.FileMessage(), diff --git a/api/core/tools/builtin_tool/tool.py b/api/core/tools/builtin_tool/tool.py index 84efefba07..51b0407886 100644 --- a/api/core/tools/builtin_tool/tool.py +++ b/api/core/tools/builtin_tool/tool.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from core.model_runtime.entities.llm_entities import LLMResult from core.model_runtime.entities.message_entities import PromptMessage, SystemPromptMessage, UserPromptMessage from core.tools.__base.tool import Tool @@ -24,7 +26,7 @@ class BuiltinTool(Tool): super().__init__(**kwargs) self.provider = provider - def fork_tool_runtime(self, runtime: ToolRuntime) -> "BuiltinTool": + def fork_tool_runtime(self, runtime: ToolRuntime) -> BuiltinTool: """ fork a new tool with metadata :return: the new tool diff --git a/api/core/tools/custom_tool/provider.py b/api/core/tools/custom_tool/provider.py index 0cc992155a..e2f6c00555 100644 --- a/api/core/tools/custom_tool/provider.py +++ b/api/core/tools/custom_tool/provider.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from pydantic import Field from sqlalchemy import select @@ -32,7 +34,7 @@ class ApiToolProviderController(ToolProviderController): self.tools = [] @classmethod - def from_db(cls, db_provider: ApiToolProvider, auth_type: ApiProviderAuthType) -> "ApiToolProviderController": + def from_db(cls, db_provider: ApiToolProvider, auth_type: ApiProviderAuthType) -> ApiToolProviderController: credentials_schema = [ ProviderConfig( name="auth_type", diff --git a/api/core/tools/entities/tool_entities.py b/api/core/tools/entities/tool_entities.py index 353f3a646a..b5c7a6310c 100644 --- a/api/core/tools/entities/tool_entities.py +++ b/api/core/tools/entities/tool_entities.py @@ -1,3 +1,5 @@ +from __future__ import annotations + import base64 import contextlib from collections.abc import Mapping @@ -55,7 +57,7 @@ class ToolProviderType(StrEnum): MCP = auto() @classmethod - def value_of(cls, value: str) -> "ToolProviderType": + def value_of(cls, value: str) -> ToolProviderType: """ Get value of given mode. @@ -79,7 +81,7 @@ class ApiProviderSchemaType(StrEnum): OPENAI_ACTIONS = auto() @classmethod - def value_of(cls, value: str) -> "ApiProviderSchemaType": + def value_of(cls, value: str) -> ApiProviderSchemaType: """ Get value of given mode. @@ -102,7 +104,7 @@ class ApiProviderAuthType(StrEnum): API_KEY_QUERY = auto() @classmethod - def value_of(cls, value: str) -> "ApiProviderAuthType": + def value_of(cls, value: str) -> ApiProviderAuthType: """ Get value of given mode. @@ -153,11 +155,11 @@ class ToolInvokeMessage(BaseModel): @classmethod def transform_variable_value(cls, values): """ - Only basic types and lists are allowed. + Only basic types, lists, and None are allowed. """ value = values.get("variable_value") - if not isinstance(value, dict | list | str | int | float | bool): - raise ValueError("Only basic types and lists are allowed.") + if value is not None and not isinstance(value, dict | list | str | int | float | bool): + raise ValueError("Only basic types, lists, and None are allowed.") # if stream is true, the value must be a string if values.get("stream"): @@ -307,7 +309,7 @@ class ToolParameter(PluginParameter): typ: ToolParameterType, required: bool, options: list[str] | None = None, - ) -> "ToolParameter": + ) -> ToolParameter: """ get a simple tool parameter @@ -429,14 +431,14 @@ class ToolInvokeMeta(BaseModel): tool_config: dict | None = None @classmethod - def empty(cls) -> "ToolInvokeMeta": + def empty(cls) -> ToolInvokeMeta: """ Get an empty instance of ToolInvokeMeta """ return cls(time_cost=0.0, error=None, tool_config={}) @classmethod - def error_instance(cls, error: str) -> "ToolInvokeMeta": + def error_instance(cls, error: str) -> ToolInvokeMeta: """ Get an instance of ToolInvokeMeta with error """ diff --git a/api/core/tools/mcp_tool/tool.py b/api/core/tools/mcp_tool/tool.py index fbaf31ad09..ef9e9c103a 100644 --- a/api/core/tools/mcp_tool/tool.py +++ b/api/core/tools/mcp_tool/tool.py @@ -1,3 +1,5 @@ +from __future__ import annotations + import base64 import json import logging @@ -6,7 +8,15 @@ from typing import Any from core.mcp.auth_client import MCPClientWithAuthRetry from core.mcp.error import MCPConnectionError -from core.mcp.types import AudioContent, CallToolResult, ImageContent, TextContent +from core.mcp.types import ( + AudioContent, + BlobResourceContents, + CallToolResult, + EmbeddedResource, + ImageContent, + TextContent, + TextResourceContents, +) from core.tools.__base.tool import Tool from core.tools.__base.tool_runtime import ToolRuntime from core.tools.entities.tool_entities import ToolEntity, ToolInvokeMessage, ToolProviderType @@ -53,10 +63,19 @@ class MCPTool(Tool): for content in result.content: if isinstance(content, TextContent): yield from self._process_text_content(content) - elif isinstance(content, ImageContent): - yield self._process_image_content(content) - elif isinstance(content, AudioContent): - yield self._process_audio_content(content) + elif isinstance(content, ImageContent | AudioContent): + yield self.create_blob_message( + blob=base64.b64decode(content.data), meta={"mime_type": content.mimeType} + ) + elif isinstance(content, EmbeddedResource): + resource = content.resource + if isinstance(resource, TextResourceContents): + yield self.create_text_message(resource.text) + elif isinstance(resource, BlobResourceContents): + mime_type = resource.mimeType or "application/octet-stream" + yield self.create_blob_message(blob=base64.b64decode(resource.blob), meta={"mime_type": mime_type}) + else: + raise ToolInvokeError(f"Unsupported embedded resource type: {type(resource)}") else: logger.warning("Unsupported content type=%s", type(content)) @@ -101,15 +120,7 @@ class MCPTool(Tool): for item in json_list: yield self.create_json_message(item) - def _process_image_content(self, content: ImageContent) -> ToolInvokeMessage: - """Process image content and return a blob message.""" - return self.create_blob_message(blob=base64.b64decode(content.data), meta={"mime_type": content.mimeType}) - - def _process_audio_content(self, content: AudioContent) -> ToolInvokeMessage: - """Process audio content and return a blob message.""" - return self.create_blob_message(blob=base64.b64decode(content.data), meta={"mime_type": content.mimeType}) - - def fork_tool_runtime(self, runtime: ToolRuntime) -> "MCPTool": + def fork_tool_runtime(self, runtime: ToolRuntime) -> MCPTool: return MCPTool( entity=self.entity, runtime=runtime, diff --git a/api/core/tools/plugin_tool/tool.py b/api/core/tools/plugin_tool/tool.py index 828dc3b810..d3a2ad488c 100644 --- a/api/core/tools/plugin_tool/tool.py +++ b/api/core/tools/plugin_tool/tool.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from collections.abc import Generator from typing import Any @@ -46,7 +48,7 @@ class PluginTool(Tool): message_id=message_id, ) - def fork_tool_runtime(self, runtime: ToolRuntime) -> "PluginTool": + def fork_tool_runtime(self, runtime: ToolRuntime) -> PluginTool: return PluginTool( entity=self.entity, runtime=runtime, diff --git a/api/core/tools/signature.py b/api/core/tools/signature.py index fef3157f27..22e099deba 100644 --- a/api/core/tools/signature.py +++ b/api/core/tools/signature.py @@ -7,12 +7,12 @@ import time from configs import dify_config -def sign_tool_file(tool_file_id: str, extension: str) -> str: +def sign_tool_file(tool_file_id: str, extension: str, for_external: bool = True) -> str: """ sign file to get a temporary url for plugin access """ - # Use internal URL for plugin/tool file access in Docker environments - base_url = dify_config.INTERNAL_FILES_URL or dify_config.FILES_URL + # Use internal URL for plugin/tool file access in Docker environments, unless for_external is True + base_url = dify_config.FILES_URL if for_external else (dify_config.INTERNAL_FILES_URL or dify_config.FILES_URL) file_preview_url = f"{base_url}/files/tools/{tool_file_id}{extension}" timestamp = str(int(time.time())) diff --git a/api/core/tools/utils/parser.py b/api/core/tools/utils/parser.py index 3486182192..584975de05 100644 --- a/api/core/tools/utils/parser.py +++ b/api/core/tools/utils/parser.py @@ -378,7 +378,7 @@ class ApiBasedToolSchemaParser: @staticmethod def auto_parse_to_tool_bundle( content: str, extra_info: dict | None = None, warning: dict | None = None - ) -> tuple[list[ApiToolBundle], str]: + ) -> tuple[list[ApiToolBundle], ApiProviderSchemaType]: """ auto parse to tool bundle diff --git a/api/core/tools/utils/text_processing_utils.py b/api/core/tools/utils/text_processing_utils.py index 0f9a91a111..4bfaa5e49b 100644 --- a/api/core/tools/utils/text_processing_utils.py +++ b/api/core/tools/utils/text_processing_utils.py @@ -4,6 +4,7 @@ import re def remove_leading_symbols(text: str) -> str: """ Remove leading punctuation or symbols from the given text. + Preserves markdown links like [text](url) at the start. Args: text (str): The input text to process. @@ -11,6 +12,11 @@ def remove_leading_symbols(text: str) -> str: Returns: str: The text with leading punctuation or symbols removed. """ + # Check if text starts with a markdown link - preserve it + markdown_link_pattern = r"^\[([^\]]+)\]\((https?://[^)]+)\)" + if re.match(markdown_link_pattern, text): + return text + # Match Unicode ranges for punctuation and symbols # FIXME this pattern is confused quick fix for #11868 maybe refactor it later pattern = r'^[\[\]\u2000-\u2025\u2027-\u206F\u2E00-\u2E7F\u3000-\u300F\u3011-\u303F"#$%&\'()*+,./:;<=>?@^_`~]+' diff --git a/api/core/tools/workflow_as_tool/provider.py b/api/core/tools/workflow_as_tool/provider.py index 0439fb1d60..a706f101ca 100644 --- a/api/core/tools/workflow_as_tool/provider.py +++ b/api/core/tools/workflow_as_tool/provider.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from collections.abc import Mapping from pydantic import Field @@ -5,6 +7,7 @@ from sqlalchemy.orm import Session from core.app.app_config.entities import VariableEntity, VariableEntityType from core.app.apps.workflow.app_config_manager import WorkflowAppConfigManager +from core.db.session_factory import session_factory from core.plugin.entities.parameters import PluginParameterOption from core.tools.__base.tool_provider import ToolProviderController from core.tools.__base.tool_runtime import ToolRuntime @@ -46,34 +49,30 @@ class WorkflowToolProviderController(ToolProviderController): self.provider_id = provider_id @classmethod - def from_db(cls, db_provider: WorkflowToolProvider) -> "WorkflowToolProviderController": - with Session(db.engine, expire_on_commit=False) as session, session.begin(): - provider = session.get(WorkflowToolProvider, db_provider.id) if db_provider.id else None - if not provider: - raise ValueError("workflow provider not found") - app = session.get(App, provider.app_id) + def from_db(cls, db_provider: WorkflowToolProvider) -> WorkflowToolProviderController: + with session_factory.create_session() as session, session.begin(): + app = session.get(App, db_provider.app_id) if not app: raise ValueError("app not found") - user = session.get(Account, provider.user_id) if provider.user_id else None - + user = session.get(Account, db_provider.user_id) if db_provider.user_id else None controller = WorkflowToolProviderController( entity=ToolProviderEntity( identity=ToolProviderIdentity( author=user.name if user else "", - name=provider.label, - label=I18nObject(en_US=provider.label, zh_Hans=provider.label), - description=I18nObject(en_US=provider.description, zh_Hans=provider.description), - icon=provider.icon, + name=db_provider.label, + label=I18nObject(en_US=db_provider.label, zh_Hans=db_provider.label), + description=I18nObject(en_US=db_provider.description, zh_Hans=db_provider.description), + icon=db_provider.icon, ), credentials_schema=[], plugin_id=None, ), - provider_id=provider.id or "", + provider_id=db_provider.id, ) controller.tools = [ - controller._get_db_provider_tool(provider, app, session=session, user=user), + controller._get_db_provider_tool(db_provider, app, session=session, user=user), ] return controller diff --git a/api/core/tools/workflow_as_tool/tool.py b/api/core/tools/workflow_as_tool/tool.py index 30334f5da8..81a1d54199 100644 --- a/api/core/tools/workflow_as_tool/tool.py +++ b/api/core/tools/workflow_as_tool/tool.py @@ -1,3 +1,5 @@ +from __future__ import annotations + import json import logging from collections.abc import Generator, Mapping, Sequence @@ -181,7 +183,7 @@ class WorkflowTool(Tool): return found return None - def fork_tool_runtime(self, runtime: ToolRuntime) -> "WorkflowTool": + def fork_tool_runtime(self, runtime: ToolRuntime) -> WorkflowTool: """ fork a new tool with metadata diff --git a/api/core/trigger/utils/encryption.py b/api/core/trigger/utils/encryption.py index 026a65aa23..b12291e299 100644 --- a/api/core/trigger/utils/encryption.py +++ b/api/core/trigger/utils/encryption.py @@ -67,12 +67,16 @@ def create_trigger_provider_encrypter_for_subscription( def delete_cache_for_subscription(tenant_id: str, provider_id: str, subscription_id: str): - cache = TriggerProviderCredentialsCache( + TriggerProviderCredentialsCache( tenant_id=tenant_id, provider_id=provider_id, credential_id=subscription_id, - ) - cache.delete() + ).delete() + TriggerProviderPropertiesCache( + tenant_id=tenant_id, + provider_id=provider_id, + subscription_id=subscription_id, + ).delete() def create_trigger_provider_encrypter_for_properties( diff --git a/api/core/variables/types.py b/api/core/variables/types.py index ce71711344..13b926c978 100644 --- a/api/core/variables/types.py +++ b/api/core/variables/types.py @@ -1,6 +1,8 @@ +from __future__ import annotations + from collections.abc import Mapping from enum import StrEnum -from typing import TYPE_CHECKING, Any, Optional +from typing import TYPE_CHECKING, Any from core.file.models import File @@ -52,7 +54,7 @@ class SegmentType(StrEnum): return self in _ARRAY_TYPES @classmethod - def infer_segment_type(cls, value: Any) -> Optional["SegmentType"]: + def infer_segment_type(cls, value: Any) -> SegmentType | None: """ Attempt to infer the `SegmentType` based on the Python type of the `value` parameter. @@ -173,7 +175,7 @@ class SegmentType(StrEnum): raise AssertionError("this statement should be unreachable.") @staticmethod - def cast_value(value: Any, type_: "SegmentType"): + def cast_value(value: Any, type_: SegmentType): # Cast Python's `bool` type to `int` when the runtime type requires # an integer or number. # @@ -193,7 +195,7 @@ class SegmentType(StrEnum): return [int(i) for i in value] return value - def exposed_type(self) -> "SegmentType": + def exposed_type(self) -> SegmentType: """Returns the type exposed to the frontend. The frontend treats `INTEGER` and `FLOAT` as `NUMBER`, so these are returned as `NUMBER` here. @@ -202,7 +204,7 @@ class SegmentType(StrEnum): return SegmentType.NUMBER return self - def element_type(self) -> "SegmentType | None": + def element_type(self) -> SegmentType | None: """Return the element type of the current segment type, or `None` if the element type is undefined. Raises: @@ -217,7 +219,7 @@ class SegmentType(StrEnum): return _ARRAY_ELEMENT_TYPES_MAPPING.get(self) @staticmethod - def get_zero_value(t: "SegmentType"): + def get_zero_value(t: SegmentType): # Lazy import to avoid circular dependency from factories import variable_factory diff --git a/api/core/workflow/README.md b/api/core/workflow/README.md index 72f5dbe1e2..9a39f976a6 100644 --- a/api/core/workflow/README.md +++ b/api/core/workflow/README.md @@ -64,6 +64,9 @@ engine.layer(DebugLoggingLayer(level="INFO")) engine.layer(ExecutionLimitsLayer(max_nodes=100)) ``` +`engine.layer()` binds the read-only runtime state before execution, so layer hooks +can assume `graph_runtime_state` is available. + ### Event-Driven Architecture All node executions emit events for monitoring and integration: diff --git a/api/core/workflow/entities/__init__.py b/api/core/workflow/entities/__init__.py index be70e467a0..0f3b9a5239 100644 --- a/api/core/workflow/entities/__init__.py +++ b/api/core/workflow/entities/__init__.py @@ -1,11 +1,16 @@ from .agent import AgentNodeStrategyInit from .graph_init_params import GraphInitParams +from .tool_entities import ToolCall, ToolCallResult, ToolResult, ToolResultStatus from .workflow_execution import WorkflowExecution from .workflow_node_execution import WorkflowNodeExecution __all__ = [ "AgentNodeStrategyInit", "GraphInitParams", + "ToolCall", + "ToolCallResult", + "ToolResult", + "ToolResultStatus", "WorkflowExecution", "WorkflowNodeExecution", ] diff --git a/api/core/workflow/entities/tool_entities.py b/api/core/workflow/entities/tool_entities.py new file mode 100644 index 0000000000..7e71a86849 --- /dev/null +++ b/api/core/workflow/entities/tool_entities.py @@ -0,0 +1,39 @@ +from enum import StrEnum + +from pydantic import BaseModel, Field + +from core.file import File + + +class ToolResultStatus(StrEnum): + SUCCESS = "success" + ERROR = "error" + + +class ToolCall(BaseModel): + id: str | None = Field(default=None, description="Unique identifier for this tool call") + name: str | None = Field(default=None, description="Name of the tool being called") + arguments: str | None = Field(default=None, description="Accumulated tool arguments JSON") + icon: str | dict | None = Field(default=None, description="Icon of the tool") + icon_dark: str | dict | None = Field(default=None, description="Dark theme icon of the tool") + + +class ToolResult(BaseModel): + id: str | None = Field(default=None, description="Identifier of the tool call this result belongs to") + name: str | None = Field(default=None, description="Name of the tool") + output: str | None = Field(default=None, description="Tool output text, error or success message") + files: list[str] = Field(default_factory=list, description="File produced by tool") + status: ToolResultStatus | None = Field(default=ToolResultStatus.SUCCESS, description="Tool execution status") + elapsed_time: float | None = Field(default=None, description="Elapsed seconds spent executing the tool") + icon: str | dict | None = Field(default=None, description="Icon of the tool") + icon_dark: str | dict | None = Field(default=None, description="Dark theme icon of the tool") + + +class ToolCallResult(BaseModel): + id: str | None = Field(default=None, description="Identifier for the tool call") + name: str | None = Field(default=None, description="Name of the tool") + arguments: str | None = Field(default=None, description="Accumulated tool arguments JSON") + output: str | None = Field(default=None, description="Tool output text, error or success message") + files: list[File] = Field(default_factory=list, description="File produced by tool") + status: ToolResultStatus = Field(default=ToolResultStatus.SUCCESS, description="Tool execution status") + elapsed_time: float | None = Field(default=None, description="Elapsed seconds spent executing the tool") diff --git a/api/core/workflow/entities/workflow_execution.py b/api/core/workflow/entities/workflow_execution.py index a8a86d3db2..1b3fb36f1f 100644 --- a/api/core/workflow/entities/workflow_execution.py +++ b/api/core/workflow/entities/workflow_execution.py @@ -5,6 +5,8 @@ Models are independent of the storage mechanism and don't contain implementation details like tenant_id, app_id, etc. """ +from __future__ import annotations + from collections.abc import Mapping from datetime import datetime from typing import Any @@ -59,7 +61,7 @@ class WorkflowExecution(BaseModel): graph: Mapping[str, Any], inputs: Mapping[str, Any], started_at: datetime, - ) -> "WorkflowExecution": + ) -> WorkflowExecution: return WorkflowExecution( id_=id_, workflow_id=workflow_id, diff --git a/api/core/workflow/enums.py b/api/core/workflow/enums.py index cf12d5ec1f..5ea7cf1a07 100644 --- a/api/core/workflow/enums.py +++ b/api/core/workflow/enums.py @@ -247,6 +247,9 @@ class WorkflowNodeExecutionMetadataKey(StrEnum): ERROR_STRATEGY = "error_strategy" # node in continue on error mode return the field LOOP_VARIABLE_MAP = "loop_variable_map" # single loop variable output DATASOURCE_INFO = "datasource_info" + LLM_CONTENT_SEQUENCE = "llm_content_sequence" + LLM_TRACE = "llm_trace" + COMPLETED_REASON = "completed_reason" # completed reason for loop node class WorkflowNodeExecutionStatus(StrEnum): diff --git a/api/core/workflow/graph/graph.py b/api/core/workflow/graph/graph.py index ba5a01fc94..7be94c2426 100644 --- a/api/core/workflow/graph/graph.py +++ b/api/core/workflow/graph/graph.py @@ -1,3 +1,5 @@ +from __future__ import annotations + import logging from collections import defaultdict from collections.abc import Mapping, Sequence @@ -175,7 +177,7 @@ class Graph: def _create_node_instances( cls, node_configs_map: dict[str, dict[str, object]], - node_factory: "NodeFactory", + node_factory: NodeFactory, ) -> dict[str, Node]: """ Create node instances from configurations using the node factory. @@ -197,7 +199,7 @@ class Graph: return nodes @classmethod - def new(cls) -> "GraphBuilder": + def new(cls) -> GraphBuilder: """Create a fluent builder for assembling a graph programmatically.""" return GraphBuilder(graph_cls=cls) @@ -284,9 +286,9 @@ class Graph: cls, *, graph_config: Mapping[str, object], - node_factory: "NodeFactory", + node_factory: NodeFactory, root_node_id: str | None = None, - ) -> "Graph": + ) -> Graph: """ Initialize graph @@ -383,7 +385,7 @@ class GraphBuilder: self._edges: list[Edge] = [] self._edge_counter = 0 - def add_root(self, node: Node) -> "GraphBuilder": + def add_root(self, node: Node) -> GraphBuilder: """Register the root node. Must be called exactly once.""" if self._nodes: @@ -398,7 +400,7 @@ class GraphBuilder: *, from_node_id: str | None = None, source_handle: str = "source", - ) -> "GraphBuilder": + ) -> GraphBuilder: """Append a node and connect it from the specified predecessor.""" if not self._nodes: @@ -419,7 +421,7 @@ class GraphBuilder: return self - def connect(self, *, tail: str, head: str, source_handle: str = "source") -> "GraphBuilder": + def connect(self, *, tail: str, head: str, source_handle: str = "source") -> GraphBuilder: """Connect two existing nodes without adding a new node.""" if tail not in self._nodes_by_id: diff --git a/api/core/workflow/graph_engine/command_channels/redis_channel.py b/api/core/workflow/graph_engine/command_channels/redis_channel.py index 4be3adb8f8..0fccd4a0fd 100644 --- a/api/core/workflow/graph_engine/command_channels/redis_channel.py +++ b/api/core/workflow/graph_engine/command_channels/redis_channel.py @@ -9,7 +9,7 @@ Each instance uses a unique key for its command queue. import json from typing import TYPE_CHECKING, Any, final -from ..entities.commands import AbortCommand, CommandType, GraphEngineCommand, PauseCommand +from ..entities.commands import AbortCommand, CommandType, GraphEngineCommand, PauseCommand, UpdateVariablesCommand if TYPE_CHECKING: from extensions.ext_redis import RedisClientWrapper @@ -113,6 +113,8 @@ class RedisChannel: return AbortCommand.model_validate(data) if command_type == CommandType.PAUSE: return PauseCommand.model_validate(data) + if command_type == CommandType.UPDATE_VARIABLES: + return UpdateVariablesCommand.model_validate(data) # For other command types, use base class return GraphEngineCommand.model_validate(data) diff --git a/api/core/workflow/graph_engine/command_processing/__init__.py b/api/core/workflow/graph_engine/command_processing/__init__.py index 837f5e55fd..7b4f0dfff7 100644 --- a/api/core/workflow/graph_engine/command_processing/__init__.py +++ b/api/core/workflow/graph_engine/command_processing/__init__.py @@ -5,11 +5,12 @@ This package handles external commands sent to the engine during execution. """ -from .command_handlers import AbortCommandHandler, PauseCommandHandler +from .command_handlers import AbortCommandHandler, PauseCommandHandler, UpdateVariablesCommandHandler from .command_processor import CommandProcessor __all__ = [ "AbortCommandHandler", "CommandProcessor", "PauseCommandHandler", + "UpdateVariablesCommandHandler", ] diff --git a/api/core/workflow/graph_engine/command_processing/command_handlers.py b/api/core/workflow/graph_engine/command_processing/command_handlers.py index e9f109c88c..cfe856d9e8 100644 --- a/api/core/workflow/graph_engine/command_processing/command_handlers.py +++ b/api/core/workflow/graph_engine/command_processing/command_handlers.py @@ -4,9 +4,10 @@ from typing import final from typing_extensions import override from core.workflow.entities.pause_reason import SchedulingPause +from core.workflow.runtime import VariablePool from ..domain.graph_execution import GraphExecution -from ..entities.commands import AbortCommand, GraphEngineCommand, PauseCommand +from ..entities.commands import AbortCommand, GraphEngineCommand, PauseCommand, UpdateVariablesCommand from .command_processor import CommandHandler logger = logging.getLogger(__name__) @@ -31,3 +32,25 @@ class PauseCommandHandler(CommandHandler): reason = command.reason pause_reason = SchedulingPause(message=reason) execution.pause(pause_reason) + + +@final +class UpdateVariablesCommandHandler(CommandHandler): + def __init__(self, variable_pool: VariablePool) -> None: + self._variable_pool = variable_pool + + @override + def handle(self, command: GraphEngineCommand, execution: GraphExecution) -> None: + assert isinstance(command, UpdateVariablesCommand) + for update in command.updates: + try: + variable = update.value + self._variable_pool.add(variable.selector, variable) + logger.debug("Updated variable %s for workflow %s", variable.selector, execution.workflow_id) + except ValueError as exc: + logger.warning( + "Skipping invalid variable selector %s for workflow %s: %s", + getattr(update.value, "selector", None), + execution.workflow_id, + exc, + ) diff --git a/api/core/workflow/graph_engine/entities/commands.py b/api/core/workflow/graph_engine/entities/commands.py index 0d51b2b716..6dce03c94d 100644 --- a/api/core/workflow/graph_engine/entities/commands.py +++ b/api/core/workflow/graph_engine/entities/commands.py @@ -5,17 +5,21 @@ This module defines command types that can be sent to a running GraphEngine instance to control its execution flow. """ -from enum import StrEnum +from collections.abc import Sequence +from enum import StrEnum, auto from typing import Any from pydantic import BaseModel, Field +from core.variables.variables import VariableUnion + class CommandType(StrEnum): """Types of commands that can be sent to GraphEngine.""" - ABORT = "abort" - PAUSE = "pause" + ABORT = auto() + PAUSE = auto() + UPDATE_VARIABLES = auto() class GraphEngineCommand(BaseModel): @@ -37,3 +41,16 @@ class PauseCommand(GraphEngineCommand): command_type: CommandType = Field(default=CommandType.PAUSE, description="Type of command") reason: str = Field(default="unknown reason", description="reason for pause") + + +class VariableUpdate(BaseModel): + """Represents a single variable update instruction.""" + + value: VariableUnion = Field(description="New variable value") + + +class UpdateVariablesCommand(GraphEngineCommand): + """Command to update a group of variables in the variable pool.""" + + command_type: CommandType = Field(default=CommandType.UPDATE_VARIABLES, description="Type of command") + updates: Sequence[VariableUpdate] = Field(default_factory=list, description="Variable updates") diff --git a/api/core/workflow/graph_engine/graph_engine.py b/api/core/workflow/graph_engine/graph_engine.py index a4b2df2a8c..9a870d7bf5 100644 --- a/api/core/workflow/graph_engine/graph_engine.py +++ b/api/core/workflow/graph_engine/graph_engine.py @@ -5,9 +5,12 @@ This engine uses a modular architecture with separated packages following Domain-Driven Design principles for improved maintainability and testability. """ +from __future__ import annotations + import contextvars import logging import queue +import threading from collections.abc import Generator from typing import TYPE_CHECKING, cast, final @@ -30,8 +33,13 @@ from core.workflow.runtime import GraphRuntimeState, ReadOnlyGraphRuntimeStateWr if TYPE_CHECKING: # pragma: no cover - used only for static analysis from core.workflow.runtime.graph_runtime_state import GraphProtocol -from .command_processing import AbortCommandHandler, CommandProcessor, PauseCommandHandler -from .entities.commands import AbortCommand, PauseCommand +from .command_processing import ( + AbortCommandHandler, + CommandProcessor, + PauseCommandHandler, + UpdateVariablesCommandHandler, +) +from .entities.commands import AbortCommand, PauseCommand, UpdateVariablesCommand from .error_handler import ErrorHandler from .event_management import EventHandler, EventManager from .graph_state_manager import GraphStateManager @@ -70,10 +78,13 @@ class GraphEngine: scale_down_idle_time: float | None = None, ) -> None: """Initialize the graph engine with all subsystems and dependencies.""" + # stop event + self._stop_event = threading.Event() # Bind runtime state to current workflow context self._graph = graph self._graph_runtime_state = graph_runtime_state + self._graph_runtime_state.stop_event = self._stop_event self._graph_runtime_state.configure(graph=cast("GraphProtocol", graph)) self._command_channel = command_channel @@ -140,6 +151,13 @@ class GraphEngine: pause_handler = PauseCommandHandler() self._command_processor.register_handler(PauseCommand, pause_handler) + update_variables_handler = UpdateVariablesCommandHandler(self._graph_runtime_state.variable_pool) + self._command_processor.register_handler(UpdateVariablesCommand, update_variables_handler) + + # === Extensibility === + # Layers allow plugins to extend engine functionality + self._layers: list[GraphEngineLayer] = [] + # === Worker Pool Setup === # Capture Flask app context for worker threads flask_app: Flask | None = None @@ -158,12 +176,14 @@ class GraphEngine: ready_queue=self._ready_queue, event_queue=self._event_queue, graph=self._graph, + layers=self._layers, flask_app=flask_app, context_vars=context_vars, min_workers=self._min_workers, max_workers=self._max_workers, scale_up_threshold=self._scale_up_threshold, scale_down_idle_time=self._scale_down_idle_time, + stop_event=self._stop_event, ) # === Orchestration === @@ -194,12 +214,9 @@ class GraphEngine: event_handler=self._event_handler_registry, execution_coordinator=self._execution_coordinator, event_emitter=self._event_manager, + stop_event=self._stop_event, ) - # === Extensibility === - # Layers allow plugins to extend engine functionality - self._layers: list[GraphEngineLayer] = [] - # === Validation === # Ensure all nodes share the same GraphRuntimeState instance self._validate_graph_state_consistency() @@ -211,9 +228,16 @@ class GraphEngine: if id(node.graph_runtime_state) != expected_state_id: raise ValueError(f"GraphRuntimeState consistency violation: Node '{node.id}' has a different instance") - def layer(self, layer: GraphEngineLayer) -> "GraphEngine": + def _bind_layer_context( + self, + layer: GraphEngineLayer, + ) -> None: + layer.initialize(ReadOnlyGraphRuntimeStateWrapper(self._graph_runtime_state), self._command_channel) + + def layer(self, layer: GraphEngineLayer) -> GraphEngine: """Add a layer for extending functionality.""" self._layers.append(layer) + self._bind_layer_context(layer) return self def run(self) -> Generator[GraphEngineEvent, None, None]: @@ -300,14 +324,7 @@ class GraphEngine: def _initialize_layers(self) -> None: """Initialize layers with context.""" self._event_manager.set_layers(self._layers) - # Create a read-only wrapper for the runtime state - read_only_state = ReadOnlyGraphRuntimeStateWrapper(self._graph_runtime_state) for layer in self._layers: - try: - layer.initialize(read_only_state, self._command_channel) - except Exception as e: - logger.warning("Failed to initialize layer %s: %s", layer.__class__.__name__, e) - try: layer.on_graph_start() except Exception as e: @@ -315,6 +332,7 @@ class GraphEngine: def _start_execution(self, *, resume: bool = False) -> None: """Start execution subsystems.""" + self._stop_event.clear() paused_nodes: list[str] = [] if resume: paused_nodes = self._graph_runtime_state.consume_paused_nodes() @@ -342,13 +360,12 @@ class GraphEngine: def _stop_execution(self) -> None: """Stop execution subsystems.""" + self._stop_event.set() self._dispatcher.stop() self._worker_pool.stop() # Don't mark complete here as the dispatcher already does it # Notify layers - logger = logging.getLogger(__name__) - for layer in self._layers: try: layer.on_graph_end(self._graph_execution.error) diff --git a/api/core/workflow/graph_engine/graph_traversal/skip_propagator.py b/api/core/workflow/graph_engine/graph_traversal/skip_propagator.py index 78f8ecdcdf..b9c9243963 100644 --- a/api/core/workflow/graph_engine/graph_traversal/skip_propagator.py +++ b/api/core/workflow/graph_engine/graph_traversal/skip_propagator.py @@ -60,6 +60,7 @@ class SkipPropagator: if edge_states["has_taken"]: # Enqueue node self._state_manager.enqueue_node(downstream_node_id) + self._state_manager.start_execution(downstream_node_id) return # All edges are skipped, propagate skip to this node diff --git a/api/core/workflow/graph_engine/layers/README.md b/api/core/workflow/graph_engine/layers/README.md index 17845ee1f0..b0f295037c 100644 --- a/api/core/workflow/graph_engine/layers/README.md +++ b/api/core/workflow/graph_engine/layers/README.md @@ -8,7 +8,7 @@ Pluggable middleware for engine extensions. Abstract base class for layers. -- `initialize()` - Receive runtime context +- `initialize()` - Receive runtime context (runtime state is bound here and always available to hooks) - `on_graph_start()` - Execution start hook - `on_event()` - Process all events - `on_graph_end()` - Execution end hook @@ -34,6 +34,9 @@ engine.layer(debug_layer) engine.run() ``` +`engine.layer()` binds the read-only runtime state before execution, so +`graph_runtime_state` is always available inside layer hooks. + ## Custom Layers ```python diff --git a/api/core/workflow/graph_engine/layers/__init__.py b/api/core/workflow/graph_engine/layers/__init__.py index 0a29a52993..772433e48c 100644 --- a/api/core/workflow/graph_engine/layers/__init__.py +++ b/api/core/workflow/graph_engine/layers/__init__.py @@ -8,9 +8,11 @@ with middleware-like components that can observe events and interact with execut from .base import GraphEngineLayer from .debug_logging import DebugLoggingLayer from .execution_limits import ExecutionLimitsLayer +from .observability import ObservabilityLayer __all__ = [ "DebugLoggingLayer", "ExecutionLimitsLayer", "GraphEngineLayer", + "ObservabilityLayer", ] diff --git a/api/core/workflow/graph_engine/layers/base.py b/api/core/workflow/graph_engine/layers/base.py index 24c12c2934..89293b9b30 100644 --- a/api/core/workflow/graph_engine/layers/base.py +++ b/api/core/workflow/graph_engine/layers/base.py @@ -9,9 +9,18 @@ from abc import ABC, abstractmethod from core.workflow.graph_engine.protocols.command_channel import CommandChannel from core.workflow.graph_events import GraphEngineEvent +from core.workflow.nodes.base.node import Node from core.workflow.runtime import ReadOnlyGraphRuntimeState +class GraphEngineLayerNotInitializedError(Exception): + """Raised when a layer's runtime state is accessed before initialization.""" + + def __init__(self, layer_name: str | None = None) -> None: + name = layer_name or "GraphEngineLayer" + super().__init__(f"{name} runtime state is not initialized. Bind the layer to a GraphEngine before access.") + + class GraphEngineLayer(ABC): """ Abstract base class for GraphEngine layers. @@ -27,22 +36,27 @@ class GraphEngineLayer(ABC): def __init__(self) -> None: """Initialize the layer. Subclasses can override with custom parameters.""" - self.graph_runtime_state: ReadOnlyGraphRuntimeState | None = None + self._graph_runtime_state: ReadOnlyGraphRuntimeState | None = None self.command_channel: CommandChannel | None = None + @property + def graph_runtime_state(self) -> ReadOnlyGraphRuntimeState: + if self._graph_runtime_state is None: + raise GraphEngineLayerNotInitializedError(type(self).__name__) + return self._graph_runtime_state + def initialize(self, graph_runtime_state: ReadOnlyGraphRuntimeState, command_channel: CommandChannel) -> None: """ Initialize the layer with engine dependencies. - Called by GraphEngine before execution starts to inject the read-only runtime state - and command channel. This allows layers to observe engine context and send - commands, but prevents direct state modification. - + Called by GraphEngine to inject the read-only runtime state and command channel. + This is invoked when the layer is registered with a `GraphEngine` instance. + Implementations should be idempotent. Args: graph_runtime_state: Read-only view of the runtime state command_channel: Channel for sending commands to the engine """ - self.graph_runtime_state = graph_runtime_state + self._graph_runtime_state = graph_runtime_state self.command_channel = command_channel @abstractmethod @@ -83,3 +97,29 @@ class GraphEngineLayer(ABC): error: The exception that caused execution to fail, or None if successful """ pass + + def on_node_run_start(self, node: Node) -> None: # noqa: B027 + """ + Called immediately before a node begins execution. + + Layers can override to inject behavior (e.g., start spans) prior to node execution. + The node's execution ID is available via `node._node_execution_id` and will be + consistent with all events emitted by this node execution. + + Args: + node: The node instance about to be executed + """ + pass + + def on_node_run_end(self, node: Node, error: Exception | None) -> None: # noqa: B027 + """ + Called after a node finishes execution. + + The node's execution ID is available via `node._node_execution_id` and matches + the `id` field in all events emitted by this node execution. + + Args: + node: The node instance that just finished execution + error: Exception instance if the node failed, otherwise None + """ + pass diff --git a/api/core/workflow/graph_engine/layers/debug_logging.py b/api/core/workflow/graph_engine/layers/debug_logging.py index 034ebcf54f..e0402cd09c 100644 --- a/api/core/workflow/graph_engine/layers/debug_logging.py +++ b/api/core/workflow/graph_engine/layers/debug_logging.py @@ -109,10 +109,8 @@ class DebugLoggingLayer(GraphEngineLayer): self.logger.info("=" * 80) self.logger.info("🚀 GRAPH EXECUTION STARTED") self.logger.info("=" * 80) - - if self.graph_runtime_state: - # Log initial state - self.logger.info("Initial State:") + # Log initial state + self.logger.info("Initial State:") @override def on_event(self, event: GraphEngineEvent) -> None: @@ -243,8 +241,7 @@ class DebugLoggingLayer(GraphEngineLayer): self.logger.info(" Node retries: %s", self.retry_count) # Log final state if available - if self.graph_runtime_state and self.include_outputs: - if self.graph_runtime_state.outputs: - self.logger.info("Final outputs: %s", self._format_dict(self.graph_runtime_state.outputs)) + if self.include_outputs and self.graph_runtime_state.outputs: + self.logger.info("Final outputs: %s", self._format_dict(self.graph_runtime_state.outputs)) self.logger.info("=" * 80) diff --git a/api/core/workflow/graph_engine/layers/node_parsers.py b/api/core/workflow/graph_engine/layers/node_parsers.py new file mode 100644 index 0000000000..b6bac794df --- /dev/null +++ b/api/core/workflow/graph_engine/layers/node_parsers.py @@ -0,0 +1,61 @@ +""" +Node-level OpenTelemetry parser interfaces and defaults. +""" + +import json +from typing import Protocol + +from opentelemetry.trace import Span +from opentelemetry.trace.status import Status, StatusCode + +from core.workflow.nodes.base.node import Node +from core.workflow.nodes.tool.entities import ToolNodeData + + +class NodeOTelParser(Protocol): + """Parser interface for node-specific OpenTelemetry enrichment.""" + + def parse(self, *, node: Node, span: "Span", error: Exception | None) -> None: ... + + +class DefaultNodeOTelParser: + """Fallback parser used when no node-specific parser is registered.""" + + def parse(self, *, node: Node, span: "Span", error: Exception | None) -> None: + span.set_attribute("node.id", node.id) + if node.execution_id: + span.set_attribute("node.execution_id", node.execution_id) + if hasattr(node, "node_type") and node.node_type: + span.set_attribute("node.type", node.node_type.value) + + if error: + span.record_exception(error) + span.set_status(Status(StatusCode.ERROR, str(error))) + else: + span.set_status(Status(StatusCode.OK)) + + +class ToolNodeOTelParser: + """Parser for tool nodes that captures tool-specific metadata.""" + + def __init__(self) -> None: + self._delegate = DefaultNodeOTelParser() + + def parse(self, *, node: Node, span: "Span", error: Exception | None) -> None: + self._delegate.parse(node=node, span=span, error=error) + + tool_data = getattr(node, "_node_data", None) + if not isinstance(tool_data, ToolNodeData): + return + + span.set_attribute("tool.provider.id", tool_data.provider_id) + span.set_attribute("tool.provider.type", tool_data.provider_type.value) + span.set_attribute("tool.provider.name", tool_data.provider_name) + span.set_attribute("tool.name", tool_data.tool_name) + span.set_attribute("tool.label", tool_data.tool_label) + if tool_data.plugin_unique_identifier: + span.set_attribute("tool.plugin.id", tool_data.plugin_unique_identifier) + if tool_data.credential_id: + span.set_attribute("tool.credential.id", tool_data.credential_id) + if tool_data.tool_configurations: + span.set_attribute("tool.config", json.dumps(tool_data.tool_configurations, ensure_ascii=False)) diff --git a/api/core/workflow/graph_engine/layers/observability.py b/api/core/workflow/graph_engine/layers/observability.py new file mode 100644 index 0000000000..a674816884 --- /dev/null +++ b/api/core/workflow/graph_engine/layers/observability.py @@ -0,0 +1,169 @@ +""" +Observability layer for GraphEngine. + +This layer creates OpenTelemetry spans for node execution, enabling distributed +tracing of workflow execution. It establishes OTel context during node execution +so that automatic instrumentation (HTTP requests, DB queries, etc.) automatically +associates with the node span. +""" + +import logging +from dataclasses import dataclass +from typing import cast, final + +from opentelemetry import context as context_api +from opentelemetry.trace import Span, SpanKind, Tracer, get_tracer, set_span_in_context +from typing_extensions import override + +from configs import dify_config +from core.workflow.enums import NodeType +from core.workflow.graph_engine.layers.base import GraphEngineLayer +from core.workflow.graph_engine.layers.node_parsers import ( + DefaultNodeOTelParser, + NodeOTelParser, + ToolNodeOTelParser, +) +from core.workflow.nodes.base.node import Node +from extensions.otel.runtime import is_instrument_flag_enabled + +logger = logging.getLogger(__name__) + + +@dataclass(slots=True) +class _NodeSpanContext: + span: "Span" + token: object + + +@final +class ObservabilityLayer(GraphEngineLayer): + """ + Layer that creates OpenTelemetry spans for node execution. + + This layer: + - Creates a span when a node starts execution + - Establishes OTel context so automatic instrumentation associates with the span + - Sets complete attributes and status when node execution ends + """ + + def __init__(self) -> None: + super().__init__() + self._node_contexts: dict[str, _NodeSpanContext] = {} + self._parsers: dict[NodeType, NodeOTelParser] = {} + self._default_parser: NodeOTelParser = cast(NodeOTelParser, DefaultNodeOTelParser()) + self._is_disabled: bool = False + self._tracer: Tracer | None = None + self._build_parser_registry() + self._init_tracer() + + def _init_tracer(self) -> None: + """Initialize OpenTelemetry tracer in constructor.""" + if not (dify_config.ENABLE_OTEL or is_instrument_flag_enabled()): + self._is_disabled = True + return + + try: + self._tracer = get_tracer(__name__) + except Exception as e: + logger.warning("Failed to get OpenTelemetry tracer: %s", e) + self._is_disabled = True + + def _build_parser_registry(self) -> None: + """Initialize parser registry for node types.""" + self._parsers = { + NodeType.TOOL: ToolNodeOTelParser(), + } + + def _get_parser(self, node: Node) -> NodeOTelParser: + node_type = getattr(node, "node_type", None) + if isinstance(node_type, NodeType): + return self._parsers.get(node_type, self._default_parser) + return self._default_parser + + @override + def on_graph_start(self) -> None: + """Called when graph execution starts.""" + self._node_contexts.clear() + + @override + def on_node_run_start(self, node: Node) -> None: + """ + Called when a node starts execution. + + Creates a span and establishes OTel context for automatic instrumentation. + """ + if self._is_disabled: + return + + try: + if not self._tracer: + return + + execution_id = node.execution_id + if not execution_id: + return + + parent_context = context_api.get_current() + span = self._tracer.start_span( + f"{node.title}", + kind=SpanKind.INTERNAL, + context=parent_context, + ) + + new_context = set_span_in_context(span) + token = context_api.attach(new_context) + + self._node_contexts[execution_id] = _NodeSpanContext(span=span, token=token) + + except Exception as e: + logger.warning("Failed to create OpenTelemetry span for node %s: %s", node.id, e) + + @override + def on_node_run_end(self, node: Node, error: Exception | None) -> None: + """ + Called when a node finishes execution. + + Sets complete attributes, records exceptions, and ends the span. + """ + if self._is_disabled: + return + + try: + execution_id = node.execution_id + if not execution_id: + return + node_context = self._node_contexts.get(execution_id) + if not node_context: + return + + span = node_context.span + parser = self._get_parser(node) + try: + parser.parse(node=node, span=span, error=error) + span.end() + finally: + token = node_context.token + if token is not None: + try: + context_api.detach(token) + except Exception: + logger.warning("Failed to detach OpenTelemetry token: %s", token) + self._node_contexts.pop(execution_id, None) + + except Exception as e: + logger.warning("Failed to end OpenTelemetry span for node %s: %s", node.id, e) + + @override + def on_event(self, event) -> None: + """Not used in this layer.""" + pass + + @override + def on_graph_end(self, error: Exception | None) -> None: + """Called when graph execution ends.""" + if self._node_contexts: + logger.warning( + "ObservabilityLayer: %d node spans were not properly ended", + len(self._node_contexts), + ) + self._node_contexts.clear() diff --git a/api/core/workflow/graph_engine/layers/persistence.py b/api/core/workflow/graph_engine/layers/persistence.py index b70f36ec9e..e81df4f3b7 100644 --- a/api/core/workflow/graph_engine/layers/persistence.py +++ b/api/core/workflow/graph_engine/layers/persistence.py @@ -337,8 +337,6 @@ class WorkflowPersistenceLayer(GraphEngineLayer): if update_finished: execution.finished_at = naive_utc_now() runtime_state = self.graph_runtime_state - if runtime_state is None: - return execution.total_tokens = runtime_state.total_tokens execution.total_steps = runtime_state.node_run_steps execution.outputs = execution.outputs or runtime_state.outputs @@ -404,6 +402,4 @@ class WorkflowPersistenceLayer(GraphEngineLayer): def _system_variables(self) -> Mapping[str, Any]: runtime_state = self.graph_runtime_state - if runtime_state is None: - return {} return runtime_state.variable_pool.get_by_prefix(SYSTEM_VARIABLE_NODE_ID) diff --git a/api/core/workflow/graph_engine/manager.py b/api/core/workflow/graph_engine/manager.py index 0577ba8f02..d2cfa755d9 100644 --- a/api/core/workflow/graph_engine/manager.py +++ b/api/core/workflow/graph_engine/manager.py @@ -3,14 +3,20 @@ GraphEngine Manager for sending control commands via Redis channel. This module provides a simplified interface for controlling workflow executions using the new Redis command channel, without requiring user permission checks. -Supports stop, pause, and resume operations. """ import logging +from collections.abc import Sequence from typing import final from core.workflow.graph_engine.command_channels.redis_channel import RedisChannel -from core.workflow.graph_engine.entities.commands import AbortCommand, GraphEngineCommand, PauseCommand +from core.workflow.graph_engine.entities.commands import ( + AbortCommand, + GraphEngineCommand, + PauseCommand, + UpdateVariablesCommand, + VariableUpdate, +) from extensions.ext_redis import redis_client logger = logging.getLogger(__name__) @@ -23,7 +29,6 @@ class GraphEngineManager: This class provides a simple interface for controlling workflow executions by sending commands through Redis channels, without user validation. - Supports stop and pause operations. """ @staticmethod @@ -45,6 +50,16 @@ class GraphEngineManager: pause_command = PauseCommand(reason=reason or "User requested pause") GraphEngineManager._send_command(task_id, pause_command) + @staticmethod + def send_update_variables_command(task_id: str, updates: Sequence[VariableUpdate]) -> None: + """Send a command to update variables in a running workflow.""" + + if not updates: + return + + update_command = UpdateVariablesCommand(updates=updates) + GraphEngineManager._send_command(task_id, update_command) + @staticmethod def _send_command(task_id: str, command: GraphEngineCommand) -> None: """Send a command to the workflow-specific Redis channel.""" diff --git a/api/core/workflow/graph_engine/orchestration/dispatcher.py b/api/core/workflow/graph_engine/orchestration/dispatcher.py index 334a3f77bf..27439a2412 100644 --- a/api/core/workflow/graph_engine/orchestration/dispatcher.py +++ b/api/core/workflow/graph_engine/orchestration/dispatcher.py @@ -44,6 +44,7 @@ class Dispatcher: event_queue: queue.Queue[GraphNodeEventBase], event_handler: "EventHandler", execution_coordinator: ExecutionCoordinator, + stop_event: threading.Event, event_emitter: EventManager | None = None, ) -> None: """ @@ -61,7 +62,7 @@ class Dispatcher: self._event_emitter = event_emitter self._thread: threading.Thread | None = None - self._stop_event = threading.Event() + self._stop_event = stop_event self._start_time: float | None = None def start(self) -> None: @@ -69,16 +70,14 @@ class Dispatcher: if self._thread and self._thread.is_alive(): return - self._stop_event.clear() self._start_time = time.time() self._thread = threading.Thread(target=self._dispatcher_loop, name="GraphDispatcher", daemon=True) self._thread.start() def stop(self) -> None: """Stop the dispatcher thread.""" - self._stop_event.set() if self._thread and self._thread.is_alive(): - self._thread.join(timeout=10.0) + self._thread.join(timeout=2.0) def _dispatcher_loop(self) -> None: """Main dispatcher loop.""" diff --git a/api/core/workflow/graph_engine/ready_queue/factory.py b/api/core/workflow/graph_engine/ready_queue/factory.py index 1144e1de69..a9d4f470e5 100644 --- a/api/core/workflow/graph_engine/ready_queue/factory.py +++ b/api/core/workflow/graph_engine/ready_queue/factory.py @@ -2,6 +2,8 @@ Factory for creating ReadyQueue instances from serialized state. """ +from __future__ import annotations + from typing import TYPE_CHECKING from .in_memory import InMemoryReadyQueue @@ -11,7 +13,7 @@ if TYPE_CHECKING: from .protocol import ReadyQueue -def create_ready_queue_from_state(state: ReadyQueueState) -> "ReadyQueue": +def create_ready_queue_from_state(state: ReadyQueueState) -> ReadyQueue: """ Create a ReadyQueue instance from a serialized state. diff --git a/api/core/workflow/graph_engine/response_coordinator/coordinator.py b/api/core/workflow/graph_engine/response_coordinator/coordinator.py index 98e0ea91ef..c5ea94ba80 100644 --- a/api/core/workflow/graph_engine/response_coordinator/coordinator.py +++ b/api/core/workflow/graph_engine/response_coordinator/coordinator.py @@ -16,7 +16,13 @@ from pydantic import BaseModel, Field from core.workflow.enums import NodeExecutionType, NodeState from core.workflow.graph import Graph -from core.workflow.graph_events import NodeRunStreamChunkEvent, NodeRunSucceededEvent +from core.workflow.graph_events import ( + ChunkType, + NodeRunStreamChunkEvent, + NodeRunSucceededEvent, + ToolCall, + ToolResult, +) from core.workflow.nodes.base.template import TextSegment, VariableSegment from core.workflow.runtime import VariablePool @@ -321,11 +327,24 @@ class ResponseStreamCoordinator: selector: Sequence[str], chunk: str, is_final: bool = False, + chunk_type: ChunkType = ChunkType.TEXT, + tool_call: ToolCall | None = None, + tool_result: ToolResult | None = None, ) -> NodeRunStreamChunkEvent: """Create a stream chunk event with consistent structure. For selectors with special prefixes (sys, env, conversation), we use the active response node's information since these are not actual node IDs. + + Args: + node_id: The node ID to attribute the event to + execution_id: The execution ID for this node + selector: The variable selector + chunk: The chunk content + is_final: Whether this is the final chunk + chunk_type: The semantic type of the chunk being streamed + tool_call: Structured data for tool_call chunks + tool_result: Structured data for tool_result chunks """ # Check if this is a special selector that doesn't correspond to a node if selector and selector[0] not in self._graph.nodes and self._active_session: @@ -338,6 +357,9 @@ class ResponseStreamCoordinator: selector=selector, chunk=chunk, is_final=is_final, + chunk_type=chunk_type, + tool_call=tool_call, + tool_result=tool_result, ) # Standard case: selector refers to an actual node @@ -349,6 +371,9 @@ class ResponseStreamCoordinator: selector=selector, chunk=chunk, is_final=is_final, + chunk_type=chunk_type, + tool_call=tool_call, + tool_result=tool_result, ) def _process_variable_segment(self, segment: VariableSegment) -> tuple[Sequence[NodeRunStreamChunkEvent], bool]: @@ -356,6 +381,8 @@ class ResponseStreamCoordinator: Handles both regular node selectors and special system selectors (sys, env, conversation). For special selectors, we attribute the output to the active response node. + + For object-type variables, automatically streams all child fields that have stream events. """ events: list[NodeRunStreamChunkEvent] = [] source_selector_prefix = segment.selector[0] if segment.selector else "" @@ -364,60 +391,81 @@ class ResponseStreamCoordinator: # Determine which node to attribute the output to # For special selectors (sys, env, conversation), use the active response node # For regular selectors, use the source node - if self._active_session and source_selector_prefix not in self._graph.nodes: - # Special selector - use active response node - output_node_id = self._active_session.node_id - else: - # Regular node selector - output_node_id = source_selector_prefix + active_session = self._active_session + special_selector = bool(active_session and source_selector_prefix not in self._graph.nodes) + output_node_id = active_session.node_id if special_selector and active_session else source_selector_prefix execution_id = self._get_or_create_execution_id(output_node_id) - # Stream all available chunks - while self._has_unread_stream(segment.selector): - if event := self._pop_stream_chunk(segment.selector): - # For special selectors, we need to update the event to use - # the active response node's information - if self._active_session and source_selector_prefix not in self._graph.nodes: - response_node = self._graph.nodes[self._active_session.node_id] - # Create a new event with the response node's information - # but keep the original selector - updated_event = NodeRunStreamChunkEvent( - id=execution_id, - node_id=response_node.id, - node_type=response_node.node_type, - selector=event.selector, # Keep original selector - chunk=event.chunk, - is_final=event.is_final, - ) - events.append(updated_event) - else: - # Regular node selector - use event as is - events.append(event) + # Check if there's a direct stream for this selector + has_direct_stream = ( + tuple(segment.selector) in self._stream_buffers or tuple(segment.selector) in self._closed_streams + ) - # Check if this is the last chunk by looking ahead - stream_closed = self._is_stream_closed(segment.selector) - # Check if stream is closed to determine if segment is complete - if stream_closed: - is_complete = True + stream_targets = [segment.selector] if has_direct_stream else sorted(self._find_child_streams(segment.selector)) - elif value := self._variable_pool.get(segment.selector): - # Process scalar value - is_last_segment = bool( - self._active_session and self._active_session.index == len(self._active_session.template.segments) - 1 - ) - events.append( - self._create_stream_chunk_event( - node_id=output_node_id, - execution_id=execution_id, - selector=segment.selector, - chunk=value.markdown, - is_final=is_last_segment, + if stream_targets: + all_complete = True + + for target_selector in stream_targets: + while self._has_unread_stream(target_selector): + if event := self._pop_stream_chunk(target_selector): + events.append( + self._rewrite_stream_event( + event=event, + output_node_id=output_node_id, + execution_id=execution_id, + special_selector=bool(special_selector), + ) + ) + + if not self._is_stream_closed(target_selector): + all_complete = False + + is_complete = all_complete + + # Fallback: check if scalar value exists in variable pool + if not is_complete and not has_direct_stream: + if value := self._variable_pool.get(segment.selector): + # Process scalar value + is_last_segment = bool( + self._active_session + and self._active_session.index == len(self._active_session.template.segments) - 1 ) - ) - is_complete = True + events.append( + self._create_stream_chunk_event( + node_id=output_node_id, + execution_id=execution_id, + selector=segment.selector, + chunk=value.markdown, + is_final=is_last_segment, + ) + ) + is_complete = True return events, is_complete + def _rewrite_stream_event( + self, + event: NodeRunStreamChunkEvent, + output_node_id: str, + execution_id: str, + special_selector: bool, + ) -> NodeRunStreamChunkEvent: + """Rewrite event to attribute to active response node when selector is special.""" + if not special_selector: + return event + + return self._create_stream_chunk_event( + node_id=output_node_id, + execution_id=execution_id, + selector=event.selector, + chunk=event.chunk, + is_final=event.is_final, + chunk_type=event.chunk_type, + tool_call=event.tool_call, + tool_result=event.tool_result, + ) + def _process_text_segment(self, segment: TextSegment) -> Sequence[NodeRunStreamChunkEvent]: """Process a text segment. Returns (events, is_complete).""" assert self._active_session is not None @@ -513,6 +561,36 @@ class ResponseStreamCoordinator: # ============= Internal Stream Management Methods ============= + def _find_child_streams(self, parent_selector: Sequence[str]) -> list[tuple[str, ...]]: + """Find all child stream selectors that are descendants of the parent selector. + + For example, if parent_selector is ['llm', 'generation'], this will find: + - ['llm', 'generation', 'content'] + - ['llm', 'generation', 'tool_calls'] + - ['llm', 'generation', 'tool_results'] + - ['llm', 'generation', 'thought'] + + Args: + parent_selector: The parent selector to search for children + + Returns: + List of child selector tuples found in stream buffers or closed streams + """ + parent_key = tuple(parent_selector) + parent_len = len(parent_key) + child_streams: set[tuple[str, ...]] = set() + + # Search in both active buffers and closed streams + all_selectors = set(self._stream_buffers.keys()) | self._closed_streams + + for selector_key in all_selectors: + # Check if this selector is a direct child of the parent + # Direct child means: len(child) == len(parent) + 1 and child starts with parent + if len(selector_key) == parent_len + 1 and selector_key[:parent_len] == parent_key: + child_streams.add(selector_key) + + return sorted(child_streams) + def _append_stream_chunk(self, selector: Sequence[str], event: NodeRunStreamChunkEvent) -> None: """ Append a stream chunk to the internal buffer. diff --git a/api/core/workflow/graph_engine/response_coordinator/session.py b/api/core/workflow/graph_engine/response_coordinator/session.py index 8b7c2e441e..8ceaa428c3 100644 --- a/api/core/workflow/graph_engine/response_coordinator/session.py +++ b/api/core/workflow/graph_engine/response_coordinator/session.py @@ -5,6 +5,8 @@ This module contains the private ResponseSession class used internally by ResponseStreamCoordinator to manage streaming sessions. """ +from __future__ import annotations + from dataclasses import dataclass from core.workflow.nodes.answer.answer_node import AnswerNode @@ -27,7 +29,7 @@ class ResponseSession: index: int = 0 # Current position in the template segments @classmethod - def from_node(cls, node: Node) -> "ResponseSession": + def from_node(cls, node: Node) -> ResponseSession: """ Create a ResponseSession from an AnswerNode or EndNode. diff --git a/api/core/workflow/graph_engine/worker.py b/api/core/workflow/graph_engine/worker.py index 73e59ee298..83419830b6 100644 --- a/api/core/workflow/graph_engine/worker.py +++ b/api/core/workflow/graph_engine/worker.py @@ -9,6 +9,7 @@ import contextvars import queue import threading import time +from collections.abc import Sequence from datetime import datetime from typing import final from uuid import uuid4 @@ -17,6 +18,7 @@ from flask import Flask from typing_extensions import override from core.workflow.graph import Graph +from core.workflow.graph_engine.layers.base import GraphEngineLayer from core.workflow.graph_events import GraphNodeEventBase, NodeRunFailedEvent from core.workflow.nodes.base.node import Node from libs.flask_utils import preserve_flask_contexts @@ -39,6 +41,8 @@ class Worker(threading.Thread): ready_queue: ReadyQueue, event_queue: queue.Queue[GraphNodeEventBase], graph: Graph, + layers: Sequence[GraphEngineLayer], + stop_event: threading.Event, worker_id: int = 0, flask_app: Flask | None = None, context_vars: contextvars.Context | None = None, @@ -50,6 +54,7 @@ class Worker(threading.Thread): ready_queue: Ready queue containing node IDs ready for execution event_queue: Queue for pushing execution events graph: Graph containing nodes to execute + layers: Graph engine layers for node execution hooks worker_id: Unique identifier for this worker flask_app: Optional Flask application for context preservation context_vars: Optional context variables to preserve in worker thread @@ -61,12 +66,16 @@ class Worker(threading.Thread): self._worker_id = worker_id self._flask_app = flask_app self._context_vars = context_vars - self._stop_event = threading.Event() self._last_task_time = time.time() + self._stop_event = stop_event + self._layers = layers if layers is not None else [] def stop(self) -> None: - """Signal the worker to stop processing.""" - self._stop_event.set() + """Worker is controlled via shared stop_event from GraphEngine. + + This method is a no-op retained for backward compatibility. + """ + pass @property def is_idle(self) -> bool: @@ -122,20 +131,51 @@ class Worker(threading.Thread): Args: node: The node instance to execute """ - # Execute the node with preserved context if Flask app is provided + node.ensure_execution_id() + + error: Exception | None = None + if self._flask_app and self._context_vars: with preserve_flask_contexts( flask_app=self._flask_app, context_vars=self._context_vars, ): - # Execute the node + self._invoke_node_run_start_hooks(node) + try: + node_events = node.run() + for event in node_events: + self._event_queue.put(event) + except Exception as exc: + error = exc + raise + finally: + self._invoke_node_run_end_hooks(node, error) + else: + self._invoke_node_run_start_hooks(node) + try: node_events = node.run() for event in node_events: - # Forward event to dispatcher immediately for streaming self._event_queue.put(event) - else: - # Execute without context preservation - node_events = node.run() - for event in node_events: - # Forward event to dispatcher immediately for streaming - self._event_queue.put(event) + except Exception as exc: + error = exc + raise + finally: + self._invoke_node_run_end_hooks(node, error) + + def _invoke_node_run_start_hooks(self, node: Node) -> None: + """Invoke on_node_run_start hooks for all layers.""" + for layer in self._layers: + try: + layer.on_node_run_start(node) + except Exception: + # Silently ignore layer errors to prevent disrupting node execution + continue + + def _invoke_node_run_end_hooks(self, node: Node, error: Exception | None) -> None: + """Invoke on_node_run_end hooks for all layers.""" + for layer in self._layers: + try: + layer.on_node_run_end(node, error) + except Exception: + # Silently ignore layer errors to prevent disrupting node execution + continue diff --git a/api/core/workflow/graph_engine/worker_management/worker_pool.py b/api/core/workflow/graph_engine/worker_management/worker_pool.py index a9aada9ea5..df76ebe882 100644 --- a/api/core/workflow/graph_engine/worker_management/worker_pool.py +++ b/api/core/workflow/graph_engine/worker_management/worker_pool.py @@ -14,6 +14,7 @@ from configs import dify_config from core.workflow.graph import Graph from core.workflow.graph_events import GraphNodeEventBase +from ..layers.base import GraphEngineLayer from ..ready_queue import ReadyQueue from ..worker import Worker @@ -39,6 +40,8 @@ class WorkerPool: ready_queue: ReadyQueue, event_queue: queue.Queue[GraphNodeEventBase], graph: Graph, + layers: list[GraphEngineLayer], + stop_event: threading.Event, flask_app: "Flask | None" = None, context_vars: "Context | None" = None, min_workers: int | None = None, @@ -53,6 +56,7 @@ class WorkerPool: ready_queue: Ready queue for nodes ready for execution event_queue: Queue for worker events graph: The workflow graph + layers: Graph engine layers for node execution hooks flask_app: Optional Flask app for context preservation context_vars: Optional context variables min_workers: Minimum number of workers @@ -65,6 +69,7 @@ class WorkerPool: self._graph = graph self._flask_app = flask_app self._context_vars = context_vars + self._layers = layers # Scaling parameters with defaults self._min_workers = min_workers or dify_config.GRAPH_ENGINE_MIN_WORKERS @@ -77,6 +82,7 @@ class WorkerPool: self._worker_counter = 0 self._lock = threading.RLock() self._running = False + self._stop_event = stop_event # No longer tracking worker states with callbacks to avoid lock contention @@ -131,7 +137,7 @@ class WorkerPool: # Wait for workers to finish for worker in self._workers: if worker.is_alive(): - worker.join(timeout=10.0) + worker.join(timeout=2.0) self._workers.clear() @@ -144,9 +150,11 @@ class WorkerPool: ready_queue=self._ready_queue, event_queue=self._event_queue, graph=self._graph, + layers=self._layers, worker_id=worker_id, flask_app=self._flask_app, context_vars=self._context_vars, + stop_event=self._stop_event, ) worker.start() diff --git a/api/core/workflow/graph_events/__init__.py b/api/core/workflow/graph_events/__init__.py index 7a5edbb331..4ee0ec94d2 100644 --- a/api/core/workflow/graph_events/__init__.py +++ b/api/core/workflow/graph_events/__init__.py @@ -36,6 +36,7 @@ from .loop import ( # Node events from .node import ( + ChunkType, NodeRunExceptionEvent, NodeRunFailedEvent, NodeRunPauseRequestedEvent, @@ -44,10 +45,13 @@ from .node import ( NodeRunStartedEvent, NodeRunStreamChunkEvent, NodeRunSucceededEvent, + ToolCall, + ToolResult, ) __all__ = [ "BaseGraphEvent", + "ChunkType", "GraphEngineEvent", "GraphNodeEventBase", "GraphRunAbortedEvent", @@ -73,4 +77,6 @@ __all__ = [ "NodeRunStartedEvent", "NodeRunStreamChunkEvent", "NodeRunSucceededEvent", + "ToolCall", + "ToolResult", ] diff --git a/api/core/workflow/graph_events/node.py b/api/core/workflow/graph_events/node.py index f225798d41..2ae4fcd919 100644 --- a/api/core/workflow/graph_events/node.py +++ b/api/core/workflow/graph_events/node.py @@ -1,10 +1,11 @@ from collections.abc import Sequence from datetime import datetime +from enum import StrEnum from pydantic import Field from core.rag.entities.citation_metadata import RetrievalSourceMetadata -from core.workflow.entities import AgentNodeStrategyInit +from core.workflow.entities import AgentNodeStrategyInit, ToolCall, ToolResult from core.workflow.entities.pause_reason import PauseReason from .base import GraphNodeEventBase @@ -21,13 +22,39 @@ class NodeRunStartedEvent(GraphNodeEventBase): provider_id: str = "" +class ChunkType(StrEnum): + """Stream chunk type for LLM-related events.""" + + TEXT = "text" # Normal text streaming + TOOL_CALL = "tool_call" # Tool call arguments streaming + TOOL_RESULT = "tool_result" # Tool execution result + THOUGHT = "thought" # Agent thinking process (ReAct) + THOUGHT_START = "thought_start" # Agent thought start + THOUGHT_END = "thought_end" # Agent thought end + + class NodeRunStreamChunkEvent(GraphNodeEventBase): - # Spec-compliant fields + """Stream chunk event for workflow node execution.""" + + # Base fields selector: Sequence[str] = Field( ..., description="selector identifying the output location (e.g., ['nodeA', 'text'])" ) chunk: str = Field(..., description="the actual chunk content") is_final: bool = Field(default=False, description="indicates if this is the last chunk") + chunk_type: ChunkType = Field(default=ChunkType.TEXT, description="type of the chunk") + + # Tool call fields (when chunk_type == TOOL_CALL) + tool_call: ToolCall | None = Field( + default=None, + description="structured payload for tool_call chunks", + ) + + # Tool result fields (when chunk_type == TOOL_RESULT) + tool_result: ToolResult | None = Field( + default=None, + description="structured payload for tool_result chunks", + ) class NodeRunRetrieverResourceEvent(GraphNodeEventBase): diff --git a/api/core/workflow/node_events/__init__.py b/api/core/workflow/node_events/__init__.py index f14a594c85..67263311b9 100644 --- a/api/core/workflow/node_events/__init__.py +++ b/api/core/workflow/node_events/__init__.py @@ -13,16 +13,21 @@ from .loop import ( LoopSucceededEvent, ) from .node import ( + ChunkType, ModelInvokeCompletedEvent, PauseRequestedEvent, RunRetrieverResourceEvent, RunRetryEvent, StreamChunkEvent, StreamCompletedEvent, + ThoughtChunkEvent, + ToolCallChunkEvent, + ToolResultChunkEvent, ) __all__ = [ "AgentLogEvent", + "ChunkType", "IterationFailedEvent", "IterationNextEvent", "IterationStartedEvent", @@ -39,4 +44,7 @@ __all__ = [ "RunRetryEvent", "StreamChunkEvent", "StreamCompletedEvent", + "ThoughtChunkEvent", + "ToolCallChunkEvent", + "ToolResultChunkEvent", ] diff --git a/api/core/workflow/node_events/node.py b/api/core/workflow/node_events/node.py index e4fa52f444..f0bbc4d96f 100644 --- a/api/core/workflow/node_events/node.py +++ b/api/core/workflow/node_events/node.py @@ -1,11 +1,13 @@ from collections.abc import Sequence from datetime import datetime +from enum import StrEnum from pydantic import Field from core.file import File from core.model_runtime.entities.llm_entities import LLMUsage from core.rag.entities.citation_metadata import RetrievalSourceMetadata +from core.workflow.entities import ToolCall, ToolResult from core.workflow.entities.pause_reason import PauseReason from core.workflow.node_events import NodeRunResult @@ -32,13 +34,60 @@ class RunRetryEvent(NodeEventBase): start_at: datetime = Field(..., description="Retry start time") +class ChunkType(StrEnum): + """Stream chunk type for LLM-related events.""" + + TEXT = "text" # Normal text streaming + TOOL_CALL = "tool_call" # Tool call arguments streaming + TOOL_RESULT = "tool_result" # Tool execution result + THOUGHT = "thought" # Agent thinking process (ReAct) + THOUGHT_START = "thought_start" # Agent thought start + THOUGHT_END = "thought_end" # Agent thought end + + class StreamChunkEvent(NodeEventBase): - # Spec-compliant fields + """Base stream chunk event - normal text streaming output.""" + selector: Sequence[str] = Field( ..., description="selector identifying the output location (e.g., ['nodeA', 'text'])" ) chunk: str = Field(..., description="the actual chunk content") is_final: bool = Field(default=False, description="indicates if this is the last chunk") + chunk_type: ChunkType = Field(default=ChunkType.TEXT, description="type of the chunk") + tool_call: ToolCall | None = Field(default=None, description="structured payload for tool_call chunks") + tool_result: ToolResult | None = Field(default=None, description="structured payload for tool_result chunks") + + +class ToolCallChunkEvent(StreamChunkEvent): + """Tool call streaming event - tool call arguments streaming output.""" + + chunk_type: ChunkType = Field(default=ChunkType.TOOL_CALL, frozen=True) + tool_call: ToolCall | None = Field(default=None, description="structured tool call payload") + + +class ToolResultChunkEvent(StreamChunkEvent): + """Tool result event - tool execution result.""" + + chunk_type: ChunkType = Field(default=ChunkType.TOOL_RESULT, frozen=True) + tool_result: ToolResult | None = Field(default=None, description="structured tool result payload") + + +class ThoughtStartChunkEvent(StreamChunkEvent): + """Agent thought start streaming event - Agent thinking process (ReAct).""" + + chunk_type: ChunkType = Field(default=ChunkType.THOUGHT_START, frozen=True) + + +class ThoughtEndChunkEvent(StreamChunkEvent): + """Agent thought end streaming event - Agent thinking process (ReAct).""" + + chunk_type: ChunkType = Field(default=ChunkType.THOUGHT_END, frozen=True) + + +class ThoughtChunkEvent(StreamChunkEvent): + """Agent thought streaming event - Agent thinking process (ReAct).""" + + chunk_type: ChunkType = Field(default=ChunkType.THOUGHT, frozen=True) class StreamCompletedEvent(NodeEventBase): diff --git a/api/core/workflow/nodes/agent/agent_node.py b/api/core/workflow/nodes/agent/agent_node.py index 4be006de11..234651ce96 100644 --- a/api/core/workflow/nodes/agent/agent_node.py +++ b/api/core/workflow/nodes/agent/agent_node.py @@ -1,3 +1,5 @@ +from __future__ import annotations + import json from collections.abc import Generator, Mapping, Sequence from typing import TYPE_CHECKING, Any, cast @@ -167,7 +169,7 @@ class AgentNode(Node[AgentNodeData]): variable_pool: VariablePool, node_data: AgentNodeData, for_log: bool = False, - strategy: "PluginAgentStrategy", + strategy: PluginAgentStrategy, ) -> dict[str, Any]: """ Generate parameters based on the given tool parameters, variable pool, and node data. @@ -328,7 +330,7 @@ class AgentNode(Node[AgentNodeData]): def _generate_credentials( self, parameters: dict[str, Any], - ) -> "InvokeCredentials": + ) -> InvokeCredentials: """ Generate credentials based on the given agent parameters. """ @@ -442,9 +444,7 @@ class AgentNode(Node[AgentNodeData]): model_schema.features.remove(feature) return model_schema - def _filter_mcp_type_tool( - self, strategy: "PluginAgentStrategy", tools: list[dict[str, Any]] - ) -> list[dict[str, Any]]: + def _filter_mcp_type_tool(self, strategy: PluginAgentStrategy, tools: list[dict[str, Any]]) -> list[dict[str, Any]]: """ Filter MCP type tool :param strategy: plugin agent strategy diff --git a/api/core/workflow/nodes/agent/exc.py b/api/core/workflow/nodes/agent/exc.py index 944f5f0b20..ba2c83d8a6 100644 --- a/api/core/workflow/nodes/agent/exc.py +++ b/api/core/workflow/nodes/agent/exc.py @@ -119,3 +119,14 @@ class AgentVariableTypeError(AgentNodeError): self.expected_type = expected_type self.actual_type = actual_type super().__init__(message) + + +class AgentMaxIterationError(AgentNodeError): + """Exception raised when the agent exceeds the maximum iteration limit.""" + + def __init__(self, max_iteration: int): + self.max_iteration = max_iteration + super().__init__( + f"Agent exceeded the maximum iteration limit of {max_iteration}. " + f"The agent was unable to complete the task within the allowed number of iterations." + ) diff --git a/api/core/workflow/nodes/base/entities.py b/api/core/workflow/nodes/base/entities.py index 5aab6bbde4..e5a20c8e91 100644 --- a/api/core/workflow/nodes/base/entities.py +++ b/api/core/workflow/nodes/base/entities.py @@ -1,3 +1,5 @@ +from __future__ import annotations + import json from abc import ABC from builtins import type as type_ @@ -111,7 +113,7 @@ class DefaultValue(BaseModel): raise DefaultValueTypeError(f"Cannot convert to number: {value}") @model_validator(mode="after") - def validate_value_type(self) -> "DefaultValue": + def validate_value_type(self) -> DefaultValue: # Type validation configuration type_validators = { DefaultValueType.STRING: { diff --git a/api/core/workflow/nodes/base/node.py b/api/core/workflow/nodes/base/node.py index c2e1105971..06e4c0440d 100644 --- a/api/core/workflow/nodes/base/node.py +++ b/api/core/workflow/nodes/base/node.py @@ -1,3 +1,5 @@ +from __future__ import annotations + import importlib import logging import operator @@ -46,6 +48,9 @@ from core.workflow.node_events import ( RunRetrieverResourceEvent, StreamChunkEvent, StreamCompletedEvent, + ThoughtChunkEvent, + ToolCallChunkEvent, + ToolResultChunkEvent, ) from core.workflow.runtime import GraphRuntimeState from libs.datetime_utils import naive_utc_now @@ -59,7 +64,7 @@ logger = logging.getLogger(__name__) class Node(Generic[NodeDataT]): - node_type: ClassVar["NodeType"] + node_type: ClassVar[NodeType] execution_type: NodeExecutionType = NodeExecutionType.EXECUTABLE _node_data_type: ClassVar[type[BaseNodeData]] = BaseNodeData @@ -198,14 +203,14 @@ class Node(Generic[NodeDataT]): return None # Global registry populated via __init_subclass__ - _registry: ClassVar[dict["NodeType", dict[str, type["Node"]]]] = {} + _registry: ClassVar[dict[NodeType, dict[str, type[Node]]]] = {} def __init__( self, id: str, config: Mapping[str, Any], - graph_init_params: "GraphInitParams", - graph_runtime_state: "GraphRuntimeState", + graph_init_params: GraphInitParams, + graph_runtime_state: GraphRuntimeState, ) -> None: self._graph_init_params = graph_init_params self.id = id @@ -241,9 +246,18 @@ class Node(Generic[NodeDataT]): return @property - def graph_init_params(self) -> "GraphInitParams": + def graph_init_params(self) -> GraphInitParams: return self._graph_init_params + @property + def execution_id(self) -> str: + return self._node_execution_id + + def ensure_execution_id(self) -> str: + if not self._node_execution_id: + self._node_execution_id = str(uuid4()) + return self._node_execution_id + def _hydrate_node_data(self, data: Mapping[str, Any]) -> NodeDataT: return cast(NodeDataT, self._node_data_type.model_validate(data)) @@ -255,15 +269,17 @@ class Node(Generic[NodeDataT]): """ raise NotImplementedError + def _should_stop(self) -> bool: + """Check if execution should be stopped.""" + return self.graph_runtime_state.stop_event.is_set() + def run(self) -> Generator[GraphNodeEventBase, None, None]: - # Generate a single node execution ID to use for all events - if not self._node_execution_id: - self._node_execution_id = str(uuid4()) + execution_id = self.ensure_execution_id() self._start_at = naive_utc_now() # Create and push start event with required fields start_event = NodeRunStartedEvent( - id=self._node_execution_id, + id=execution_id, node_id=self._node_id, node_type=self.node_type, node_title=self.title, @@ -321,10 +337,25 @@ class Node(Generic[NodeDataT]): if isinstance(event, NodeEventBase): # pyright: ignore[reportUnnecessaryIsInstance] yield self._dispatch(event) elif isinstance(event, GraphNodeEventBase) and not event.in_iteration_id and not event.in_loop_id: # pyright: ignore[reportUnnecessaryIsInstance] - event.id = self._node_execution_id + event.id = self.execution_id yield event else: yield event + + if self._should_stop(): + error_message = "Execution cancelled" + yield NodeRunFailedEvent( + id=self.execution_id, + node_id=self._node_id, + node_type=self.node_type, + start_at=self._start_at, + node_run_result=NodeRunResult( + status=WorkflowNodeExecutionStatus.FAILED, + error=error_message, + ), + error=error_message, + ) + return except Exception as e: logger.exception("Node %s failed to run", self._node_id) result = NodeRunResult( @@ -333,7 +364,7 @@ class Node(Generic[NodeDataT]): error_type="WorkflowNodeError", ) yield NodeRunFailedEvent( - id=self._node_execution_id, + id=self.execution_id, node_id=self._node_id, node_type=self.node_type, start_at=self._start_at, @@ -431,7 +462,7 @@ class Node(Generic[NodeDataT]): raise NotImplementedError("subclasses of BaseNode must implement `version` method.") @classmethod - def get_node_type_classes_mapping(cls) -> Mapping["NodeType", Mapping[str, type["Node"]]]: + def get_node_type_classes_mapping(cls) -> Mapping[NodeType, Mapping[str, type[Node]]]: """Return mapping of NodeType -> {version -> Node subclass} using __init_subclass__ registry. Import all modules under core.workflow.nodes so subclasses register themselves on import. @@ -512,7 +543,7 @@ class Node(Generic[NodeDataT]): match result.status: case WorkflowNodeExecutionStatus.FAILED: return NodeRunFailedEvent( - id=self._node_execution_id, + id=self.execution_id, node_id=self.id, node_type=self.node_type, start_at=self._start_at, @@ -521,7 +552,7 @@ class Node(Generic[NodeDataT]): ) case WorkflowNodeExecutionStatus.SUCCEEDED: return NodeRunSucceededEvent( - id=self._node_execution_id, + id=self.execution_id, node_id=self.id, node_type=self.node_type, start_at=self._start_at, @@ -536,6 +567,24 @@ class Node(Generic[NodeDataT]): @_dispatch.register def _(self, event: StreamChunkEvent) -> NodeRunStreamChunkEvent: + from core.workflow.graph_events import ChunkType + + return NodeRunStreamChunkEvent( + id=self.execution_id, + node_id=self._node_id, + node_type=self.node_type, + selector=event.selector, + chunk=event.chunk, + is_final=event.is_final, + chunk_type=ChunkType(event.chunk_type.value), + tool_call=event.tool_call, + tool_result=event.tool_result, + ) + + @_dispatch.register + def _(self, event: ToolCallChunkEvent) -> NodeRunStreamChunkEvent: + from core.workflow.graph_events import ChunkType + return NodeRunStreamChunkEvent( id=self._node_execution_id, node_id=self._node_id, @@ -543,6 +592,44 @@ class Node(Generic[NodeDataT]): selector=event.selector, chunk=event.chunk, is_final=event.is_final, + chunk_type=ChunkType.TOOL_CALL, + tool_call=event.tool_call, + ) + + @_dispatch.register + def _(self, event: ToolResultChunkEvent) -> NodeRunStreamChunkEvent: + from core.workflow.entities import ToolResult, ToolResultStatus + from core.workflow.graph_events import ChunkType + + tool_result = event.tool_result or ToolResult() + status: ToolResultStatus = tool_result.status or ToolResultStatus.SUCCESS + tool_result = tool_result.model_copy( + update={"status": status, "files": tool_result.files or []}, + ) + + return NodeRunStreamChunkEvent( + id=self._node_execution_id, + node_id=self._node_id, + node_type=self.node_type, + selector=event.selector, + chunk=event.chunk, + is_final=event.is_final, + chunk_type=ChunkType.TOOL_RESULT, + tool_result=tool_result, + ) + + @_dispatch.register + def _(self, event: ThoughtChunkEvent) -> NodeRunStreamChunkEvent: + from core.workflow.graph_events import ChunkType + + return NodeRunStreamChunkEvent( + id=self._node_execution_id, + node_id=self._node_id, + node_type=self.node_type, + selector=event.selector, + chunk=event.chunk, + is_final=event.is_final, + chunk_type=ChunkType.THOUGHT, ) @_dispatch.register @@ -550,7 +637,7 @@ class Node(Generic[NodeDataT]): match event.node_run_result.status: case WorkflowNodeExecutionStatus.SUCCEEDED: return NodeRunSucceededEvent( - id=self._node_execution_id, + id=self.execution_id, node_id=self._node_id, node_type=self.node_type, start_at=self._start_at, @@ -558,7 +645,7 @@ class Node(Generic[NodeDataT]): ) case WorkflowNodeExecutionStatus.FAILED: return NodeRunFailedEvent( - id=self._node_execution_id, + id=self.execution_id, node_id=self._node_id, node_type=self.node_type, start_at=self._start_at, @@ -573,7 +660,7 @@ class Node(Generic[NodeDataT]): @_dispatch.register def _(self, event: PauseRequestedEvent) -> NodeRunPauseRequestedEvent: return NodeRunPauseRequestedEvent( - id=self._node_execution_id, + id=self.execution_id, node_id=self._node_id, node_type=self.node_type, node_run_result=NodeRunResult(status=WorkflowNodeExecutionStatus.PAUSED), @@ -583,7 +670,7 @@ class Node(Generic[NodeDataT]): @_dispatch.register def _(self, event: AgentLogEvent) -> NodeRunAgentLogEvent: return NodeRunAgentLogEvent( - id=self._node_execution_id, + id=self.execution_id, node_id=self._node_id, node_type=self.node_type, message_id=event.message_id, @@ -599,7 +686,7 @@ class Node(Generic[NodeDataT]): @_dispatch.register def _(self, event: LoopStartedEvent) -> NodeRunLoopStartedEvent: return NodeRunLoopStartedEvent( - id=self._node_execution_id, + id=self.execution_id, node_id=self._node_id, node_type=self.node_type, node_title=self.node_data.title, @@ -612,7 +699,7 @@ class Node(Generic[NodeDataT]): @_dispatch.register def _(self, event: LoopNextEvent) -> NodeRunLoopNextEvent: return NodeRunLoopNextEvent( - id=self._node_execution_id, + id=self.execution_id, node_id=self._node_id, node_type=self.node_type, node_title=self.node_data.title, @@ -623,7 +710,7 @@ class Node(Generic[NodeDataT]): @_dispatch.register def _(self, event: LoopSucceededEvent) -> NodeRunLoopSucceededEvent: return NodeRunLoopSucceededEvent( - id=self._node_execution_id, + id=self.execution_id, node_id=self._node_id, node_type=self.node_type, node_title=self.node_data.title, @@ -637,7 +724,7 @@ class Node(Generic[NodeDataT]): @_dispatch.register def _(self, event: LoopFailedEvent) -> NodeRunLoopFailedEvent: return NodeRunLoopFailedEvent( - id=self._node_execution_id, + id=self.execution_id, node_id=self._node_id, node_type=self.node_type, node_title=self.node_data.title, @@ -652,7 +739,7 @@ class Node(Generic[NodeDataT]): @_dispatch.register def _(self, event: IterationStartedEvent) -> NodeRunIterationStartedEvent: return NodeRunIterationStartedEvent( - id=self._node_execution_id, + id=self.execution_id, node_id=self._node_id, node_type=self.node_type, node_title=self.node_data.title, @@ -665,7 +752,7 @@ class Node(Generic[NodeDataT]): @_dispatch.register def _(self, event: IterationNextEvent) -> NodeRunIterationNextEvent: return NodeRunIterationNextEvent( - id=self._node_execution_id, + id=self.execution_id, node_id=self._node_id, node_type=self.node_type, node_title=self.node_data.title, @@ -676,7 +763,7 @@ class Node(Generic[NodeDataT]): @_dispatch.register def _(self, event: IterationSucceededEvent) -> NodeRunIterationSucceededEvent: return NodeRunIterationSucceededEvent( - id=self._node_execution_id, + id=self.execution_id, node_id=self._node_id, node_type=self.node_type, node_title=self.node_data.title, @@ -690,7 +777,7 @@ class Node(Generic[NodeDataT]): @_dispatch.register def _(self, event: IterationFailedEvent) -> NodeRunIterationFailedEvent: return NodeRunIterationFailedEvent( - id=self._node_execution_id, + id=self.execution_id, node_id=self._node_id, node_type=self.node_type, node_title=self.node_data.title, @@ -705,7 +792,7 @@ class Node(Generic[NodeDataT]): @_dispatch.register def _(self, event: RunRetrieverResourceEvent) -> NodeRunRetrieverResourceEvent: return NodeRunRetrieverResourceEvent( - id=self._node_execution_id, + id=self.execution_id, node_id=self._node_id, node_type=self.node_type, retriever_resources=event.retriever_resources, diff --git a/api/core/workflow/nodes/base/template.py b/api/core/workflow/nodes/base/template.py index ba3e2058cf..81f4b9f6fb 100644 --- a/api/core/workflow/nodes/base/template.py +++ b/api/core/workflow/nodes/base/template.py @@ -4,6 +4,8 @@ This module provides a unified template structure for both Answer and End nodes, similar to SegmentGroup but focused on template representation without values. """ +from __future__ import annotations + from abc import ABC, abstractmethod from collections.abc import Sequence from dataclasses import dataclass @@ -58,7 +60,7 @@ class Template: segments: list[TemplateSegmentUnion] @classmethod - def from_answer_template(cls, template_str: str) -> "Template": + def from_answer_template(cls, template_str: str) -> Template: """Create a Template from an Answer node template string. Example: @@ -107,7 +109,7 @@ class Template: return cls(segments=segments) @classmethod - def from_end_outputs(cls, outputs_config: list[dict[str, Any]]) -> "Template": + def from_end_outputs(cls, outputs_config: list[dict[str, Any]]) -> Template: """Create a Template from an End node outputs configuration. End nodes are treated as templates of concatenated variables with newlines. diff --git a/api/core/workflow/nodes/code/code_node.py b/api/core/workflow/nodes/code/code_node.py index a38e10030a..e3035d3bf0 100644 --- a/api/core/workflow/nodes/code/code_node.py +++ b/api/core/workflow/nodes/code/code_node.py @@ -1,8 +1,7 @@ from collections.abc import Mapping, Sequence from decimal import Decimal -from typing import Any, cast +from typing import TYPE_CHECKING, Any, ClassVar, cast -from configs import dify_config from core.helper.code_executor.code_executor import CodeExecutionError, CodeExecutor, CodeLanguage from core.helper.code_executor.code_node_provider import CodeNodeProvider from core.helper.code_executor.javascript.javascript_code_provider import JavascriptCodeProvider @@ -13,6 +12,7 @@ from core.workflow.enums import NodeType, WorkflowNodeExecutionStatus from core.workflow.node_events import NodeRunResult from core.workflow.nodes.base.node import Node from core.workflow.nodes.code.entities import CodeNodeData +from core.workflow.nodes.code.limits import CodeNodeLimits from .exc import ( CodeNodeError, @@ -20,9 +20,41 @@ from .exc import ( OutputValidationError, ) +if TYPE_CHECKING: + from core.workflow.entities import GraphInitParams + from core.workflow.runtime import GraphRuntimeState + class CodeNode(Node[CodeNodeData]): node_type = NodeType.CODE + _DEFAULT_CODE_PROVIDERS: ClassVar[tuple[type[CodeNodeProvider], ...]] = ( + Python3CodeProvider, + JavascriptCodeProvider, + ) + _limits: CodeNodeLimits + + def __init__( + self, + id: str, + config: Mapping[str, Any], + graph_init_params: "GraphInitParams", + graph_runtime_state: "GraphRuntimeState", + *, + code_executor: type[CodeExecutor] | None = None, + code_providers: Sequence[type[CodeNodeProvider]] | None = None, + code_limits: CodeNodeLimits, + ) -> None: + super().__init__( + id=id, + config=config, + graph_init_params=graph_init_params, + graph_runtime_state=graph_runtime_state, + ) + self._code_executor: type[CodeExecutor] = code_executor or CodeExecutor + self._code_providers: tuple[type[CodeNodeProvider], ...] = ( + tuple(code_providers) if code_providers else self._DEFAULT_CODE_PROVIDERS + ) + self._limits = code_limits @classmethod def get_default_config(cls, filters: Mapping[str, object] | None = None) -> Mapping[str, object]: @@ -35,11 +67,16 @@ class CodeNode(Node[CodeNodeData]): if filters: code_language = cast(CodeLanguage, filters.get("code_language", CodeLanguage.PYTHON3)) - providers: list[type[CodeNodeProvider]] = [Python3CodeProvider, JavascriptCodeProvider] - code_provider: type[CodeNodeProvider] = next(p for p in providers if p.is_accept_language(code_language)) + code_provider: type[CodeNodeProvider] = next( + provider for provider in cls._DEFAULT_CODE_PROVIDERS if provider.is_accept_language(code_language) + ) return code_provider.get_default_config() + @classmethod + def default_code_providers(cls) -> tuple[type[CodeNodeProvider], ...]: + return cls._DEFAULT_CODE_PROVIDERS + @classmethod def version(cls) -> str: return "1" @@ -60,7 +97,8 @@ class CodeNode(Node[CodeNodeData]): variables[variable_name] = variable.to_object() if variable else None # Run code try: - result = CodeExecutor.execute_workflow_code_template( + _ = self._select_code_provider(code_language) + result = self._code_executor.execute_workflow_code_template( language=code_language, code=code, inputs=variables, @@ -75,6 +113,12 @@ class CodeNode(Node[CodeNodeData]): return NodeRunResult(status=WorkflowNodeExecutionStatus.SUCCEEDED, inputs=variables, outputs=result) + def _select_code_provider(self, code_language: CodeLanguage) -> type[CodeNodeProvider]: + for provider in self._code_providers: + if provider.is_accept_language(code_language): + return provider + raise CodeNodeError(f"Unsupported code language: {code_language}") + def _check_string(self, value: str | None, variable: str) -> str | None: """ Check string @@ -85,10 +129,10 @@ class CodeNode(Node[CodeNodeData]): if value is None: return None - if len(value) > dify_config.CODE_MAX_STRING_LENGTH: + if len(value) > self._limits.max_string_length: raise OutputValidationError( f"The length of output variable `{variable}` must be" - f" less than {dify_config.CODE_MAX_STRING_LENGTH} characters" + f" less than {self._limits.max_string_length} characters" ) return value.replace("\x00", "") @@ -109,20 +153,20 @@ class CodeNode(Node[CodeNodeData]): if value is None: return None - if value > dify_config.CODE_MAX_NUMBER or value < dify_config.CODE_MIN_NUMBER: + if value > self._limits.max_number or value < self._limits.min_number: raise OutputValidationError( f"Output variable `{variable}` is out of range," - f" it must be between {dify_config.CODE_MIN_NUMBER} and {dify_config.CODE_MAX_NUMBER}." + f" it must be between {self._limits.min_number} and {self._limits.max_number}." ) if isinstance(value, float): decimal_value = Decimal(str(value)).normalize() precision = -decimal_value.as_tuple().exponent if decimal_value.as_tuple().exponent < 0 else 0 # type: ignore[operator] # raise error if precision is too high - if precision > dify_config.CODE_MAX_PRECISION: + if precision > self._limits.max_precision: raise OutputValidationError( f"Output variable `{variable}` has too high precision," - f" it must be less than {dify_config.CODE_MAX_PRECISION} digits." + f" it must be less than {self._limits.max_precision} digits." ) return value @@ -137,8 +181,8 @@ class CodeNode(Node[CodeNodeData]): # TODO(QuantumGhost): Replace native Python lists with `Array*Segment` classes. # Note that `_transform_result` may produce lists containing `None` values, # which don't conform to the type requirements of `Array*Segment` classes. - if depth > dify_config.CODE_MAX_DEPTH: - raise DepthLimitError(f"Depth limit {dify_config.CODE_MAX_DEPTH} reached, object too deep.") + if depth > self._limits.max_depth: + raise DepthLimitError(f"Depth limit {self._limits.max_depth} reached, object too deep.") transformed_result: dict[str, Any] = {} if output_schema is None: @@ -272,10 +316,10 @@ class CodeNode(Node[CodeNodeData]): f"Output {prefix}{dot}{output_name} is not an array, got {type(value)} instead." ) else: - if len(value) > dify_config.CODE_MAX_NUMBER_ARRAY_LENGTH: + if len(value) > self._limits.max_number_array_length: raise OutputValidationError( f"The length of output variable `{prefix}{dot}{output_name}` must be" - f" less than {dify_config.CODE_MAX_NUMBER_ARRAY_LENGTH} elements." + f" less than {self._limits.max_number_array_length} elements." ) for i, inner_value in enumerate(value): @@ -305,10 +349,10 @@ class CodeNode(Node[CodeNodeData]): f" got {type(result.get(output_name))} instead." ) else: - if len(result[output_name]) > dify_config.CODE_MAX_STRING_ARRAY_LENGTH: + if len(result[output_name]) > self._limits.max_string_array_length: raise OutputValidationError( f"The length of output variable `{prefix}{dot}{output_name}` must be" - f" less than {dify_config.CODE_MAX_STRING_ARRAY_LENGTH} elements." + f" less than {self._limits.max_string_array_length} elements." ) transformed_result[output_name] = [ @@ -326,10 +370,10 @@ class CodeNode(Node[CodeNodeData]): f" got {type(result.get(output_name))} instead." ) else: - if len(result[output_name]) > dify_config.CODE_MAX_OBJECT_ARRAY_LENGTH: + if len(result[output_name]) > self._limits.max_object_array_length: raise OutputValidationError( f"The length of output variable `{prefix}{dot}{output_name}` must be" - f" less than {dify_config.CODE_MAX_OBJECT_ARRAY_LENGTH} elements." + f" less than {self._limits.max_object_array_length} elements." ) for i, value in enumerate(result[output_name]): diff --git a/api/core/workflow/nodes/code/limits.py b/api/core/workflow/nodes/code/limits.py new file mode 100644 index 0000000000..a6b9e9e68e --- /dev/null +++ b/api/core/workflow/nodes/code/limits.py @@ -0,0 +1,13 @@ +from dataclasses import dataclass + + +@dataclass(frozen=True) +class CodeNodeLimits: + max_string_length: int + max_number: int | float + min_number: int | float + max_precision: int + max_depth: int + max_number_array_length: int + max_string_array_length: int + max_object_array_length: int diff --git a/api/core/workflow/nodes/http_request/executor.py b/api/core/workflow/nodes/http_request/executor.py index f0c84872fb..931c6113a7 100644 --- a/api/core/workflow/nodes/http_request/executor.py +++ b/api/core/workflow/nodes/http_request/executor.py @@ -86,6 +86,11 @@ class Executor: node_data.authorization.config.api_key = variable_pool.convert_template( node_data.authorization.config.api_key ).text + # Validate that API key is not empty after template conversion + if not node_data.authorization.config.api_key or not node_data.authorization.config.api_key.strip(): + raise AuthorizationConfigError( + "API key is required for authorization but was empty. Please provide a valid API key." + ) self.url = node_data.url self.method = node_data.method diff --git a/api/core/workflow/nodes/knowledge_retrieval/knowledge_retrieval_node.py b/api/core/workflow/nodes/knowledge_retrieval/knowledge_retrieval_node.py index adc474bd60..8670a71aa3 100644 --- a/api/core/workflow/nodes/knowledge_retrieval/knowledge_retrieval_node.py +++ b/api/core/workflow/nodes/knowledge_retrieval/knowledge_retrieval_node.py @@ -6,7 +6,7 @@ from collections import defaultdict from collections.abc import Mapping, Sequence from typing import TYPE_CHECKING, Any, cast -from sqlalchemy import and_, func, literal, or_, select +from sqlalchemy import and_, func, or_, select from sqlalchemy.orm import sessionmaker from core.app.app_config.entities import DatasetRetrieveConfigEntity @@ -460,7 +460,7 @@ class KnowledgeRetrievalNode(LLMUsageTrackingMixin, Node[KnowledgeRetrievalNodeD if automatic_metadata_filters: conditions = [] for sequence, filter in enumerate(automatic_metadata_filters): - self._process_metadata_filter_func( + DatasetRetrieval.process_metadata_filter_func( sequence, filter.get("condition", ""), filter.get("metadata_name", ""), @@ -504,7 +504,7 @@ class KnowledgeRetrievalNode(LLMUsageTrackingMixin, Node[KnowledgeRetrievalNodeD value=expected_value, ) ) - filters = self._process_metadata_filter_func( + filters = DatasetRetrieval.process_metadata_filter_func( sequence, condition.comparison_operator, metadata_name, @@ -603,87 +603,6 @@ class KnowledgeRetrievalNode(LLMUsageTrackingMixin, Node[KnowledgeRetrievalNodeD return [], usage return automatic_metadata_filters, usage - def _process_metadata_filter_func( - self, sequence: int, condition: str, metadata_name: str, value: Any, filters: list[Any] - ) -> list[Any]: - if value is None and condition not in ("empty", "not empty"): - return filters - - json_field = Document.doc_metadata[metadata_name].as_string() - - match condition: - case "contains": - filters.append(json_field.like(f"%{value}%")) - - case "not contains": - filters.append(json_field.notlike(f"%{value}%")) - - case "start with": - filters.append(json_field.like(f"{value}%")) - - case "end with": - filters.append(json_field.like(f"%{value}")) - case "in": - if isinstance(value, str): - value_list = [v.strip() for v in value.split(",") if v.strip()] - elif isinstance(value, (list, tuple)): - value_list = [str(v) for v in value if v is not None] - else: - value_list = [str(value)] if value is not None else [] - - if not value_list: - filters.append(literal(False)) - else: - filters.append(json_field.in_(value_list)) - - case "not in": - if isinstance(value, str): - value_list = [v.strip() for v in value.split(",") if v.strip()] - elif isinstance(value, (list, tuple)): - value_list = [str(v) for v in value if v is not None] - else: - value_list = [str(value)] if value is not None else [] - - if not value_list: - filters.append(literal(True)) - else: - filters.append(json_field.notin_(value_list)) - - case "is" | "=": - if isinstance(value, str): - filters.append(json_field == value) - elif isinstance(value, (int, float)): - filters.append(Document.doc_metadata[metadata_name].as_float() == value) - - case "is not" | "≠": - if isinstance(value, str): - filters.append(json_field != value) - elif isinstance(value, (int, float)): - filters.append(Document.doc_metadata[metadata_name].as_float() != value) - - case "empty": - filters.append(Document.doc_metadata[metadata_name].is_(None)) - - case "not empty": - filters.append(Document.doc_metadata[metadata_name].isnot(None)) - - case "before" | "<": - filters.append(Document.doc_metadata[metadata_name].as_float() < value) - - case "after" | ">": - filters.append(Document.doc_metadata[metadata_name].as_float() > value) - - case "≤" | "<=": - filters.append(Document.doc_metadata[metadata_name].as_float() <= value) - - case "≥" | ">=": - filters.append(Document.doc_metadata[metadata_name].as_float() >= value) - - case _: - pass - - return filters - @classmethod def _extract_variable_selector_to_variable_mapping( cls, diff --git a/api/core/workflow/nodes/llm/__init__.py b/api/core/workflow/nodes/llm/__init__.py index f7bc713f63..edd0d3d581 100644 --- a/api/core/workflow/nodes/llm/__init__.py +++ b/api/core/workflow/nodes/llm/__init__.py @@ -3,6 +3,7 @@ from .entities import ( LLMNodeCompletionModelPromptTemplate, LLMNodeData, ModelConfig, + ToolMetadata, VisionConfig, ) from .node import LLMNode @@ -13,5 +14,6 @@ __all__ = [ "LLMNodeCompletionModelPromptTemplate", "LLMNodeData", "ModelConfig", + "ToolMetadata", "VisionConfig", ] diff --git a/api/core/workflow/nodes/llm/entities.py b/api/core/workflow/nodes/llm/entities.py index fe6f2290aa..0d61e45003 100644 --- a/api/core/workflow/nodes/llm/entities.py +++ b/api/core/workflow/nodes/llm/entities.py @@ -1,10 +1,17 @@ +import re from collections.abc import Mapping, Sequence from typing import Any, Literal -from pydantic import BaseModel, Field, field_validator +from pydantic import BaseModel, ConfigDict, Field, field_validator +from core.agent.entities import AgentLog, AgentResult +from core.file import File from core.model_runtime.entities import ImagePromptMessageContent, LLMMode +from core.model_runtime.entities.llm_entities import LLMUsage from core.prompt.entities.advanced_prompt_entities import ChatModelMessage, CompletionModelPromptTemplate, MemoryConfig +from core.tools.entities.tool_entities import ToolProviderType +from core.workflow.entities import ToolCall, ToolCallResult +from core.workflow.node_events import AgentLogEvent from core.workflow.nodes.base import BaseNodeData from core.workflow.nodes.base.entities import VariableSelector @@ -58,6 +65,268 @@ class LLMNodeCompletionModelPromptTemplate(CompletionModelPromptTemplate): jinja2_text: str | None = None +class ToolMetadata(BaseModel): + """ + Tool metadata for LLM node with tool support. + + Defines the essential fields needed for tool configuration, + particularly the 'type' field to identify tool provider type. + """ + + # Core fields + enabled: bool = True + type: ToolProviderType = Field(..., description="Tool provider type: builtin, api, mcp, workflow") + provider_name: str = Field(..., description="Tool provider name/identifier") + tool_name: str = Field(..., description="Tool name") + + # Optional fields + plugin_unique_identifier: str | None = Field(None, description="Plugin unique identifier for plugin tools") + credential_id: str | None = Field(None, description="Credential ID for tools requiring authentication") + + # Configuration fields + parameters: dict[str, Any] = Field(default_factory=dict, description="Tool parameters") + settings: dict[str, Any] = Field(default_factory=dict, description="Tool settings configuration") + extra: dict[str, Any] = Field(default_factory=dict, description="Extra tool configuration like custom description") + + +class ModelTraceSegment(BaseModel): + """Model invocation trace segment with token usage and output.""" + + text: str | None = Field(None, description="Model output text content") + reasoning: str | None = Field(None, description="Reasoning/thought content from model") + tool_calls: list[ToolCall] = Field(default_factory=list, description="Tool calls made by the model") + + +class ToolTraceSegment(BaseModel): + """Tool invocation trace segment with call details and result.""" + + id: str | None = Field(default=None, description="Unique identifier for this tool call") + name: str | None = Field(default=None, description="Name of the tool being called") + arguments: str | None = Field(default=None, description="Accumulated tool arguments JSON") + output: str | None = Field(default=None, description="Tool call result") + + +class LLMTraceSegment(BaseModel): + """ + Streaming trace segment for LLM tool-enabled runs. + + Represents alternating model and tool invocations in sequence: + model -> tool -> model -> tool -> ... + + Each segment records its execution duration. + """ + + type: Literal["model", "tool"] + duration: float = Field(..., description="Execution duration in seconds") + usage: LLMUsage | None = Field(default=None, description="Token usage statistics for this model call") + output: ModelTraceSegment | ToolTraceSegment = Field(..., description="Output of the segment") + + # Common metadata for both model and tool segments + provider: str | None = Field(default=None, description="Model or tool provider identifier") + name: str | None = Field(default=None, description="Name of the model or tool") + icon: str | None = Field(default=None, description="Icon for the provider") + icon_dark: str | None = Field(default=None, description="Dark theme icon for the provider") + error: str | None = Field(default=None, description="Error message if segment failed") + status: Literal["success", "error"] | None = Field(default=None, description="Tool execution status") + + +class LLMGenerationData(BaseModel): + """Generation data from LLM invocation with tools. + + For multi-turn tool calls like: thought1 -> text1 -> tool_call1 -> thought2 -> text2 -> tool_call2 + - reasoning_contents: [thought1, thought2, ...] - one element per turn + - tool_calls: [{id, name, arguments, result}, ...] - all tool calls with results + """ + + text: str = Field(..., description="Accumulated text content from all turns") + reasoning_contents: list[str] = Field(default_factory=list, description="Reasoning content per turn") + tool_calls: list[ToolCallResult] = Field(default_factory=list, description="Tool calls with results") + sequence: list[dict[str, Any]] = Field(default_factory=list, description="Ordered segments for rendering") + usage: LLMUsage = Field(..., description="LLM usage statistics") + finish_reason: str | None = Field(None, description="Finish reason from LLM") + files: list[File] = Field(default_factory=list, description="Generated files") + trace: list[LLMTraceSegment] = Field(default_factory=list, description="Streaming trace in emitted order") + + +class ThinkTagStreamParser: + """Lightweight state machine to split streaming chunks by tags.""" + + _START_PATTERN = re.compile(r"]*)?>", re.IGNORECASE) + _END_PATTERN = re.compile(r"", re.IGNORECASE) + _START_PREFIX = " int: + """Return length of the longest suffix of `text` that is a prefix of `prefix`.""" + max_len = min(len(text), len(prefix) - 1) + for i in range(max_len, 0, -1): + if text[-i:].lower() == prefix[:i].lower(): + return i + return 0 + + def process(self, chunk: str) -> list[tuple[str, str]]: + """ + Split incoming chunk into ('thought' | 'text', content) tuples. + Content excludes the tags themselves and handles split tags across chunks. + """ + parts: list[tuple[str, str]] = [] + self._buffer += chunk + + while self._buffer: + if self._in_think: + end_match = self._END_PATTERN.search(self._buffer) + if end_match: + thought_text = self._buffer[: end_match.start()] + if thought_text: + parts.append(("thought", thought_text)) + parts.append(("thought_end", "")) + self._buffer = self._buffer[end_match.end() :] + self._in_think = False + continue + + hold_len = self._suffix_prefix_len(self._buffer, self._END_PREFIX) + emit = self._buffer[: len(self._buffer) - hold_len] + if emit: + parts.append(("thought", emit)) + self._buffer = self._buffer[-hold_len:] if hold_len > 0 else "" + break + + start_match = self._START_PATTERN.search(self._buffer) + if start_match: + prefix = self._buffer[: start_match.start()] + if prefix: + parts.append(("text", prefix)) + self._buffer = self._buffer[start_match.end() :] + parts.append(("thought_start", "")) + self._in_think = True + continue + + hold_len = self._suffix_prefix_len(self._buffer, self._START_PREFIX) + emit = self._buffer[: len(self._buffer) - hold_len] + if emit: + parts.append(("text", emit)) + self._buffer = self._buffer[-hold_len:] if hold_len > 0 else "" + break + + cleaned_parts: list[tuple[str, str]] = [] + for kind, content in parts: + # Extra safeguard: strip any stray tags that slipped through. + content = self._START_PATTERN.sub("", content) + content = self._END_PATTERN.sub("", content) + if content or kind in {"thought_start", "thought_end"}: + cleaned_parts.append((kind, content)) + + return cleaned_parts + + def flush(self) -> list[tuple[str, str]]: + """Flush remaining buffer when the stream ends.""" + if not self._buffer: + return [] + kind = "thought" if self._in_think else "text" + content = self._buffer + # Drop dangling partial tags instead of emitting them + if content.lower().startswith(self._START_PREFIX) or content.lower().startswith(self._END_PREFIX): + content = "" + self._buffer = "" + if not content and not self._in_think: + return [] + # Strip any complete tags that might still be present. + content = self._START_PATTERN.sub("", content) + content = self._END_PATTERN.sub("", content) + + result: list[tuple[str, str]] = [] + if content: + result.append((kind, content)) + if self._in_think: + result.append(("thought_end", "")) + self._in_think = False + return result + + +class StreamBuffers(BaseModel): + model_config = ConfigDict(arbitrary_types_allowed=True) + + think_parser: ThinkTagStreamParser = Field(default_factory=ThinkTagStreamParser) + pending_thought: list[str] = Field(default_factory=list) + pending_content: list[str] = Field(default_factory=list) + pending_tool_calls: list[ToolCall] = Field(default_factory=list) + current_turn_reasoning: list[str] = Field(default_factory=list) + reasoning_per_turn: list[str] = Field(default_factory=list) + + +class TraceState(BaseModel): + trace_segments: list[LLMTraceSegment] = Field(default_factory=list) + tool_trace_map: dict[str, LLMTraceSegment] = Field(default_factory=dict) + tool_call_index_map: dict[str, int] = Field(default_factory=dict) + model_segment_start_time: float | None = Field(default=None, description="Start time for current model segment") + pending_usage: LLMUsage | None = Field(default=None, description="Pending usage for current model segment") + + +class AggregatedResult(BaseModel): + model_config = ConfigDict(arbitrary_types_allowed=True) + + text: str = "" + files: list[File] = Field(default_factory=list) + usage: LLMUsage = Field(default_factory=LLMUsage.empty_usage) + finish_reason: str | None = None + + +class AgentContext(BaseModel): + agent_logs: list[AgentLogEvent] = Field(default_factory=list) + agent_result: AgentResult | None = None + + +class ToolOutputState(BaseModel): + model_config = ConfigDict(arbitrary_types_allowed=True) + + stream: StreamBuffers = Field(default_factory=StreamBuffers) + trace: TraceState = Field(default_factory=TraceState) + aggregate: AggregatedResult = Field(default_factory=AggregatedResult) + agent: AgentContext = Field(default_factory=AgentContext) + + +class ToolLogPayload(BaseModel): + model_config = ConfigDict(arbitrary_types_allowed=True) + + tool_name: str = "" + tool_call_id: str = "" + tool_args: dict[str, Any] = Field(default_factory=dict) + tool_output: Any = None + tool_error: Any = None + files: list[Any] = Field(default_factory=list) + meta: dict[str, Any] = Field(default_factory=dict) + + @classmethod + def from_log(cls, log: AgentLog) -> "ToolLogPayload": + data = log.data or {} + return cls( + tool_name=data.get("tool_name", ""), + tool_call_id=data.get("tool_call_id", ""), + tool_args=data.get("tool_args") or {}, + tool_output=data.get("output"), + tool_error=data.get("error"), + files=data.get("files") or [], + meta=data.get("meta") or {}, + ) + + @classmethod + def from_mapping(cls, data: Mapping[str, Any]) -> "ToolLogPayload": + return cls( + tool_name=data.get("tool_name", ""), + tool_call_id=data.get("tool_call_id", ""), + tool_args=data.get("tool_args") or {}, + tool_output=data.get("output"), + tool_error=data.get("error"), + files=data.get("files") or [], + meta=data.get("meta") or {}, + ) + + class LLMNodeData(BaseNodeData): model: ModelConfig prompt_template: Sequence[LLMNodeChatModelMessage] | LLMNodeCompletionModelPromptTemplate @@ -86,6 +355,10 @@ class LLMNodeData(BaseNodeData): ), ) + # Tool support + tools: Sequence[ToolMetadata] = Field(default_factory=list) + max_iterations: int | None = Field(default=None, description="Maximum number of iterations for the LLM node") + @field_validator("prompt_config", mode="before") @classmethod def convert_none_prompt_config(cls, v: Any): diff --git a/api/core/workflow/nodes/llm/llm_utils.py b/api/core/workflow/nodes/llm/llm_utils.py index 0c545469bc..01e25cbf5c 100644 --- a/api/core/workflow/nodes/llm/llm_utils.py +++ b/api/core/workflow/nodes/llm/llm_utils.py @@ -6,7 +6,7 @@ from sqlalchemy.orm import Session from configs import dify_config from core.app.entities.app_invoke_entities import ModelConfigWithCredentialsEntity -from core.entities.provider_entities import QuotaUnit +from core.entities.provider_entities import ProviderQuotaType, QuotaUnit from core.file.models import File from core.memory.token_buffer_memory import TokenBufferMemory from core.model_manager import ModelInstance, ModelManager @@ -136,21 +136,37 @@ def deduct_llm_quota(tenant_id: str, model_instance: ModelInstance, usage: LLMUs used_quota = 1 if used_quota is not None and system_configuration.current_quota_type is not None: - with Session(db.engine) as session: - stmt = ( - update(Provider) - .where( - Provider.tenant_id == tenant_id, - # TODO: Use provider name with prefix after the data migration. - Provider.provider_name == ModelProviderID(model_instance.provider).provider_name, - Provider.provider_type == ProviderType.SYSTEM, - Provider.quota_type == system_configuration.current_quota_type.value, - Provider.quota_limit > Provider.quota_used, - ) - .values( - quota_used=Provider.quota_used + used_quota, - last_used=naive_utc_now(), - ) + if system_configuration.current_quota_type == ProviderQuotaType.TRIAL: + from services.credit_pool_service import CreditPoolService + + CreditPoolService.check_and_deduct_credits( + tenant_id=tenant_id, + credits_required=used_quota, ) - session.execute(stmt) - session.commit() + elif system_configuration.current_quota_type == ProviderQuotaType.PAID: + from services.credit_pool_service import CreditPoolService + + CreditPoolService.check_and_deduct_credits( + tenant_id=tenant_id, + credits_required=used_quota, + pool_type="paid", + ) + else: + with Session(db.engine) as session: + stmt = ( + update(Provider) + .where( + Provider.tenant_id == tenant_id, + # TODO: Use provider name with prefix after the data migration. + Provider.provider_name == ModelProviderID(model_instance.provider).provider_name, + Provider.provider_type == ProviderType.SYSTEM.value, + Provider.quota_type == system_configuration.current_quota_type.value, + Provider.quota_limit > Provider.quota_used, + ) + .values( + quota_used=Provider.quota_used + used_quota, + last_used=naive_utc_now(), + ) + ) + session.execute(stmt) + session.commit() diff --git a/api/core/workflow/nodes/llm/node.py b/api/core/workflow/nodes/llm/node.py index 04e2802191..b4caa28ee0 100644 --- a/api/core/workflow/nodes/llm/node.py +++ b/api/core/workflow/nodes/llm/node.py @@ -1,3 +1,5 @@ +from __future__ import annotations + import base64 import io import json @@ -9,6 +11,8 @@ from typing import TYPE_CHECKING, Any, Literal from sqlalchemy import select +from core.agent.entities import AgentLog, AgentResult, AgentToolEntity, ExecutionContext +from core.agent.patterns import StrategyFactory from core.app.entities.app_invoke_entities import ModelConfigWithCredentialsEntity from core.file import File, FileTransferMethod, FileType, file_manager from core.helper.code_executor import CodeExecutor, CodeLanguage @@ -46,7 +50,9 @@ from core.model_runtime.utils.encoders import jsonable_encoder from core.prompt.entities.advanced_prompt_entities import CompletionModelPromptTemplate, MemoryConfig from core.prompt.utils.prompt_message_util import PromptMessageUtil from core.rag.entities.citation_metadata import RetrievalSourceMetadata +from core.tools.__base.tool import Tool from core.tools.signature import sign_upload_file +from core.tools.tool_manager import ToolManager from core.variables import ( ArrayFileSegment, ArraySegment, @@ -56,7 +62,8 @@ from core.variables import ( StringSegment, ) from core.workflow.constants import SYSTEM_VARIABLE_NODE_ID -from core.workflow.entities import GraphInitParams +from core.workflow.entities import GraphInitParams, ToolCall, ToolResult, ToolResultStatus +from core.workflow.entities.tool_entities import ToolCallResult from core.workflow.enums import ( NodeType, SystemVariableKey, @@ -64,13 +71,18 @@ from core.workflow.enums import ( WorkflowNodeExecutionStatus, ) from core.workflow.node_events import ( + AgentLogEvent, ModelInvokeCompletedEvent, NodeEventBase, NodeRunResult, RunRetrieverResourceEvent, StreamChunkEvent, StreamCompletedEvent, + ThoughtChunkEvent, + ToolCallChunkEvent, + ToolResultChunkEvent, ) +from core.workflow.node_events.node import ThoughtEndChunkEvent, ThoughtStartChunkEvent from core.workflow.nodes.base.entities import VariableSelector from core.workflow.nodes.base.node import Node from core.workflow.nodes.base.variable_template_parser import VariableTemplateParser @@ -81,10 +93,21 @@ from models.model import UploadFile from . import llm_utils from .entities import ( + AgentContext, + AggregatedResult, + LLMGenerationData, LLMNodeChatModelMessage, LLMNodeCompletionModelPromptTemplate, LLMNodeData, + LLMTraceSegment, ModelConfig, + ModelTraceSegment, + StreamBuffers, + ThinkTagStreamParser, + ToolLogPayload, + ToolOutputState, + ToolTraceSegment, + TraceState, ) from .exc import ( InvalidContextStructureError, @@ -113,7 +136,7 @@ class LLMNode(Node[LLMNodeData]): # Instance attributes specific to LLMNode. # Output variable for file - _file_outputs: list["File"] + _file_outputs: list[File] _llm_file_saver: LLMFileSaver @@ -121,8 +144,8 @@ class LLMNode(Node[LLMNodeData]): self, id: str, config: Mapping[str, Any], - graph_init_params: "GraphInitParams", - graph_runtime_state: "GraphRuntimeState", + graph_init_params: GraphInitParams, + graph_runtime_state: GraphRuntimeState, *, llm_file_saver: LLMFileSaver | None = None, ): @@ -149,11 +172,11 @@ class LLMNode(Node[LLMNodeData]): def _run(self) -> Generator: node_inputs: dict[str, Any] = {} process_data: dict[str, Any] = {} - result_text = "" clean_text = "" usage = LLMUsage.empty_usage() finish_reason = None - reasoning_content = None + reasoning_content = "" # Initialize as empty string for consistency + clean_text = "" # Initialize clean_text to avoid UnboundLocalError variable_pool = self.graph_runtime_state.variable_pool try: @@ -234,55 +257,58 @@ class LLMNode(Node[LLMNodeData]): context_files=context_files, ) - # handle invoke result - generator = LLMNode.invoke_llm( - node_data_model=self.node_data.model, - model_instance=model_instance, - prompt_messages=prompt_messages, - stop=stop, - user_id=self.user_id, - structured_output_enabled=self.node_data.structured_output_enabled, - structured_output=self.node_data.structured_output, - file_saver=self._llm_file_saver, - file_outputs=self._file_outputs, - node_id=self._node_id, - node_type=self.node_type, - reasoning_format=self.node_data.reasoning_format, - ) - + # Variables for outputs + generation_data: LLMGenerationData | None = None structured_output: LLMStructuredOutput | None = None - for event in generator: - if isinstance(event, StreamChunkEvent): - yield event - elif isinstance(event, ModelInvokeCompletedEvent): - # Raw text - result_text = event.text - usage = event.usage - finish_reason = event.finish_reason - reasoning_content = event.reasoning_content or "" + # Check if tools are configured + if self.tool_call_enabled: + # Use tool-enabled invocation (Agent V2 style) + generator = self._invoke_llm_with_tools( + model_instance=model_instance, + prompt_messages=prompt_messages, + stop=stop, + files=files, + variable_pool=variable_pool, + node_inputs=node_inputs, + process_data=process_data, + ) + else: + # Use traditional LLM invocation + generator = LLMNode.invoke_llm( + node_data_model=self._node_data.model, + model_instance=model_instance, + prompt_messages=prompt_messages, + stop=stop, + user_id=self.user_id, + structured_output_enabled=self._node_data.structured_output_enabled, + structured_output=self._node_data.structured_output, + file_saver=self._llm_file_saver, + file_outputs=self._file_outputs, + node_id=self._node_id, + node_type=self.node_type, + reasoning_format=self._node_data.reasoning_format, + ) - # For downstream nodes, determine clean text based on reasoning_format - if self.node_data.reasoning_format == "tagged": - # Keep tags for backward compatibility - clean_text = result_text - else: - # Extract clean text from tags - clean_text, _ = LLMNode._split_reasoning(result_text, self.node_data.reasoning_format) + ( + clean_text, + reasoning_content, + generation_reasoning_content, + generation_clean_content, + usage, + finish_reason, + structured_output, + generation_data, + ) = yield from self._stream_llm_events(generator, model_instance=model_instance) - # Process structured output if available from the event. - structured_output = ( - LLMStructuredOutput(structured_output=event.structured_output) - if event.structured_output - else None - ) - - # deduct quota - llm_utils.deduct_llm_quota(tenant_id=self.tenant_id, model_instance=model_instance, usage=usage) - break - elif isinstance(event, LLMStructuredOutput): - structured_output = event + # Extract variables from generation_data if available + if generation_data: + clean_text = generation_data.text + reasoning_content = "" + usage = generation_data.usage + finish_reason = generation_data.finish_reason + # Unified process_data building process_data = { "model_mode": model_config.mode, "prompts": PromptMessageUtil.prompt_messages_to_prompt_for_saving( @@ -293,24 +319,88 @@ class LLMNode(Node[LLMNodeData]): "model_provider": model_config.provider, "model_name": model_config.model, } + if self.tool_call_enabled and self._node_data.tools: + process_data["tools"] = [ + { + "type": tool.type.value if hasattr(tool.type, "value") else tool.type, + "provider_name": tool.provider_name, + "tool_name": tool.tool_name, + } + for tool in self._node_data.tools + if tool.enabled + ] + # Unified outputs building outputs = { "text": clean_text, "reasoning_content": reasoning_content, "usage": jsonable_encoder(usage), "finish_reason": finish_reason, } + + # Build generation field + if generation_data: + # Use generation_data from tool invocation (supports multi-turn) + generation = { + "content": generation_data.text, + "reasoning_content": generation_data.reasoning_contents, # [thought1, thought2, ...] + "tool_calls": [self._serialize_tool_call(item) for item in generation_data.tool_calls], + "sequence": generation_data.sequence, + } + files_to_output = generation_data.files + else: + # Traditional LLM invocation + generation_reasoning = generation_reasoning_content or reasoning_content + generation_content = generation_clean_content or clean_text + sequence: list[dict[str, Any]] = [] + if generation_reasoning: + sequence = [ + {"type": "reasoning", "index": 0}, + {"type": "content", "start": 0, "end": len(generation_content)}, + ] + generation = { + "content": generation_content, + "reasoning_content": [generation_reasoning] if generation_reasoning else [], + "tool_calls": [], + "sequence": sequence, + } + files_to_output = self._file_outputs + + outputs["generation"] = generation + if files_to_output: + outputs["files"] = ArrayFileSegment(value=files_to_output) if structured_output: outputs["structured_output"] = structured_output.structured_output - if self._file_outputs: - outputs["files"] = ArrayFileSegment(value=self._file_outputs) # Send final chunk event to indicate streaming is complete - yield StreamChunkEvent( - selector=[self._node_id, "text"], - chunk="", - is_final=True, - ) + if not self.tool_call_enabled: + # For tool calls, final events are already sent in _process_tool_outputs + yield StreamChunkEvent( + selector=[self._node_id, "text"], + chunk="", + is_final=True, + ) + yield StreamChunkEvent( + selector=[self._node_id, "generation", "content"], + chunk="", + is_final=True, + ) + yield ThoughtChunkEvent( + selector=[self._node_id, "generation", "thought"], + chunk="", + is_final=True, + ) + + metadata: dict[WorkflowNodeExecutionMetadataKey, Any] = { + WorkflowNodeExecutionMetadataKey.TOTAL_TOKENS: usage.total_tokens, + WorkflowNodeExecutionMetadataKey.TOTAL_PRICE: usage.total_price, + WorkflowNodeExecutionMetadataKey.CURRENCY: usage.currency, + } + + if generation_data and generation_data.trace: + metadata[WorkflowNodeExecutionMetadataKey.LLM_TRACE] = [ + segment.model_dump() for segment in generation_data.trace + ] yield StreamCompletedEvent( node_run_result=NodeRunResult( @@ -318,11 +408,7 @@ class LLMNode(Node[LLMNodeData]): inputs=node_inputs, process_data=process_data, outputs=outputs, - metadata={ - WorkflowNodeExecutionMetadataKey.TOTAL_TOKENS: usage.total_tokens, - WorkflowNodeExecutionMetadataKey.TOTAL_PRICE: usage.total_price, - WorkflowNodeExecutionMetadataKey.CURRENCY: usage.currency, - }, + metadata=metadata, llm_usage=usage, ) ) @@ -361,7 +447,7 @@ class LLMNode(Node[LLMNodeData]): structured_output_enabled: bool, structured_output: Mapping[str, Any] | None = None, file_saver: LLMFileSaver, - file_outputs: list["File"], + file_outputs: list[File], node_id: str, node_type: NodeType, reasoning_format: Literal["separated", "tagged"] = "tagged", @@ -415,7 +501,7 @@ class LLMNode(Node[LLMNodeData]): *, invoke_result: LLMResult | Generator[LLMResultChunk | LLMStructuredOutput, None, None], file_saver: LLMFileSaver, - file_outputs: list["File"], + file_outputs: list[File], node_id: str, node_type: NodeType, reasoning_format: Literal["separated", "tagged"] = "tagged", @@ -444,6 +530,8 @@ class LLMNode(Node[LLMNodeData]): usage = LLMUsage.empty_usage() finish_reason = None full_text_buffer = io.StringIO() + think_parser = ThinkTagStreamParser() + reasoning_chunks: list[str] = [] # Initialize streaming metrics tracking start_time = request_start_time if request_start_time is not None else time.perf_counter() @@ -472,12 +560,45 @@ class LLMNode(Node[LLMNodeData]): has_content = True full_text_buffer.write(text_part) + # Text output: always forward raw chunk (keep tags intact) yield StreamChunkEvent( selector=[node_id, "text"], chunk=text_part, is_final=False, ) + # Generation output: split out thoughts, forward only non-thought content chunks + for kind, segment in think_parser.process(text_part): + if not segment: + if kind not in {"thought_start", "thought_end"}: + continue + + if kind == "thought_start": + yield ThoughtStartChunkEvent( + selector=[node_id, "generation", "thought"], + chunk="", + is_final=False, + ) + elif kind == "thought": + reasoning_chunks.append(segment) + yield ThoughtChunkEvent( + selector=[node_id, "generation", "thought"], + chunk=segment, + is_final=False, + ) + elif kind == "thought_end": + yield ThoughtEndChunkEvent( + selector=[node_id, "generation", "thought"], + chunk="", + is_final=False, + ) + else: + yield StreamChunkEvent( + selector=[node_id, "generation", "content"], + chunk=segment, + is_final=False, + ) + # Update the whole metadata if not model and result.model: model = result.model @@ -492,16 +613,47 @@ class LLMNode(Node[LLMNodeData]): except OutputParserError as e: raise LLMNodeError(f"Failed to parse structured output: {e}") + for kind, segment in think_parser.flush(): + if not segment and kind not in {"thought_start", "thought_end"}: + continue + if kind == "thought_start": + yield ThoughtStartChunkEvent( + selector=[node_id, "generation", "thought"], + chunk="", + is_final=False, + ) + elif kind == "thought": + reasoning_chunks.append(segment) + yield ThoughtChunkEvent( + selector=[node_id, "generation", "thought"], + chunk=segment, + is_final=False, + ) + elif kind == "thought_end": + yield ThoughtEndChunkEvent( + selector=[node_id, "generation", "thought"], + chunk="", + is_final=False, + ) + else: + yield StreamChunkEvent( + selector=[node_id, "generation", "content"], + chunk=segment, + is_final=False, + ) + # Extract reasoning content from tags in the main text full_text = full_text_buffer.getvalue() if reasoning_format == "tagged": # Keep tags in text for backward compatibility clean_text = full_text - reasoning_content = "" + reasoning_content = "".join(reasoning_chunks) else: # Extract clean text and reasoning from tags clean_text, reasoning_content = LLMNode._split_reasoning(full_text, reasoning_format) + if reasoning_chunks and not reasoning_content: + reasoning_content = "".join(reasoning_chunks) # Calculate streaming metrics end_time = time.perf_counter() @@ -525,7 +677,7 @@ class LLMNode(Node[LLMNodeData]): ) @staticmethod - def _image_file_to_markdown(file: "File", /): + def _image_file_to_markdown(file: File, /): text_chunk = f"![]({file.generate_url()})" return text_chunk @@ -774,7 +926,7 @@ class LLMNode(Node[LLMNodeData]): def fetch_prompt_messages( *, sys_query: str | None = None, - sys_files: Sequence["File"], + sys_files: Sequence[File], context: str | None = None, memory: TokenBufferMemory | None = None, model_config: ModelConfigWithCredentialsEntity, @@ -785,7 +937,7 @@ class LLMNode(Node[LLMNodeData]): variable_pool: VariablePool, jinja2_variables: Sequence[VariableSelector], tenant_id: str, - context_files: list["File"] | None = None, + context_files: list[File] | None = None, ) -> tuple[Sequence[PromptMessage], Sequence[str] | None]: prompt_messages: list[PromptMessage] = [] @@ -1137,7 +1289,7 @@ class LLMNode(Node[LLMNodeData]): *, invoke_result: LLMResult | LLMResultWithStructuredOutput, saver: LLMFileSaver, - file_outputs: list["File"], + file_outputs: list[File], reasoning_format: Literal["separated", "tagged"] = "tagged", request_latency: float | None = None, ) -> ModelInvokeCompletedEvent: @@ -1179,7 +1331,7 @@ class LLMNode(Node[LLMNodeData]): *, content: ImagePromptMessageContent, file_saver: LLMFileSaver, - ) -> "File": + ) -> File: """_save_multimodal_output saves multi-modal contents generated by LLM plugins. There are two kinds of multimodal outputs: @@ -1229,7 +1381,7 @@ class LLMNode(Node[LLMNodeData]): *, contents: str | list[PromptMessageContentUnionTypes] | None, file_saver: LLMFileSaver, - file_outputs: list["File"], + file_outputs: list[File], ) -> Generator[str, None, None]: """Convert intermediate prompt messages into strings and yield them to the caller. @@ -1266,6 +1418,732 @@ class LLMNode(Node[LLMNodeData]): def retry(self) -> bool: return self.node_data.retry_config.retry_enabled + @property + def tool_call_enabled(self) -> bool: + return ( + self.node_data.tools is not None + and len(self.node_data.tools) > 0 + and all(tool.enabled for tool in self.node_data.tools) + ) + + def _stream_llm_events( + self, + generator: Generator[NodeEventBase | LLMStructuredOutput, None, LLMGenerationData | None], + *, + model_instance: ModelInstance, + ) -> Generator[ + NodeEventBase, + None, + tuple[ + str, + str, + str, + str, + LLMUsage, + str | None, + LLMStructuredOutput | None, + LLMGenerationData | None, + ], + ]: + """ + Stream events and capture generator return value in one place. + Uses generator delegation so _run stays concise while still emitting events. + """ + clean_text = "" + reasoning_content = "" + generation_reasoning_content = "" + generation_clean_content = "" + usage = LLMUsage.empty_usage() + finish_reason: str | None = None + structured_output: LLMStructuredOutput | None = None + generation_data: LLMGenerationData | None = None + completed = False + + while True: + try: + event = next(generator) + except StopIteration as exc: + if isinstance(exc.value, LLMGenerationData): + generation_data = exc.value + break + + if completed: + # After completion we still drain to reach StopIteration.value + continue + + match event: + case StreamChunkEvent() | ThoughtChunkEvent(): + yield event + + case ModelInvokeCompletedEvent( + text=text, + usage=usage_event, + finish_reason=finish_reason_event, + reasoning_content=reasoning_event, + structured_output=structured_raw, + ): + clean_text = text + usage = usage_event + finish_reason = finish_reason_event + reasoning_content = reasoning_event or "" + generation_reasoning_content = reasoning_content + generation_clean_content = clean_text + + if self.node_data.reasoning_format == "tagged": + # Keep tagged text for output; also extract reasoning for generation field + generation_clean_content, generation_reasoning_content = LLMNode._split_reasoning( + clean_text, reasoning_format="separated" + ) + else: + clean_text, generation_reasoning_content = LLMNode._split_reasoning( + clean_text, self.node_data.reasoning_format + ) + generation_clean_content = clean_text + + structured_output = ( + LLMStructuredOutput(structured_output=structured_raw) if structured_raw else None + ) + + llm_utils.deduct_llm_quota(tenant_id=self.tenant_id, model_instance=model_instance, usage=usage) + completed = True + + case LLMStructuredOutput(): + structured_output = event + + case _: + continue + + return ( + clean_text, + reasoning_content, + generation_reasoning_content, + generation_clean_content, + usage, + finish_reason, + structured_output, + generation_data, + ) + + def _invoke_llm_with_tools( + self, + model_instance: ModelInstance, + prompt_messages: Sequence[PromptMessage], + stop: Sequence[str] | None, + files: Sequence[File], + variable_pool: VariablePool, + node_inputs: dict[str, Any], + process_data: dict[str, Any], + ) -> Generator[NodeEventBase, None, LLMGenerationData]: + """Invoke LLM with tools support (from Agent V2). + + Returns LLMGenerationData with text, reasoning_contents, tool_calls, usage, finish_reason, files + """ + # Get model features to determine strategy + model_features = self._get_model_features(model_instance) + + # Prepare tool instances + tool_instances = self._prepare_tool_instances(variable_pool) + + # Prepare prompt files (files that come from prompt variables, not vision files) + prompt_files = self._extract_prompt_files(variable_pool) + + # Use factory to create appropriate strategy + strategy = StrategyFactory.create_strategy( + model_features=model_features, + model_instance=model_instance, + tools=tool_instances, + files=prompt_files, + max_iterations=self._node_data.max_iterations or 10, + context=ExecutionContext(user_id=self.user_id, app_id=self.app_id, tenant_id=self.tenant_id), + ) + + # Run strategy + outputs = strategy.run( + prompt_messages=list(prompt_messages), + model_parameters=self._node_data.model.completion_params, + stop=list(stop or []), + stream=True, + ) + + # Process outputs and return generation result + result = yield from self._process_tool_outputs(outputs) + return result + + def _get_model_features(self, model_instance: ModelInstance) -> list[ModelFeature]: + """Get model schema to determine features.""" + try: + model_type_instance = model_instance.model_type_instance + model_schema = model_type_instance.get_model_schema( + model_instance.model, + model_instance.credentials, + ) + return model_schema.features if model_schema and model_schema.features else [] + except Exception: + logger.warning("Failed to get model schema, assuming no special features") + return [] + + def _prepare_tool_instances(self, variable_pool: VariablePool) -> list[Tool]: + """Prepare tool instances from configuration.""" + tool_instances = [] + + if self._node_data.tools: + for tool in self._node_data.tools: + try: + # Process settings to extract the correct structure + processed_settings = {} + for key, value in tool.settings.items(): + if isinstance(value, dict) and "value" in value and isinstance(value["value"], dict): + # Extract the nested value if it has the ToolInput structure + if "type" in value["value"] and "value" in value["value"]: + processed_settings[key] = value["value"] + else: + processed_settings[key] = value + else: + processed_settings[key] = value + + # Merge parameters with processed settings (similar to Agent Node logic) + merged_parameters = {**tool.parameters, **processed_settings} + + # Create AgentToolEntity from ToolMetadata + agent_tool = AgentToolEntity( + provider_id=tool.provider_name, + provider_type=tool.type, + tool_name=tool.tool_name, + tool_parameters=merged_parameters, + plugin_unique_identifier=tool.plugin_unique_identifier, + credential_id=tool.credential_id, + ) + + # Get tool runtime from ToolManager + tool_runtime = ToolManager.get_agent_tool_runtime( + tenant_id=self.tenant_id, + app_id=self.app_id, + agent_tool=agent_tool, + invoke_from=self.invoke_from, + variable_pool=variable_pool, + ) + + # Apply custom description from extra field if available + if tool.extra.get("description") and tool_runtime.entity.description: + tool_runtime.entity.description.llm = ( + tool.extra.get("description") or tool_runtime.entity.description.llm + ) + + tool_instances.append(tool_runtime) + except Exception as e: + logger.warning("Failed to load tool %s: %s", tool, str(e)) + continue + + return tool_instances + + def _extract_prompt_files(self, variable_pool: VariablePool) -> list[File]: + """Extract files from prompt template variables.""" + from core.variables import ArrayFileVariable, FileVariable + + files: list[File] = [] + + # Extract variables from prompt template + if isinstance(self._node_data.prompt_template, list): + for message in self._node_data.prompt_template: + if message.text: + parser = VariableTemplateParser(message.text) + variable_selectors = parser.extract_variable_selectors() + + for variable_selector in variable_selectors: + variable = variable_pool.get(variable_selector.value_selector) + if isinstance(variable, FileVariable) and variable.value: + files.append(variable.value) + elif isinstance(variable, ArrayFileVariable) and variable.value: + files.extend(variable.value) + + return files + + @staticmethod + def _serialize_tool_call(tool_call: ToolCallResult) -> dict[str, Any]: + """Convert ToolCallResult into JSON-friendly dict.""" + + def _file_to_ref(file: File) -> str | None: + # Align with streamed tool result events which carry file IDs + return file.id or file.related_id + + files = [] + for file in tool_call.files or []: + ref = _file_to_ref(file) + if ref: + files.append(ref) + + return { + "id": tool_call.id, + "name": tool_call.name, + "arguments": tool_call.arguments, + "output": tool_call.output, + "files": files, + "status": tool_call.status.value if hasattr(tool_call.status, "value") else tool_call.status, + "elapsed_time": tool_call.elapsed_time, + } + + def _generate_model_provider_icon_url(self, provider: str, dark: bool = False) -> str | None: + """Generate icon URL for model provider.""" + from yarl import URL + + from configs import dify_config + + icon_type = "icon_small_dark" if dark else "icon_small" + try: + return str( + URL(dify_config.CONSOLE_API_URL or "/") + / "console" + / "api" + / "workspaces" + / "current" + / "model-providers" + / provider + / icon_type + / "en_US" + ) + except Exception: + return None + + def _flush_model_segment( + self, + buffers: StreamBuffers, + trace_state: TraceState, + error: str | None = None, + ) -> None: + """Flush pending thought/content buffers into a single model trace segment.""" + if not buffers.pending_thought and not buffers.pending_content and not buffers.pending_tool_calls: + return + + now = time.perf_counter() + duration = now - trace_state.model_segment_start_time if trace_state.model_segment_start_time else 0.0 + + # Use pending_usage from trace_state (captured from THOUGHT log) + usage = trace_state.pending_usage + + # Generate model provider icon URL + provider = self._node_data.model.provider + model_name = self._node_data.model.name + model_icon = self._generate_model_provider_icon_url(provider) + model_icon_dark = self._generate_model_provider_icon_url(provider, dark=True) + + trace_state.trace_segments.append( + LLMTraceSegment( + type="model", + duration=duration, + usage=usage, + output=ModelTraceSegment( + text="".join(buffers.pending_content) if buffers.pending_content else None, + reasoning="".join(buffers.pending_thought) if buffers.pending_thought else None, + tool_calls=list(buffers.pending_tool_calls), + ), + provider=provider, + name=model_name, + icon=model_icon, + icon_dark=model_icon_dark, + error=error, + status="error" if error else "success", + ) + ) + buffers.pending_thought.clear() + buffers.pending_content.clear() + buffers.pending_tool_calls.clear() + trace_state.model_segment_start_time = None + trace_state.pending_usage = None + + def _handle_agent_log_output( + self, output: AgentLog, buffers: StreamBuffers, trace_state: TraceState, agent_context: AgentContext + ) -> Generator[NodeEventBase, None, None]: + payload = ToolLogPayload.from_log(output) + agent_log_event = AgentLogEvent( + message_id=output.id, + label=output.label, + node_execution_id=self.id, + parent_id=output.parent_id, + error=output.error, + status=output.status.value, + data=output.data, + metadata={k.value: v for k, v in output.metadata.items()}, + node_id=self._node_id, + ) + for log in agent_context.agent_logs: + if log.message_id == agent_log_event.message_id: + log.data = agent_log_event.data + log.status = agent_log_event.status + log.error = agent_log_event.error + log.label = agent_log_event.label + log.metadata = agent_log_event.metadata + break + else: + agent_context.agent_logs.append(agent_log_event) + + # Handle THOUGHT log completion - capture usage for model segment + if output.log_type == AgentLog.LogType.THOUGHT and output.status == AgentLog.LogStatus.SUCCESS: + llm_usage = output.metadata.get(AgentLog.LogMetadata.LLM_USAGE) if output.metadata else None + if llm_usage: + trace_state.pending_usage = llm_usage + + if output.log_type == AgentLog.LogType.TOOL_CALL and output.status == AgentLog.LogStatus.START: + tool_name = payload.tool_name + tool_call_id = payload.tool_call_id + tool_arguments = json.dumps(payload.tool_args) if payload.tool_args else "" + + # Get icon from metadata (available at START) + tool_icon = output.metadata.get(AgentLog.LogMetadata.ICON) if output.metadata else None + tool_icon_dark = output.metadata.get(AgentLog.LogMetadata.ICON_DARK) if output.metadata else None + + if tool_call_id and tool_call_id not in trace_state.tool_call_index_map: + trace_state.tool_call_index_map[tool_call_id] = len(trace_state.tool_call_index_map) + + # Add tool call to pending list for model segment + buffers.pending_tool_calls.append(ToolCall(id=tool_call_id, name=tool_name, arguments=tool_arguments)) + + yield ToolCallChunkEvent( + selector=[self._node_id, "generation", "tool_calls"], + chunk=tool_arguments, + tool_call=ToolCall( + id=tool_call_id, + name=tool_name, + arguments=tool_arguments, + icon=tool_icon, + icon_dark=tool_icon_dark, + ), + is_final=False, + ) + + if output.log_type == AgentLog.LogType.TOOL_CALL and output.status != AgentLog.LogStatus.START: + tool_name = payload.tool_name + tool_output = payload.tool_output + tool_call_id = payload.tool_call_id + tool_files = payload.files if isinstance(payload.files, list) else [] + tool_error = payload.tool_error + tool_arguments = json.dumps(payload.tool_args) if payload.tool_args else "" + + if tool_call_id and tool_call_id not in trace_state.tool_call_index_map: + trace_state.tool_call_index_map[tool_call_id] = len(trace_state.tool_call_index_map) + + # Flush model segment before tool result processing + self._flush_model_segment(buffers, trace_state) + + if output.status == AgentLog.LogStatus.ERROR: + tool_error = output.error or payload.tool_error + if not tool_error and payload.meta: + tool_error = payload.meta.get("error") + else: + if payload.meta: + meta_error = payload.meta.get("error") + if meta_error: + tool_error = meta_error + + elapsed_time = output.metadata.get(AgentLog.LogMetadata.ELAPSED_TIME) if output.metadata else None + tool_provider = output.metadata.get(AgentLog.LogMetadata.PROVIDER) if output.metadata else None + tool_icon = output.metadata.get(AgentLog.LogMetadata.ICON) if output.metadata else None + tool_icon_dark = output.metadata.get(AgentLog.LogMetadata.ICON_DARK) if output.metadata else None + result_str = str(tool_output) if tool_output is not None else None + + tool_status: Literal["success", "error"] = "error" if tool_error else "success" + tool_call_segment = LLMTraceSegment( + type="tool", + duration=elapsed_time or 0.0, + usage=None, + output=ToolTraceSegment( + id=tool_call_id, + name=tool_name, + arguments=tool_arguments, + output=result_str, + ), + provider=tool_provider, + name=tool_name, + icon=tool_icon, + icon_dark=tool_icon_dark, + error=str(tool_error) if tool_error else None, + status=tool_status, + ) + trace_state.trace_segments.append(tool_call_segment) + if tool_call_id: + trace_state.tool_trace_map[tool_call_id] = tool_call_segment + + # Start new model segment tracking + trace_state.model_segment_start_time = time.perf_counter() + + yield ToolResultChunkEvent( + selector=[self._node_id, "generation", "tool_results"], + chunk=result_str or "", + tool_result=ToolResult( + id=tool_call_id, + name=tool_name, + output=result_str, + files=tool_files, + status=ToolResultStatus.ERROR if tool_error else ToolResultStatus.SUCCESS, + elapsed_time=elapsed_time, + icon=tool_icon, + icon_dark=tool_icon_dark, + ), + is_final=False, + ) + + if buffers.current_turn_reasoning: + buffers.reasoning_per_turn.append("".join(buffers.current_turn_reasoning)) + buffers.current_turn_reasoning.clear() + + def _handle_llm_chunk_output( + self, output: LLMResultChunk, buffers: StreamBuffers, trace_state: TraceState, aggregate: AggregatedResult + ) -> Generator[NodeEventBase, None, None]: + message = output.delta.message + + if message and message.content: + chunk_text = message.content + if isinstance(chunk_text, list): + chunk_text = "".join(getattr(content, "data", str(content)) for content in chunk_text) + else: + chunk_text = str(chunk_text) + + for kind, segment in buffers.think_parser.process(chunk_text): + if not segment and kind not in {"thought_start", "thought_end"}: + continue + + # Start tracking model segment time on first output + if trace_state.model_segment_start_time is None: + trace_state.model_segment_start_time = time.perf_counter() + + if kind == "thought_start": + yield ThoughtStartChunkEvent( + selector=[self._node_id, "generation", "thought"], + chunk="", + is_final=False, + ) + elif kind == "thought": + buffers.current_turn_reasoning.append(segment) + buffers.pending_thought.append(segment) + yield ThoughtChunkEvent( + selector=[self._node_id, "generation", "thought"], + chunk=segment, + is_final=False, + ) + elif kind == "thought_end": + yield ThoughtEndChunkEvent( + selector=[self._node_id, "generation", "thought"], + chunk="", + is_final=False, + ) + else: + aggregate.text += segment + buffers.pending_content.append(segment) + yield StreamChunkEvent( + selector=[self._node_id, "text"], + chunk=segment, + is_final=False, + ) + yield StreamChunkEvent( + selector=[self._node_id, "generation", "content"], + chunk=segment, + is_final=False, + ) + + if output.delta.usage: + self._accumulate_usage(aggregate.usage, output.delta.usage) + + if output.delta.finish_reason: + aggregate.finish_reason = output.delta.finish_reason + + def _flush_remaining_stream( + self, buffers: StreamBuffers, trace_state: TraceState, aggregate: AggregatedResult + ) -> Generator[NodeEventBase, None, None]: + for kind, segment in buffers.think_parser.flush(): + if not segment and kind not in {"thought_start", "thought_end"}: + continue + + # Start tracking model segment time on first output + if trace_state.model_segment_start_time is None: + trace_state.model_segment_start_time = time.perf_counter() + + if kind == "thought_start": + yield ThoughtStartChunkEvent( + selector=[self._node_id, "generation", "thought"], + chunk="", + is_final=False, + ) + elif kind == "thought": + buffers.current_turn_reasoning.append(segment) + buffers.pending_thought.append(segment) + yield ThoughtChunkEvent( + selector=[self._node_id, "generation", "thought"], + chunk=segment, + is_final=False, + ) + elif kind == "thought_end": + yield ThoughtEndChunkEvent( + selector=[self._node_id, "generation", "thought"], + chunk="", + is_final=False, + ) + else: + aggregate.text += segment + buffers.pending_content.append(segment) + yield StreamChunkEvent( + selector=[self._node_id, "text"], + chunk=segment, + is_final=False, + ) + yield StreamChunkEvent( + selector=[self._node_id, "generation", "content"], + chunk=segment, + is_final=False, + ) + + if buffers.current_turn_reasoning: + buffers.reasoning_per_turn.append("".join(buffers.current_turn_reasoning)) + + # For final flush, use aggregate.usage if pending_usage is not set + # (e.g., for simple LLM calls without tool invocations) + if trace_state.pending_usage is None: + trace_state.pending_usage = aggregate.usage + + # Flush final model segment + self._flush_model_segment(buffers, trace_state) + + def _close_streams(self) -> Generator[NodeEventBase, None, None]: + yield StreamChunkEvent( + selector=[self._node_id, "text"], + chunk="", + is_final=True, + ) + yield StreamChunkEvent( + selector=[self._node_id, "generation", "content"], + chunk="", + is_final=True, + ) + yield ThoughtChunkEvent( + selector=[self._node_id, "generation", "thought"], + chunk="", + is_final=True, + ) + yield ToolCallChunkEvent( + selector=[self._node_id, "generation", "tool_calls"], + chunk="", + tool_call=ToolCall( + id="", + name="", + arguments="", + ), + is_final=True, + ) + yield ToolResultChunkEvent( + selector=[self._node_id, "generation", "tool_results"], + chunk="", + tool_result=ToolResult( + id="", + name="", + output="", + files=[], + status=ToolResultStatus.SUCCESS, + ), + is_final=True, + ) + + def _build_generation_data( + self, + trace_state: TraceState, + agent_context: AgentContext, + aggregate: AggregatedResult, + buffers: StreamBuffers, + ) -> LLMGenerationData: + sequence: list[dict[str, Any]] = [] + reasoning_index = 0 + content_position = 0 + tool_call_seen_index: dict[str, int] = {} + for trace_segment in trace_state.trace_segments: + if trace_segment.type == "thought": + sequence.append({"type": "reasoning", "index": reasoning_index}) + reasoning_index += 1 + elif trace_segment.type == "content": + segment_text = trace_segment.text or "" + start = content_position + end = start + len(segment_text) + sequence.append({"type": "content", "start": start, "end": end}) + content_position = end + elif trace_segment.type == "tool_call": + tool_id = trace_segment.tool_call.id if trace_segment.tool_call and trace_segment.tool_call.id else "" + if tool_id not in tool_call_seen_index: + tool_call_seen_index[tool_id] = len(tool_call_seen_index) + sequence.append({"type": "tool_call", "index": tool_call_seen_index[tool_id]}) + + tool_calls_for_generation: list[ToolCallResult] = [] + for log in agent_context.agent_logs: + payload = ToolLogPayload.from_mapping(log.data or {}) + tool_call_id = payload.tool_call_id + if not tool_call_id or log.status == AgentLog.LogStatus.START.value: + continue + + tool_args = payload.tool_args + log_error = payload.tool_error + log_output = payload.tool_output + result_text = log_output or log_error or "" + status = ToolResultStatus.ERROR if log_error else ToolResultStatus.SUCCESS + tool_calls_for_generation.append( + ToolCallResult( + id=tool_call_id, + name=payload.tool_name, + arguments=json.dumps(tool_args) if tool_args else "", + output=result_text, + status=status, + elapsed_time=log.metadata.get(AgentLog.LogMetadata.ELAPSED_TIME) if log.metadata else None, + ) + ) + + tool_calls_for_generation.sort( + key=lambda item: trace_state.tool_call_index_map.get(item.id or "", len(trace_state.tool_call_index_map)) + ) + + return LLMGenerationData( + text=aggregate.text, + reasoning_contents=buffers.reasoning_per_turn, + tool_calls=tool_calls_for_generation, + sequence=sequence, + usage=aggregate.usage, + finish_reason=aggregate.finish_reason, + files=aggregate.files, + trace=trace_state.trace_segments, + ) + + def _process_tool_outputs( + self, + outputs: Generator[LLMResultChunk | AgentLog, None, AgentResult], + ) -> Generator[NodeEventBase, None, LLMGenerationData]: + """Process strategy outputs and convert to node events.""" + state = ToolOutputState() + + try: + for output in outputs: + if isinstance(output, AgentLog): + yield from self._handle_agent_log_output(output, state.stream, state.trace, state.agent) + else: + yield from self._handle_llm_chunk_output(output, state.stream, state.trace, state.aggregate) + except StopIteration as exception: + if isinstance(getattr(exception, "value", None), AgentResult): + state.agent.agent_result = exception.value + + if state.agent.agent_result: + state.aggregate.text = state.agent.agent_result.text or state.aggregate.text + state.aggregate.files = state.agent.agent_result.files + if state.agent.agent_result.usage: + state.aggregate.usage = state.agent.agent_result.usage + if state.agent.agent_result.finish_reason: + state.aggregate.finish_reason = state.agent.agent_result.finish_reason + + yield from self._flush_remaining_stream(state.stream, state.trace, state.aggregate) + yield from self._close_streams() + + return self._build_generation_data(state.trace, state.agent, state.aggregate, state.stream) + + def _accumulate_usage(self, total_usage: LLMUsage, delta_usage: LLMUsage) -> None: + """Accumulate LLM usage statistics.""" + total_usage.prompt_tokens += delta_usage.prompt_tokens + total_usage.completion_tokens += delta_usage.completion_tokens + total_usage.total_tokens += delta_usage.total_tokens + total_usage.prompt_price += delta_usage.prompt_price + total_usage.completion_price += delta_usage.completion_price + total_usage.total_price += delta_usage.total_price + def _combine_message_content_with_role( *, contents: str | list[PromptMessageContentUnionTypes] | None = None, role: PromptMessageRole diff --git a/api/core/workflow/nodes/loop/entities.py b/api/core/workflow/nodes/loop/entities.py index 4fcad888e4..92a8702fc3 100644 --- a/api/core/workflow/nodes/loop/entities.py +++ b/api/core/workflow/nodes/loop/entities.py @@ -1,3 +1,4 @@ +from enum import StrEnum from typing import Annotated, Any, Literal from pydantic import AfterValidator, BaseModel, Field, field_validator @@ -96,3 +97,8 @@ class LoopState(BaseLoopState): Get current output. """ return self.current_output + + +class LoopCompletedReason(StrEnum): + LOOP_BREAK = "loop_break" + LOOP_COMPLETED = "loop_completed" diff --git a/api/core/workflow/nodes/loop/loop_node.py b/api/core/workflow/nodes/loop/loop_node.py index 1c26bbc2d0..1f9fc8a115 100644 --- a/api/core/workflow/nodes/loop/loop_node.py +++ b/api/core/workflow/nodes/loop/loop_node.py @@ -29,7 +29,7 @@ from core.workflow.node_events import ( ) from core.workflow.nodes.base import LLMUsageTrackingMixin from core.workflow.nodes.base.node import Node -from core.workflow.nodes.loop.entities import LoopNodeData, LoopVariableData +from core.workflow.nodes.loop.entities import LoopCompletedReason, LoopNodeData, LoopVariableData from core.workflow.utils.condition.processor import ConditionProcessor from factories.variable_factory import TypeMismatchError, build_segment_with_type, segment_to_variable from libs.datetime_utils import naive_utc_now @@ -96,6 +96,7 @@ class LoopNode(LLMUsageTrackingMixin, Node[LoopNodeData]): loop_duration_map: dict[str, float] = {} single_loop_variable_map: dict[str, dict[str, Any]] = {} # single loop variable output loop_usage = LLMUsage.empty_usage() + loop_node_ids = self._extract_loop_node_ids_from_config(self.graph_config, self._node_id) # Start Loop event yield LoopStartedEvent( @@ -118,6 +119,8 @@ class LoopNode(LLMUsageTrackingMixin, Node[LoopNodeData]): loop_count = 0 for i in range(loop_count): + # Clear stale variables from previous loop iterations to avoid streaming old values + self._clear_loop_subgraph_variables(loop_node_ids) graph_engine = self._create_graph_engine(start_at=start_at, root_node_id=root_node_id) loop_start_time = naive_utc_now() @@ -177,7 +180,11 @@ class LoopNode(LLMUsageTrackingMixin, Node[LoopNodeData]): WorkflowNodeExecutionMetadataKey.TOTAL_TOKENS: loop_usage.total_tokens, WorkflowNodeExecutionMetadataKey.TOTAL_PRICE: loop_usage.total_price, WorkflowNodeExecutionMetadataKey.CURRENCY: loop_usage.currency, - "completed_reason": "loop_break" if reach_break_condition else "loop_completed", + WorkflowNodeExecutionMetadataKey.COMPLETED_REASON: ( + LoopCompletedReason.LOOP_BREAK + if reach_break_condition + else LoopCompletedReason.LOOP_COMPLETED.value + ), WorkflowNodeExecutionMetadataKey.LOOP_DURATION_MAP: loop_duration_map, WorkflowNodeExecutionMetadataKey.LOOP_VARIABLE_MAP: single_loop_variable_map, }, @@ -274,6 +281,17 @@ class LoopNode(LLMUsageTrackingMixin, Node[LoopNodeData]): if WorkflowNodeExecutionMetadataKey.LOOP_ID not in current_metadata: event.node_run_result.metadata = {**current_metadata, **loop_metadata} + def _clear_loop_subgraph_variables(self, loop_node_ids: set[str]) -> None: + """ + Remove variables produced by loop sub-graph nodes from previous iterations. + + Keeping stale variables causes a freshly created response coordinator in the + next iteration to fall back to outdated values when no stream chunks exist. + """ + variable_pool = self.graph_runtime_state.variable_pool + for node_id in loop_node_ids: + variable_pool.remove([node_id]) + @classmethod def _extract_variable_selector_to_variable_mapping( cls, diff --git a/api/core/workflow/nodes/node_factory.py b/api/core/workflow/nodes/node_factory.py index c55ad346bf..557d3a330d 100644 --- a/api/core/workflow/nodes/node_factory.py +++ b/api/core/workflow/nodes/node_factory.py @@ -1,10 +1,21 @@ +from collections.abc import Sequence from typing import TYPE_CHECKING, final from typing_extensions import override +from configs import dify_config +from core.helper.code_executor.code_executor import CodeExecutor +from core.helper.code_executor.code_node_provider import CodeNodeProvider from core.workflow.enums import NodeType from core.workflow.graph import NodeFactory from core.workflow.nodes.base.node import Node +from core.workflow.nodes.code.code_node import CodeNode +from core.workflow.nodes.code.limits import CodeNodeLimits +from core.workflow.nodes.template_transform.template_renderer import ( + CodeExecutorJinja2TemplateRenderer, + Jinja2TemplateRenderer, +) +from core.workflow.nodes.template_transform.template_transform_node import TemplateTransformNode from libs.typing import is_str, is_str_dict from .node_mapping import LATEST_VERSION, NODE_TYPE_CLASSES_MAPPING @@ -27,9 +38,29 @@ class DifyNodeFactory(NodeFactory): self, graph_init_params: "GraphInitParams", graph_runtime_state: "GraphRuntimeState", + *, + code_executor: type[CodeExecutor] | None = None, + code_providers: Sequence[type[CodeNodeProvider]] | None = None, + code_limits: CodeNodeLimits | None = None, + template_renderer: Jinja2TemplateRenderer | None = None, ) -> None: self.graph_init_params = graph_init_params self.graph_runtime_state = graph_runtime_state + self._code_executor: type[CodeExecutor] = code_executor or CodeExecutor + self._code_providers: tuple[type[CodeNodeProvider], ...] = ( + tuple(code_providers) if code_providers else CodeNode.default_code_providers() + ) + self._code_limits = code_limits or CodeNodeLimits( + max_string_length=dify_config.CODE_MAX_STRING_LENGTH, + max_number=dify_config.CODE_MAX_NUMBER, + min_number=dify_config.CODE_MIN_NUMBER, + max_precision=dify_config.CODE_MAX_PRECISION, + max_depth=dify_config.CODE_MAX_DEPTH, + max_number_array_length=dify_config.CODE_MAX_NUMBER_ARRAY_LENGTH, + max_string_array_length=dify_config.CODE_MAX_STRING_ARRAY_LENGTH, + max_object_array_length=dify_config.CODE_MAX_OBJECT_ARRAY_LENGTH, + ) + self._template_renderer = template_renderer or CodeExecutorJinja2TemplateRenderer() @override def create_node(self, node_config: dict[str, object]) -> Node: @@ -72,6 +103,25 @@ class DifyNodeFactory(NodeFactory): raise ValueError(f"No latest version class found for node type: {node_type}") # Create node instance + if node_type == NodeType.CODE: + return CodeNode( + id=node_id, + config=node_config, + graph_init_params=self.graph_init_params, + graph_runtime_state=self.graph_runtime_state, + code_executor=self._code_executor, + code_providers=self._code_providers, + code_limits=self._code_limits, + ) + if node_type == NodeType.TEMPLATE_TRANSFORM: + return TemplateTransformNode( + id=node_id, + config=node_config, + graph_init_params=self.graph_init_params, + graph_runtime_state=self.graph_runtime_state, + template_renderer=self._template_renderer, + ) + return node_class( id=node_id, config=node_config, diff --git a/api/core/workflow/nodes/parameter_extractor/parameter_extractor_node.py b/api/core/workflow/nodes/parameter_extractor/parameter_extractor_node.py index 93db417b15..08e0542d61 100644 --- a/api/core/workflow/nodes/parameter_extractor/parameter_extractor_node.py +++ b/api/core/workflow/nodes/parameter_extractor/parameter_extractor_node.py @@ -281,7 +281,7 @@ class ParameterExtractorNode(Node[ParameterExtractorNodeData]): # handle invoke result - text = invoke_result.message.content or "" + text = invoke_result.message.get_text_content() if not isinstance(text, str): raise InvalidTextContentTypeError(f"Invalid text content type: {type(text)}. Expected str.") diff --git a/api/core/workflow/nodes/start/start_node.py b/api/core/workflow/nodes/start/start_node.py index 38effa79f7..53c1b4ee6b 100644 --- a/api/core/workflow/nodes/start/start_node.py +++ b/api/core/workflow/nodes/start/start_node.py @@ -42,9 +42,17 @@ class StartNode(Node[StartNodeData]): if value is None and variable.required: raise ValueError(f"{key} is required in input form") - if not isinstance(value, dict): - raise ValueError(f"{key} must be a JSON object") + # If no value provided, skip further processing for this key + if not value: + continue + if not isinstance(value, dict): + raise ValueError(f"JSON object for '{key}' must be an object") + + # Overwrite with normalized dict to ensure downstream consistency + node_inputs[key] = value + + # If schema exists, then validate against it schema = variable.json_schema if not schema: continue @@ -53,4 +61,3 @@ class StartNode(Node[StartNodeData]): Draft7Validator(schema).validate(value) except ValidationError as e: raise ValueError(f"JSON object for '{key}' does not match schema: {e.message}") - node_inputs[key] = value diff --git a/api/core/workflow/nodes/template_transform/template_renderer.py b/api/core/workflow/nodes/template_transform/template_renderer.py new file mode 100644 index 0000000000..a5f06bf2bb --- /dev/null +++ b/api/core/workflow/nodes/template_transform/template_renderer.py @@ -0,0 +1,40 @@ +from __future__ import annotations + +from collections.abc import Mapping +from typing import Any, Protocol + +from core.helper.code_executor.code_executor import CodeExecutionError, CodeExecutor, CodeLanguage + + +class TemplateRenderError(ValueError): + """Raised when rendering a Jinja2 template fails.""" + + +class Jinja2TemplateRenderer(Protocol): + """Render Jinja2 templates for template transform nodes.""" + + def render_template(self, template: str, variables: Mapping[str, Any]) -> str: + """Render a Jinja2 template with provided variables.""" + raise NotImplementedError + + +class CodeExecutorJinja2TemplateRenderer(Jinja2TemplateRenderer): + """Adapter that renders Jinja2 templates via CodeExecutor.""" + + _code_executor: type[CodeExecutor] + + def __init__(self, code_executor: type[CodeExecutor] | None = None) -> None: + self._code_executor = code_executor or CodeExecutor + + def render_template(self, template: str, variables: Mapping[str, Any]) -> str: + try: + result = self._code_executor.execute_workflow_code_template( + language=CodeLanguage.JINJA2, code=template, inputs=variables + ) + except CodeExecutionError as exc: + raise TemplateRenderError(str(exc)) from exc + + rendered = result.get("result") + if not isinstance(rendered, str): + raise TemplateRenderError("Template render result must be a string.") + return rendered diff --git a/api/core/workflow/nodes/template_transform/template_transform_node.py b/api/core/workflow/nodes/template_transform/template_transform_node.py index 2274323960..f7e0bccccf 100644 --- a/api/core/workflow/nodes/template_transform/template_transform_node.py +++ b/api/core/workflow/nodes/template_transform/template_transform_node.py @@ -1,18 +1,44 @@ from collections.abc import Mapping, Sequence -from typing import Any +from typing import TYPE_CHECKING, Any from configs import dify_config -from core.helper.code_executor.code_executor import CodeExecutionError, CodeExecutor, CodeLanguage from core.workflow.enums import NodeType, WorkflowNodeExecutionStatus from core.workflow.node_events import NodeRunResult from core.workflow.nodes.base.node import Node from core.workflow.nodes.template_transform.entities import TemplateTransformNodeData +from core.workflow.nodes.template_transform.template_renderer import ( + CodeExecutorJinja2TemplateRenderer, + Jinja2TemplateRenderer, + TemplateRenderError, +) + +if TYPE_CHECKING: + from core.workflow.entities import GraphInitParams + from core.workflow.runtime import GraphRuntimeState MAX_TEMPLATE_TRANSFORM_OUTPUT_LENGTH = dify_config.TEMPLATE_TRANSFORM_MAX_LENGTH class TemplateTransformNode(Node[TemplateTransformNodeData]): node_type = NodeType.TEMPLATE_TRANSFORM + _template_renderer: Jinja2TemplateRenderer + + def __init__( + self, + id: str, + config: Mapping[str, Any], + graph_init_params: "GraphInitParams", + graph_runtime_state: "GraphRuntimeState", + *, + template_renderer: Jinja2TemplateRenderer | None = None, + ) -> None: + super().__init__( + id=id, + config=config, + graph_init_params=graph_init_params, + graph_runtime_state=graph_runtime_state, + ) + self._template_renderer = template_renderer or CodeExecutorJinja2TemplateRenderer() @classmethod def get_default_config(cls, filters: Mapping[str, object] | None = None) -> Mapping[str, object]: @@ -39,13 +65,11 @@ class TemplateTransformNode(Node[TemplateTransformNodeData]): variables[variable_name] = value.to_object() if value else None # Run code try: - result = CodeExecutor.execute_workflow_code_template( - language=CodeLanguage.JINJA2, code=self.node_data.template, inputs=variables - ) - except CodeExecutionError as e: + rendered = self._template_renderer.render_template(self.node_data.template, variables) + except TemplateRenderError as e: return NodeRunResult(inputs=variables, status=WorkflowNodeExecutionStatus.FAILED, error=str(e)) - if len(result["result"]) > MAX_TEMPLATE_TRANSFORM_OUTPUT_LENGTH: + if len(rendered) > MAX_TEMPLATE_TRANSFORM_OUTPUT_LENGTH: return NodeRunResult( inputs=variables, status=WorkflowNodeExecutionStatus.FAILED, @@ -53,7 +77,7 @@ class TemplateTransformNode(Node[TemplateTransformNodeData]): ) return NodeRunResult( - status=WorkflowNodeExecutionStatus.SUCCEEDED, inputs=variables, outputs={"output": result["result"]} + status=WorkflowNodeExecutionStatus.SUCCEEDED, inputs=variables, outputs={"output": rendered} ) @classmethod diff --git a/api/core/workflow/nodes/variable_assigner/common/impl.py b/api/core/workflow/nodes/variable_assigner/common/impl.py deleted file mode 100644 index 050e213535..0000000000 --- a/api/core/workflow/nodes/variable_assigner/common/impl.py +++ /dev/null @@ -1,28 +0,0 @@ -from sqlalchemy import select -from sqlalchemy.orm import Session - -from core.variables.variables import Variable -from extensions.ext_database import db -from models import ConversationVariable - -from .exc import VariableOperatorNodeError - - -class ConversationVariableUpdaterImpl: - def update(self, conversation_id: str, variable: Variable): - stmt = select(ConversationVariable).where( - ConversationVariable.id == variable.id, ConversationVariable.conversation_id == conversation_id - ) - with Session(db.engine) as session: - row = session.scalar(stmt) - if not row: - raise VariableOperatorNodeError("conversation variable not found in the database") - row.data = variable.model_dump_json() - session.commit() - - def flush(self): - pass - - -def conversation_variable_updater_factory() -> ConversationVariableUpdaterImpl: - return ConversationVariableUpdaterImpl() diff --git a/api/core/workflow/nodes/variable_assigner/v1/node.py b/api/core/workflow/nodes/variable_assigner/v1/node.py index da23207b62..d2ea7d94ea 100644 --- a/api/core/workflow/nodes/variable_assigner/v1/node.py +++ b/api/core/workflow/nodes/variable_assigner/v1/node.py @@ -1,9 +1,8 @@ -from collections.abc import Callable, Mapping, Sequence -from typing import TYPE_CHECKING, Any, TypeAlias +from collections.abc import Mapping, Sequence +from typing import TYPE_CHECKING, Any from core.variables import SegmentType, Variable from core.workflow.constants import CONVERSATION_VARIABLE_NODE_ID -from core.workflow.conversation_variable_updater import ConversationVariableUpdater from core.workflow.entities import GraphInitParams from core.workflow.enums import NodeType, WorkflowNodeExecutionStatus from core.workflow.node_events import NodeRunResult @@ -11,19 +10,14 @@ from core.workflow.nodes.base.node import Node from core.workflow.nodes.variable_assigner.common import helpers as common_helpers from core.workflow.nodes.variable_assigner.common.exc import VariableOperatorNodeError -from ..common.impl import conversation_variable_updater_factory from .node_data import VariableAssignerData, WriteMode if TYPE_CHECKING: from core.workflow.runtime import GraphRuntimeState -_CONV_VAR_UPDATER_FACTORY: TypeAlias = Callable[[], ConversationVariableUpdater] - - class VariableAssignerNode(Node[VariableAssignerData]): node_type = NodeType.VARIABLE_ASSIGNER - _conv_var_updater_factory: _CONV_VAR_UPDATER_FACTORY def __init__( self, @@ -31,7 +25,6 @@ class VariableAssignerNode(Node[VariableAssignerData]): config: Mapping[str, Any], graph_init_params: "GraphInitParams", graph_runtime_state: "GraphRuntimeState", - conv_var_updater_factory: _CONV_VAR_UPDATER_FACTORY = conversation_variable_updater_factory, ): super().__init__( id=id, @@ -39,7 +32,6 @@ class VariableAssignerNode(Node[VariableAssignerData]): graph_init_params=graph_init_params, graph_runtime_state=graph_runtime_state, ) - self._conv_var_updater_factory = conv_var_updater_factory @classmethod def version(cls) -> str: @@ -96,16 +88,7 @@ class VariableAssignerNode(Node[VariableAssignerData]): # Over write the variable. self.graph_runtime_state.variable_pool.add(assigned_variable_selector, updated_variable) - # TODO: Move database operation to the pipeline. - # Update conversation variable. - conversation_id = self.graph_runtime_state.variable_pool.get(["sys", "conversation_id"]) - if not conversation_id: - raise VariableOperatorNodeError("conversation_id not found") - conv_var_updater = self._conv_var_updater_factory() - conv_var_updater.update(conversation_id=conversation_id.text, variable=updated_variable) - conv_var_updater.flush() updated_variables = [common_helpers.variable_to_processed_data(assigned_variable_selector, updated_variable)] - return NodeRunResult( status=WorkflowNodeExecutionStatus.SUCCEEDED, inputs={ diff --git a/api/core/workflow/nodes/variable_assigner/v2/node.py b/api/core/workflow/nodes/variable_assigner/v2/node.py index 389fb54d35..486e6bb6a7 100644 --- a/api/core/workflow/nodes/variable_assigner/v2/node.py +++ b/api/core/workflow/nodes/variable_assigner/v2/node.py @@ -1,24 +1,20 @@ import json from collections.abc import Mapping, MutableMapping, Sequence -from typing import Any, cast +from typing import TYPE_CHECKING, Any -from core.app.entities.app_invoke_entities import InvokeFrom from core.variables import SegmentType, Variable from core.variables.consts import SELECTORS_LENGTH from core.workflow.constants import CONVERSATION_VARIABLE_NODE_ID -from core.workflow.conversation_variable_updater import ConversationVariableUpdater from core.workflow.enums import NodeType, WorkflowNodeExecutionStatus from core.workflow.node_events import NodeRunResult from core.workflow.nodes.base.node import Node from core.workflow.nodes.variable_assigner.common import helpers as common_helpers from core.workflow.nodes.variable_assigner.common.exc import VariableOperatorNodeError -from core.workflow.nodes.variable_assigner.common.impl import conversation_variable_updater_factory from . import helpers from .entities import VariableAssignerNodeData, VariableOperationItem from .enums import InputType, Operation from .exc import ( - ConversationIDNotFoundError, InputTypeNotSupportedError, InvalidDataError, InvalidInputValueError, @@ -26,6 +22,10 @@ from .exc import ( VariableNotFoundError, ) +if TYPE_CHECKING: + from core.workflow.entities import GraphInitParams + from core.workflow.runtime import GraphRuntimeState + def _target_mapping_from_item(mapping: MutableMapping[str, Sequence[str]], node_id: str, item: VariableOperationItem): selector_node_id = item.variable_selector[0] @@ -53,6 +53,20 @@ def _source_mapping_from_item(mapping: MutableMapping[str, Sequence[str]], node_ class VariableAssignerNode(Node[VariableAssignerNodeData]): node_type = NodeType.VARIABLE_ASSIGNER + def __init__( + self, + id: str, + config: Mapping[str, Any], + graph_init_params: "GraphInitParams", + graph_runtime_state: "GraphRuntimeState", + ): + super().__init__( + id=id, + config=config, + graph_init_params=graph_init_params, + graph_runtime_state=graph_runtime_state, + ) + def blocks_variable_output(self, variable_selectors: set[tuple[str, ...]]) -> bool: """ Check if this Variable Assigner node blocks the output of specific variables. @@ -70,9 +84,6 @@ class VariableAssignerNode(Node[VariableAssignerNodeData]): return False - def _conv_var_updater_factory(self) -> ConversationVariableUpdater: - return conversation_variable_updater_factory() - @classmethod def version(cls) -> str: return "2" @@ -179,26 +190,12 @@ class VariableAssignerNode(Node[VariableAssignerNodeData]): # remove the duplicated items first. updated_variable_selectors = list(set(map(tuple, updated_variable_selectors))) - conv_var_updater = self._conv_var_updater_factory() - # Update variables for selector in updated_variable_selectors: variable = self.graph_runtime_state.variable_pool.get(selector) if not isinstance(variable, Variable): raise VariableNotFoundError(variable_selector=selector) process_data[variable.name] = variable.value - if variable.selector[0] == CONVERSATION_VARIABLE_NODE_ID: - conversation_id = self.graph_runtime_state.variable_pool.get(["sys", "conversation_id"]) - if not conversation_id: - if self.invoke_from != InvokeFrom.DEBUGGER: - raise ConversationIDNotFoundError - else: - conversation_id = conversation_id.value - conv_var_updater.update( - conversation_id=cast(str, conversation_id), - variable=variable, - ) - conv_var_updater.flush() updated_variables = [ common_helpers.variable_to_processed_data(selector, seg) for selector in updated_variable_selectors diff --git a/api/core/workflow/repositories/draft_variable_repository.py b/api/core/workflow/repositories/draft_variable_repository.py index 97bfcd5666..66ef714c16 100644 --- a/api/core/workflow/repositories/draft_variable_repository.py +++ b/api/core/workflow/repositories/draft_variable_repository.py @@ -1,3 +1,5 @@ +from __future__ import annotations + import abc from collections.abc import Mapping from typing import Any, Protocol @@ -23,7 +25,7 @@ class DraftVariableSaverFactory(Protocol): node_type: NodeType, node_execution_id: str, enclosing_node_id: str | None = None, - ) -> "DraftVariableSaver": + ) -> DraftVariableSaver: pass diff --git a/api/core/workflow/runtime/graph_runtime_state.py b/api/core/workflow/runtime/graph_runtime_state.py index 1561b789df..401cecc162 100644 --- a/api/core/workflow/runtime/graph_runtime_state.py +++ b/api/core/workflow/runtime/graph_runtime_state.py @@ -2,6 +2,7 @@ from __future__ import annotations import importlib import json +import threading from collections.abc import Mapping, Sequence from copy import deepcopy from dataclasses import dataclass @@ -168,6 +169,7 @@ class GraphRuntimeState: self._pending_response_coordinator_dump: str | None = None self._pending_graph_execution_workflow_id: str | None = None self._paused_nodes: set[str] = set() + self.stop_event: threading.Event = threading.Event() if graph is not None: self.attach_graph(graph) diff --git a/api/core/workflow/runtime/graph_runtime_state_protocol.py b/api/core/workflow/runtime/graph_runtime_state_protocol.py index 5e0878e873..bfbb5ba704 100644 --- a/api/core/workflow/runtime/graph_runtime_state_protocol.py +++ b/api/core/workflow/runtime/graph_runtime_state_protocol.py @@ -1,4 +1,4 @@ -from collections.abc import Mapping +from collections.abc import Mapping, Sequence from typing import Any, Protocol from core.model_runtime.entities.llm_entities import LLMUsage @@ -9,7 +9,7 @@ from core.workflow.system_variable import SystemVariableReadOnlyView class ReadOnlyVariablePool(Protocol): """Read-only interface for VariablePool.""" - def get(self, node_id: str, variable_key: str) -> Segment | None: + def get(self, selector: Sequence[str], /) -> Segment | None: """Get a variable value (read-only).""" ... diff --git a/api/core/workflow/runtime/read_only_wrappers.py b/api/core/workflow/runtime/read_only_wrappers.py index 8539727fd6..d3e4c60d9b 100644 --- a/api/core/workflow/runtime/read_only_wrappers.py +++ b/api/core/workflow/runtime/read_only_wrappers.py @@ -1,6 +1,6 @@ from __future__ import annotations -from collections.abc import Mapping +from collections.abc import Mapping, Sequence from copy import deepcopy from typing import Any @@ -18,9 +18,9 @@ class ReadOnlyVariablePoolWrapper: def __init__(self, variable_pool: VariablePool) -> None: self._variable_pool = variable_pool - def get(self, node_id: str, variable_key: str) -> Segment | None: + def get(self, selector: Sequence[str], /) -> Segment | None: """Return a copy of a variable value if present.""" - value = self._variable_pool.get([node_id, variable_key]) + value = self._variable_pool.get(selector) return deepcopy(value) if value is not None else None def get_all_by_node(self, node_id: str) -> Mapping[str, object]: diff --git a/api/core/workflow/runtime/variable_pool.py b/api/core/workflow/runtime/variable_pool.py index 7fbaec9e70..85ceb9d59e 100644 --- a/api/core/workflow/runtime/variable_pool.py +++ b/api/core/workflow/runtime/variable_pool.py @@ -1,3 +1,5 @@ +from __future__ import annotations + import re from collections import defaultdict from collections.abc import Mapping, Sequence @@ -267,6 +269,6 @@ class VariablePool(BaseModel): self.add(selector, value) @classmethod - def empty(cls) -> "VariablePool": + def empty(cls) -> VariablePool: """Create an empty variable pool.""" return cls(system_variables=SystemVariable.empty()) diff --git a/api/core/workflow/system_variable.py b/api/core/workflow/system_variable.py index ad925912a4..cda8091771 100644 --- a/api/core/workflow/system_variable.py +++ b/api/core/workflow/system_variable.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from collections.abc import Mapping, Sequence from types import MappingProxyType from typing import Any @@ -70,7 +72,7 @@ class SystemVariable(BaseModel): return data @classmethod - def empty(cls) -> "SystemVariable": + def empty(cls) -> SystemVariable: return cls() def to_dict(self) -> dict[SystemVariableKey, Any]: @@ -114,7 +116,7 @@ class SystemVariable(BaseModel): d[SystemVariableKey.TIMESTAMP] = self.timestamp return d - def as_view(self) -> "SystemVariableReadOnlyView": + def as_view(self) -> SystemVariableReadOnlyView: return SystemVariableReadOnlyView(self) diff --git a/api/core/workflow/workflow_entry.py b/api/core/workflow/workflow_entry.py index d4ec29518a..ddf545bb34 100644 --- a/api/core/workflow/workflow_entry.py +++ b/api/core/workflow/workflow_entry.py @@ -14,7 +14,7 @@ from core.workflow.errors import WorkflowNodeRunFailedError from core.workflow.graph import Graph from core.workflow.graph_engine import GraphEngine from core.workflow.graph_engine.command_channels import InMemoryChannel -from core.workflow.graph_engine.layers import DebugLoggingLayer, ExecutionLimitsLayer +from core.workflow.graph_engine.layers import DebugLoggingLayer, ExecutionLimitsLayer, ObservabilityLayer from core.workflow.graph_engine.protocols.command_channel import CommandChannel from core.workflow.graph_events import GraphEngineEvent, GraphNodeEventBase, GraphRunFailedEvent from core.workflow.nodes import NodeType @@ -23,6 +23,7 @@ from core.workflow.nodes.node_mapping import NODE_TYPE_CLASSES_MAPPING from core.workflow.runtime import GraphRuntimeState, VariablePool from core.workflow.system_variable import SystemVariable from core.workflow.variable_loader import DUMMY_VARIABLE_LOADER, VariableLoader, load_into_variable_pool +from extensions.otel.runtime import is_instrument_flag_enabled from factories import file_factory from models.enums import UserFrom from models.workflow import Workflow @@ -98,6 +99,10 @@ class WorkflowEntry: ) self.graph_engine.layer(limits_layer) + # Add observability layer when OTel is enabled + if dify_config.ENABLE_OTEL or is_instrument_flag_enabled(): + self.graph_engine.layer(ObservabilityLayer()) + def run(self) -> Generator[GraphEngineEvent, None, None]: graph_engine = self.graph_engine diff --git a/api/docker/entrypoint.sh b/api/docker/entrypoint.sh index 6313085e64..c0279f893b 100755 --- a/api/docker/entrypoint.sh +++ b/api/docker/entrypoint.sh @@ -3,8 +3,9 @@ set -e # Set UTF-8 encoding to address potential encoding issues in containerized environments -export LANG=${LANG:-en_US.UTF-8} -export LC_ALL=${LC_ALL:-en_US.UTF-8} +# Use C.UTF-8 which is universally available in all containers +export LANG=${LANG:-C.UTF-8} +export LC_ALL=${LC_ALL:-C.UTF-8} export PYTHONIOENCODING=${PYTHONIOENCODING:-utf-8} if [[ "${MIGRATION_ENABLED}" == "true" ]]; then @@ -34,10 +35,10 @@ if [[ "${MODE}" == "worker" ]]; then if [[ -z "${CELERY_QUEUES}" ]]; then if [[ "${EDITION}" == "CLOUD" ]]; then # Cloud edition: separate queues for dataset and trigger tasks - DEFAULT_QUEUES="dataset,priority_dataset,priority_pipeline,pipeline,mail,ops_trace,app_deletion,plugin,workflow_storage,conversation,workflow_professional,workflow_team,workflow_sandbox,schedule_poller,schedule_executor,triggered_workflow_dispatcher,trigger_refresh_executor" + DEFAULT_QUEUES="dataset,priority_dataset,priority_pipeline,pipeline,mail,ops_trace,app_deletion,plugin,workflow_storage,conversation,workflow_professional,workflow_team,workflow_sandbox,schedule_poller,schedule_executor,triggered_workflow_dispatcher,trigger_refresh_executor,retention" else # Community edition (SELF_HOSTED): dataset, pipeline and workflow have separate queues - DEFAULT_QUEUES="dataset,priority_dataset,priority_pipeline,pipeline,mail,ops_trace,app_deletion,plugin,workflow_storage,conversation,workflow,schedule_poller,schedule_executor,triggered_workflow_dispatcher,trigger_refresh_executor" + DEFAULT_QUEUES="dataset,priority_dataset,priority_pipeline,pipeline,mail,ops_trace,app_deletion,plugin,workflow_storage,conversation,workflow,schedule_poller,schedule_executor,triggered_workflow_dispatcher,trigger_refresh_executor,retention" fi else DEFAULT_QUEUES="${CELERY_QUEUES}" @@ -69,6 +70,53 @@ if [[ "${MODE}" == "worker" ]]; then elif [[ "${MODE}" == "beat" ]]; then exec celery -A app.celery beat --loglevel ${LOG_LEVEL:-INFO} + +elif [[ "${MODE}" == "job" ]]; then + # Job mode: Run a one-time Flask command and exit + # Pass Flask command and arguments via container args + # Example K8s usage: + # args: + # - create-tenant + # - --email + # - admin@example.com + # + # Example Docker usage: + # docker run -e MODE=job dify-api:latest create-tenant --email admin@example.com + + if [[ $# -eq 0 ]]; then + echo "Error: No command specified for job mode." + echo "" + echo "Usage examples:" + echo " Kubernetes:" + echo " args: [create-tenant, --email, admin@example.com]" + echo "" + echo " Docker:" + echo " docker run -e MODE=job dify-api create-tenant --email admin@example.com" + echo "" + echo "Available commands:" + echo " create-tenant, reset-password, reset-email, upgrade-db," + echo " vdb-migrate, install-plugins, and more..." + echo "" + echo "Run 'flask --help' to see all available commands." + exit 1 + fi + + echo "Running Flask job command: flask $*" + + # Temporarily disable exit on error to capture exit code + set +e + flask "$@" + JOB_EXIT_CODE=$? + set -e + + if [[ ${JOB_EXIT_CODE} -eq 0 ]]; then + echo "Job completed successfully." + else + echo "Job failed with exit code ${JOB_EXIT_CODE}." + fi + + exit ${JOB_EXIT_CODE} + else if [[ "${DEBUG}" == "true" ]]; then exec flask run --host=${DIFY_BIND_ADDRESS:-0.0.0.0} --port=${DIFY_PORT:-5001} --debug diff --git a/api/events/event_handlers/update_provider_when_message_created.py b/api/events/event_handlers/update_provider_when_message_created.py index 84266ab0fa..1ddcc8f792 100644 --- a/api/events/event_handlers/update_provider_when_message_created.py +++ b/api/events/event_handlers/update_provider_when_message_created.py @@ -10,7 +10,7 @@ from sqlalchemy.orm import Session from configs import dify_config from core.app.entities.app_invoke_entities import AgentChatAppGenerateEntity, ChatAppGenerateEntity -from core.entities.provider_entities import QuotaUnit, SystemConfiguration +from core.entities.provider_entities import ProviderQuotaType, QuotaUnit, SystemConfiguration from events.message_event import message_was_created from extensions.ext_database import db from extensions.ext_redis import redis_client, redis_fallback @@ -134,22 +134,38 @@ def handle(sender: Message, **kwargs): system_configuration=system_configuration, model_name=model_config.model, ) - if used_quota is not None: - quota_update = _ProviderUpdateOperation( - filters=_ProviderUpdateFilters( + if provider_configuration.system_configuration.current_quota_type == ProviderQuotaType.TRIAL: + from services.credit_pool_service import CreditPoolService + + CreditPoolService.check_and_deduct_credits( tenant_id=tenant_id, - provider_name=ModelProviderID(model_config.provider).provider_name, - provider_type=ProviderType.SYSTEM, - quota_type=provider_configuration.system_configuration.current_quota_type.value, - ), - values=_ProviderUpdateValues(quota_used=Provider.quota_used + used_quota, last_used=current_time), - additional_filters=_ProviderUpdateAdditionalFilters( - quota_limit_check=True # Provider.quota_limit > Provider.quota_used - ), - description="quota_deduction_update", - ) - updates_to_perform.append(quota_update) + credits_required=used_quota, + pool_type="trial", + ) + elif provider_configuration.system_configuration.current_quota_type == ProviderQuotaType.PAID: + from services.credit_pool_service import CreditPoolService + + CreditPoolService.check_and_deduct_credits( + tenant_id=tenant_id, + credits_required=used_quota, + pool_type="paid", + ) + else: + quota_update = _ProviderUpdateOperation( + filters=_ProviderUpdateFilters( + tenant_id=tenant_id, + provider_name=ModelProviderID(model_config.provider).provider_name, + provider_type=ProviderType.SYSTEM.value, + quota_type=provider_configuration.system_configuration.current_quota_type.value, + ), + values=_ProviderUpdateValues(quota_used=Provider.quota_used + used_quota, last_used=current_time), + additional_filters=_ProviderUpdateAdditionalFilters( + quota_limit_check=True # Provider.quota_limit > Provider.quota_used + ), + description="quota_deduction_update", + ) + updates_to_perform.append(quota_update) # Execute all updates start_time = time_module.perf_counter() diff --git a/api/extensions/ext_blueprints.py b/api/extensions/ext_blueprints.py index cf994c11df..7d13f0c061 100644 --- a/api/extensions/ext_blueprints.py +++ b/api/extensions/ext_blueprints.py @@ -6,6 +6,7 @@ BASE_CORS_HEADERS: tuple[str, ...] = ("Content-Type", HEADER_NAME_APP_CODE, HEAD SERVICE_API_HEADERS: tuple[str, ...] = (*BASE_CORS_HEADERS, "Authorization") AUTHENTICATED_HEADERS: tuple[str, ...] = (*SERVICE_API_HEADERS, HEADER_NAME_CSRF_TOKEN) FILES_HEADERS: tuple[str, ...] = (*BASE_CORS_HEADERS, HEADER_NAME_CSRF_TOKEN) +EMBED_HEADERS: tuple[str, ...] = ("Content-Type", HEADER_NAME_APP_CODE) EXPOSED_HEADERS: tuple[str, ...] = ("X-Version", "X-Env", "X-Trace-Id") @@ -42,10 +43,28 @@ def init_app(app: DifyApp): _apply_cors_once( web_bp, - resources={r"/*": {"origins": dify_config.WEB_API_CORS_ALLOW_ORIGINS}}, - supports_credentials=True, - allow_headers=list(AUTHENTICATED_HEADERS), - methods=["GET", "PUT", "POST", "DELETE", "OPTIONS", "PATCH"], + resources={ + # Embedded bot endpoints (unauthenticated, cross-origin safe) + r"^/chat-messages$": { + "origins": dify_config.WEB_API_CORS_ALLOW_ORIGINS, + "supports_credentials": False, + "allow_headers": list(EMBED_HEADERS), + "methods": ["GET", "POST", "OPTIONS"], + }, + r"^/chat-messages/.*": { + "origins": dify_config.WEB_API_CORS_ALLOW_ORIGINS, + "supports_credentials": False, + "allow_headers": list(EMBED_HEADERS), + "methods": ["GET", "POST", "OPTIONS"], + }, + # Default web application endpoints (authenticated) + r"/*": { + "origins": dify_config.WEB_API_CORS_ALLOW_ORIGINS, + "supports_credentials": True, + "allow_headers": list(AUTHENTICATED_HEADERS), + "methods": ["GET", "PUT", "POST", "DELETE", "OPTIONS", "PATCH"], + }, + }, expose_headers=list(EXPOSED_HEADERS), ) app.register_blueprint(web_bp) diff --git a/api/extensions/ext_celery.py b/api/extensions/ext_celery.py index ffa0e3d7a0..af983f6d87 100644 --- a/api/extensions/ext_celery.py +++ b/api/extensions/ext_celery.py @@ -12,9 +12,8 @@ from dify_app import DifyApp def _get_celery_ssl_options() -> dict[str, Any] | None: """Get SSL configuration for Celery broker/backend connections.""" - # Use REDIS_USE_SSL for consistency with the main Redis client # Only apply SSL if we're using Redis as broker/backend - if not dify_config.REDIS_USE_SSL: + if not dify_config.BROKER_USE_SSL: return None # Check if Celery is actually using Redis @@ -47,7 +46,11 @@ def _get_celery_ssl_options() -> dict[str, Any] | None: def init_app(app: DifyApp) -> Celery: class FlaskTask(Task): def __call__(self, *args: object, **kwargs: object) -> object: + from core.logging.context import init_request_context + with app.app_context(): + # Initialize logging context for this task (similar to before_request in Flask) + init_request_context() return self.run(*args, **kwargs) broker_transport_options = {} @@ -162,6 +165,13 @@ def init_app(app: DifyApp) -> Celery: "task": "schedule.clean_workflow_runlogs_precise.clean_workflow_runlogs_precise", "schedule": crontab(minute="0", hour="2"), } + if dify_config.ENABLE_WORKFLOW_RUN_CLEANUP_TASK: + # for saas only + imports.append("schedule.clean_workflow_runs_task") + beat_schedule["clean_workflow_runs_task"] = { + "task": "schedule.clean_workflow_runs_task.clean_workflow_runs_task", + "schedule": crontab(minute="0", hour="0"), + } if dify_config.ENABLE_WORKFLOW_SCHEDULE_POLLER_TASK: imports.append("schedule.workflow_schedule_task") beat_schedule["workflow_schedule_task"] = { diff --git a/api/extensions/ext_commands.py b/api/extensions/ext_commands.py index 71a63168a5..3baefd2c89 100644 --- a/api/extensions/ext_commands.py +++ b/api/extensions/ext_commands.py @@ -4,6 +4,7 @@ from dify_app import DifyApp def init_app(app: DifyApp): from commands import ( add_qdrant_index, + clean_workflow_runs, cleanup_orphaned_draft_variables, clear_free_plan_tenant_expired_logs, clear_orphaned_file_records, @@ -11,6 +12,7 @@ def init_app(app: DifyApp): create_tenant, extract_plugins, extract_unique_plugins, + file_usage, fix_app_site_missing, install_plugins, install_rag_pipeline_plugins, @@ -21,6 +23,7 @@ def init_app(app: DifyApp): reset_email, reset_encrypt_key_pair, reset_password, + seed_expired_workflow_runs, setup_datasource_oauth_client, setup_system_tool_oauth_client, setup_system_trigger_oauth_client, @@ -47,6 +50,7 @@ def init_app(app: DifyApp): clear_free_plan_tenant_expired_logs, clear_orphaned_file_records, remove_orphaned_files_on_storage, + file_usage, setup_system_tool_oauth_client, setup_system_trigger_oauth_client, cleanup_orphaned_draft_variables, @@ -54,6 +58,8 @@ def init_app(app: DifyApp): setup_datasource_oauth_client, transform_datasource_credentials, install_rag_pipeline_plugins, + clean_workflow_runs, + seed_expired_workflow_runs, ] for cmd in cmds_to_register: app.cli.add_command(cmd) diff --git a/api/extensions/ext_database.py b/api/extensions/ext_database.py index c90b1d0a9f..2e0d4c889a 100644 --- a/api/extensions/ext_database.py +++ b/api/extensions/ext_database.py @@ -53,3 +53,10 @@ def _setup_gevent_compatibility(): def init_app(app: DifyApp): db.init_app(app) _setup_gevent_compatibility() + + # Eagerly build the engine so pool_size/max_overflow/etc. come from config + try: + with app.app_context(): + _ = db.engine # triggers engine creation with the configured options + except Exception: + logger.exception("Failed to initialize SQLAlchemy engine during app startup") diff --git a/api/extensions/ext_logging.py b/api/extensions/ext_logging.py index 000d03ac41..978a40c503 100644 --- a/api/extensions/ext_logging.py +++ b/api/extensions/ext_logging.py @@ -1,18 +1,19 @@ +"""Logging extension for Dify Flask application.""" + import logging import os import sys -import uuid from logging.handlers import RotatingFileHandler -import flask - from configs import dify_config -from core.helper.trace_id_helper import get_trace_id_from_otel_context from dify_app import DifyApp def init_app(app: DifyApp): + """Initialize logging with support for text or JSON format.""" log_handlers: list[logging.Handler] = [] + + # File handler log_file = dify_config.LOG_FILE if log_file: log_dir = os.path.dirname(log_file) @@ -25,27 +26,53 @@ def init_app(app: DifyApp): ) ) - # Always add StreamHandler to log to console + # Console handler sh = logging.StreamHandler(sys.stdout) log_handlers.append(sh) - # Apply RequestIdFilter to all handlers - for handler in log_handlers: - handler.addFilter(RequestIdFilter()) + # Apply filters to all handlers + from core.logging.filters import IdentityContextFilter, TraceContextFilter + for handler in log_handlers: + handler.addFilter(TraceContextFilter()) + handler.addFilter(IdentityContextFilter()) + + # Configure formatter based on format type + formatter = _create_formatter() + for handler in log_handlers: + handler.setFormatter(formatter) + + # Configure root logger logging.basicConfig( level=dify_config.LOG_LEVEL, - format=dify_config.LOG_FORMAT, - datefmt=dify_config.LOG_DATEFORMAT, handlers=log_handlers, force=True, ) - # Apply RequestIdFormatter to all handlers - apply_request_id_formatter() - # Disable propagation for noisy loggers to avoid duplicate logs logging.getLogger("sqlalchemy.engine").propagate = False + + # Apply timezone if specified (only for text format) + if dify_config.LOG_OUTPUT_FORMAT == "text": + _apply_timezone(log_handlers) + + +def _create_formatter() -> logging.Formatter: + """Create appropriate formatter based on configuration.""" + if dify_config.LOG_OUTPUT_FORMAT == "json": + from core.logging.structured_formatter import StructuredJSONFormatter + + return StructuredJSONFormatter() + else: + # Text format - use existing pattern with backward compatible formatter + return _TextFormatter( + fmt=dify_config.LOG_FORMAT, + datefmt=dify_config.LOG_DATEFORMAT, + ) + + +def _apply_timezone(handlers: list[logging.Handler]): + """Apply timezone conversion to text formatters.""" log_tz = dify_config.LOG_TZ if log_tz: from datetime import datetime @@ -57,34 +84,51 @@ def init_app(app: DifyApp): def time_converter(seconds): return datetime.fromtimestamp(seconds, tz=timezone).timetuple() - for handler in logging.root.handlers: + for handler in handlers: if handler.formatter: - handler.formatter.converter = time_converter + handler.formatter.converter = time_converter # type: ignore[attr-defined] -def get_request_id(): - if getattr(flask.g, "request_id", None): - return flask.g.request_id +class _TextFormatter(logging.Formatter): + """Text formatter that ensures trace_id and req_id are always present.""" - new_uuid = uuid.uuid4().hex[:10] - flask.g.request_id = new_uuid - - return new_uuid + def format(self, record: logging.LogRecord) -> str: + if not hasattr(record, "req_id"): + record.req_id = "" + if not hasattr(record, "trace_id"): + record.trace_id = "" + if not hasattr(record, "span_id"): + record.span_id = "" + return super().format(record) +def get_request_id() -> str: + """Get request ID for current request context. + + Deprecated: Use core.logging.context.get_request_id() directly. + """ + from core.logging.context import get_request_id as _get_request_id + + return _get_request_id() + + +# Backward compatibility aliases class RequestIdFilter(logging.Filter): - # This is a logging filter that makes the request ID available for use in - # the logging format. Note that we're checking if we're in a request - # context, as we may want to log things before Flask is fully loaded. - def filter(self, record): - trace_id = get_trace_id_from_otel_context() or "" - record.req_id = get_request_id() if flask.has_request_context() else "" - record.trace_id = trace_id + """Deprecated: Use TraceContextFilter from core.logging.filters instead.""" + + def filter(self, record: logging.LogRecord) -> bool: + from core.logging.context import get_request_id as _get_request_id + from core.logging.context import get_trace_id as _get_trace_id + + record.req_id = _get_request_id() + record.trace_id = _get_trace_id() return True class RequestIdFormatter(logging.Formatter): - def format(self, record): + """Deprecated: Use _TextFormatter instead.""" + + def format(self, record: logging.LogRecord) -> str: if not hasattr(record, "req_id"): record.req_id = "" if not hasattr(record, "trace_id"): @@ -93,6 +137,7 @@ class RequestIdFormatter(logging.Formatter): def apply_request_id_formatter(): + """Deprecated: Formatter is now applied in init_app.""" for handler in logging.root.handlers: if handler.formatter: handler.formatter = RequestIdFormatter(dify_config.LOG_FORMAT, dify_config.LOG_DATEFORMAT) diff --git a/api/extensions/ext_login.py b/api/extensions/ext_login.py index 5cbdd4db12..74299956c0 100644 --- a/api/extensions/ext_login.py +++ b/api/extensions/ext_login.py @@ -22,8 +22,8 @@ login_manager = flask_login.LoginManager() @login_manager.request_loader def load_user_from_request(request_from_flask_login): """Load user based on the request.""" - # Skip authentication for documentation endpoints (only when Swagger is enabled) - if dify_config.swagger_ui_enabled and request.path.endswith((dify_config.SWAGGER_UI_PATH, "/swagger.json")): + # Skip authentication for documentation endpoints + if dify_config.SWAGGER_UI_ENABLED and request.path.endswith((dify_config.SWAGGER_UI_PATH, "/swagger.json")): return None auth_token = extract_access_token(request) diff --git a/api/extensions/ext_logstore.py b/api/extensions/ext_logstore.py new file mode 100644 index 0000000000..502f0bb46b --- /dev/null +++ b/api/extensions/ext_logstore.py @@ -0,0 +1,74 @@ +""" +Logstore extension for Dify application. + +This extension initializes the logstore (Aliyun SLS) on application startup, +creating necessary projects, logstores, and indexes if they don't exist. +""" + +import logging +import os + +from dotenv import load_dotenv + +from dify_app import DifyApp + +logger = logging.getLogger(__name__) + + +def is_enabled() -> bool: + """ + Check if logstore extension is enabled. + + Returns: + True if all required Aliyun SLS environment variables are set, False otherwise + """ + # Load environment variables from .env file + load_dotenv() + + required_vars = [ + "ALIYUN_SLS_ACCESS_KEY_ID", + "ALIYUN_SLS_ACCESS_KEY_SECRET", + "ALIYUN_SLS_ENDPOINT", + "ALIYUN_SLS_REGION", + "ALIYUN_SLS_PROJECT_NAME", + ] + + all_set = all(os.environ.get(var) for var in required_vars) + + if not all_set: + logger.info("Logstore extension disabled: required Aliyun SLS environment variables not set") + + return all_set + + +def init_app(app: DifyApp): + """ + Initialize logstore on application startup. + + This function: + 1. Creates Aliyun SLS project if it doesn't exist + 2. Creates logstores (workflow_execution, workflow_node_execution) if they don't exist + 3. Creates indexes with field configurations based on PostgreSQL table structures + + This operation is idempotent and only executes once during application startup. + + Args: + app: The Dify application instance + """ + try: + from extensions.logstore.aliyun_logstore import AliyunLogStore + + logger.info("Initializing logstore...") + + # Create logstore client and initialize project/logstores/indexes + logstore_client = AliyunLogStore() + logstore_client.init_project_logstore() + + # Attach to app for potential later use + app.extensions["logstore"] = logstore_client + + logger.info("Logstore initialized successfully") + except Exception: + logger.exception("Failed to initialize logstore") + # Don't raise - allow application to continue even if logstore init fails + # This ensures that the application can still run if logstore is misconfigured diff --git a/web/__mocks__/mime.js b/api/extensions/logstore/__init__.py similarity index 100% rename from web/__mocks__/mime.js rename to api/extensions/logstore/__init__.py diff --git a/api/extensions/logstore/aliyun_logstore.py b/api/extensions/logstore/aliyun_logstore.py new file mode 100644 index 0000000000..8c64a25be4 --- /dev/null +++ b/api/extensions/logstore/aliyun_logstore.py @@ -0,0 +1,892 @@ +from __future__ import annotations + +import logging +import os +import threading +import time +from collections.abc import Sequence +from typing import Any + +import sqlalchemy as sa +from aliyun.log import ( # type: ignore[import-untyped] + GetLogsRequest, + IndexConfig, + IndexKeyConfig, + IndexLineConfig, + LogClient, + LogItem, + PutLogsRequest, +) +from aliyun.log.auth import AUTH_VERSION_4 # type: ignore[import-untyped] +from aliyun.log.logexception import LogException # type: ignore[import-untyped] +from dotenv import load_dotenv +from sqlalchemy.orm import DeclarativeBase + +from configs import dify_config +from extensions.logstore.aliyun_logstore_pg import AliyunLogStorePG + +logger = logging.getLogger(__name__) + + +class AliyunLogStore: + """ + Singleton class for Aliyun SLS LogStore operations. + + Ensures only one instance exists to prevent multiple PG connection pools. + """ + + _instance: AliyunLogStore | None = None + _initialized: bool = False + + # Track delayed PG connection for newly created projects + _pg_connection_timer: threading.Timer | None = None + _pg_connection_delay: int = 90 # delay seconds + + # Default tokenizer for text/json fields and full-text index + # Common delimiters: comma, space, quotes, punctuation, operators, brackets, special chars + DEFAULT_TOKEN_LIST = [ + ",", + " ", + '"', + '"', + ";", + "=", + "(", + ")", + "[", + "]", + "{", + "}", + "?", + "@", + "&", + "<", + ">", + "/", + ":", + "\n", + "\t", + ] + + def __new__(cls) -> AliyunLogStore: + """Implement singleton pattern.""" + if cls._instance is None: + cls._instance = super().__new__(cls) + return cls._instance + + project_des = "dify" + + workflow_execution_logstore = "workflow_execution" + + workflow_node_execution_logstore = "workflow_node_execution" + + @staticmethod + def _sqlalchemy_type_to_logstore_type(column: Any) -> str: + """ + Map SQLAlchemy column type to Aliyun LogStore index type. + + Args: + column: SQLAlchemy column object + + Returns: + LogStore index type: 'text', 'long', 'double', or 'json' + """ + column_type = column.type + + # Integer types -> long + if isinstance(column_type, (sa.Integer, sa.BigInteger, sa.SmallInteger)): + return "long" + + # Float types -> double + if isinstance(column_type, (sa.Float, sa.Numeric)): + return "double" + + # String and Text types -> text + if isinstance(column_type, (sa.String, sa.Text)): + return "text" + + # DateTime -> text (stored as ISO format string in logstore) + if isinstance(column_type, sa.DateTime): + return "text" + + # Boolean -> long (stored as 0/1) + if isinstance(column_type, sa.Boolean): + return "long" + + # JSON -> json + if isinstance(column_type, sa.JSON): + return "json" + + # Default to text for unknown types + return "text" + + @staticmethod + def _generate_index_keys_from_model(model_class: type[DeclarativeBase]) -> dict[str, IndexKeyConfig]: + """ + Automatically generate LogStore field index configuration from SQLAlchemy model. + + This method introspects the SQLAlchemy model's column definitions and creates + corresponding LogStore index configurations. When the PG schema is updated via + Flask-Migrate, this method will automatically pick up the new fields on next startup. + + Args: + model_class: SQLAlchemy model class (e.g., WorkflowRun, WorkflowNodeExecutionModel) + + Returns: + Dictionary mapping field names to IndexKeyConfig objects + """ + index_keys = {} + + # Iterate over all mapped columns in the model + if hasattr(model_class, "__mapper__"): + for column_name, column_property in model_class.__mapper__.columns.items(): + # Skip relationship properties and other non-column attributes + if not hasattr(column_property, "type"): + continue + + # Map SQLAlchemy type to LogStore type + logstore_type = AliyunLogStore._sqlalchemy_type_to_logstore_type(column_property) + + # Create index configuration + # - text fields: case_insensitive for better search, with tokenizer and Chinese support + # - all fields: doc_value=True for analytics + if logstore_type == "text": + index_keys[column_name] = IndexKeyConfig( + index_type="text", + case_sensitive=False, + doc_value=True, + token_list=AliyunLogStore.DEFAULT_TOKEN_LIST, + chinese=True, + ) + else: + index_keys[column_name] = IndexKeyConfig(index_type=logstore_type, doc_value=True) + + # Add log_version field (not in PG model, but used in logstore for versioning) + index_keys["log_version"] = IndexKeyConfig(index_type="long", doc_value=True) + + return index_keys + + def __init__(self) -> None: + # Skip initialization if already initialized (singleton pattern) + if self.__class__._initialized: + return + + load_dotenv() + + self.access_key_id: str = os.environ.get("ALIYUN_SLS_ACCESS_KEY_ID", "") + self.access_key_secret: str = os.environ.get("ALIYUN_SLS_ACCESS_KEY_SECRET", "") + self.endpoint: str = os.environ.get("ALIYUN_SLS_ENDPOINT", "") + self.region: str = os.environ.get("ALIYUN_SLS_REGION", "") + self.project_name: str = os.environ.get("ALIYUN_SLS_PROJECT_NAME", "") + self.logstore_ttl: int = int(os.environ.get("ALIYUN_SLS_LOGSTORE_TTL", 365)) + self.log_enabled: bool = os.environ.get("SQLALCHEMY_ECHO", "false").lower() == "true" + self.pg_mode_enabled: bool = os.environ.get("LOGSTORE_PG_MODE_ENABLED", "true").lower() == "true" + + # Initialize SDK client + self.client = LogClient( + self.endpoint, self.access_key_id, self.access_key_secret, auth_version=AUTH_VERSION_4, region=self.region + ) + + # Append Dify identification to the existing user agent + original_user_agent = self.client._user_agent # pyright: ignore[reportPrivateUsage] + dify_version = dify_config.project.version + enhanced_user_agent = f"Dify,Dify-{dify_version},{original_user_agent}" + self.client.set_user_agent(enhanced_user_agent) + + # PG client will be initialized in init_project_logstore + self._pg_client: AliyunLogStorePG | None = None + self._use_pg_protocol: bool = False + + self.__class__._initialized = True + + @property + def supports_pg_protocol(self) -> bool: + """Check if PG protocol is supported and enabled.""" + return self._use_pg_protocol + + def _attempt_pg_connection_init(self) -> bool: + """ + Attempt to initialize PG connection. + + This method tries to establish PG connection and performs necessary checks. + It's used both for immediate connection (existing projects) and delayed connection (new projects). + + Returns: + True if PG connection was successfully established, False otherwise. + """ + if not self.pg_mode_enabled or not self._pg_client: + return False + + try: + self._use_pg_protocol = self._pg_client.init_connection() + if self._use_pg_protocol: + logger.info("Successfully connected to project %s using PG protocol", self.project_name) + # Check if scan_index is enabled for all logstores + self._check_and_disable_pg_if_scan_index_disabled() + return True + else: + logger.info("PG connection failed for project %s. Will use SDK mode.", self.project_name) + return False + except Exception as e: + logger.warning( + "Failed to establish PG connection for project %s: %s. Will use SDK mode.", + self.project_name, + str(e), + ) + self._use_pg_protocol = False + return False + + def _delayed_pg_connection_init(self) -> None: + """ + Delayed initialization of PG connection for newly created projects. + + This method is called by a background timer 3 minutes after project creation. + """ + # Double check conditions in case state changed + if self._use_pg_protocol: + return + + logger.info( + "Attempting delayed PG connection for newly created project %s ...", + self.project_name, + ) + self._attempt_pg_connection_init() + self.__class__._pg_connection_timer = None + + def init_project_logstore(self): + """ + Initialize project, logstore, index, and PG connection. + + This method should be called once during application startup to ensure + all required resources exist and connections are established. + """ + # Step 1: Ensure project and logstore exist + project_is_new = False + if not self.is_project_exist(): + self.create_project() + project_is_new = True + + self.create_logstore_if_not_exist() + + # Step 2: Initialize PG client and connection (if enabled) + if not self.pg_mode_enabled: + logger.info("PG mode is disabled. Will use SDK mode.") + return + + # Create PG client if not already created + if self._pg_client is None: + logger.info("Initializing PG client for project %s...", self.project_name) + self._pg_client = AliyunLogStorePG( + self.access_key_id, self.access_key_secret, self.endpoint, self.project_name + ) + + # Step 3: Establish PG connection based on project status + if project_is_new: + # For newly created projects, schedule delayed PG connection + self._use_pg_protocol = False + logger.info( + "Project %s is newly created. Will use SDK mode and schedule PG connection attempt in %d seconds.", + self.project_name, + self.__class__._pg_connection_delay, + ) + if self.__class__._pg_connection_timer is not None: + self.__class__._pg_connection_timer.cancel() + self.__class__._pg_connection_timer = threading.Timer( + self.__class__._pg_connection_delay, + self._delayed_pg_connection_init, + ) + self.__class__._pg_connection_timer.daemon = True # Don't block app shutdown + self.__class__._pg_connection_timer.start() + else: + # For existing projects, attempt PG connection immediately + logger.info("Project %s already exists. Attempting PG connection...", self.project_name) + self._attempt_pg_connection_init() + + def _check_and_disable_pg_if_scan_index_disabled(self) -> None: + """ + Check if scan_index is enabled for all logstores. + If any logstore has scan_index=false, disable PG protocol. + + This is necessary because PG protocol requires scan_index to be enabled. + """ + logstore_name_list = [ + AliyunLogStore.workflow_execution_logstore, + AliyunLogStore.workflow_node_execution_logstore, + ] + + for logstore_name in logstore_name_list: + existing_config = self.get_existing_index_config(logstore_name) + if existing_config and not existing_config.scan_index: + logger.info( + "Logstore %s has scan_index=false, USE SDK mode for read/write operations. " + "PG protocol requires scan_index to be enabled.", + logstore_name, + ) + self._use_pg_protocol = False + # Close PG connection if it was initialized + if self._pg_client: + self._pg_client.close() + self._pg_client = None + return + + def is_project_exist(self) -> bool: + try: + self.client.get_project(self.project_name) + return True + except Exception as e: + if e.args[0] == "ProjectNotExist": + return False + else: + raise e + + def create_project(self): + try: + self.client.create_project(self.project_name, AliyunLogStore.project_des) + logger.info("Project %s created successfully", self.project_name) + except LogException as e: + logger.exception( + "Failed to create project %s: errorCode=%s, errorMessage=%s, requestId=%s", + self.project_name, + e.get_error_code(), + e.get_error_message(), + e.get_request_id(), + ) + raise + + def is_logstore_exist(self, logstore_name: str) -> bool: + try: + _ = self.client.get_logstore(self.project_name, logstore_name) + return True + except Exception as e: + if e.args[0] == "LogStoreNotExist": + return False + else: + raise e + + def create_logstore_if_not_exist(self) -> None: + logstore_name_list = [ + AliyunLogStore.workflow_execution_logstore, + AliyunLogStore.workflow_node_execution_logstore, + ] + + for logstore_name in logstore_name_list: + if not self.is_logstore_exist(logstore_name): + try: + self.client.create_logstore( + project_name=self.project_name, logstore_name=logstore_name, ttl=self.logstore_ttl + ) + logger.info("logstore %s created successfully", logstore_name) + except LogException as e: + logger.exception( + "Failed to create logstore %s: errorCode=%s, errorMessage=%s, requestId=%s", + logstore_name, + e.get_error_code(), + e.get_error_message(), + e.get_request_id(), + ) + raise + + # Ensure index contains all Dify-required fields + # This intelligently merges with existing config, preserving custom indexes + self.ensure_index_config(logstore_name) + + def is_index_exist(self, logstore_name: str) -> bool: + try: + _ = self.client.get_index_config(self.project_name, logstore_name) + return True + except Exception as e: + if e.args[0] == "IndexConfigNotExist": + return False + else: + raise e + + def get_existing_index_config(self, logstore_name: str) -> IndexConfig | None: + """ + Get existing index configuration from logstore. + + Args: + logstore_name: Name of the logstore + + Returns: + IndexConfig object if index exists, None otherwise + """ + try: + response = self.client.get_index_config(self.project_name, logstore_name) + return response.get_index_config() + except Exception as e: + if e.args[0] == "IndexConfigNotExist": + return None + else: + logger.exception("Failed to get index config for logstore %s", logstore_name) + raise e + + def _get_workflow_execution_index_keys(self) -> dict[str, IndexKeyConfig]: + """ + Get field index configuration for workflow_execution logstore. + + This method automatically generates index configuration from the WorkflowRun SQLAlchemy model. + When the PG schema is updated via Flask-Migrate, the index configuration will be automatically + updated on next application startup. + """ + from models.workflow import WorkflowRun + + index_keys = self._generate_index_keys_from_model(WorkflowRun) + + # Add custom fields that are in logstore but not in PG model + # These fields are added by the repository layer + index_keys["error_message"] = IndexKeyConfig( + index_type="text", + case_sensitive=False, + doc_value=True, + token_list=self.DEFAULT_TOKEN_LIST, + chinese=True, + ) # Maps to 'error' in PG + index_keys["started_at"] = IndexKeyConfig( + index_type="text", + case_sensitive=False, + doc_value=True, + token_list=self.DEFAULT_TOKEN_LIST, + chinese=True, + ) # Maps to 'created_at' in PG + + logger.info("Generated %d index keys for workflow_execution from WorkflowRun model", len(index_keys)) + return index_keys + + def _get_workflow_node_execution_index_keys(self) -> dict[str, IndexKeyConfig]: + """ + Get field index configuration for workflow_node_execution logstore. + + This method automatically generates index configuration from the WorkflowNodeExecutionModel. + When the PG schema is updated via Flask-Migrate, the index configuration will be automatically + updated on next application startup. + """ + from models.workflow import WorkflowNodeExecutionModel + + index_keys = self._generate_index_keys_from_model(WorkflowNodeExecutionModel) + + logger.debug( + "Generated %d index keys for workflow_node_execution from WorkflowNodeExecutionModel", len(index_keys) + ) + return index_keys + + def _get_index_config(self, logstore_name: str) -> IndexConfig: + """ + Get index configuration for the specified logstore. + + Args: + logstore_name: Name of the logstore + + Returns: + IndexConfig object with line and field indexes + """ + # Create full-text index (line config) with tokenizer + line_config = IndexLineConfig(token_list=self.DEFAULT_TOKEN_LIST, case_sensitive=False, chinese=True) + + # Get field index configuration based on logstore name + field_keys = {} + if logstore_name == AliyunLogStore.workflow_execution_logstore: + field_keys = self._get_workflow_execution_index_keys() + elif logstore_name == AliyunLogStore.workflow_node_execution_logstore: + field_keys = self._get_workflow_node_execution_index_keys() + + # key_config_list should be a dict, not a list + # Create index config with both line and field indexes + return IndexConfig(line_config=line_config, key_config_list=field_keys, scan_index=True) + + def create_index(self, logstore_name: str) -> None: + """ + Create index for the specified logstore with both full-text and field indexes. + Field indexes are automatically generated from the corresponding SQLAlchemy model. + """ + index_config = self._get_index_config(logstore_name) + + try: + self.client.create_index(self.project_name, logstore_name, index_config) + logger.info( + "index for %s created successfully with %d field indexes", + logstore_name, + len(index_config.key_config_list or {}), + ) + except LogException as e: + logger.exception( + "Failed to create index for logstore %s: errorCode=%s, errorMessage=%s, requestId=%s", + logstore_name, + e.get_error_code(), + e.get_error_message(), + e.get_request_id(), + ) + raise + + def _merge_index_configs( + self, existing_config: IndexConfig, required_keys: dict[str, IndexKeyConfig], logstore_name: str + ) -> tuple[IndexConfig, bool]: + """ + Intelligently merge existing index config with Dify's required field indexes. + + This method: + 1. Preserves all existing field indexes in logstore (including custom fields) + 2. Adds missing Dify-required fields + 3. Updates fields where type doesn't match (with json/text compatibility) + 4. Corrects case mismatches (e.g., if Dify needs 'status' but logstore has 'Status') + + Type compatibility rules: + - json and text types are considered compatible (users can manually choose either) + - All other type mismatches will be corrected to match Dify requirements + + Note: Logstore is case-sensitive and doesn't allow duplicate fields with different cases. + Case mismatch means: existing field name differs from required name only in case. + + Args: + existing_config: Current index configuration from logstore + required_keys: Dify's required field index configurations + logstore_name: Name of the logstore (for logging) + + Returns: + Tuple of (merged_config, needs_update) + """ + # key_config_list is already a dict in the SDK + # Make a copy to avoid modifying the original + existing_keys = dict(existing_config.key_config_list) if existing_config.key_config_list else {} + + # Track changes + needs_update = False + case_corrections = [] # Fields that need case correction (e.g., 'Status' -> 'status') + missing_fields = [] + type_mismatches = [] + + # First pass: Check for and resolve case mismatches with required fields + # Note: Logstore itself doesn't allow duplicate fields with different cases, + # so we only need to check if the existing case matches the required case + for required_name in required_keys: + lower_name = required_name.lower() + # Find key that matches case-insensitively but not exactly + wrong_case_key = None + for existing_key in existing_keys: + if existing_key.lower() == lower_name and existing_key != required_name: + wrong_case_key = existing_key + break + + if wrong_case_key: + # Field exists but with wrong case (e.g., 'Status' when we need 'status') + # Remove the wrong-case key, will be added back with correct case later + case_corrections.append((wrong_case_key, required_name)) + del existing_keys[wrong_case_key] + needs_update = True + + # Second pass: Check each required field + for required_name, required_config in required_keys.items(): + # Check for exact match (case-sensitive) + if required_name in existing_keys: + existing_type = existing_keys[required_name].index_type + required_type = required_config.index_type + + # Check if type matches + # Special case: json and text are interchangeable for JSON content fields + # Allow users to manually configure text instead of json (or vice versa) without forcing updates + is_compatible = existing_type == required_type or ({existing_type, required_type} == {"json", "text"}) + + if not is_compatible: + type_mismatches.append((required_name, existing_type, required_type)) + # Update with correct type + existing_keys[required_name] = required_config + needs_update = True + # else: field exists with compatible type, no action needed + else: + # Field doesn't exist (may have been removed in first pass due to case conflict) + missing_fields.append(required_name) + existing_keys[required_name] = required_config + needs_update = True + + # Log changes + if missing_fields: + logger.info( + "Logstore %s: Adding %d missing Dify-required fields: %s", + logstore_name, + len(missing_fields), + ", ".join(missing_fields[:10]) + ("..." if len(missing_fields) > 10 else ""), + ) + + if type_mismatches: + logger.info( + "Logstore %s: Fixing %d type mismatches: %s", + logstore_name, + len(type_mismatches), + ", ".join([f"{name}({old}->{new})" for name, old, new in type_mismatches[:5]]) + + ("..." if len(type_mismatches) > 5 else ""), + ) + + if case_corrections: + logger.info( + "Logstore %s: Correcting %d field name cases: %s", + logstore_name, + len(case_corrections), + ", ".join([f"'{old}' -> '{new}'" for old, new in case_corrections[:5]]) + + ("..." if len(case_corrections) > 5 else ""), + ) + + # Create merged config + # key_config_list should be a dict, not a list + # Preserve the original scan_index value - don't force it to True + merged_config = IndexConfig( + line_config=existing_config.line_config + or IndexLineConfig(token_list=self.DEFAULT_TOKEN_LIST, case_sensitive=False, chinese=True), + key_config_list=existing_keys, + scan_index=existing_config.scan_index, + ) + + return merged_config, needs_update + + def ensure_index_config(self, logstore_name: str) -> None: + """ + Ensure index configuration includes all Dify-required fields. + + This method intelligently manages index configuration: + 1. If index doesn't exist, create it with Dify's required fields + 2. If index exists: + - Check if all Dify-required fields are present + - Check if field types match requirements + - Only update if fields are missing or types are incorrect + - Preserve any additional custom index configurations + + This approach allows users to add their own custom indexes without being overwritten. + """ + # Get Dify's required field indexes + required_keys = {} + if logstore_name == AliyunLogStore.workflow_execution_logstore: + required_keys = self._get_workflow_execution_index_keys() + elif logstore_name == AliyunLogStore.workflow_node_execution_logstore: + required_keys = self._get_workflow_node_execution_index_keys() + + # Check if index exists + existing_config = self.get_existing_index_config(logstore_name) + + if existing_config is None: + # Index doesn't exist, create it + logger.info( + "Logstore %s: Index doesn't exist, creating with %d required fields", + logstore_name, + len(required_keys), + ) + self.create_index(logstore_name) + else: + merged_config, needs_update = self._merge_index_configs(existing_config, required_keys, logstore_name) + + if needs_update: + logger.info("Logstore %s: Updating index to include Dify-required fields", logstore_name) + try: + self.client.update_index(self.project_name, logstore_name, merged_config) + logger.info( + "Logstore %s: Index updated successfully, now has %d total field indexes", + logstore_name, + len(merged_config.key_config_list or {}), + ) + except LogException as e: + logger.exception( + "Failed to update index for logstore %s: errorCode=%s, errorMessage=%s, requestId=%s", + logstore_name, + e.get_error_code(), + e.get_error_message(), + e.get_request_id(), + ) + raise + else: + logger.info( + "Logstore %s: Index already contains all %d Dify-required fields with correct types, " + "no update needed", + logstore_name, + len(required_keys), + ) + + def put_log(self, logstore: str, contents: Sequence[tuple[str, str]]) -> None: + # Route to PG or SDK based on protocol availability + if self._use_pg_protocol and self._pg_client: + self._pg_client.put_log(logstore, contents, self.log_enabled) + else: + log_item = LogItem(contents=contents) + request = PutLogsRequest(project=self.project_name, logstore=logstore, logitems=[log_item]) + + if self.log_enabled: + logger.info( + "[LogStore-SDK] PUT_LOG | logstore=%s | project=%s | items_count=%d", + logstore, + self.project_name, + len(contents), + ) + + try: + self.client.put_logs(request) + except LogException as e: + logger.exception( + "Failed to put logs to logstore %s: errorCode=%s, errorMessage=%s, requestId=%s", + logstore, + e.get_error_code(), + e.get_error_message(), + e.get_request_id(), + ) + raise + + def get_logs( + self, + logstore: str, + from_time: int, + to_time: int, + topic: str = "", + query: str = "", + line: int = 100, + offset: int = 0, + reverse: bool = True, + ) -> list[dict]: + request = GetLogsRequest( + project=self.project_name, + logstore=logstore, + fromTime=from_time, + toTime=to_time, + topic=topic, + query=query, + line=line, + offset=offset, + reverse=reverse, + ) + + # Log query info if SQLALCHEMY_ECHO is enabled + if self.log_enabled: + logger.info( + "[LogStore] GET_LOGS | logstore=%s | project=%s | query=%s | " + "from_time=%d | to_time=%d | line=%d | offset=%d | reverse=%s", + logstore, + self.project_name, + query, + from_time, + to_time, + line, + offset, + reverse, + ) + + try: + response = self.client.get_logs(request) + result = [] + logs = response.get_logs() if response else [] + for log in logs: + result.append(log.get_contents()) + + # Log result count if SQLALCHEMY_ECHO is enabled + if self.log_enabled: + logger.info( + "[LogStore] GET_LOGS RESULT | logstore=%s | returned_count=%d", + logstore, + len(result), + ) + + return result + except LogException as e: + logger.exception( + "Failed to get logs from logstore %s with query '%s': errorCode=%s, errorMessage=%s, requestId=%s", + logstore, + query, + e.get_error_code(), + e.get_error_message(), + e.get_request_id(), + ) + raise + + def execute_sql( + self, + sql: str, + logstore: str | None = None, + query: str = "*", + from_time: int | None = None, + to_time: int | None = None, + power_sql: bool = False, + ) -> list[dict]: + """ + Execute SQL query for aggregation and analysis. + + Args: + sql: SQL query string (SELECT statement) + logstore: Name of the logstore (required) + query: Search/filter query for SDK mode (default: "*" for all logs). + Only used in SDK mode. PG mode ignores this parameter. + from_time: Start time (Unix timestamp) - only used in SDK mode + to_time: End time (Unix timestamp) - only used in SDK mode + power_sql: Whether to use enhanced SQL mode (default: False) + + Returns: + List of result rows as dictionaries + + Note: + - PG mode: Only executes the SQL directly + - SDK mode: Combines query and sql as "query | sql" + """ + # Logstore is required + if not logstore: + raise ValueError("logstore parameter is required for execute_sql") + + # Route to PG or SDK based on protocol availability + if self._use_pg_protocol and self._pg_client: + # PG mode: execute SQL directly (ignore query parameter) + return self._pg_client.execute_sql(sql, logstore, self.log_enabled) + else: + # SDK mode: combine query and sql as "query | sql" + full_query = f"{query} | {sql}" + + # Provide default time range if not specified + if from_time is None: + from_time = 0 + + if to_time is None: + to_time = int(time.time()) # now + + request = GetLogsRequest( + project=self.project_name, + logstore=logstore, + fromTime=from_time, + toTime=to_time, + query=full_query, + ) + + # Log query info if SQLALCHEMY_ECHO is enabled + if self.log_enabled: + logger.info( + "[LogStore-SDK] EXECUTE_SQL | logstore=%s | project=%s | from_time=%d | to_time=%d | full_query=%s", + logstore, + self.project_name, + from_time, + to_time, + query, + sql, + ) + + try: + response = self.client.get_logs(request) + + result = [] + logs = response.get_logs() if response else [] + for log in logs: + result.append(log.get_contents()) + + # Log result count if SQLALCHEMY_ECHO is enabled + if self.log_enabled: + logger.info( + "[LogStore-SDK] EXECUTE_SQL RESULT | logstore=%s | returned_count=%d", + logstore, + len(result), + ) + + return result + except LogException as e: + logger.exception( + "Failed to execute SQL, logstore %s: errorCode=%s, errorMessage=%s, requestId=%s, full_query=%s", + logstore, + e.get_error_code(), + e.get_error_message(), + e.get_request_id(), + full_query, + ) + raise + + +if __name__ == "__main__": + aliyun_logstore = AliyunLogStore() + # aliyun_logstore.init_project_logstore() + aliyun_logstore.put_log(AliyunLogStore.workflow_execution_logstore, [("key1", "value1")]) diff --git a/api/extensions/logstore/aliyun_logstore_pg.py b/api/extensions/logstore/aliyun_logstore_pg.py new file mode 100644 index 0000000000..35aa51ce53 --- /dev/null +++ b/api/extensions/logstore/aliyun_logstore_pg.py @@ -0,0 +1,407 @@ +import logging +import os +import socket +import time +from collections.abc import Sequence +from contextlib import contextmanager +from typing import Any + +import psycopg2 +import psycopg2.pool +from psycopg2 import InterfaceError, OperationalError + +from configs import dify_config + +logger = logging.getLogger(__name__) + + +class AliyunLogStorePG: + """ + PostgreSQL protocol support for Aliyun SLS LogStore. + + Handles PG connection pooling and operations for regions that support PG protocol. + """ + + def __init__(self, access_key_id: str, access_key_secret: str, endpoint: str, project_name: str): + """ + Initialize PG connection for SLS. + + Args: + access_key_id: Aliyun access key ID + access_key_secret: Aliyun access key secret + endpoint: SLS endpoint + project_name: SLS project name + """ + self._access_key_id = access_key_id + self._access_key_secret = access_key_secret + self._endpoint = endpoint + self.project_name = project_name + self._pg_pool: psycopg2.pool.SimpleConnectionPool | None = None + self._use_pg_protocol = False + + def _check_port_connectivity(self, host: str, port: int, timeout: float = 2.0) -> bool: + """ + Check if a TCP port is reachable using socket connection. + + This provides a fast check before attempting full database connection, + preventing long waits when connecting to unsupported regions. + + Args: + host: Hostname or IP address + port: Port number + timeout: Connection timeout in seconds (default: 2.0) + + Returns: + True if port is reachable, False otherwise + """ + try: + sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) + sock.settimeout(timeout) + result = sock.connect_ex((host, port)) + sock.close() + return result == 0 + except Exception as e: + logger.debug("Port connectivity check failed for %s:%d: %s", host, port, str(e)) + return False + + def init_connection(self) -> bool: + """ + Initialize PostgreSQL connection pool for SLS PG protocol support. + + Attempts to connect to SLS using PostgreSQL protocol. If successful, sets + _use_pg_protocol to True and creates a connection pool. If connection fails + (region doesn't support PG protocol or other errors), returns False. + + Returns: + True if PG protocol is supported and initialized, False otherwise + """ + try: + # Extract hostname from endpoint (remove protocol if present) + pg_host = self._endpoint.replace("http://", "").replace("https://", "") + + # Get pool configuration + pg_max_connections = int(os.environ.get("ALIYUN_SLS_PG_MAX_CONNECTIONS", 10)) + + logger.debug( + "Check PG protocol connection to SLS: host=%s, project=%s", + pg_host, + self.project_name, + ) + + # Fast port connectivity check before attempting full connection + # This prevents long waits when connecting to unsupported regions + if not self._check_port_connectivity(pg_host, 5432, timeout=1.0): + logger.info( + "USE SDK mode for read/write operations, host=%s", + pg_host, + ) + return False + + # Create connection pool + self._pg_pool = psycopg2.pool.SimpleConnectionPool( + minconn=1, + maxconn=pg_max_connections, + host=pg_host, + port=5432, + database=self.project_name, + user=self._access_key_id, + password=self._access_key_secret, + sslmode="require", + connect_timeout=5, + application_name=f"Dify-{dify_config.project.version}", + ) + + # Note: Skip test query because SLS PG protocol only supports SELECT/INSERT on actual tables + # Connection pool creation success already indicates connectivity + + self._use_pg_protocol = True + logger.info( + "PG protocol initialized successfully for SLS project=%s. Will use PG for read/write operations.", + self.project_name, + ) + return True + + except Exception as e: + # PG connection failed - fallback to SDK mode + self._use_pg_protocol = False + if self._pg_pool: + try: + self._pg_pool.closeall() + except Exception: + logger.debug("Failed to close PG connection pool during cleanup, ignoring") + self._pg_pool = None + + logger.info( + "PG protocol connection failed (region may not support PG protocol): %s. " + "Falling back to SDK mode for read/write operations.", + str(e), + ) + return False + + def _is_connection_valid(self, conn: Any) -> bool: + """ + Check if a connection is still valid. + + Args: + conn: psycopg2 connection object + + Returns: + True if connection is valid, False otherwise + """ + try: + # Check if connection is closed + if conn.closed: + return False + + # Quick ping test - execute a lightweight query + # For SLS PG protocol, we can't use SELECT 1 without FROM, + # so we just check the connection status + with conn.cursor() as cursor: + cursor.execute("SELECT 1") + cursor.fetchone() + return True + except Exception: + return False + + @contextmanager + def _get_connection(self): + """ + Context manager to get a PostgreSQL connection from the pool. + + Automatically validates and refreshes stale connections. + + Note: Aliyun SLS PG protocol does not support transactions, so we always + use autocommit mode. + + Yields: + psycopg2 connection object + + Raises: + RuntimeError: If PG pool is not initialized + """ + if not self._pg_pool: + raise RuntimeError("PG connection pool is not initialized") + + conn = self._pg_pool.getconn() + try: + # Validate connection and get a fresh one if needed + if not self._is_connection_valid(conn): + logger.debug("Connection is stale, marking as bad and getting a new one") + # Mark connection as bad and get a new one + self._pg_pool.putconn(conn, close=True) + conn = self._pg_pool.getconn() + + # Aliyun SLS PG protocol does not support transactions, always use autocommit + conn.autocommit = True + yield conn + finally: + # Return connection to pool (or close if it's bad) + if self._is_connection_valid(conn): + self._pg_pool.putconn(conn) + else: + self._pg_pool.putconn(conn, close=True) + + def close(self) -> None: + """Close the PostgreSQL connection pool.""" + if self._pg_pool: + try: + self._pg_pool.closeall() + logger.info("PG connection pool closed") + except Exception: + logger.exception("Failed to close PG connection pool") + + def _is_retriable_error(self, error: Exception) -> bool: + """ + Check if an error is retriable (connection-related issues). + + Args: + error: Exception to check + + Returns: + True if the error is retriable, False otherwise + """ + # Retry on connection-related errors + if isinstance(error, (OperationalError, InterfaceError)): + return True + + # Check error message for specific connection issues + error_msg = str(error).lower() + retriable_patterns = [ + "connection", + "timeout", + "closed", + "broken pipe", + "reset by peer", + "no route to host", + "network", + ] + return any(pattern in error_msg for pattern in retriable_patterns) + + def put_log(self, logstore: str, contents: Sequence[tuple[str, str]], log_enabled: bool = False) -> None: + """ + Write log to SLS using PostgreSQL protocol with automatic retry. + + Note: SLS PG protocol only supports INSERT (not UPDATE). This uses append-only + writes with log_version field for versioning, same as SDK implementation. + + Args: + logstore: Name of the logstore table + contents: List of (field_name, value) tuples + log_enabled: Whether to enable logging + + Raises: + psycopg2.Error: If database operation fails after all retries + """ + if not contents: + return + + # Extract field names and values from contents + fields = [field_name for field_name, _ in contents] + values = [value for _, value in contents] + + # Build INSERT statement with literal values + # Note: Aliyun SLS PG protocol doesn't support parameterized queries, + # so we need to use mogrify to safely create literal values + field_list = ", ".join([f'"{field}"' for field in fields]) + + if log_enabled: + logger.info( + "[LogStore-PG] PUT_LOG | logstore=%s | project=%s | items_count=%d", + logstore, + self.project_name, + len(contents), + ) + + # Retry configuration + max_retries = 3 + retry_delay = 0.1 # Start with 100ms + + for attempt in range(max_retries): + try: + with self._get_connection() as conn: + with conn.cursor() as cursor: + # Use mogrify to safely convert values to SQL literals + placeholders = ", ".join(["%s"] * len(fields)) + values_literal = cursor.mogrify(f"({placeholders})", values).decode("utf-8") + insert_sql = f'INSERT INTO "{logstore}" ({field_list}) VALUES {values_literal}' + cursor.execute(insert_sql) + # Success - exit retry loop + return + + except psycopg2.Error as e: + # Check if error is retriable + if not self._is_retriable_error(e): + # Not a retriable error (e.g., data validation error), fail immediately + logger.exception( + "Failed to put logs to logstore %s via PG protocol (non-retriable error)", + logstore, + ) + raise + + # Retriable error - log and retry if we have attempts left + if attempt < max_retries - 1: + logger.warning( + "Failed to put logs to logstore %s via PG protocol (attempt %d/%d): %s. Retrying...", + logstore, + attempt + 1, + max_retries, + str(e), + ) + time.sleep(retry_delay) + retry_delay *= 2 # Exponential backoff + else: + # Last attempt failed + logger.exception( + "Failed to put logs to logstore %s via PG protocol after %d attempts", + logstore, + max_retries, + ) + raise + + def execute_sql(self, sql: str, logstore: str, log_enabled: bool = False) -> list[dict[str, Any]]: + """ + Execute SQL query using PostgreSQL protocol with automatic retry. + + Args: + sql: SQL query string + logstore: Name of the logstore (for logging purposes) + log_enabled: Whether to enable logging + + Returns: + List of result rows as dictionaries + + Raises: + psycopg2.Error: If database operation fails after all retries + """ + if log_enabled: + logger.info( + "[LogStore-PG] EXECUTE_SQL | logstore=%s | project=%s | sql=%s", + logstore, + self.project_name, + sql, + ) + + # Retry configuration + max_retries = 3 + retry_delay = 0.1 # Start with 100ms + + for attempt in range(max_retries): + try: + with self._get_connection() as conn: + with conn.cursor() as cursor: + cursor.execute(sql) + + # Get column names from cursor description + columns = [desc[0] for desc in cursor.description] + + # Fetch all results and convert to list of dicts + result = [] + for row in cursor.fetchall(): + row_dict = {} + for col, val in zip(columns, row): + row_dict[col] = "" if val is None else str(val) + result.append(row_dict) + + if log_enabled: + logger.info( + "[LogStore-PG] EXECUTE_SQL RESULT | logstore=%s | returned_count=%d", + logstore, + len(result), + ) + + return result + + except psycopg2.Error as e: + # Check if error is retriable + if not self._is_retriable_error(e): + # Not a retriable error (e.g., SQL syntax error), fail immediately + logger.exception( + "Failed to execute SQL query on logstore %s via PG protocol (non-retriable error): sql=%s", + logstore, + sql, + ) + raise + + # Retriable error - log and retry if we have attempts left + if attempt < max_retries - 1: + logger.warning( + "Failed to execute SQL query on logstore %s via PG protocol (attempt %d/%d): %s. Retrying...", + logstore, + attempt + 1, + max_retries, + str(e), + ) + time.sleep(retry_delay) + retry_delay *= 2 # Exponential backoff + else: + # Last attempt failed + logger.exception( + "Failed to execute SQL query on logstore %s via PG protocol after %d attempts: sql=%s", + logstore, + max_retries, + sql, + ) + raise + + # This line should never be reached due to raise above, but makes type checker happy + return [] diff --git a/api/extensions/logstore/repositories/__init__.py b/api/extensions/logstore/repositories/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/api/extensions/logstore/repositories/logstore_api_workflow_node_execution_repository.py b/api/extensions/logstore/repositories/logstore_api_workflow_node_execution_repository.py new file mode 100644 index 0000000000..8c804d6bb5 --- /dev/null +++ b/api/extensions/logstore/repositories/logstore_api_workflow_node_execution_repository.py @@ -0,0 +1,365 @@ +""" +LogStore implementation of DifyAPIWorkflowNodeExecutionRepository. + +This module provides the LogStore-based implementation for service-layer +WorkflowNodeExecutionModel operations using Aliyun SLS LogStore. +""" + +import logging +import time +from collections.abc import Sequence +from datetime import datetime +from typing import Any + +from sqlalchemy.orm import sessionmaker + +from extensions.logstore.aliyun_logstore import AliyunLogStore +from models.workflow import WorkflowNodeExecutionModel +from repositories.api_workflow_node_execution_repository import DifyAPIWorkflowNodeExecutionRepository + +logger = logging.getLogger(__name__) + + +def _dict_to_workflow_node_execution_model(data: dict[str, Any]) -> WorkflowNodeExecutionModel: + """ + Convert LogStore result dictionary to WorkflowNodeExecutionModel instance. + + Args: + data: Dictionary from LogStore query result + + Returns: + WorkflowNodeExecutionModel instance (detached from session) + + Note: + The returned model is not attached to any SQLAlchemy session. + Relationship fields (like offload_data) are not loaded from LogStore. + """ + logger.debug("_dict_to_workflow_node_execution_model: data keys=%s", list(data.keys())[:5]) + # Create model instance without session + model = WorkflowNodeExecutionModel() + + # Map all required fields with validation + # Critical fields - must not be None + model.id = data.get("id") or "" + model.tenant_id = data.get("tenant_id") or "" + model.app_id = data.get("app_id") or "" + model.workflow_id = data.get("workflow_id") or "" + model.triggered_from = data.get("triggered_from") or "" + model.node_id = data.get("node_id") or "" + model.node_type = data.get("node_type") or "" + model.status = data.get("status") or "running" # Default status if missing + model.title = data.get("title") or "" + model.created_by_role = data.get("created_by_role") or "" + model.created_by = data.get("created_by") or "" + + # Numeric fields with defaults + model.index = int(data.get("index", 0)) + model.elapsed_time = float(data.get("elapsed_time", 0)) + + # Optional fields + model.workflow_run_id = data.get("workflow_run_id") + model.predecessor_node_id = data.get("predecessor_node_id") + model.node_execution_id = data.get("node_execution_id") + model.inputs = data.get("inputs") + model.process_data = data.get("process_data") + model.outputs = data.get("outputs") + model.error = data.get("error") + model.execution_metadata = data.get("execution_metadata") + + # Handle datetime fields + created_at = data.get("created_at") + if created_at: + if isinstance(created_at, str): + model.created_at = datetime.fromisoformat(created_at) + elif isinstance(created_at, (int, float)): + model.created_at = datetime.fromtimestamp(created_at) + else: + model.created_at = created_at + else: + # Provide default created_at if missing + model.created_at = datetime.now() + + finished_at = data.get("finished_at") + if finished_at: + if isinstance(finished_at, str): + model.finished_at = datetime.fromisoformat(finished_at) + elif isinstance(finished_at, (int, float)): + model.finished_at = datetime.fromtimestamp(finished_at) + else: + model.finished_at = finished_at + + return model + + +class LogstoreAPIWorkflowNodeExecutionRepository(DifyAPIWorkflowNodeExecutionRepository): + """ + LogStore implementation of DifyAPIWorkflowNodeExecutionRepository. + + Provides service-layer database operations for WorkflowNodeExecutionModel + using LogStore SQL queries with optimized deduplication strategies. + """ + + def __init__(self, session_maker: sessionmaker | None = None): + """ + Initialize the repository with LogStore client. + + Args: + session_maker: SQLAlchemy sessionmaker (unused, for compatibility with factory pattern) + """ + logger.debug("LogstoreAPIWorkflowNodeExecutionRepository.__init__: initializing") + self.logstore_client = AliyunLogStore() + + def get_node_last_execution( + self, + tenant_id: str, + app_id: str, + workflow_id: str, + node_id: str, + ) -> WorkflowNodeExecutionModel | None: + """ + Get the most recent execution for a specific node. + + Uses query syntax to get raw logs and selects the one with max log_version. + Returns the most recent execution ordered by created_at. + """ + logger.debug( + "get_node_last_execution: tenant_id=%s, app_id=%s, workflow_id=%s, node_id=%s", + tenant_id, + app_id, + workflow_id, + node_id, + ) + try: + # Check if PG protocol is supported + if self.logstore_client.supports_pg_protocol: + # Use PG protocol with SQL query (get latest version of each record) + sql_query = f""" + SELECT * FROM ( + SELECT *, + ROW_NUMBER() OVER (PARTITION BY id ORDER BY log_version DESC) as rn + FROM "{AliyunLogStore.workflow_node_execution_logstore}" + WHERE tenant_id = '{tenant_id}' + AND app_id = '{app_id}' + AND workflow_id = '{workflow_id}' + AND node_id = '{node_id}' + AND __time__ > 0 + ) AS subquery WHERE rn = 1 + LIMIT 100 + """ + results = self.logstore_client.execute_sql( + sql=sql_query, + logstore=AliyunLogStore.workflow_node_execution_logstore, + ) + else: + # Use SDK with LogStore query syntax + query = ( + f"tenant_id: {tenant_id} and app_id: {app_id} and workflow_id: {workflow_id} and node_id: {node_id}" + ) + from_time = 0 + to_time = int(time.time()) # now + + results = self.logstore_client.get_logs( + logstore=AliyunLogStore.workflow_node_execution_logstore, + from_time=from_time, + to_time=to_time, + query=query, + line=100, + reverse=False, + ) + + if not results: + return None + + # For SDK mode, group by id and select the one with max log_version for each group + # For PG mode, this is already done by the SQL query + if not self.logstore_client.supports_pg_protocol: + id_to_results: dict[str, list[dict[str, Any]]] = {} + for row in results: + row_id = row.get("id") + if row_id: + if row_id not in id_to_results: + id_to_results[row_id] = [] + id_to_results[row_id].append(row) + + # For each id, select the row with max log_version + deduplicated_results = [] + for rows in id_to_results.values(): + if len(rows) > 1: + max_row = max(rows, key=lambda x: int(x.get("log_version", 0))) + else: + max_row = rows[0] + deduplicated_results.append(max_row) + else: + # For PG mode, results are already deduplicated by the SQL query + deduplicated_results = results + + # Sort by created_at DESC and return the most recent one + deduplicated_results.sort( + key=lambda x: x.get("created_at", 0) if isinstance(x.get("created_at"), (int, float)) else 0, + reverse=True, + ) + + if deduplicated_results: + return _dict_to_workflow_node_execution_model(deduplicated_results[0]) + + return None + + except Exception: + logger.exception("Failed to get node last execution from LogStore") + raise + + def get_executions_by_workflow_run( + self, + tenant_id: str, + app_id: str, + workflow_run_id: str, + ) -> Sequence[WorkflowNodeExecutionModel]: + """ + Get all node executions for a specific workflow run. + + Uses query syntax to get raw logs and selects the one with max log_version for each node execution. + Ordered by index DESC for trace visualization. + """ + logger.debug( + "[LogStore] get_executions_by_workflow_run: tenant_id=%s, app_id=%s, workflow_run_id=%s", + tenant_id, + app_id, + workflow_run_id, + ) + try: + # Check if PG protocol is supported + if self.logstore_client.supports_pg_protocol: + # Use PG protocol with SQL query (get latest version of each record) + sql_query = f""" + SELECT * FROM ( + SELECT *, + ROW_NUMBER() OVER (PARTITION BY id ORDER BY log_version DESC) as rn + FROM "{AliyunLogStore.workflow_node_execution_logstore}" + WHERE tenant_id = '{tenant_id}' + AND app_id = '{app_id}' + AND workflow_run_id = '{workflow_run_id}' + AND __time__ > 0 + ) AS subquery WHERE rn = 1 + LIMIT 1000 + """ + results = self.logstore_client.execute_sql( + sql=sql_query, + logstore=AliyunLogStore.workflow_node_execution_logstore, + ) + else: + # Use SDK with LogStore query syntax + query = f"tenant_id: {tenant_id} and app_id: {app_id} and workflow_run_id: {workflow_run_id}" + from_time = 0 + to_time = int(time.time()) # now + + results = self.logstore_client.get_logs( + logstore=AliyunLogStore.workflow_node_execution_logstore, + from_time=from_time, + to_time=to_time, + query=query, + line=1000, # Get more results for node executions + reverse=False, + ) + + if not results: + return [] + + # For SDK mode, group by id and select the one with max log_version for each group + # For PG mode, this is already done by the SQL query + models = [] + if not self.logstore_client.supports_pg_protocol: + id_to_results: dict[str, list[dict[str, Any]]] = {} + for row in results: + row_id = row.get("id") + if row_id: + if row_id not in id_to_results: + id_to_results[row_id] = [] + id_to_results[row_id].append(row) + + # For each id, select the row with max log_version + for rows in id_to_results.values(): + if len(rows) > 1: + max_row = max(rows, key=lambda x: int(x.get("log_version", 0))) + else: + max_row = rows[0] + + model = _dict_to_workflow_node_execution_model(max_row) + if model and model.id: # Ensure model is valid + models.append(model) + else: + # For PG mode, results are already deduplicated by the SQL query + for row in results: + model = _dict_to_workflow_node_execution_model(row) + if model and model.id: # Ensure model is valid + models.append(model) + + # Sort by index DESC for trace visualization + models.sort(key=lambda x: x.index, reverse=True) + + return models + + except Exception: + logger.exception("Failed to get executions by workflow run from LogStore") + raise + + def get_execution_by_id( + self, + execution_id: str, + tenant_id: str | None = None, + ) -> WorkflowNodeExecutionModel | None: + """ + Get a workflow node execution by its ID. + Uses query syntax to get raw logs and selects the one with max log_version. + """ + logger.debug("get_execution_by_id: execution_id=%s, tenant_id=%s", execution_id, tenant_id) + try: + # Check if PG protocol is supported + if self.logstore_client.supports_pg_protocol: + # Use PG protocol with SQL query (get latest version of record) + tenant_filter = f"AND tenant_id = '{tenant_id}'" if tenant_id else "" + sql_query = f""" + SELECT * FROM ( + SELECT *, + ROW_NUMBER() OVER (PARTITION BY id ORDER BY log_version DESC) as rn + FROM "{AliyunLogStore.workflow_node_execution_logstore}" + WHERE id = '{execution_id}' {tenant_filter} AND __time__ > 0 + ) AS subquery WHERE rn = 1 + LIMIT 1 + """ + results = self.logstore_client.execute_sql( + sql=sql_query, + logstore=AliyunLogStore.workflow_node_execution_logstore, + ) + else: + # Use SDK with LogStore query syntax + if tenant_id: + query = f"id: {execution_id} and tenant_id: {tenant_id}" + else: + query = f"id: {execution_id}" + + from_time = 0 + to_time = int(time.time()) # now + + results = self.logstore_client.get_logs( + logstore=AliyunLogStore.workflow_node_execution_logstore, + from_time=from_time, + to_time=to_time, + query=query, + line=100, + reverse=False, + ) + + if not results: + return None + + # For PG mode, result is already the latest version + # For SDK mode, if multiple results, select the one with max log_version + if self.logstore_client.supports_pg_protocol or len(results) == 1: + return _dict_to_workflow_node_execution_model(results[0]) + else: + max_result = max(results, key=lambda x: int(x.get("log_version", 0))) + return _dict_to_workflow_node_execution_model(max_result) + + except Exception: + logger.exception("Failed to get execution by ID from LogStore: execution_id=%s", execution_id) + raise diff --git a/api/extensions/logstore/repositories/logstore_api_workflow_run_repository.py b/api/extensions/logstore/repositories/logstore_api_workflow_run_repository.py new file mode 100644 index 0000000000..252cdcc4df --- /dev/null +++ b/api/extensions/logstore/repositories/logstore_api_workflow_run_repository.py @@ -0,0 +1,757 @@ +""" +LogStore API WorkflowRun Repository Implementation + +This module provides the LogStore-based implementation of the APIWorkflowRunRepository +protocol. It handles service-layer WorkflowRun database operations using Aliyun SLS LogStore +with optimized queries for statistics and pagination. + +Key Features: +- LogStore SQL queries for aggregation and statistics +- Optimized deduplication using finished_at IS NOT NULL filter +- Window functions only when necessary (running status queries) +- Multi-tenant data isolation and security +""" + +import logging +import os +import time +from collections.abc import Sequence +from datetime import datetime +from typing import Any, cast + +from sqlalchemy.orm import sessionmaker + +from extensions.logstore.aliyun_logstore import AliyunLogStore +from libs.infinite_scroll_pagination import InfiniteScrollPagination +from models.enums import WorkflowRunTriggeredFrom +from models.workflow import WorkflowRun +from repositories.api_workflow_run_repository import APIWorkflowRunRepository +from repositories.types import ( + AverageInteractionStats, + DailyRunsStats, + DailyTerminalsStats, + DailyTokenCostStats, +) + +logger = logging.getLogger(__name__) + + +def _dict_to_workflow_run(data: dict[str, Any]) -> WorkflowRun: + """ + Convert LogStore result dictionary to WorkflowRun instance. + + Args: + data: Dictionary from LogStore query result + + Returns: + WorkflowRun instance + """ + logger.debug("_dict_to_workflow_run: data keys=%s", list(data.keys())[:5]) + # Create model instance without session + model = WorkflowRun() + + # Map all required fields with validation + # Critical fields - must not be None + model.id = data.get("id") or "" + model.tenant_id = data.get("tenant_id") or "" + model.app_id = data.get("app_id") or "" + model.workflow_id = data.get("workflow_id") or "" + model.type = data.get("type") or "" + model.triggered_from = data.get("triggered_from") or "" + model.version = data.get("version") or "" + model.status = data.get("status") or "running" # Default status if missing + model.created_by_role = data.get("created_by_role") or "" + model.created_by = data.get("created_by") or "" + + # Numeric fields with defaults + model.total_tokens = int(data.get("total_tokens", 0)) + model.total_steps = int(data.get("total_steps", 0)) + model.exceptions_count = int(data.get("exceptions_count", 0)) + + # Optional fields + model.graph = data.get("graph") + model.inputs = data.get("inputs") + model.outputs = data.get("outputs") + model.error = data.get("error_message") or data.get("error") + + # Handle datetime fields + started_at = data.get("started_at") or data.get("created_at") + if started_at: + if isinstance(started_at, str): + model.created_at = datetime.fromisoformat(started_at) + elif isinstance(started_at, (int, float)): + model.created_at = datetime.fromtimestamp(started_at) + else: + model.created_at = started_at + else: + # Provide default created_at if missing + model.created_at = datetime.now() + + finished_at = data.get("finished_at") + if finished_at: + if isinstance(finished_at, str): + model.finished_at = datetime.fromisoformat(finished_at) + elif isinstance(finished_at, (int, float)): + model.finished_at = datetime.fromtimestamp(finished_at) + else: + model.finished_at = finished_at + + # Compute elapsed_time from started_at and finished_at + # LogStore doesn't store elapsed_time, it's computed in WorkflowExecution domain entity + if model.finished_at and model.created_at: + model.elapsed_time = (model.finished_at - model.created_at).total_seconds() + else: + model.elapsed_time = float(data.get("elapsed_time", 0)) + + return model + + +class LogstoreAPIWorkflowRunRepository(APIWorkflowRunRepository): + """ + LogStore implementation of APIWorkflowRunRepository. + + Provides service-layer WorkflowRun database operations using LogStore SQL + with optimized query strategies: + - Use finished_at IS NOT NULL for deduplication (10-100x faster) + - Use window functions only when running status is required + - Proper time range filtering for LogStore queries + """ + + def __init__(self, session_maker: sessionmaker | None = None): + """ + Initialize the repository with LogStore client. + + Args: + session_maker: SQLAlchemy sessionmaker (unused, for compatibility with factory pattern) + """ + logger.debug("LogstoreAPIWorkflowRunRepository.__init__: initializing") + self.logstore_client = AliyunLogStore() + + # Control flag for dual-read (fallback to PostgreSQL when LogStore returns no results) + # Set to True to enable fallback for safe migration from PostgreSQL to LogStore + # Set to False for new deployments without legacy data in PostgreSQL + self._enable_dual_read = os.environ.get("LOGSTORE_DUAL_READ_ENABLED", "true").lower() == "true" + + def get_paginated_workflow_runs( + self, + tenant_id: str, + app_id: str, + triggered_from: WorkflowRunTriggeredFrom | Sequence[WorkflowRunTriggeredFrom], + limit: int = 20, + last_id: str | None = None, + status: str | None = None, + ) -> InfiniteScrollPagination: + """ + Get paginated workflow runs with filtering. + + Uses window function for deduplication to support both running and finished states. + + Args: + tenant_id: Tenant identifier for multi-tenant isolation + app_id: Application identifier + triggered_from: Filter by trigger source(s) + limit: Maximum number of records to return (default: 20) + last_id: Cursor for pagination - ID of the last record from previous page + status: Optional filter by status + + Returns: + InfiniteScrollPagination object + """ + logger.debug( + "get_paginated_workflow_runs: tenant_id=%s, app_id=%s, limit=%d, status=%s", + tenant_id, + app_id, + limit, + status, + ) + # Convert triggered_from to list if needed + if isinstance(triggered_from, WorkflowRunTriggeredFrom): + triggered_from_list = [triggered_from] + else: + triggered_from_list = list(triggered_from) + + # Build triggered_from filter + triggered_from_filter = " OR ".join([f"triggered_from='{tf.value}'" for tf in triggered_from_list]) + + # Build status filter + status_filter = f"AND status='{status}'" if status else "" + + # Build last_id filter for pagination + # Note: This is simplified. In production, you'd need to track created_at from last record + last_id_filter = "" + if last_id: + # TODO: Implement proper cursor-based pagination with created_at + logger.warning("last_id pagination not fully implemented for LogStore") + + # Use window function to get latest log_version of each workflow run + sql = f""" + SELECT * FROM ( + SELECT *, ROW_NUMBER() OVER (PARTITION BY id ORDER BY log_version DESC) AS rn + FROM {AliyunLogStore.workflow_execution_logstore} + WHERE tenant_id='{tenant_id}' + AND app_id='{app_id}' + AND ({triggered_from_filter}) + {status_filter} + {last_id_filter} + ) t + WHERE rn = 1 + ORDER BY created_at DESC + LIMIT {limit + 1} + """ + + try: + results = self.logstore_client.execute_sql( + sql=sql, query="*", logstore=AliyunLogStore.workflow_execution_logstore, from_time=None, to_time=None + ) + + # Check if there are more records + has_more = len(results) > limit + if has_more: + results = results[:limit] + + # Convert results to WorkflowRun models + workflow_runs = [_dict_to_workflow_run(row) for row in results] + return InfiniteScrollPagination(data=workflow_runs, limit=limit, has_more=has_more) + + except Exception: + logger.exception("Failed to get paginated workflow runs from LogStore") + raise + + def get_workflow_run_by_id( + self, + tenant_id: str, + app_id: str, + run_id: str, + ) -> WorkflowRun | None: + """ + Get a specific workflow run by ID with tenant and app isolation. + + Uses query syntax to get raw logs and selects the one with max log_version in code. + Falls back to PostgreSQL if not found in LogStore (for data consistency during migration). + """ + logger.debug("get_workflow_run_by_id: tenant_id=%s, app_id=%s, run_id=%s", tenant_id, app_id, run_id) + + try: + # Check if PG protocol is supported + if self.logstore_client.supports_pg_protocol: + # Use PG protocol with SQL query (get latest version of record) + sql_query = f""" + SELECT * FROM ( + SELECT *, + ROW_NUMBER() OVER (PARTITION BY id ORDER BY log_version DESC) as rn + FROM "{AliyunLogStore.workflow_execution_logstore}" + WHERE id = '{run_id}' AND tenant_id = '{tenant_id}' AND app_id = '{app_id}' AND __time__ > 0 + ) AS subquery WHERE rn = 1 + LIMIT 100 + """ + results = self.logstore_client.execute_sql( + sql=sql_query, + logstore=AliyunLogStore.workflow_execution_logstore, + ) + else: + # Use SDK with LogStore query syntax + query = f"id: {run_id} and tenant_id: {tenant_id} and app_id: {app_id}" + from_time = 0 + to_time = int(time.time()) # now + + results = self.logstore_client.get_logs( + logstore=AliyunLogStore.workflow_execution_logstore, + from_time=from_time, + to_time=to_time, + query=query, + line=100, + reverse=False, + ) + + if not results: + # Fallback to PostgreSQL for records created before LogStore migration + if self._enable_dual_read: + logger.debug( + "WorkflowRun not found in LogStore, falling back to PostgreSQL: " + "run_id=%s, tenant_id=%s, app_id=%s", + run_id, + tenant_id, + app_id, + ) + return self._fallback_get_workflow_run_by_id_with_tenant(run_id, tenant_id, app_id) + return None + + # For PG mode, results are already deduplicated by the SQL query + # For SDK mode, if multiple results, select the one with max log_version + if self.logstore_client.supports_pg_protocol or len(results) == 1: + return _dict_to_workflow_run(results[0]) + else: + max_result = max(results, key=lambda x: int(x.get("log_version", 0))) + return _dict_to_workflow_run(max_result) + + except Exception: + logger.exception("Failed to get workflow run by ID from LogStore: run_id=%s", run_id) + # Try PostgreSQL fallback on any error (only if dual-read is enabled) + if self._enable_dual_read: + try: + return self._fallback_get_workflow_run_by_id_with_tenant(run_id, tenant_id, app_id) + except Exception: + logger.exception( + "PostgreSQL fallback also failed: run_id=%s, tenant_id=%s, app_id=%s", run_id, tenant_id, app_id + ) + raise + + def _fallback_get_workflow_run_by_id_with_tenant( + self, run_id: str, tenant_id: str, app_id: str + ) -> WorkflowRun | None: + """Fallback to PostgreSQL query for records not in LogStore (with tenant isolation).""" + from sqlalchemy import select + from sqlalchemy.orm import Session + + from extensions.ext_database import db + + with Session(db.engine) as session: + stmt = select(WorkflowRun).where( + WorkflowRun.id == run_id, WorkflowRun.tenant_id == tenant_id, WorkflowRun.app_id == app_id + ) + return session.scalar(stmt) + + def get_workflow_run_by_id_without_tenant( + self, + run_id: str, + ) -> WorkflowRun | None: + """ + Get a specific workflow run by ID without tenant/app context. + Uses query syntax to get raw logs and selects the one with max log_version. + Falls back to PostgreSQL if not found in LogStore (controlled by LOGSTORE_DUAL_READ_ENABLED). + """ + logger.debug("get_workflow_run_by_id_without_tenant: run_id=%s", run_id) + + try: + # Check if PG protocol is supported + if self.logstore_client.supports_pg_protocol: + # Use PG protocol with SQL query (get latest version of record) + sql_query = f""" + SELECT * FROM ( + SELECT *, + ROW_NUMBER() OVER (PARTITION BY id ORDER BY log_version DESC) as rn + FROM "{AliyunLogStore.workflow_execution_logstore}" + WHERE id = '{run_id}' AND __time__ > 0 + ) AS subquery WHERE rn = 1 + LIMIT 100 + """ + results = self.logstore_client.execute_sql( + sql=sql_query, + logstore=AliyunLogStore.workflow_execution_logstore, + ) + else: + # Use SDK with LogStore query syntax + query = f"id: {run_id}" + from_time = 0 + to_time = int(time.time()) # now + + results = self.logstore_client.get_logs( + logstore=AliyunLogStore.workflow_execution_logstore, + from_time=from_time, + to_time=to_time, + query=query, + line=100, + reverse=False, + ) + + if not results: + # Fallback to PostgreSQL for records created before LogStore migration + if self._enable_dual_read: + logger.debug("WorkflowRun not found in LogStore, falling back to PostgreSQL: run_id=%s", run_id) + return self._fallback_get_workflow_run_by_id(run_id) + return None + + # For PG mode, results are already deduplicated by the SQL query + # For SDK mode, if multiple results, select the one with max log_version + if self.logstore_client.supports_pg_protocol or len(results) == 1: + return _dict_to_workflow_run(results[0]) + else: + max_result = max(results, key=lambda x: int(x.get("log_version", 0))) + return _dict_to_workflow_run(max_result) + + except Exception: + logger.exception("Failed to get workflow run without tenant: run_id=%s", run_id) + # Try PostgreSQL fallback on any error (only if dual-read is enabled) + if self._enable_dual_read: + try: + return self._fallback_get_workflow_run_by_id(run_id) + except Exception: + logger.exception("PostgreSQL fallback also failed: run_id=%s", run_id) + raise + + def _fallback_get_workflow_run_by_id(self, run_id: str) -> WorkflowRun | None: + """Fallback to PostgreSQL query for records not in LogStore.""" + from sqlalchemy import select + from sqlalchemy.orm import Session + + from extensions.ext_database import db + + with Session(db.engine) as session: + stmt = select(WorkflowRun).where(WorkflowRun.id == run_id) + return session.scalar(stmt) + + def get_workflow_runs_count( + self, + tenant_id: str, + app_id: str, + triggered_from: str, + status: str | None = None, + time_range: str | None = None, + ) -> dict[str, int]: + """ + Get workflow runs count statistics grouped by status. + + Optimization: Use finished_at IS NOT NULL for completed runs (10-50x faster) + """ + logger.debug( + "get_workflow_runs_count: tenant_id=%s, app_id=%s, triggered_from=%s, status=%s", + tenant_id, + app_id, + triggered_from, + status, + ) + # Build time range filter + time_filter = "" + if time_range: + # TODO: Parse time_range and convert to from_time/to_time + logger.warning("time_range filter not implemented") + + # If status is provided, simple count + if status: + if status == "running": + # Running status requires window function + sql = f""" + SELECT COUNT(*) as count + FROM ( + SELECT *, ROW_NUMBER() OVER (PARTITION BY id ORDER BY log_version DESC) AS rn + FROM {AliyunLogStore.workflow_execution_logstore} + WHERE tenant_id='{tenant_id}' + AND app_id='{app_id}' + AND triggered_from='{triggered_from}' + AND status='running' + {time_filter} + ) t + WHERE rn = 1 + """ + else: + # Finished status uses optimized filter + sql = f""" + SELECT COUNT(DISTINCT id) as count + FROM {AliyunLogStore.workflow_execution_logstore} + WHERE tenant_id='{tenant_id}' + AND app_id='{app_id}' + AND triggered_from='{triggered_from}' + AND status='{status}' + AND finished_at IS NOT NULL + {time_filter} + """ + + try: + results = self.logstore_client.execute_sql( + sql=sql, query="*", logstore=AliyunLogStore.workflow_execution_logstore + ) + count = results[0]["count"] if results and len(results) > 0 else 0 + + return { + "total": count, + "running": count if status == "running" else 0, + "succeeded": count if status == "succeeded" else 0, + "failed": count if status == "failed" else 0, + "stopped": count if status == "stopped" else 0, + "partial-succeeded": count if status == "partial-succeeded" else 0, + } + except Exception: + logger.exception("Failed to get workflow runs count") + raise + + # No status filter - get counts grouped by status + # Use optimized query for finished runs, separate query for running + try: + # Count finished runs grouped by status + finished_sql = f""" + SELECT status, COUNT(DISTINCT id) as count + FROM {AliyunLogStore.workflow_execution_logstore} + WHERE tenant_id='{tenant_id}' + AND app_id='{app_id}' + AND triggered_from='{triggered_from}' + AND finished_at IS NOT NULL + {time_filter} + GROUP BY status + """ + + # Count running runs + running_sql = f""" + SELECT COUNT(*) as count + FROM ( + SELECT *, ROW_NUMBER() OVER (PARTITION BY id ORDER BY log_version DESC) AS rn + FROM {AliyunLogStore.workflow_execution_logstore} + WHERE tenant_id='{tenant_id}' + AND app_id='{app_id}' + AND triggered_from='{triggered_from}' + AND status='running' + {time_filter} + ) t + WHERE rn = 1 + """ + + finished_results = self.logstore_client.execute_sql( + sql=finished_sql, query="*", logstore=AliyunLogStore.workflow_execution_logstore + ) + running_results = self.logstore_client.execute_sql( + sql=running_sql, query="*", logstore=AliyunLogStore.workflow_execution_logstore + ) + + # Build response + status_counts = { + "running": 0, + "succeeded": 0, + "failed": 0, + "stopped": 0, + "partial-succeeded": 0, + } + + total = 0 + for result in finished_results: + status_val = result.get("status") + count = result.get("count", 0) + if status_val in status_counts: + status_counts[status_val] = count + total += count + + # Add running count + running_count = running_results[0]["count"] if running_results and len(running_results) > 0 else 0 + status_counts["running"] = running_count + total += running_count + + return {"total": total} | status_counts + + except Exception: + logger.exception("Failed to get workflow runs count") + raise + + def get_daily_runs_statistics( + self, + tenant_id: str, + app_id: str, + triggered_from: str, + start_date: datetime | None = None, + end_date: datetime | None = None, + timezone: str = "UTC", + ) -> list[DailyRunsStats]: + """ + Get daily runs statistics using optimized query. + + Optimization: Use finished_at IS NOT NULL + COUNT(DISTINCT id) (20-100x faster) + """ + logger.debug( + "get_daily_runs_statistics: tenant_id=%s, app_id=%s, triggered_from=%s", tenant_id, app_id, triggered_from + ) + # Build time range filter + time_filter = "" + if start_date: + time_filter += f" AND __time__ >= to_unixtime(from_iso8601_timestamp('{start_date.isoformat()}'))" + if end_date: + time_filter += f" AND __time__ < to_unixtime(from_iso8601_timestamp('{end_date.isoformat()}'))" + + # Optimized query: Use finished_at filter to avoid window function + sql = f""" + SELECT DATE(from_unixtime(__time__)) as date, COUNT(DISTINCT id) as runs + FROM {AliyunLogStore.workflow_execution_logstore} + WHERE tenant_id='{tenant_id}' + AND app_id='{app_id}' + AND triggered_from='{triggered_from}' + AND finished_at IS NOT NULL + {time_filter} + GROUP BY date + ORDER BY date + """ + + try: + results = self.logstore_client.execute_sql( + sql=sql, query="*", logstore=AliyunLogStore.workflow_execution_logstore + ) + + response_data = [] + for row in results: + response_data.append({"date": str(row.get("date", "")), "runs": row.get("runs", 0)}) + + return cast(list[DailyRunsStats], response_data) + + except Exception: + logger.exception("Failed to get daily runs statistics") + raise + + def get_daily_terminals_statistics( + self, + tenant_id: str, + app_id: str, + triggered_from: str, + start_date: datetime | None = None, + end_date: datetime | None = None, + timezone: str = "UTC", + ) -> list[DailyTerminalsStats]: + """ + Get daily terminals statistics using optimized query. + + Optimization: Use finished_at IS NOT NULL + COUNT(DISTINCT created_by) (20-100x faster) + """ + logger.debug( + "get_daily_terminals_statistics: tenant_id=%s, app_id=%s, triggered_from=%s", + tenant_id, + app_id, + triggered_from, + ) + # Build time range filter + time_filter = "" + if start_date: + time_filter += f" AND __time__ >= to_unixtime(from_iso8601_timestamp('{start_date.isoformat()}'))" + if end_date: + time_filter += f" AND __time__ < to_unixtime(from_iso8601_timestamp('{end_date.isoformat()}'))" + + sql = f""" + SELECT DATE(from_unixtime(__time__)) as date, COUNT(DISTINCT created_by) as terminal_count + FROM {AliyunLogStore.workflow_execution_logstore} + WHERE tenant_id='{tenant_id}' + AND app_id='{app_id}' + AND triggered_from='{triggered_from}' + AND finished_at IS NOT NULL + {time_filter} + GROUP BY date + ORDER BY date + """ + + try: + results = self.logstore_client.execute_sql( + sql=sql, query="*", logstore=AliyunLogStore.workflow_execution_logstore + ) + + response_data = [] + for row in results: + response_data.append({"date": str(row.get("date", "")), "terminal_count": row.get("terminal_count", 0)}) + + return cast(list[DailyTerminalsStats], response_data) + + except Exception: + logger.exception("Failed to get daily terminals statistics") + raise + + def get_daily_token_cost_statistics( + self, + tenant_id: str, + app_id: str, + triggered_from: str, + start_date: datetime | None = None, + end_date: datetime | None = None, + timezone: str = "UTC", + ) -> list[DailyTokenCostStats]: + """ + Get daily token cost statistics using optimized query. + + Optimization: Use finished_at IS NOT NULL + SUM(total_tokens) (20-100x faster) + """ + logger.debug( + "get_daily_token_cost_statistics: tenant_id=%s, app_id=%s, triggered_from=%s", + tenant_id, + app_id, + triggered_from, + ) + # Build time range filter + time_filter = "" + if start_date: + time_filter += f" AND __time__ >= to_unixtime(from_iso8601_timestamp('{start_date.isoformat()}'))" + if end_date: + time_filter += f" AND __time__ < to_unixtime(from_iso8601_timestamp('{end_date.isoformat()}'))" + + sql = f""" + SELECT DATE(from_unixtime(__time__)) as date, SUM(total_tokens) as token_count + FROM {AliyunLogStore.workflow_execution_logstore} + WHERE tenant_id='{tenant_id}' + AND app_id='{app_id}' + AND triggered_from='{triggered_from}' + AND finished_at IS NOT NULL + {time_filter} + GROUP BY date + ORDER BY date + """ + + try: + results = self.logstore_client.execute_sql( + sql=sql, query="*", logstore=AliyunLogStore.workflow_execution_logstore + ) + + response_data = [] + for row in results: + response_data.append({"date": str(row.get("date", "")), "token_count": row.get("token_count", 0)}) + + return cast(list[DailyTokenCostStats], response_data) + + except Exception: + logger.exception("Failed to get daily token cost statistics") + raise + + def get_average_app_interaction_statistics( + self, + tenant_id: str, + app_id: str, + triggered_from: str, + start_date: datetime | None = None, + end_date: datetime | None = None, + timezone: str = "UTC", + ) -> list[AverageInteractionStats]: + """ + Get average app interaction statistics using optimized query. + + Optimization: Use finished_at IS NOT NULL + AVG (20-100x faster) + """ + logger.debug( + "get_average_app_interaction_statistics: tenant_id=%s, app_id=%s, triggered_from=%s", + tenant_id, + app_id, + triggered_from, + ) + # Build time range filter + time_filter = "" + if start_date: + time_filter += f" AND __time__ >= to_unixtime(from_iso8601_timestamp('{start_date.isoformat()}'))" + if end_date: + time_filter += f" AND __time__ < to_unixtime(from_iso8601_timestamp('{end_date.isoformat()}'))" + + sql = f""" + SELECT + AVG(sub.interactions) AS interactions, + sub.date + FROM ( + SELECT + DATE(from_unixtime(__time__)) AS date, + created_by, + COUNT(DISTINCT id) AS interactions + FROM {AliyunLogStore.workflow_execution_logstore} + WHERE tenant_id='{tenant_id}' + AND app_id='{app_id}' + AND triggered_from='{triggered_from}' + AND finished_at IS NOT NULL + {time_filter} + GROUP BY date, created_by + ) sub + GROUP BY sub.date + """ + + try: + results = self.logstore_client.execute_sql( + sql=sql, query="*", logstore=AliyunLogStore.workflow_execution_logstore + ) + + response_data = [] + for row in results: + response_data.append( + { + "date": str(row.get("date", "")), + "interactions": float(row.get("interactions", 0)), + } + ) + + return cast(list[AverageInteractionStats], response_data) + + except Exception: + logger.exception("Failed to get average app interaction statistics") + raise diff --git a/api/extensions/logstore/repositories/logstore_workflow_execution_repository.py b/api/extensions/logstore/repositories/logstore_workflow_execution_repository.py new file mode 100644 index 0000000000..1119534d52 --- /dev/null +++ b/api/extensions/logstore/repositories/logstore_workflow_execution_repository.py @@ -0,0 +1,196 @@ +import json +import logging +import os +import time +from typing import Union + +from sqlalchemy.engine import Engine +from sqlalchemy.orm import sessionmaker + +from core.repositories.sqlalchemy_workflow_execution_repository import SQLAlchemyWorkflowExecutionRepository +from core.workflow.entities import WorkflowExecution +from core.workflow.repositories.workflow_execution_repository import WorkflowExecutionRepository +from extensions.logstore.aliyun_logstore import AliyunLogStore +from libs.helper import extract_tenant_id +from models import ( + Account, + CreatorUserRole, + EndUser, +) +from models.enums import WorkflowRunTriggeredFrom + +logger = logging.getLogger(__name__) + + +def to_serializable(obj): + """ + Convert non-JSON-serializable objects into JSON-compatible formats. + + - Uses `to_dict()` if it's a callable method. + - Falls back to string representation. + """ + if hasattr(obj, "to_dict") and callable(obj.to_dict): + return obj.to_dict() + return str(obj) + + +class LogstoreWorkflowExecutionRepository(WorkflowExecutionRepository): + def __init__( + self, + session_factory: sessionmaker | Engine, + user: Union[Account, EndUser], + app_id: str | None, + triggered_from: WorkflowRunTriggeredFrom | None, + ): + """ + Initialize the repository with a SQLAlchemy sessionmaker or engine and context information. + + Args: + session_factory: SQLAlchemy sessionmaker or engine for creating sessions + user: Account or EndUser object containing tenant_id, user ID, and role information + app_id: App ID for filtering by application (can be None) + triggered_from: Source of the execution trigger (DEBUGGING or APP_RUN) + """ + logger.debug( + "LogstoreWorkflowExecutionRepository.__init__: app_id=%s, triggered_from=%s", app_id, triggered_from + ) + # Initialize LogStore client + # Note: Project/logstore/index initialization is done at app startup via ext_logstore + self.logstore_client = AliyunLogStore() + + # Extract tenant_id from user + tenant_id = extract_tenant_id(user) + if not tenant_id: + raise ValueError("User must have a tenant_id or current_tenant_id") + self._tenant_id = tenant_id + + # Store app context + self._app_id = app_id + + # Extract user context + self._triggered_from = triggered_from + self._creator_user_id = user.id + + # Determine user role based on user type + self._creator_user_role = CreatorUserRole.ACCOUNT if isinstance(user, Account) else CreatorUserRole.END_USER + + # Initialize SQL repository for dual-write support + self.sql_repository = SQLAlchemyWorkflowExecutionRepository(session_factory, user, app_id, triggered_from) + + # Control flag for dual-write (write to both LogStore and SQL database) + # Set to True to enable dual-write for safe migration, False to use LogStore only + self._enable_dual_write = os.environ.get("LOGSTORE_DUAL_WRITE_ENABLED", "true").lower() == "true" + + # Control flag for whether to write the `graph` field to LogStore. + # If LOGSTORE_ENABLE_PUT_GRAPH_FIELD is "true", write the full `graph` field; + # otherwise write an empty {} instead. Defaults to writing the `graph` field. + self._enable_put_graph_field = os.environ.get("LOGSTORE_ENABLE_PUT_GRAPH_FIELD", "true").lower() == "true" + + def _to_logstore_model(self, domain_model: WorkflowExecution) -> list[tuple[str, str]]: + """ + Convert a domain model to a logstore model (List[Tuple[str, str]]). + + Args: + domain_model: The domain model to convert + + Returns: + The logstore model as a list of key-value tuples + """ + logger.debug( + "_to_logstore_model: id=%s, workflow_id=%s, status=%s", + domain_model.id_, + domain_model.workflow_id, + domain_model.status.value, + ) + # Use values from constructor if provided + if not self._triggered_from: + raise ValueError("triggered_from is required in repository constructor") + if not self._creator_user_id: + raise ValueError("created_by is required in repository constructor") + if not self._creator_user_role: + raise ValueError("created_by_role is required in repository constructor") + + # Generate log_version as nanosecond timestamp for record versioning + log_version = str(time.time_ns()) + + logstore_model = [ + ("id", domain_model.id_), + ("log_version", log_version), # Add log_version field for append-only writes + ("tenant_id", self._tenant_id), + ("app_id", self._app_id or ""), + ("workflow_id", domain_model.workflow_id), + ( + "triggered_from", + self._triggered_from.value if hasattr(self._triggered_from, "value") else str(self._triggered_from), + ), + ("type", domain_model.workflow_type.value), + ("version", domain_model.workflow_version), + ( + "graph", + json.dumps(domain_model.graph, ensure_ascii=False, default=to_serializable) + if domain_model.graph and self._enable_put_graph_field + else "{}", + ), + ( + "inputs", + json.dumps(domain_model.inputs, ensure_ascii=False, default=to_serializable) + if domain_model.inputs + else "{}", + ), + ( + "outputs", + json.dumps(domain_model.outputs, ensure_ascii=False, default=to_serializable) + if domain_model.outputs + else "{}", + ), + ("status", domain_model.status.value), + ("error_message", domain_model.error_message or ""), + ("total_tokens", str(domain_model.total_tokens)), + ("total_steps", str(domain_model.total_steps)), + ("exceptions_count", str(domain_model.exceptions_count)), + ( + "created_by_role", + self._creator_user_role.value + if hasattr(self._creator_user_role, "value") + else str(self._creator_user_role), + ), + ("created_by", self._creator_user_id), + ("started_at", domain_model.started_at.isoformat() if domain_model.started_at else ""), + ("finished_at", domain_model.finished_at.isoformat() if domain_model.finished_at else ""), + ] + + return logstore_model + + def save(self, execution: WorkflowExecution) -> None: + """ + Save or update a WorkflowExecution domain entity to the logstore. + + This method serves as a domain-to-logstore adapter that: + 1. Converts the domain entity to its logstore representation + 2. Persists the logstore model using Aliyun SLS + 3. Maintains proper multi-tenancy by including tenant context during conversion + 4. Optionally writes to SQL database for dual-write support (controlled by LOGSTORE_DUAL_WRITE_ENABLED) + + Args: + execution: The WorkflowExecution domain entity to persist + """ + logger.debug( + "save: id=%s, workflow_id=%s, status=%s", execution.id_, execution.workflow_id, execution.status.value + ) + try: + logstore_model = self._to_logstore_model(execution) + self.logstore_client.put_log(AliyunLogStore.workflow_execution_logstore, logstore_model) + + logger.debug("Saved workflow execution to logstore: id=%s", execution.id_) + except Exception: + logger.exception("Failed to save workflow execution to logstore: id=%s", execution.id_) + raise + + # Dual-write to SQL database if enabled (for safe migration) + if self._enable_dual_write: + try: + self.sql_repository.save(execution) + logger.debug("Dual-write: saved workflow execution to SQL database: id=%s", execution.id_) + except Exception: + logger.exception("Failed to dual-write workflow execution to SQL database: id=%s", execution.id_) + # Don't raise - LogStore write succeeded, SQL is just a backup diff --git a/api/extensions/logstore/repositories/logstore_workflow_node_execution_repository.py b/api/extensions/logstore/repositories/logstore_workflow_node_execution_repository.py new file mode 100644 index 0000000000..400a089516 --- /dev/null +++ b/api/extensions/logstore/repositories/logstore_workflow_node_execution_repository.py @@ -0,0 +1,366 @@ +""" +LogStore implementation of the WorkflowNodeExecutionRepository. + +This module provides a LogStore-based repository for WorkflowNodeExecution entities, +using Aliyun SLS LogStore with append-only writes and version control. +""" + +import json +import logging +import os +import time +from collections.abc import Sequence +from datetime import datetime +from typing import Any, Union + +from sqlalchemy.engine import Engine +from sqlalchemy.orm import sessionmaker + +from core.model_runtime.utils.encoders import jsonable_encoder +from core.repositories import SQLAlchemyWorkflowNodeExecutionRepository +from core.workflow.entities import WorkflowNodeExecution +from core.workflow.entities.workflow_node_execution import WorkflowNodeExecutionMetadataKey, WorkflowNodeExecutionStatus +from core.workflow.enums import NodeType +from core.workflow.repositories.workflow_node_execution_repository import OrderConfig, WorkflowNodeExecutionRepository +from core.workflow.workflow_type_encoder import WorkflowRuntimeTypeConverter +from extensions.logstore.aliyun_logstore import AliyunLogStore +from libs.helper import extract_tenant_id +from models import ( + Account, + CreatorUserRole, + EndUser, + WorkflowNodeExecutionTriggeredFrom, +) + +logger = logging.getLogger(__name__) + + +def _dict_to_workflow_node_execution(data: dict[str, Any]) -> WorkflowNodeExecution: + """ + Convert LogStore result dictionary to WorkflowNodeExecution domain model. + + Args: + data: Dictionary from LogStore query result + + Returns: + WorkflowNodeExecution domain model instance + """ + logger.debug("_dict_to_workflow_node_execution: data keys=%s", list(data.keys())[:5]) + # Parse JSON fields + inputs = json.loads(data.get("inputs", "{}")) + process_data = json.loads(data.get("process_data", "{}")) + outputs = json.loads(data.get("outputs", "{}")) + metadata = json.loads(data.get("execution_metadata", "{}")) + + # Convert metadata to domain enum keys + domain_metadata = {} + for k, v in metadata.items(): + try: + domain_metadata[WorkflowNodeExecutionMetadataKey(k)] = v + except ValueError: + # Skip invalid metadata keys + continue + + # Convert status to domain enum + status = WorkflowNodeExecutionStatus(data.get("status", "running")) + + # Parse datetime fields + created_at = datetime.fromisoformat(data.get("created_at", "")) if data.get("created_at") else datetime.now() + finished_at = datetime.fromisoformat(data.get("finished_at", "")) if data.get("finished_at") else None + + return WorkflowNodeExecution( + id=data.get("id", ""), + node_execution_id=data.get("node_execution_id"), + workflow_id=data.get("workflow_id", ""), + workflow_execution_id=data.get("workflow_run_id"), + index=int(data.get("index", 0)), + predecessor_node_id=data.get("predecessor_node_id"), + node_id=data.get("node_id", ""), + node_type=NodeType(data.get("node_type", "start")), + title=data.get("title", ""), + inputs=inputs, + process_data=process_data, + outputs=outputs, + status=status, + error=data.get("error"), + elapsed_time=float(data.get("elapsed_time", 0.0)), + metadata=domain_metadata, + created_at=created_at, + finished_at=finished_at, + ) + + +class LogstoreWorkflowNodeExecutionRepository(WorkflowNodeExecutionRepository): + """ + LogStore implementation of the WorkflowNodeExecutionRepository interface. + + This implementation uses Aliyun SLS LogStore with an append-only write strategy: + - Each save() operation appends a new record with a version timestamp + - Updates are simulated by writing new records with higher version numbers + - Queries retrieve the latest version using finished_at IS NOT NULL filter + - Multi-tenancy is maintained through tenant_id filtering + + Version Strategy: + version = time.time_ns() # Nanosecond timestamp for unique ordering + """ + + def __init__( + self, + session_factory: sessionmaker | Engine, + user: Union[Account, EndUser], + app_id: str | None, + triggered_from: WorkflowNodeExecutionTriggeredFrom | None, + ): + """ + Initialize the repository with a SQLAlchemy sessionmaker or engine and context information. + + Args: + session_factory: SQLAlchemy sessionmaker or engine for creating sessions + user: Account or EndUser object containing tenant_id, user ID, and role information + app_id: App ID for filtering by application (can be None) + triggered_from: Source of the execution trigger (SINGLE_STEP or WORKFLOW_RUN) + """ + logger.debug( + "LogstoreWorkflowNodeExecutionRepository.__init__: app_id=%s, triggered_from=%s", app_id, triggered_from + ) + # Initialize LogStore client + self.logstore_client = AliyunLogStore() + + # Extract tenant_id from user + tenant_id = extract_tenant_id(user) + if not tenant_id: + raise ValueError("User must have a tenant_id or current_tenant_id") + self._tenant_id = tenant_id + + # Store app context + self._app_id = app_id + + # Extract user context + self._triggered_from = triggered_from + self._creator_user_id = user.id + + # Determine user role based on user type + self._creator_user_role = CreatorUserRole.ACCOUNT if isinstance(user, Account) else CreatorUserRole.END_USER + + # Initialize SQL repository for dual-write support + self.sql_repository = SQLAlchemyWorkflowNodeExecutionRepository(session_factory, user, app_id, triggered_from) + + # Control flag for dual-write (write to both LogStore and SQL database) + # Set to True to enable dual-write for safe migration, False to use LogStore only + self._enable_dual_write = os.environ.get("LOGSTORE_DUAL_WRITE_ENABLED", "true").lower() == "true" + + def _to_logstore_model(self, domain_model: WorkflowNodeExecution) -> Sequence[tuple[str, str]]: + logger.debug( + "_to_logstore_model: id=%s, node_id=%s, status=%s", + domain_model.id, + domain_model.node_id, + domain_model.status.value, + ) + if not self._triggered_from: + raise ValueError("triggered_from is required in repository constructor") + if not self._creator_user_id: + raise ValueError("created_by is required in repository constructor") + if not self._creator_user_role: + raise ValueError("created_by_role is required in repository constructor") + + # Generate log_version as nanosecond timestamp for record versioning + log_version = str(time.time_ns()) + + json_converter = WorkflowRuntimeTypeConverter() + + logstore_model = [ + ("id", domain_model.id), + ("log_version", log_version), # Add log_version field for append-only writes + ("tenant_id", self._tenant_id), + ("app_id", self._app_id or ""), + ("workflow_id", domain_model.workflow_id), + ( + "triggered_from", + self._triggered_from.value if hasattr(self._triggered_from, "value") else str(self._triggered_from), + ), + ("workflow_run_id", domain_model.workflow_execution_id or ""), + ("index", str(domain_model.index)), + ("predecessor_node_id", domain_model.predecessor_node_id or ""), + ("node_execution_id", domain_model.node_execution_id or ""), + ("node_id", domain_model.node_id), + ("node_type", domain_model.node_type.value), + ("title", domain_model.title), + ( + "inputs", + json.dumps(json_converter.to_json_encodable(domain_model.inputs), ensure_ascii=False) + if domain_model.inputs + else "{}", + ), + ( + "process_data", + json.dumps(json_converter.to_json_encodable(domain_model.process_data), ensure_ascii=False) + if domain_model.process_data + else "{}", + ), + ( + "outputs", + json.dumps(json_converter.to_json_encodable(domain_model.outputs), ensure_ascii=False) + if domain_model.outputs + else "{}", + ), + ("status", domain_model.status.value), + ("error", domain_model.error or ""), + ("elapsed_time", str(domain_model.elapsed_time)), + ( + "execution_metadata", + json.dumps(jsonable_encoder(domain_model.metadata), ensure_ascii=False) + if domain_model.metadata + else "{}", + ), + ("created_at", domain_model.created_at.isoformat() if domain_model.created_at else ""), + ("created_by_role", self._creator_user_role.value), + ("created_by", self._creator_user_id), + ("finished_at", domain_model.finished_at.isoformat() if domain_model.finished_at else ""), + ] + + return logstore_model + + def save(self, execution: WorkflowNodeExecution) -> None: + """ + Save or update a NodeExecution domain entity to LogStore. + + This method serves as a domain-to-logstore adapter that: + 1. Converts the domain entity to its logstore representation + 2. Appends a new record with a log_version timestamp + 3. Maintains proper multi-tenancy by including tenant context during conversion + 4. Optionally writes to SQL database for dual-write support (controlled by LOGSTORE_DUAL_WRITE_ENABLED) + + Each save operation creates a new record. Updates are simulated by writing + new records with higher log_version numbers. + + Args: + execution: The NodeExecution domain entity to persist + """ + logger.debug( + "save: id=%s, node_execution_id=%s, status=%s", + execution.id, + execution.node_execution_id, + execution.status.value, + ) + try: + logstore_model = self._to_logstore_model(execution) + self.logstore_client.put_log(AliyunLogStore.workflow_node_execution_logstore, logstore_model) + + logger.debug( + "Saved node execution to LogStore: id=%s, node_execution_id=%s, status=%s", + execution.id, + execution.node_execution_id, + execution.status.value, + ) + except Exception: + logger.exception( + "Failed to save node execution to LogStore: id=%s, node_execution_id=%s", + execution.id, + execution.node_execution_id, + ) + raise + + # Dual-write to SQL database if enabled (for safe migration) + if self._enable_dual_write: + try: + self.sql_repository.save(execution) + logger.debug("Dual-write: saved node execution to SQL database: id=%s", execution.id) + except Exception: + logger.exception("Failed to dual-write node execution to SQL database: id=%s", execution.id) + # Don't raise - LogStore write succeeded, SQL is just a backup + + def save_execution_data(self, execution: WorkflowNodeExecution) -> None: + """ + Save or update the inputs, process_data, or outputs associated with a specific + node_execution record. + + For LogStore implementation, this is similar to save() since we always write + complete records. We append a new record with updated data fields. + + Args: + execution: The NodeExecution instance with data to save + """ + logger.debug("save_execution_data: id=%s, node_execution_id=%s", execution.id, execution.node_execution_id) + # In LogStore, we simply write a new complete record with the data + # The log_version timestamp will ensure this is treated as the latest version + self.save(execution) + + def get_by_workflow_run( + self, + workflow_run_id: str, + order_config: OrderConfig | None = None, + ) -> Sequence[WorkflowNodeExecution]: + """ + Retrieve all NodeExecution instances for a specific workflow run. + Uses LogStore SQL query with finished_at IS NOT NULL filter for deduplication. + This ensures we only get the final version of each node execution. + Args: + workflow_run_id: The workflow run ID + order_config: Optional configuration for ordering results + order_config.order_by: List of fields to order by (e.g., ["index", "created_at"]) + order_config.order_direction: Direction to order ("asc" or "desc") + + Returns: + A list of NodeExecution instances + + Note: + This method filters by finished_at IS NOT NULL to avoid duplicates from + version updates. For complete history including intermediate states, + a different query strategy would be needed. + """ + logger.debug("get_by_workflow_run: workflow_run_id=%s, order_config=%s", workflow_run_id, order_config) + # Build SQL query with deduplication using finished_at IS NOT NULL + # This optimization avoids window functions for common case where we only + # want the final state of each node execution + + # Build ORDER BY clause + order_clause = "" + if order_config and order_config.order_by: + order_fields = [] + for field in order_config.order_by: + # Map domain field names to logstore field names if needed + field_name = field + if order_config.order_direction == "desc": + order_fields.append(f"{field_name} DESC") + else: + order_fields.append(f"{field_name} ASC") + if order_fields: + order_clause = "ORDER BY " + ", ".join(order_fields) + + sql = f""" + SELECT * + FROM {AliyunLogStore.workflow_node_execution_logstore} + WHERE workflow_run_id='{workflow_run_id}' + AND tenant_id='{self._tenant_id}' + AND finished_at IS NOT NULL + """ + + if self._app_id: + sql += f" AND app_id='{self._app_id}'" + + if order_clause: + sql += f" {order_clause}" + + try: + # Execute SQL query + results = self.logstore_client.execute_sql( + sql=sql, + query="*", + logstore=AliyunLogStore.workflow_node_execution_logstore, + ) + + # Convert LogStore results to WorkflowNodeExecution domain models + executions = [] + for row in results: + try: + execution = _dict_to_workflow_node_execution(row) + executions.append(execution) + except Exception as e: + logger.warning("Failed to convert row to WorkflowNodeExecution: %s, row=%s", e, row) + continue + + return executions + + except Exception: + logger.exception("Failed to retrieve node executions from LogStore: workflow_run_id=%s", workflow_run_id) + raise diff --git a/api/extensions/otel/decorators/base.py b/api/extensions/otel/decorators/base.py index 9604a3b6d5..14221d24dd 100644 --- a/api/extensions/otel/decorators/base.py +++ b/api/extensions/otel/decorators/base.py @@ -1,5 +1,4 @@ import functools -import os from collections.abc import Callable from typing import Any, TypeVar, cast @@ -7,22 +6,13 @@ from opentelemetry.trace import get_tracer from configs import dify_config from extensions.otel.decorators.handler import SpanHandler +from extensions.otel.runtime import is_instrument_flag_enabled T = TypeVar("T", bound=Callable[..., Any]) _HANDLER_INSTANCES: dict[type[SpanHandler], SpanHandler] = {SpanHandler: SpanHandler()} -def _is_instrument_flag_enabled() -> bool: - """ - Check if external instrumentation is enabled via environment variable. - - Third-party non-invasive instrumentation agents set this flag to coordinate - with Dify's manual OpenTelemetry instrumentation. - """ - return os.getenv("ENABLE_OTEL_FOR_INSTRUMENT", "").strip().lower() == "true" - - def _get_handler_instance(handler_class: type[SpanHandler]) -> SpanHandler: """Get or create a singleton instance of the handler class.""" if handler_class not in _HANDLER_INSTANCES: @@ -43,7 +33,7 @@ def trace_span(handler_class: type[SpanHandler] | None = None) -> Callable[[T], def decorator(func: T) -> T: @functools.wraps(func) def wrapper(*args: Any, **kwargs: Any) -> Any: - if not (dify_config.ENABLE_OTEL or _is_instrument_flag_enabled()): + if not (dify_config.ENABLE_OTEL or is_instrument_flag_enabled()): return func(*args, **kwargs) handler = _get_handler_instance(handler_class or SpanHandler) diff --git a/api/extensions/otel/instrumentation.py b/api/extensions/otel/instrumentation.py index 3597110cba..6617f69513 100644 --- a/api/extensions/otel/instrumentation.py +++ b/api/extensions/otel/instrumentation.py @@ -19,26 +19,43 @@ logger = logging.getLogger(__name__) class ExceptionLoggingHandler(logging.Handler): + """ + Handler that records exceptions to the current OpenTelemetry span. + + Unlike creating a new span, this records exceptions on the existing span + to maintain trace context consistency throughout the request lifecycle. + """ + def emit(self, record: logging.LogRecord): with contextlib.suppress(Exception): - if record.exc_info: - tracer = get_tracer_provider().get_tracer("dify.exception.logging") - with tracer.start_as_current_span( - "log.exception", - attributes={ - "log.level": record.levelname, - "log.message": record.getMessage(), - "log.logger": record.name, - "log.file.path": record.pathname, - "log.file.line": record.lineno, - }, - ) as span: - span.set_status(StatusCode.ERROR) - if record.exc_info[1]: - span.record_exception(record.exc_info[1]) - span.set_attribute("exception.message", str(record.exc_info[1])) - if record.exc_info[0]: - span.set_attribute("exception.type", record.exc_info[0].__name__) + if not record.exc_info: + return + + from opentelemetry.trace import get_current_span + + span = get_current_span() + if not span or not span.is_recording(): + return + + # Record exception on the current span instead of creating a new one + span.set_status(StatusCode.ERROR, record.getMessage()) + + # Add log context as span events/attributes + span.add_event( + "log.exception", + attributes={ + "log.level": record.levelname, + "log.message": record.getMessage(), + "log.logger": record.name, + "log.file.path": record.pathname, + "log.file.line": record.lineno, + }, + ) + + if record.exc_info[1]: + span.record_exception(record.exc_info[1]) + if record.exc_info[0]: + span.set_attribute("exception.type", record.exc_info[0].__name__) def instrument_exception_logging() -> None: diff --git a/api/extensions/otel/runtime.py b/api/extensions/otel/runtime.py index 16f5ccf488..a7181d2683 100644 --- a/api/extensions/otel/runtime.py +++ b/api/extensions/otel/runtime.py @@ -1,4 +1,5 @@ import logging +import os import sys from typing import Union @@ -71,3 +72,13 @@ def init_celery_worker(*args, **kwargs): if dify_config.DEBUG: logger.info("Initializing OpenTelemetry for Celery worker") CeleryInstrumentor(tracer_provider=tracer_provider, meter_provider=metric_provider).instrument() + + +def is_instrument_flag_enabled() -> bool: + """ + Check if external instrumentation is enabled via environment variable. + + Third-party non-invasive instrumentation agents set this flag to coordinate + with Dify's manual OpenTelemetry instrumentation. + """ + return os.getenv("ENABLE_OTEL_FOR_INSTRUMENT", "").strip().lower() == "true" diff --git a/api/extensions/storage/aliyun_oss_storage.py b/api/extensions/storage/aliyun_oss_storage.py index 2283581f62..3d7ef99c9e 100644 --- a/api/extensions/storage/aliyun_oss_storage.py +++ b/api/extensions/storage/aliyun_oss_storage.py @@ -26,6 +26,7 @@ class AliyunOssStorage(BaseStorage): self.bucket_name, connect_timeout=30, region=region, + cloudbox_id=dify_config.ALIYUN_CLOUDBOX_ID, ) def save(self, filename, data): diff --git a/api/extensions/storage/clickzetta_volume/file_lifecycle.py b/api/extensions/storage/clickzetta_volume/file_lifecycle.py index 51a97b20f8..1d9911465b 100644 --- a/api/extensions/storage/clickzetta_volume/file_lifecycle.py +++ b/api/extensions/storage/clickzetta_volume/file_lifecycle.py @@ -5,6 +5,8 @@ automatic cleanup, backup and restore. Supports complete lifecycle management for knowledge base files. """ +from __future__ import annotations + import json import logging import operator @@ -48,7 +50,7 @@ class FileMetadata: return data @classmethod - def from_dict(cls, data: dict) -> "FileMetadata": + def from_dict(cls, data: dict) -> FileMetadata: """Create instance from dictionary""" data = data.copy() data["created_at"] = datetime.fromisoformat(data["created_at"]) diff --git a/api/extensions/storage/huawei_obs_storage.py b/api/extensions/storage/huawei_obs_storage.py index 74fed26f65..72cb59abbe 100644 --- a/api/extensions/storage/huawei_obs_storage.py +++ b/api/extensions/storage/huawei_obs_storage.py @@ -17,6 +17,7 @@ class HuaweiObsStorage(BaseStorage): access_key_id=dify_config.HUAWEI_OBS_ACCESS_KEY, secret_access_key=dify_config.HUAWEI_OBS_SECRET_KEY, server=dify_config.HUAWEI_OBS_SERVER, + path_style=dify_config.HUAWEI_OBS_PATH_STYLE, ) def save(self, filename, data): diff --git a/api/extensions/storage/opendal_storage.py b/api/extensions/storage/opendal_storage.py index a084844d72..83c5c2d12f 100644 --- a/api/extensions/storage/opendal_storage.py +++ b/api/extensions/storage/opendal_storage.py @@ -87,15 +87,16 @@ class OpenDALStorage(BaseStorage): if not self.exists(path): raise FileNotFoundError("Path not found") - all_files = self.op.scan(path=path) + # Use the new OpenDAL 0.46.0+ API with recursive listing + lister = self.op.list(path, recursive=True) if files and directories: logger.debug("files and directories on %s scanned", path) - return [f.path for f in all_files] + return [entry.path for entry in lister] if files: logger.debug("files on %s scanned", path) - return [f.path for f in all_files if not f.path.endswith("/")] + return [entry.path for entry in lister if not entry.metadata.is_dir] elif directories: logger.debug("directories on %s scanned", path) - return [f.path for f in all_files if f.path.endswith("/")] + return [entry.path for entry in lister if entry.metadata.is_dir] else: raise ValueError("At least one of files or directories must be True") diff --git a/api/extensions/storage/tencent_cos_storage.py b/api/extensions/storage/tencent_cos_storage.py index ea5d982efc..cf092c6973 100644 --- a/api/extensions/storage/tencent_cos_storage.py +++ b/api/extensions/storage/tencent_cos_storage.py @@ -13,12 +13,20 @@ class TencentCosStorage(BaseStorage): super().__init__() self.bucket_name = dify_config.TENCENT_COS_BUCKET_NAME - config = CosConfig( - Region=dify_config.TENCENT_COS_REGION, - SecretId=dify_config.TENCENT_COS_SECRET_ID, - SecretKey=dify_config.TENCENT_COS_SECRET_KEY, - Scheme=dify_config.TENCENT_COS_SCHEME, - ) + if dify_config.TENCENT_COS_CUSTOM_DOMAIN: + config = CosConfig( + Domain=dify_config.TENCENT_COS_CUSTOM_DOMAIN, + SecretId=dify_config.TENCENT_COS_SECRET_ID, + SecretKey=dify_config.TENCENT_COS_SECRET_KEY, + Scheme=dify_config.TENCENT_COS_SCHEME, + ) + else: + config = CosConfig( + Region=dify_config.TENCENT_COS_REGION, + SecretId=dify_config.TENCENT_COS_SECRET_ID, + SecretKey=dify_config.TENCENT_COS_SECRET_KEY, + Scheme=dify_config.TENCENT_COS_SCHEME, + ) self.client = CosS3Client(config) def save(self, filename, data): diff --git a/api/fields/annotation_fields.py b/api/fields/annotation_fields.py index 38835d5ac7..e69306dcb2 100644 --- a/api/fields/annotation_fields.py +++ b/api/fields/annotation_fields.py @@ -1,4 +1,4 @@ -from flask_restx import Api, Namespace, fields +from flask_restx import Namespace, fields from libs.helper import TimestampField @@ -12,7 +12,7 @@ annotation_fields = { } -def build_annotation_model(api_or_ns: Api | Namespace): +def build_annotation_model(api_or_ns: Namespace): """Build the annotation model for the API or Namespace.""" return api_or_ns.model("Annotation", annotation_fields) diff --git a/api/fields/conversation_fields.py b/api/fields/conversation_fields.py index ecc267cf38..fe59cdcbb4 100644 --- a/api/fields/conversation_fields.py +++ b/api/fields/conversation_fields.py @@ -1,236 +1,339 @@ -from flask_restx import Api, Namespace, fields +from __future__ import annotations -from fields.member_fields import simple_account_fields -from libs.helper import TimestampField +from datetime import datetime +from typing import Any, TypeAlias -from .raws import FilesContainedField +from pydantic import BaseModel, ConfigDict, Field, field_validator, model_validator + +from core.file import File + +JSONValue: TypeAlias = Any -class MessageTextField(fields.Raw): - def format(self, value): - return value[0]["text"] if value else "" +class ResponseModel(BaseModel): + model_config = ConfigDict( + from_attributes=True, + extra="ignore", + populate_by_name=True, + serialize_by_alias=True, + protected_namespaces=(), + ) -feedback_fields = { - "rating": fields.String, - "content": fields.String, - "from_source": fields.String, - "from_end_user_id": fields.String, - "from_account": fields.Nested(simple_account_fields, allow_null=True), -} +class MessageFile(ResponseModel): + id: str + filename: str + type: str + url: str | None = None + mime_type: str | None = None + size: int | None = None + transfer_method: str + belongs_to: str | None = None + upload_file_id: str | None = None -annotation_fields = { - "id": fields.String, - "question": fields.String, - "content": fields.String, - "account": fields.Nested(simple_account_fields, allow_null=True), - "created_at": TimestampField, -} - -annotation_hit_history_fields = { - "annotation_id": fields.String(attribute="id"), - "annotation_create_account": fields.Nested(simple_account_fields, allow_null=True), - "created_at": TimestampField, -} - -message_file_fields = { - "id": fields.String, - "filename": fields.String, - "type": fields.String, - "url": fields.String, - "mime_type": fields.String, - "size": fields.Integer, - "transfer_method": fields.String, - "belongs_to": fields.String(default="user"), - "upload_file_id": fields.String(default=None), -} + @field_validator("transfer_method", mode="before") + @classmethod + def _normalize_transfer_method(cls, value: object) -> str: + if isinstance(value, str): + return value + return str(value) -def build_message_file_model(api_or_ns: Api | Namespace): - """Build the message file fields for the API or Namespace.""" - return api_or_ns.model("MessageFile", message_file_fields) +class SimpleConversation(ResponseModel): + id: str + name: str + inputs: dict[str, JSONValue] + status: str + introduction: str | None = None + created_at: int | None = None + updated_at: int | None = None + + @field_validator("inputs", mode="before") + @classmethod + def _normalize_inputs(cls, value: JSONValue) -> JSONValue: + return format_files_contained(value) + + @field_validator("created_at", "updated_at", mode="before") + @classmethod + def _normalize_timestamp(cls, value: datetime | int | None) -> int | None: + if isinstance(value, datetime): + return to_timestamp(value) + return value -agent_thought_fields = { - "id": fields.String, - "chain_id": fields.String, - "message_id": fields.String, - "position": fields.Integer, - "thought": fields.String, - "tool": fields.String, - "tool_labels": fields.Raw, - "tool_input": fields.String, - "created_at": TimestampField, - "observation": fields.String, - "files": fields.List(fields.String), -} - -message_detail_fields = { - "id": fields.String, - "conversation_id": fields.String, - "inputs": FilesContainedField, - "query": fields.String, - "message": fields.Raw, - "message_tokens": fields.Integer, - "answer": fields.String(attribute="re_sign_file_url_answer"), - "answer_tokens": fields.Integer, - "provider_response_latency": fields.Float, - "from_source": fields.String, - "from_end_user_id": fields.String, - "from_account_id": fields.String, - "feedbacks": fields.List(fields.Nested(feedback_fields)), - "workflow_run_id": fields.String, - "annotation": fields.Nested(annotation_fields, allow_null=True), - "annotation_hit_history": fields.Nested(annotation_hit_history_fields, allow_null=True), - "created_at": TimestampField, - "agent_thoughts": fields.List(fields.Nested(agent_thought_fields)), - "message_files": fields.List(fields.Nested(message_file_fields)), - "metadata": fields.Raw(attribute="message_metadata_dict"), - "status": fields.String, - "error": fields.String, - "parent_message_id": fields.String, -} - -feedback_stat_fields = {"like": fields.Integer, "dislike": fields.Integer} -status_count_fields = {"success": fields.Integer, "failed": fields.Integer, "partial_success": fields.Integer} -model_config_fields = { - "opening_statement": fields.String, - "suggested_questions": fields.Raw, - "model": fields.Raw, - "user_input_form": fields.Raw, - "pre_prompt": fields.String, - "agent_mode": fields.Raw, -} - -simple_model_config_fields = { - "model": fields.Raw(attribute="model_dict"), - "pre_prompt": fields.String, -} - -simple_message_detail_fields = { - "inputs": FilesContainedField, - "query": fields.String, - "message": MessageTextField, - "answer": fields.String, -} - -conversation_fields = { - "id": fields.String, - "status": fields.String, - "from_source": fields.String, - "from_end_user_id": fields.String, - "from_end_user_session_id": fields.String(), - "from_account_id": fields.String, - "from_account_name": fields.String, - "read_at": TimestampField, - "created_at": TimestampField, - "updated_at": TimestampField, - "annotation": fields.Nested(annotation_fields, allow_null=True), - "model_config": fields.Nested(simple_model_config_fields), - "user_feedback_stats": fields.Nested(feedback_stat_fields), - "admin_feedback_stats": fields.Nested(feedback_stat_fields), - "message": fields.Nested(simple_message_detail_fields, attribute="first_message"), -} - -conversation_pagination_fields = { - "page": fields.Integer, - "limit": fields.Integer(attribute="per_page"), - "total": fields.Integer, - "has_more": fields.Boolean(attribute="has_next"), - "data": fields.List(fields.Nested(conversation_fields), attribute="items"), -} - -conversation_message_detail_fields = { - "id": fields.String, - "status": fields.String, - "from_source": fields.String, - "from_end_user_id": fields.String, - "from_account_id": fields.String, - "created_at": TimestampField, - "model_config": fields.Nested(model_config_fields), - "message": fields.Nested(message_detail_fields, attribute="first_message"), -} - -conversation_with_summary_fields = { - "id": fields.String, - "status": fields.String, - "from_source": fields.String, - "from_end_user_id": fields.String, - "from_end_user_session_id": fields.String, - "from_account_id": fields.String, - "from_account_name": fields.String, - "name": fields.String, - "summary": fields.String(attribute="summary_or_query"), - "read_at": TimestampField, - "created_at": TimestampField, - "updated_at": TimestampField, - "annotated": fields.Boolean, - "model_config": fields.Nested(simple_model_config_fields), - "message_count": fields.Integer, - "user_feedback_stats": fields.Nested(feedback_stat_fields), - "admin_feedback_stats": fields.Nested(feedback_stat_fields), - "status_count": fields.Nested(status_count_fields), -} - -conversation_with_summary_pagination_fields = { - "page": fields.Integer, - "limit": fields.Integer(attribute="per_page"), - "total": fields.Integer, - "has_more": fields.Boolean(attribute="has_next"), - "data": fields.List(fields.Nested(conversation_with_summary_fields), attribute="items"), -} - -conversation_detail_fields = { - "id": fields.String, - "status": fields.String, - "from_source": fields.String, - "from_end_user_id": fields.String, - "from_account_id": fields.String, - "created_at": TimestampField, - "updated_at": TimestampField, - "annotated": fields.Boolean, - "introduction": fields.String, - "model_config": fields.Nested(model_config_fields), - "message_count": fields.Integer, - "user_feedback_stats": fields.Nested(feedback_stat_fields), - "admin_feedback_stats": fields.Nested(feedback_stat_fields), -} - -simple_conversation_fields = { - "id": fields.String, - "name": fields.String, - "inputs": FilesContainedField, - "status": fields.String, - "introduction": fields.String, - "created_at": TimestampField, - "updated_at": TimestampField, -} - -conversation_delete_fields = { - "result": fields.String, -} - -conversation_infinite_scroll_pagination_fields = { - "limit": fields.Integer, - "has_more": fields.Boolean, - "data": fields.List(fields.Nested(simple_conversation_fields)), -} +class ConversationInfiniteScrollPagination(ResponseModel): + limit: int + has_more: bool + data: list[SimpleConversation] -def build_conversation_infinite_scroll_pagination_model(api_or_ns: Api | Namespace): - """Build the conversation infinite scroll pagination model for the API or Namespace.""" - simple_conversation_model = build_simple_conversation_model(api_or_ns) - - copied_fields = conversation_infinite_scroll_pagination_fields.copy() - copied_fields["data"] = fields.List(fields.Nested(simple_conversation_model)) - return api_or_ns.model("ConversationInfiniteScrollPagination", copied_fields) +class ConversationDelete(ResponseModel): + result: str -def build_conversation_delete_model(api_or_ns: Api | Namespace): - """Build the conversation delete model for the API or Namespace.""" - return api_or_ns.model("ConversationDelete", conversation_delete_fields) +class ResultResponse(ResponseModel): + result: str -def build_simple_conversation_model(api_or_ns: Api | Namespace): - """Build the simple conversation model for the API or Namespace.""" - return api_or_ns.model("SimpleConversation", simple_conversation_fields) +class SimpleAccount(ResponseModel): + id: str + name: str + email: str + + +class Feedback(ResponseModel): + rating: str + content: str | None = None + from_source: str + from_end_user_id: str | None = None + from_account: SimpleAccount | None = None + + +class Annotation(ResponseModel): + id: str + question: str | None = None + content: str + account: SimpleAccount | None = None + created_at: int | None = None + + @field_validator("created_at", mode="before") + @classmethod + def _normalize_created_at(cls, value: datetime | int | None) -> int | None: + if isinstance(value, datetime): + return to_timestamp(value) + return value + + +class AnnotationHitHistory(ResponseModel): + annotation_id: str + annotation_create_account: SimpleAccount | None = None + created_at: int | None = None + + @field_validator("created_at", mode="before") + @classmethod + def _normalize_created_at(cls, value: datetime | int | None) -> int | None: + if isinstance(value, datetime): + return to_timestamp(value) + return value + + +class AgentThought(ResponseModel): + id: str + chain_id: str | None = None + message_chain_id: str | None = Field(default=None, exclude=True, validation_alias="message_chain_id") + message_id: str + position: int + thought: str | None = None + tool: str | None = None + tool_labels: JSONValue + tool_input: str | None = None + created_at: int | None = None + observation: str | None = None + files: list[str] + + @field_validator("created_at", mode="before") + @classmethod + def _normalize_created_at(cls, value: datetime | int | None) -> int | None: + if isinstance(value, datetime): + return to_timestamp(value) + return value + + @model_validator(mode="after") + def _fallback_chain_id(self): + if self.chain_id is None and self.message_chain_id: + self.chain_id = self.message_chain_id + return self + + +class MessageDetail(ResponseModel): + id: str + conversation_id: str + inputs: dict[str, JSONValue] + query: str + message: JSONValue + message_tokens: int + answer: str + answer_tokens: int + provider_response_latency: float + from_source: str + from_end_user_id: str | None = None + from_account_id: str | None = None + feedbacks: list[Feedback] + workflow_run_id: str | None = None + annotation: Annotation | None = None + annotation_hit_history: AnnotationHitHistory | None = None + created_at: int | None = None + agent_thoughts: list[AgentThought] + message_files: list[MessageFile] + metadata: JSONValue + status: str + error: str | None = None + parent_message_id: str | None = None + generation_detail: JSONValue | None = Field(default=None, validation_alias="generation_detail_dict") + + @field_validator("inputs", mode="before") + @classmethod + def _normalize_inputs(cls, value: JSONValue) -> JSONValue: + return format_files_contained(value) + + @field_validator("created_at", mode="before") + @classmethod + def _normalize_created_at(cls, value: datetime | int | None) -> int | None: + if isinstance(value, datetime): + return to_timestamp(value) + return value + + +class FeedbackStat(ResponseModel): + like: int + dislike: int + + +class StatusCount(ResponseModel): + success: int + failed: int + partial_success: int + + +class ModelConfig(ResponseModel): + opening_statement: str | None = None + suggested_questions: JSONValue | None = None + model: JSONValue | None = None + user_input_form: JSONValue | None = None + pre_prompt: str | None = None + agent_mode: JSONValue | None = None + + +class SimpleModelConfig(ResponseModel): + model: JSONValue | None = None + pre_prompt: str | None = None + + +class SimpleMessageDetail(ResponseModel): + inputs: dict[str, JSONValue] + query: str + message: str + answer: str + + @field_validator("inputs", mode="before") + @classmethod + def _normalize_inputs(cls, value: JSONValue) -> JSONValue: + return format_files_contained(value) + + +class Conversation(ResponseModel): + id: str + status: str + from_source: str + from_end_user_id: str | None = None + from_end_user_session_id: str | None = None + from_account_id: str | None = None + from_account_name: str | None = None + read_at: int | None = None + created_at: int | None = None + updated_at: int | None = None + annotation: Annotation | None = None + model_config_: SimpleModelConfig | None = Field(default=None, alias="model_config") + user_feedback_stats: FeedbackStat | None = None + admin_feedback_stats: FeedbackStat | None = None + message: SimpleMessageDetail | None = None + + +class ConversationPagination(ResponseModel): + page: int + limit: int + total: int + has_more: bool + data: list[Conversation] + + +class ConversationMessageDetail(ResponseModel): + id: str + status: str + from_source: str + from_end_user_id: str | None = None + from_account_id: str | None = None + created_at: int | None = None + model_config_: ModelConfig | None = Field(default=None, alias="model_config") + message: MessageDetail | None = None + + +class ConversationWithSummary(ResponseModel): + id: str + status: str + from_source: str + from_end_user_id: str | None = None + from_end_user_session_id: str | None = None + from_account_id: str | None = None + from_account_name: str | None = None + name: str + summary: str + read_at: int | None = None + created_at: int | None = None + updated_at: int | None = None + annotated: bool + model_config_: SimpleModelConfig | None = Field(default=None, alias="model_config") + message_count: int + user_feedback_stats: FeedbackStat | None = None + admin_feedback_stats: FeedbackStat | None = None + status_count: StatusCount | None = None + + +class ConversationWithSummaryPagination(ResponseModel): + page: int + limit: int + total: int + has_more: bool + data: list[ConversationWithSummary] + + +class ConversationDetail(ResponseModel): + id: str + status: str + from_source: str + from_end_user_id: str | None = None + from_account_id: str | None = None + created_at: int | None = None + updated_at: int | None = None + annotated: bool + introduction: str | None = None + model_config_: ModelConfig | None = Field(default=None, alias="model_config") + message_count: int + user_feedback_stats: FeedbackStat | None = None + admin_feedback_stats: FeedbackStat | None = None + + +def to_timestamp(value: datetime | None) -> int | None: + if value is None: + return None + return int(value.timestamp()) + + +def format_files_contained(value: JSONValue) -> JSONValue: + if isinstance(value, File): + return value.model_dump() + if isinstance(value, dict): + return {k: format_files_contained(v) for k, v in value.items()} + if isinstance(value, list): + return [format_files_contained(v) for v in value] + return value + + +def message_text(value: JSONValue) -> str: + if isinstance(value, list) and value: + first = value[0] + if isinstance(first, dict): + text = first.get("text") + if isinstance(text, str): + return text + return "" + + +def extract_model_config(value: object | None) -> dict[str, JSONValue]: + if value is None: + return {} + if isinstance(value, dict): + return value + if hasattr(value, "to_dict"): + return value.to_dict() + return {} diff --git a/api/fields/conversation_variable_fields.py b/api/fields/conversation_variable_fields.py index 7d5e311591..c55014a368 100644 --- a/api/fields/conversation_variable_fields.py +++ b/api/fields/conversation_variable_fields.py @@ -1,4 +1,4 @@ -from flask_restx import Api, Namespace, fields +from flask_restx import Namespace, fields from libs.helper import TimestampField @@ -29,12 +29,12 @@ conversation_variable_infinite_scroll_pagination_fields = { } -def build_conversation_variable_model(api_or_ns: Api | Namespace): +def build_conversation_variable_model(api_or_ns: Namespace): """Build the conversation variable model for the API or Namespace.""" return api_or_ns.model("ConversationVariable", conversation_variable_fields) -def build_conversation_variable_infinite_scroll_pagination_model(api_or_ns: Api | Namespace): +def build_conversation_variable_infinite_scroll_pagination_model(api_or_ns: Namespace): """Build the conversation variable infinite scroll pagination model for the API or Namespace.""" # Build the nested variable model first conversation_variable_model = build_conversation_variable_model(api_or_ns) diff --git a/api/fields/end_user_fields.py b/api/fields/end_user_fields.py index ea43e3b5fd..5389b0213a 100644 --- a/api/fields/end_user_fields.py +++ b/api/fields/end_user_fields.py @@ -1,4 +1,4 @@ -from flask_restx import Api, Namespace, fields +from flask_restx import Namespace, fields simple_end_user_fields = { "id": fields.String, @@ -8,5 +8,5 @@ simple_end_user_fields = { } -def build_simple_end_user_model(api_or_ns: Api | Namespace): +def build_simple_end_user_model(api_or_ns: Namespace): return api_or_ns.model("SimpleEndUser", simple_end_user_fields) diff --git a/api/fields/file_fields.py b/api/fields/file_fields.py index a707500445..913fb675f9 100644 --- a/api/fields/file_fields.py +++ b/api/fields/file_fields.py @@ -1,93 +1,85 @@ -from flask_restx import Api, Namespace, fields +from __future__ import annotations -from libs.helper import TimestampField +from datetime import datetime -upload_config_fields = { - "file_size_limit": fields.Integer, - "batch_count_limit": fields.Integer, - "image_file_size_limit": fields.Integer, - "video_file_size_limit": fields.Integer, - "audio_file_size_limit": fields.Integer, - "workflow_file_upload_limit": fields.Integer, - "image_file_batch_limit": fields.Integer, - "single_chunk_attachment_limit": fields.Integer, -} +from pydantic import BaseModel, ConfigDict, field_validator -def build_upload_config_model(api_or_ns: Api | Namespace): - """Build the upload config model for the API or Namespace. - - Args: - api_or_ns: Flask-RestX Api or Namespace instance - - Returns: - The registered model - """ - return api_or_ns.model("UploadConfig", upload_config_fields) +class ResponseModel(BaseModel): + model_config = ConfigDict( + from_attributes=True, + extra="ignore", + populate_by_name=True, + serialize_by_alias=True, + protected_namespaces=(), + ) -file_fields = { - "id": fields.String, - "name": fields.String, - "size": fields.Integer, - "extension": fields.String, - "mime_type": fields.String, - "created_by": fields.String, - "created_at": TimestampField, - "preview_url": fields.String, - "source_url": fields.String, -} +def _to_timestamp(value: datetime | int | None) -> int | None: + if isinstance(value, datetime): + return int(value.timestamp()) + return value -def build_file_model(api_or_ns: Api | Namespace): - """Build the file model for the API or Namespace. - - Args: - api_or_ns: Flask-RestX Api or Namespace instance - - Returns: - The registered model - """ - return api_or_ns.model("File", file_fields) +class UploadConfig(ResponseModel): + file_size_limit: int + batch_count_limit: int + file_upload_limit: int | None = None + image_file_size_limit: int + video_file_size_limit: int + audio_file_size_limit: int + workflow_file_upload_limit: int + image_file_batch_limit: int + single_chunk_attachment_limit: int + attachment_image_file_size_limit: int | None = None -remote_file_info_fields = { - "file_type": fields.String(attribute="file_type"), - "file_length": fields.Integer(attribute="file_length"), -} +class FileResponse(ResponseModel): + id: str + name: str + size: int + extension: str | None = None + mime_type: str | None = None + created_by: str | None = None + created_at: int | None = None + preview_url: str | None = None + source_url: str | None = None + original_url: str | None = None + user_id: str | None = None + tenant_id: str | None = None + conversation_id: str | None = None + file_key: str | None = None + + @field_validator("created_at", mode="before") + @classmethod + def _normalize_created_at(cls, value: datetime | int | None) -> int | None: + return _to_timestamp(value) -def build_remote_file_info_model(api_or_ns: Api | Namespace): - """Build the remote file info model for the API or Namespace. - - Args: - api_or_ns: Flask-RestX Api or Namespace instance - - Returns: - The registered model - """ - return api_or_ns.model("RemoteFileInfo", remote_file_info_fields) +class RemoteFileInfo(ResponseModel): + file_type: str + file_length: int -file_fields_with_signed_url = { - "id": fields.String, - "name": fields.String, - "size": fields.Integer, - "extension": fields.String, - "url": fields.String, - "mime_type": fields.String, - "created_by": fields.String, - "created_at": TimestampField, -} +class FileWithSignedUrl(ResponseModel): + id: str + name: str + size: int + extension: str | None = None + url: str | None = None + mime_type: str | None = None + created_by: str | None = None + created_at: int | None = None + + @field_validator("created_at", mode="before") + @classmethod + def _normalize_created_at(cls, value: datetime | int | None) -> int | None: + return _to_timestamp(value) -def build_file_with_signed_url_model(api_or_ns: Api | Namespace): - """Build the file with signed URL model for the API or Namespace. - - Args: - api_or_ns: Flask-RestX Api or Namespace instance - - Returns: - The registered model - """ - return api_or_ns.model("FileWithSignedUrl", file_fields_with_signed_url) +__all__ = [ + "FileResponse", + "FileWithSignedUrl", + "RemoteFileInfo", + "UploadConfig", +] diff --git a/api/fields/member_fields.py b/api/fields/member_fields.py index 08e38a6931..25160927e6 100644 --- a/api/fields/member_fields.py +++ b/api/fields/member_fields.py @@ -1,4 +1,4 @@ -from flask_restx import Api, Namespace, fields +from flask_restx import Namespace, fields from libs.helper import AvatarUrlField, TimestampField @@ -9,7 +9,7 @@ simple_account_fields = { } -def build_simple_account_model(api_or_ns: Api | Namespace): +def build_simple_account_model(api_or_ns: Namespace): return api_or_ns.model("SimpleAccount", simple_account_fields) diff --git a/api/fields/message_fields.py b/api/fields/message_fields.py index a419da2e18..797f01c00c 100644 --- a/api/fields/message_fields.py +++ b/api/fields/message_fields.py @@ -1,77 +1,138 @@ -from flask_restx import Api, Namespace, fields +from __future__ import annotations -from fields.conversation_fields import message_file_fields -from libs.helper import TimestampField +from datetime import datetime +from typing import TypeAlias -from .raws import FilesContainedField +from pydantic import BaseModel, ConfigDict, Field, field_validator -feedback_fields = { - "rating": fields.String, -} +from core.file import File +from fields.conversation_fields import AgentThought, JSONValue, MessageFile + +JSONValueType: TypeAlias = JSONValue -def build_feedback_model(api_or_ns: Api | Namespace): - """Build the feedback model for the API or Namespace.""" - return api_or_ns.model("Feedback", feedback_fields) +class ResponseModel(BaseModel): + model_config = ConfigDict(from_attributes=True, extra="ignore") -agent_thought_fields = { - "id": fields.String, - "chain_id": fields.String, - "message_id": fields.String, - "position": fields.Integer, - "thought": fields.String, - "tool": fields.String, - "tool_labels": fields.Raw, - "tool_input": fields.String, - "created_at": TimestampField, - "observation": fields.String, - "files": fields.List(fields.String), -} +class SimpleFeedback(ResponseModel): + rating: str | None = None -def build_agent_thought_model(api_or_ns: Api | Namespace): - """Build the agent thought model for the API or Namespace.""" - return api_or_ns.model("AgentThought", agent_thought_fields) +class RetrieverResource(ResponseModel): + id: str + message_id: str + position: int + dataset_id: str | None = None + dataset_name: str | None = None + document_id: str | None = None + document_name: str | None = None + data_source_type: str | None = None + segment_id: str | None = None + score: float | None = None + hit_count: int | None = None + word_count: int | None = None + segment_position: int | None = None + index_node_hash: str | None = None + content: str | None = None + created_at: int | None = None + + @field_validator("created_at", mode="before") + @classmethod + def _normalize_created_at(cls, value: datetime | int | None) -> int | None: + if isinstance(value, datetime): + return to_timestamp(value) + return value -retriever_resource_fields = { - "id": fields.String, - "message_id": fields.String, - "position": fields.Integer, - "dataset_id": fields.String, - "dataset_name": fields.String, - "document_id": fields.String, - "document_name": fields.String, - "data_source_type": fields.String, - "segment_id": fields.String, - "score": fields.Float, - "hit_count": fields.Integer, - "word_count": fields.Integer, - "segment_position": fields.Integer, - "index_node_hash": fields.String, - "content": fields.String, - "created_at": TimestampField, -} +class MessageListItem(ResponseModel): + id: str + conversation_id: str + parent_message_id: str | None = None + inputs: dict[str, JSONValueType] + query: str + answer: str = Field(validation_alias="re_sign_file_url_answer") + feedback: SimpleFeedback | None = Field(default=None, validation_alias="user_feedback") + retriever_resources: list[RetrieverResource] + created_at: int | None = None + agent_thoughts: list[AgentThought] + message_files: list[MessageFile] + status: str + error: str | None = None + generation_detail: JSONValueType | None = Field(default=None, validation_alias="generation_detail_dict") -message_fields = { - "id": fields.String, - "conversation_id": fields.String, - "parent_message_id": fields.String, - "inputs": FilesContainedField, - "query": fields.String, - "answer": fields.String(attribute="re_sign_file_url_answer"), - "feedback": fields.Nested(feedback_fields, attribute="user_feedback", allow_null=True), - "retriever_resources": fields.List(fields.Nested(retriever_resource_fields)), - "created_at": TimestampField, - "agent_thoughts": fields.List(fields.Nested(agent_thought_fields)), - "message_files": fields.List(fields.Nested(message_file_fields)), - "status": fields.String, - "error": fields.String, -} + @field_validator("inputs", mode="before") + @classmethod + def _normalize_inputs(cls, value: JSONValueType) -> JSONValueType: + return format_files_contained(value) -message_infinite_scroll_pagination_fields = { - "limit": fields.Integer, - "has_more": fields.Boolean, - "data": fields.List(fields.Nested(message_fields)), -} + @field_validator("created_at", mode="before") + @classmethod + def _normalize_created_at(cls, value: datetime | int | None) -> int | None: + if isinstance(value, datetime): + return to_timestamp(value) + return value + + +class WebMessageListItem(MessageListItem): + metadata: JSONValueType | None = Field(default=None, validation_alias="message_metadata_dict") + + +class MessageInfiniteScrollPagination(ResponseModel): + limit: int + has_more: bool + data: list[MessageListItem] + + +class WebMessageInfiniteScrollPagination(ResponseModel): + limit: int + has_more: bool + data: list[WebMessageListItem] + + +class SavedMessageItem(ResponseModel): + id: str + inputs: dict[str, JSONValueType] + query: str + answer: str + message_files: list[MessageFile] + feedback: SimpleFeedback | None = Field(default=None, validation_alias="user_feedback") + created_at: int | None = None + + @field_validator("inputs", mode="before") + @classmethod + def _normalize_inputs(cls, value: JSONValueType) -> JSONValueType: + return format_files_contained(value) + + @field_validator("created_at", mode="before") + @classmethod + def _normalize_created_at(cls, value: datetime | int | None) -> int | None: + if isinstance(value, datetime): + return to_timestamp(value) + return value + + +class SavedMessageInfiniteScrollPagination(ResponseModel): + limit: int + has_more: bool + data: list[SavedMessageItem] + + +class SuggestedQuestionsResponse(ResponseModel): + data: list[str] + + +def to_timestamp(value: datetime | None) -> int | None: + if value is None: + return None + return int(value.timestamp()) + + +def format_files_contained(value: JSONValueType) -> JSONValueType: + if isinstance(value, File): + return value.model_dump() + if isinstance(value, dict): + return {k: format_files_contained(v) for k, v in value.items()} + if isinstance(value, list): + return [format_files_contained(v) for v in value] + return value diff --git a/api/fields/rag_pipeline_fields.py b/api/fields/rag_pipeline_fields.py index f9e858c68b..97c02e7085 100644 --- a/api/fields/rag_pipeline_fields.py +++ b/api/fields/rag_pipeline_fields.py @@ -1,4 +1,4 @@ -from flask_restx import fields # type: ignore +from flask_restx import fields from fields.workflow_fields import workflow_partial_fields from libs.helper import AppIconUrlField, TimestampField diff --git a/api/fields/tag_fields.py b/api/fields/tag_fields.py index d5b7c86a04..e359a4408c 100644 --- a/api/fields/tag_fields.py +++ b/api/fields/tag_fields.py @@ -1,4 +1,4 @@ -from flask_restx import Api, Namespace, fields +from flask_restx import Namespace, fields dataset_tag_fields = { "id": fields.String, @@ -8,5 +8,5 @@ dataset_tag_fields = { } -def build_dataset_tag_fields(api_or_ns: Api | Namespace): +def build_dataset_tag_fields(api_or_ns: Namespace): return api_or_ns.model("DataSetTag", dataset_tag_fields) diff --git a/api/fields/workflow_app_log_fields.py b/api/fields/workflow_app_log_fields.py index 4cbdf6f0ca..0ebc03a98c 100644 --- a/api/fields/workflow_app_log_fields.py +++ b/api/fields/workflow_app_log_fields.py @@ -1,4 +1,4 @@ -from flask_restx import Api, Namespace, fields +from flask_restx import Namespace, fields from fields.end_user_fields import build_simple_end_user_model, simple_end_user_fields from fields.member_fields import build_simple_account_model, simple_account_fields @@ -17,7 +17,7 @@ workflow_app_log_partial_fields = { } -def build_workflow_app_log_partial_model(api_or_ns: Api | Namespace): +def build_workflow_app_log_partial_model(api_or_ns: Namespace): """Build the workflow app log partial model for the API or Namespace.""" workflow_run_model = build_workflow_run_for_log_model(api_or_ns) simple_account_model = build_simple_account_model(api_or_ns) @@ -43,7 +43,7 @@ workflow_app_log_pagination_fields = { } -def build_workflow_app_log_pagination_model(api_or_ns: Api | Namespace): +def build_workflow_app_log_pagination_model(api_or_ns: Namespace): """Build the workflow app log pagination model for the API or Namespace.""" # Build the nested partial model first workflow_app_log_partial_model = build_workflow_app_log_partial_model(api_or_ns) diff --git a/api/fields/workflow_run_fields.py b/api/fields/workflow_run_fields.py index 821ce62ecc..1b2948811b 100644 --- a/api/fields/workflow_run_fields.py +++ b/api/fields/workflow_run_fields.py @@ -1,4 +1,4 @@ -from flask_restx import Api, Namespace, fields +from flask_restx import Namespace, fields from fields.end_user_fields import simple_end_user_fields from fields.member_fields import simple_account_fields @@ -19,7 +19,7 @@ workflow_run_for_log_fields = { } -def build_workflow_run_for_log_model(api_or_ns: Api | Namespace): +def build_workflow_run_for_log_model(api_or_ns: Namespace): return api_or_ns.model("WorkflowRunForLog", workflow_run_for_log_fields) @@ -81,6 +81,7 @@ workflow_run_detail_fields = { "inputs": fields.Raw(attribute="inputs_dict"), "status": fields.String, "outputs": fields.Raw(attribute="outputs_dict"), + "outputs_as_generation": fields.Boolean, "error": fields.String, "elapsed_time": fields.Float, "total_tokens": fields.Integer, @@ -129,6 +130,7 @@ workflow_run_node_execution_fields = { "inputs_truncated": fields.Boolean, "outputs_truncated": fields.Boolean, "process_data_truncated": fields.Boolean, + "generation_detail": fields.Raw, } workflow_run_node_execution_list_fields = { diff --git a/api/libs/archive_storage.py b/api/libs/archive_storage.py new file mode 100644 index 0000000000..f84d226447 --- /dev/null +++ b/api/libs/archive_storage.py @@ -0,0 +1,347 @@ +""" +Archive Storage Client for S3-compatible storage. + +This module provides a dedicated storage client for archiving or exporting logs +to S3-compatible object storage. +""" + +import base64 +import datetime +import gzip +import hashlib +import logging +from collections.abc import Generator +from typing import Any, cast + +import boto3 +import orjson +from botocore.client import Config +from botocore.exceptions import ClientError + +from configs import dify_config + +logger = logging.getLogger(__name__) + + +class ArchiveStorageError(Exception): + """Base exception for archive storage operations.""" + + pass + + +class ArchiveStorageNotConfiguredError(ArchiveStorageError): + """Raised when archive storage is not properly configured.""" + + pass + + +class ArchiveStorage: + """ + S3-compatible storage client for archiving or exporting. + + This client provides methods for storing and retrieving archived data in JSONL+gzip format. + """ + + def __init__(self, bucket: str): + if not dify_config.ARCHIVE_STORAGE_ENABLED: + raise ArchiveStorageNotConfiguredError("Archive storage is not enabled") + + if not bucket: + raise ArchiveStorageNotConfiguredError("Archive storage bucket is not configured") + if not all( + [ + dify_config.ARCHIVE_STORAGE_ENDPOINT, + bucket, + dify_config.ARCHIVE_STORAGE_ACCESS_KEY, + dify_config.ARCHIVE_STORAGE_SECRET_KEY, + ] + ): + raise ArchiveStorageNotConfiguredError( + "Archive storage configuration is incomplete. " + "Required: ARCHIVE_STORAGE_ENDPOINT, ARCHIVE_STORAGE_ACCESS_KEY, " + "ARCHIVE_STORAGE_SECRET_KEY, and a bucket name" + ) + + self.bucket = bucket + self.client = boto3.client( + "s3", + endpoint_url=dify_config.ARCHIVE_STORAGE_ENDPOINT, + aws_access_key_id=dify_config.ARCHIVE_STORAGE_ACCESS_KEY, + aws_secret_access_key=dify_config.ARCHIVE_STORAGE_SECRET_KEY, + region_name=dify_config.ARCHIVE_STORAGE_REGION, + config=Config(s3={"addressing_style": "path"}), + ) + + # Verify bucket accessibility + try: + self.client.head_bucket(Bucket=self.bucket) + except ClientError as e: + error_code = e.response.get("Error", {}).get("Code") + if error_code == "404": + raise ArchiveStorageNotConfiguredError(f"Archive bucket '{self.bucket}' does not exist") + elif error_code == "403": + raise ArchiveStorageNotConfiguredError(f"Access denied to archive bucket '{self.bucket}'") + else: + raise ArchiveStorageError(f"Failed to access archive bucket: {e}") + + def put_object(self, key: str, data: bytes) -> str: + """ + Upload an object to the archive storage. + + Args: + key: Object key (path) within the bucket + data: Binary data to upload + + Returns: + MD5 checksum of the uploaded data + + Raises: + ArchiveStorageError: If upload fails + """ + checksum = hashlib.md5(data).hexdigest() + try: + self.client.put_object( + Bucket=self.bucket, + Key=key, + Body=data, + ContentMD5=self._content_md5(data), + ) + logger.debug("Uploaded object: %s (size=%d, checksum=%s)", key, len(data), checksum) + return checksum + except ClientError as e: + raise ArchiveStorageError(f"Failed to upload object '{key}': {e}") + + def get_object(self, key: str) -> bytes: + """ + Download an object from the archive storage. + + Args: + key: Object key (path) within the bucket + + Returns: + Binary data of the object + + Raises: + ArchiveStorageError: If download fails + FileNotFoundError: If object does not exist + """ + try: + response = self.client.get_object(Bucket=self.bucket, Key=key) + return response["Body"].read() + except ClientError as e: + error_code = e.response.get("Error", {}).get("Code") + if error_code == "NoSuchKey": + raise FileNotFoundError(f"Archive object not found: {key}") + raise ArchiveStorageError(f"Failed to download object '{key}': {e}") + + def get_object_stream(self, key: str) -> Generator[bytes, None, None]: + """ + Stream an object from the archive storage. + + Args: + key: Object key (path) within the bucket + + Yields: + Chunks of binary data + + Raises: + ArchiveStorageError: If download fails + FileNotFoundError: If object does not exist + """ + try: + response = self.client.get_object(Bucket=self.bucket, Key=key) + yield from response["Body"].iter_chunks() + except ClientError as e: + error_code = e.response.get("Error", {}).get("Code") + if error_code == "NoSuchKey": + raise FileNotFoundError(f"Archive object not found: {key}") + raise ArchiveStorageError(f"Failed to stream object '{key}': {e}") + + def object_exists(self, key: str) -> bool: + """ + Check if an object exists in the archive storage. + + Args: + key: Object key (path) within the bucket + + Returns: + True if object exists, False otherwise + """ + try: + self.client.head_object(Bucket=self.bucket, Key=key) + return True + except ClientError: + return False + + def delete_object(self, key: str) -> None: + """ + Delete an object from the archive storage. + + Args: + key: Object key (path) within the bucket + + Raises: + ArchiveStorageError: If deletion fails + """ + try: + self.client.delete_object(Bucket=self.bucket, Key=key) + logger.debug("Deleted object: %s", key) + except ClientError as e: + raise ArchiveStorageError(f"Failed to delete object '{key}': {e}") + + def generate_presigned_url(self, key: str, expires_in: int = 3600) -> str: + """ + Generate a pre-signed URL for downloading an object. + + Args: + key: Object key (path) within the bucket + expires_in: URL validity duration in seconds (default: 1 hour) + + Returns: + Pre-signed URL string. + + Raises: + ArchiveStorageError: If generation fails + """ + try: + return self.client.generate_presigned_url( + ClientMethod="get_object", + Params={"Bucket": self.bucket, "Key": key}, + ExpiresIn=expires_in, + ) + except ClientError as e: + raise ArchiveStorageError(f"Failed to generate pre-signed URL for '{key}': {e}") + + def list_objects(self, prefix: str) -> list[str]: + """ + List objects under a given prefix. + + Args: + prefix: Object key prefix to filter by + + Returns: + List of object keys matching the prefix + """ + keys = [] + paginator = self.client.get_paginator("list_objects_v2") + + try: + for page in paginator.paginate(Bucket=self.bucket, Prefix=prefix): + for obj in page.get("Contents", []): + keys.append(obj["Key"]) + except ClientError as e: + raise ArchiveStorageError(f"Failed to list objects with prefix '{prefix}': {e}") + + return keys + + @staticmethod + def _content_md5(data: bytes) -> str: + """Calculate base64-encoded MD5 for Content-MD5 header.""" + return base64.b64encode(hashlib.md5(data).digest()).decode() + + @staticmethod + def serialize_to_jsonl_gz(records: list[dict[str, Any]]) -> bytes: + """ + Serialize records to gzipped JSONL format. + + Args: + records: List of dictionaries to serialize + + Returns: + Gzipped JSONL bytes + """ + lines = [] + for record in records: + # Convert datetime objects to ISO format strings + serialized = ArchiveStorage._serialize_record(record) + lines.append(orjson.dumps(serialized)) + + jsonl_content = b"\n".join(lines) + if jsonl_content: + jsonl_content += b"\n" + + return gzip.compress(jsonl_content) + + @staticmethod + def deserialize_from_jsonl_gz(data: bytes) -> list[dict[str, Any]]: + """ + Deserialize gzipped JSONL data to records. + + Args: + data: Gzipped JSONL bytes + + Returns: + List of dictionaries + """ + jsonl_content = gzip.decompress(data) + records = [] + + for line in jsonl_content.splitlines(): + if line: + records.append(orjson.loads(line)) + + return records + + @staticmethod + def _serialize_record(record: dict[str, Any]) -> dict[str, Any]: + """Serialize a single record, converting special types.""" + + def _serialize(item: Any) -> Any: + if isinstance(item, datetime.datetime): + return item.isoformat() + if isinstance(item, dict): + return {key: _serialize(value) for key, value in item.items()} + if isinstance(item, list): + return [_serialize(value) for value in item] + return item + + return cast(dict[str, Any], _serialize(record)) + + @staticmethod + def compute_checksum(data: bytes) -> str: + """Compute MD5 checksum of data.""" + return hashlib.md5(data).hexdigest() + + +# Singleton instance (lazy initialization) +_archive_storage: ArchiveStorage | None = None +_export_storage: ArchiveStorage | None = None + + +def get_archive_storage() -> ArchiveStorage: + """ + Get the archive storage singleton instance. + + Returns: + ArchiveStorage instance + + Raises: + ArchiveStorageNotConfiguredError: If archive storage is not configured + """ + global _archive_storage + if _archive_storage is None: + archive_bucket = dify_config.ARCHIVE_STORAGE_ARCHIVE_BUCKET + if not archive_bucket: + raise ArchiveStorageNotConfiguredError( + "Archive storage bucket is not configured. Required: ARCHIVE_STORAGE_ARCHIVE_BUCKET" + ) + _archive_storage = ArchiveStorage(bucket=archive_bucket) + return _archive_storage + + +def get_export_storage() -> ArchiveStorage: + """ + Get the export storage singleton instance. + + Returns: + ArchiveStorage instance + """ + global _export_storage + if _export_storage is None: + export_bucket = dify_config.ARCHIVE_STORAGE_EXPORT_BUCKET + if not export_bucket: + raise ArchiveStorageNotConfiguredError( + "Archive export bucket is not configured. Required: ARCHIVE_STORAGE_EXPORT_BUCKET" + ) + _export_storage = ArchiveStorage(bucket=export_bucket) + return _export_storage diff --git a/api/libs/broadcast_channel/channel.py b/api/libs/broadcast_channel/channel.py index 5bbf0c79a3..d4cb3e9971 100644 --- a/api/libs/broadcast_channel/channel.py +++ b/api/libs/broadcast_channel/channel.py @@ -2,6 +2,8 @@ Broadcast channel for Pub/Sub messaging. """ +from __future__ import annotations + import types from abc import abstractmethod from collections.abc import Iterator @@ -129,6 +131,6 @@ class BroadcastChannel(Protocol): """ @abstractmethod - def topic(self, topic: str) -> "Topic": + def topic(self, topic: str) -> Topic: """topic returns a `Topic` instance for the given topic name.""" ... diff --git a/api/libs/broadcast_channel/redis/channel.py b/api/libs/broadcast_channel/redis/channel.py index 1fc3db8156..5bb4f579c1 100644 --- a/api/libs/broadcast_channel/redis/channel.py +++ b/api/libs/broadcast_channel/redis/channel.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from libs.broadcast_channel.channel import Producer, Subscriber, Subscription from redis import Redis @@ -20,7 +22,7 @@ class BroadcastChannel: ): self._client = redis_client - def topic(self, topic: str) -> "Topic": + def topic(self, topic: str) -> Topic: return Topic(self._client, topic) diff --git a/api/libs/broadcast_channel/redis/sharded_channel.py b/api/libs/broadcast_channel/redis/sharded_channel.py index 16e3a80ee1..d190c51bbc 100644 --- a/api/libs/broadcast_channel/redis/sharded_channel.py +++ b/api/libs/broadcast_channel/redis/sharded_channel.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from libs.broadcast_channel.channel import Producer, Subscriber, Subscription from redis import Redis @@ -18,7 +20,7 @@ class ShardedRedisBroadcastChannel: ): self._client = redis_client - def topic(self, topic: str) -> "ShardedTopic": + def topic(self, topic: str) -> ShardedTopic: return ShardedTopic(self._client, topic) diff --git a/api/libs/email_i18n.py b/api/libs/email_i18n.py index ff74ccbe8e..0828cf80bf 100644 --- a/api/libs/email_i18n.py +++ b/api/libs/email_i18n.py @@ -6,6 +6,8 @@ in Dify. It follows Domain-Driven Design principles with proper type hints and eliminates the need for repetitive language switching logic. """ +from __future__ import annotations + from dataclasses import dataclass from enum import StrEnum, auto from typing import Any, Protocol @@ -53,7 +55,7 @@ class EmailLanguage(StrEnum): ZH_HANS = "zh-Hans" @classmethod - def from_language_code(cls, language_code: str) -> "EmailLanguage": + def from_language_code(cls, language_code: str) -> EmailLanguage: """Convert a language code to EmailLanguage with fallback to English.""" if language_code == "zh-Hans": return cls.ZH_HANS diff --git a/api/libs/encryption.py b/api/libs/encryption.py new file mode 100644 index 0000000000..81be8cce97 --- /dev/null +++ b/api/libs/encryption.py @@ -0,0 +1,66 @@ +""" +Field Encoding/Decoding Utilities + +Provides Base64 decoding for sensitive fields (password, verification code) +received from the frontend. + +Note: This uses Base64 encoding for obfuscation, not cryptographic encryption. +Real security relies on HTTPS for transport layer encryption. +""" + +import base64 +import logging + +logger = logging.getLogger(__name__) + + +class FieldEncryption: + """Handle decoding of sensitive fields during transmission""" + + @classmethod + def decrypt_field(cls, encoded_text: str) -> str | None: + """ + Decode Base64 encoded field from frontend. + + Args: + encoded_text: Base64 encoded text from frontend + + Returns: + Decoded plaintext, or None if decoding fails + """ + try: + # Decode base64 + decoded_bytes = base64.b64decode(encoded_text) + decoded_text = decoded_bytes.decode("utf-8") + logger.debug("Field decoding successful") + return decoded_text + + except Exception: + # Decoding failed - return None to trigger error in caller + return None + + @classmethod + def decrypt_password(cls, encrypted_password: str) -> str | None: + """ + Decrypt password field + + Args: + encrypted_password: Encrypted password from frontend + + Returns: + Decrypted password or None if decryption fails + """ + return cls.decrypt_field(encrypted_password) + + @classmethod + def decrypt_verification_code(cls, encrypted_code: str) -> str | None: + """ + Decrypt verification code field + + Args: + encrypted_code: Encrypted code from frontend + + Returns: + Decrypted code or None if decryption fails + """ + return cls.decrypt_field(encrypted_code) diff --git a/api/libs/external_api.py b/api/libs/external_api.py index 31ca2b3e08..e8592407c3 100644 --- a/api/libs/external_api.py +++ b/api/libs/external_api.py @@ -1,5 +1,4 @@ import re -import sys from collections.abc import Mapping from typing import Any @@ -109,11 +108,8 @@ def register_external_error_handlers(api: Api): data.setdefault("code", "unknown") data.setdefault("status", status_code) - # Log stack - exc_info: Any = sys.exc_info() - if exc_info[1] is None: - exc_info = (None, None, None) - current_app.log_exception(exc_info) + # Note: Exception logging is handled by Flask/Flask-RESTX framework automatically + # Explicit log_exception call removed to avoid duplicate log entries return data, status_code @@ -131,28 +127,12 @@ class ExternalApi(Api): } def __init__(self, app: Blueprint | Flask, *args, **kwargs): - import logging - import os - kwargs.setdefault("authorizations", self._authorizations) kwargs.setdefault("security", "Bearer") - - # Security: Use computed swagger_ui_enabled which respects DEPLOY_ENV - swagger_enabled = dify_config.swagger_ui_enabled - kwargs["add_specs"] = swagger_enabled - kwargs["doc"] = dify_config.SWAGGER_UI_PATH if swagger_enabled else False + kwargs["add_specs"] = dify_config.SWAGGER_UI_ENABLED + kwargs["doc"] = dify_config.SWAGGER_UI_PATH if dify_config.SWAGGER_UI_ENABLED else False # manual separate call on construction and init_app to ensure configs in kwargs effective super().__init__(app=None, *args, **kwargs) self.init_app(app, **kwargs) register_external_error_handlers(self) - - # Security: Log warning when Swagger is enabled in production environment - deploy_env = os.environ.get("DEPLOY_ENV", "PRODUCTION") - if swagger_enabled and deploy_env.upper() == "PRODUCTION": - logger = logging.getLogger(__name__) - logger.warning( - "SECURITY WARNING: Swagger UI is ENABLED in PRODUCTION environment. " - "This may expose sensitive API documentation. " - "Set SWAGGER_UI_ENABLED=false or remove the explicit setting to disable." - ) diff --git a/api/libs/helper.py b/api/libs/helper.py index abc81d1fde..07c4823727 100644 --- a/api/libs/helper.py +++ b/api/libs/helper.py @@ -11,6 +11,7 @@ from collections.abc import Generator, Mapping from datetime import datetime from hashlib import sha256 from typing import TYPE_CHECKING, Annotated, Any, Optional, Union, cast +from uuid import UUID from zoneinfo import available_timezones from flask import Response, stream_with_context @@ -31,6 +32,38 @@ if TYPE_CHECKING: logger = logging.getLogger(__name__) +def escape_like_pattern(pattern: str) -> str: + """ + Escape special characters in a string for safe use in SQL LIKE patterns. + + This function escapes the special characters used in SQL LIKE patterns: + - Backslash (\\) -> \\ + - Percent (%) -> \\% + - Underscore (_) -> \\_ + + The escaped pattern can then be safely used in SQL LIKE queries with the + ESCAPE '\\' clause to prevent SQL injection via LIKE wildcards. + + Args: + pattern: The string pattern to escape + + Returns: + Escaped string safe for use in SQL LIKE queries + + Examples: + >>> escape_like_pattern("50% discount") + '50\\% discount' + >>> escape_like_pattern("test_data") + 'test\\_data' + >>> escape_like_pattern("path\\to\\file") + 'path\\\\to\\\\file' + """ + if not pattern: + return pattern + # Escape backslash first, then percent and underscore + return pattern.replace("\\", "\\\\").replace("%", "\\%").replace("_", "\\_") + + def extract_tenant_id(user: Union["Account", "EndUser"]) -> str | None: """ Extract tenant_id from Account or EndUser object. @@ -119,6 +152,19 @@ def uuid_value(value: Any) -> str: raise ValueError(error) +def normalize_uuid(value: str | UUID) -> str: + if not value: + return "" + + try: + return uuid_value(value) + except ValueError as exc: + raise ValueError("must be a valid UUID") from exc + + +UUIDStrOrEmpty = Annotated[str, AfterValidator(normalize_uuid)] + + def alphanumeric(value: str): # check if the value is alphanumeric and underlined if re.match(r"^[a-zA-Z0-9_]+$", value): @@ -184,7 +230,7 @@ def timezone(timezone_string): def convert_datetime_to_date(field, target_timezone: str = ":tz"): if dify_config.DB_TYPE == "postgresql": return f"DATE(DATE_TRUNC('day', {field} AT TIME ZONE 'UTC' AT TIME ZONE {target_timezone}))" - elif dify_config.DB_TYPE == "mysql": + elif dify_config.DB_TYPE in ["mysql", "oceanbase", "seekdb"]: return f"DATE(CONVERT_TZ({field}, 'UTC', {target_timezone}))" else: raise NotImplementedError(f"Unsupported database type: {dify_config.DB_TYPE}") diff --git a/api/migrations/versions/00bacef91f18_rename_api_provider_description.py b/api/migrations/versions/00bacef91f18_rename_api_provider_description.py index 17ed067d81..657d28f896 100644 --- a/api/migrations/versions/00bacef91f18_rename_api_provider_description.py +++ b/api/migrations/versions/00bacef91f18_rename_api_provider_description.py @@ -11,9 +11,6 @@ from alembic import op import models.types -def _is_pg(conn): - return conn.dialect.name == "postgresql" - # revision identifiers, used by Alembic. revision = '00bacef91f18' down_revision = '8ec536f3c800' @@ -23,31 +20,17 @@ depends_on = None def upgrade(): # ### commands auto generated by Alembic - please adjust! ### - conn = op.get_bind() - - if _is_pg(conn): - with op.batch_alter_table('tool_api_providers', schema=None) as batch_op: - batch_op.add_column(sa.Column('description', sa.Text(), nullable=False)) - batch_op.drop_column('description_str') - else: - with op.batch_alter_table('tool_api_providers', schema=None) as batch_op: - batch_op.add_column(sa.Column('description', models.types.LongText(), nullable=False)) - batch_op.drop_column('description_str') + with op.batch_alter_table('tool_api_providers', schema=None) as batch_op: + batch_op.add_column(sa.Column('description', models.types.LongText(), nullable=False)) + batch_op.drop_column('description_str') # ### end Alembic commands ### def downgrade(): # ### commands auto generated by Alembic - please adjust! ### - conn = op.get_bind() - - if _is_pg(conn): - with op.batch_alter_table('tool_api_providers', schema=None) as batch_op: - batch_op.add_column(sa.Column('description_str', sa.TEXT(), autoincrement=False, nullable=False)) - batch_op.drop_column('description') - else: - with op.batch_alter_table('tool_api_providers', schema=None) as batch_op: - batch_op.add_column(sa.Column('description_str', models.types.LongText(), autoincrement=False, nullable=False)) - batch_op.drop_column('description') + with op.batch_alter_table('tool_api_providers', schema=None) as batch_op: + batch_op.add_column(sa.Column('description_str', models.types.LongText(), autoincrement=False, nullable=False)) + batch_op.drop_column('description') # ### end Alembic commands ### diff --git a/api/migrations/versions/114eed84c228_remove_tool_id_from_model_invoke.py b/api/migrations/versions/114eed84c228_remove_tool_id_from_model_invoke.py index ed70bf5d08..912d9dbfa4 100644 --- a/api/migrations/versions/114eed84c228_remove_tool_id_from_model_invoke.py +++ b/api/migrations/versions/114eed84c228_remove_tool_id_from_model_invoke.py @@ -7,14 +7,10 @@ Create Date: 2024-01-10 04:40:57.257824 """ import sqlalchemy as sa from alembic import op -from sqlalchemy.dialects import postgresql import models.types -def _is_pg(conn): - return conn.dialect.name == "postgresql" - # revision identifiers, used by Alembic. revision = '114eed84c228' down_revision = 'c71211c8f604' @@ -32,13 +28,7 @@ def upgrade(): def downgrade(): # ### commands auto generated by Alembic - please adjust! ### - conn = op.get_bind() - - if _is_pg(conn): - with op.batch_alter_table('tool_model_invokes', schema=None) as batch_op: - batch_op.add_column(sa.Column('tool_id', postgresql.UUID(), autoincrement=False, nullable=False)) - else: - with op.batch_alter_table('tool_model_invokes', schema=None) as batch_op: - batch_op.add_column(sa.Column('tool_id', models.types.StringUUID(), autoincrement=False, nullable=False)) + with op.batch_alter_table('tool_model_invokes', schema=None) as batch_op: + batch_op.add_column(sa.Column('tool_id', models.types.StringUUID(), autoincrement=False, nullable=False)) # ### end Alembic commands ### diff --git a/api/migrations/versions/161cadc1af8d_add_dataset_permission_tenant_id.py b/api/migrations/versions/161cadc1af8d_add_dataset_permission_tenant_id.py index 509bd5d0e8..0ca905129d 100644 --- a/api/migrations/versions/161cadc1af8d_add_dataset_permission_tenant_id.py +++ b/api/migrations/versions/161cadc1af8d_add_dataset_permission_tenant_id.py @@ -11,9 +11,6 @@ from alembic import op import models.types -def _is_pg(conn): - return conn.dialect.name == "postgresql" - # revision identifiers, used by Alembic. revision = '161cadc1af8d' down_revision = '7e6a8693e07a' @@ -23,16 +20,9 @@ depends_on = None def upgrade(): # ### commands auto generated by Alembic - please adjust! ### - conn = op.get_bind() - - if _is_pg(conn): - with op.batch_alter_table('dataset_permissions', schema=None) as batch_op: - # Step 1: Add column without NOT NULL constraint - op.add_column('dataset_permissions', sa.Column('tenant_id', sa.UUID(), nullable=False)) - else: - with op.batch_alter_table('dataset_permissions', schema=None) as batch_op: - # Step 1: Add column without NOT NULL constraint - op.add_column('dataset_permissions', sa.Column('tenant_id', models.types.StringUUID(), nullable=False)) + with op.batch_alter_table('dataset_permissions', schema=None) as batch_op: + # Step 1: Add column without NOT NULL constraint + op.add_column('dataset_permissions', sa.Column('tenant_id', models.types.StringUUID(), nullable=False)) # ### end Alembic commands ### diff --git a/api/migrations/versions/2024_09_24_0922-6af6a521a53e_update_retrieval_resource.py b/api/migrations/versions/2024_09_24_0922-6af6a521a53e_update_retrieval_resource.py index 0767b725f6..be1b42f883 100644 --- a/api/migrations/versions/2024_09_24_0922-6af6a521a53e_update_retrieval_resource.py +++ b/api/migrations/versions/2024_09_24_0922-6af6a521a53e_update_retrieval_resource.py @@ -9,11 +9,6 @@ from alembic import op import models.types -def _is_pg(conn): - return conn.dialect.name == "postgresql" -import sqlalchemy as sa -from sqlalchemy.dialects import postgresql - # revision identifiers, used by Alembic. revision = '6af6a521a53e' down_revision = 'd57ba9ebb251' @@ -23,58 +18,30 @@ depends_on = None def upgrade(): # ### commands auto generated by Alembic - please adjust! ### - conn = op.get_bind() - - if _is_pg(conn): - with op.batch_alter_table('dataset_retriever_resources', schema=None) as batch_op: - batch_op.alter_column('document_id', - existing_type=sa.UUID(), - nullable=True) - batch_op.alter_column('data_source_type', - existing_type=sa.TEXT(), - nullable=True) - batch_op.alter_column('segment_id', - existing_type=sa.UUID(), - nullable=True) - else: - with op.batch_alter_table('dataset_retriever_resources', schema=None) as batch_op: - batch_op.alter_column('document_id', - existing_type=models.types.StringUUID(), - nullable=True) - batch_op.alter_column('data_source_type', - existing_type=models.types.LongText(), - nullable=True) - batch_op.alter_column('segment_id', - existing_type=models.types.StringUUID(), - nullable=True) + with op.batch_alter_table('dataset_retriever_resources', schema=None) as batch_op: + batch_op.alter_column('document_id', + existing_type=models.types.StringUUID(), + nullable=True) + batch_op.alter_column('data_source_type', + existing_type=models.types.LongText(), + nullable=True) + batch_op.alter_column('segment_id', + existing_type=models.types.StringUUID(), + nullable=True) # ### end Alembic commands ### def downgrade(): # ### commands auto generated by Alembic - please adjust! ### - conn = op.get_bind() - - if _is_pg(conn): - with op.batch_alter_table('dataset_retriever_resources', schema=None) as batch_op: - batch_op.alter_column('segment_id', - existing_type=sa.UUID(), - nullable=False) - batch_op.alter_column('data_source_type', - existing_type=sa.TEXT(), - nullable=False) - batch_op.alter_column('document_id', - existing_type=sa.UUID(), - nullable=False) - else: - with op.batch_alter_table('dataset_retriever_resources', schema=None) as batch_op: - batch_op.alter_column('segment_id', - existing_type=models.types.StringUUID(), - nullable=False) - batch_op.alter_column('data_source_type', - existing_type=models.types.LongText(), - nullable=False) - batch_op.alter_column('document_id', - existing_type=models.types.StringUUID(), - nullable=False) + with op.batch_alter_table('dataset_retriever_resources', schema=None) as batch_op: + batch_op.alter_column('segment_id', + existing_type=models.types.StringUUID(), + nullable=False) + batch_op.alter_column('data_source_type', + existing_type=models.types.LongText(), + nullable=False) + batch_op.alter_column('document_id', + existing_type=models.types.StringUUID(), + nullable=False) # ### end Alembic commands ### diff --git a/api/migrations/versions/2024_11_01_0434-d3f6769a94a3_add_upload_files_source_url.py b/api/migrations/versions/2024_11_01_0434-d3f6769a94a3_add_upload_files_source_url.py index a749c8bddf..5d12419bf7 100644 --- a/api/migrations/versions/2024_11_01_0434-d3f6769a94a3_add_upload_files_source_url.py +++ b/api/migrations/versions/2024_11_01_0434-d3f6769a94a3_add_upload_files_source_url.py @@ -8,7 +8,6 @@ Create Date: 2024-11-01 04:34:23.816198 from alembic import op import models as models import sqlalchemy as sa -from sqlalchemy.dialects import postgresql # revision identifiers, used by Alembic. revision = 'd3f6769a94a3' diff --git a/api/migrations/versions/2024_11_01_0622-d07474999927_update_type_of_custom_disclaimer_to_text.py b/api/migrations/versions/2024_11_01_0622-d07474999927_update_type_of_custom_disclaimer_to_text.py index 45842295ea..a49d6a52f6 100644 --- a/api/migrations/versions/2024_11_01_0622-d07474999927_update_type_of_custom_disclaimer_to_text.py +++ b/api/migrations/versions/2024_11_01_0622-d07474999927_update_type_of_custom_disclaimer_to_text.py @@ -28,85 +28,45 @@ def upgrade(): op.execute("UPDATE sites SET custom_disclaimer = '' WHERE custom_disclaimer IS NULL") op.execute("UPDATE tool_api_providers SET custom_disclaimer = '' WHERE custom_disclaimer IS NULL") - if _is_pg(conn): - with op.batch_alter_table('recommended_apps', schema=None) as batch_op: - batch_op.alter_column('custom_disclaimer', - existing_type=sa.VARCHAR(length=255), - type_=sa.TEXT(), - nullable=False) + with op.batch_alter_table('recommended_apps', schema=None) as batch_op: + batch_op.alter_column('custom_disclaimer', + existing_type=sa.VARCHAR(length=255), + type_=models.types.LongText(), + nullable=False) - with op.batch_alter_table('sites', schema=None) as batch_op: - batch_op.alter_column('custom_disclaimer', - existing_type=sa.VARCHAR(length=255), - type_=sa.TEXT(), - nullable=False) + with op.batch_alter_table('sites', schema=None) as batch_op: + batch_op.alter_column('custom_disclaimer', + existing_type=sa.VARCHAR(length=255), + type_=models.types.LongText(), + nullable=False) - with op.batch_alter_table('tool_api_providers', schema=None) as batch_op: - batch_op.alter_column('custom_disclaimer', - existing_type=sa.VARCHAR(length=255), - type_=sa.TEXT(), - nullable=False) - else: - with op.batch_alter_table('recommended_apps', schema=None) as batch_op: - batch_op.alter_column('custom_disclaimer', - existing_type=sa.VARCHAR(length=255), - type_=models.types.LongText(), - nullable=False) - - with op.batch_alter_table('sites', schema=None) as batch_op: - batch_op.alter_column('custom_disclaimer', - existing_type=sa.VARCHAR(length=255), - type_=models.types.LongText(), - nullable=False) - - with op.batch_alter_table('tool_api_providers', schema=None) as batch_op: - batch_op.alter_column('custom_disclaimer', - existing_type=sa.VARCHAR(length=255), - type_=models.types.LongText(), - nullable=False) + with op.batch_alter_table('tool_api_providers', schema=None) as batch_op: + batch_op.alter_column('custom_disclaimer', + existing_type=sa.VARCHAR(length=255), + type_=models.types.LongText(), + nullable=False) # ### end Alembic commands ### def downgrade(): # ### commands auto generated by Alembic - please adjust! ### - conn = op.get_bind() - - if _is_pg(conn): - with op.batch_alter_table('tool_api_providers', schema=None) as batch_op: - batch_op.alter_column('custom_disclaimer', - existing_type=sa.TEXT(), - type_=sa.VARCHAR(length=255), - nullable=True) + with op.batch_alter_table('tool_api_providers', schema=None) as batch_op: + batch_op.alter_column('custom_disclaimer', + existing_type=models.types.LongText(), + type_=sa.VARCHAR(length=255), + nullable=True) - with op.batch_alter_table('sites', schema=None) as batch_op: - batch_op.alter_column('custom_disclaimer', - existing_type=sa.TEXT(), - type_=sa.VARCHAR(length=255), - nullable=True) + with op.batch_alter_table('sites', schema=None) as batch_op: + batch_op.alter_column('custom_disclaimer', + existing_type=models.types.LongText(), + type_=sa.VARCHAR(length=255), + nullable=True) - with op.batch_alter_table('recommended_apps', schema=None) as batch_op: - batch_op.alter_column('custom_disclaimer', - existing_type=sa.TEXT(), - type_=sa.VARCHAR(length=255), - nullable=True) - else: - with op.batch_alter_table('tool_api_providers', schema=None) as batch_op: - batch_op.alter_column('custom_disclaimer', - existing_type=models.types.LongText(), - type_=sa.VARCHAR(length=255), - nullable=True) - - with op.batch_alter_table('sites', schema=None) as batch_op: - batch_op.alter_column('custom_disclaimer', - existing_type=models.types.LongText(), - type_=sa.VARCHAR(length=255), - nullable=True) - - with op.batch_alter_table('recommended_apps', schema=None) as batch_op: - batch_op.alter_column('custom_disclaimer', - existing_type=models.types.LongText(), - type_=sa.VARCHAR(length=255), - nullable=True) + with op.batch_alter_table('recommended_apps', schema=None) as batch_op: + batch_op.alter_column('custom_disclaimer', + existing_type=models.types.LongText(), + type_=sa.VARCHAR(length=255), + nullable=True) # ### end Alembic commands ### diff --git a/api/migrations/versions/2024_11_01_0623-09a8d1878d9b_update_workflows_graph_features_and_.py b/api/migrations/versions/2024_11_01_0623-09a8d1878d9b_update_workflows_graph_features_and_.py index fdd8984029..8a36c9c4a5 100644 --- a/api/migrations/versions/2024_11_01_0623-09a8d1878d9b_update_workflows_graph_features_and_.py +++ b/api/migrations/versions/2024_11_01_0623-09a8d1878d9b_update_workflows_graph_features_and_.py @@ -49,57 +49,33 @@ def upgrade(): op.execute("UPDATE workflows SET updated_at = created_at WHERE updated_at IS NULL") op.execute("UPDATE workflows SET graph = '' WHERE graph IS NULL") op.execute("UPDATE workflows SET features = '' WHERE features IS NULL") - if _is_pg(conn): - with op.batch_alter_table('workflows', schema=None) as batch_op: - batch_op.alter_column('graph', - existing_type=sa.TEXT(), - nullable=False) - batch_op.alter_column('features', - existing_type=sa.TEXT(), - nullable=False) - batch_op.alter_column('updated_at', - existing_type=postgresql.TIMESTAMP(), - nullable=False) - else: - with op.batch_alter_table('workflows', schema=None) as batch_op: - batch_op.alter_column('graph', - existing_type=models.types.LongText(), - nullable=False) - batch_op.alter_column('features', - existing_type=models.types.LongText(), - nullable=False) - batch_op.alter_column('updated_at', - existing_type=sa.TIMESTAMP(), - nullable=False) + + with op.batch_alter_table('workflows', schema=None) as batch_op: + batch_op.alter_column('graph', + existing_type=models.types.LongText(), + nullable=False) + batch_op.alter_column('features', + existing_type=models.types.LongText(), + nullable=False) + batch_op.alter_column('updated_at', + existing_type=sa.TIMESTAMP(), + nullable=False) # ### end Alembic commands ### def downgrade(): # ### commands auto generated by Alembic - please adjust! ### conn = op.get_bind() - - if _is_pg(conn): - with op.batch_alter_table('workflows', schema=None) as batch_op: - batch_op.alter_column('updated_at', - existing_type=postgresql.TIMESTAMP(), - nullable=True) - batch_op.alter_column('features', - existing_type=sa.TEXT(), - nullable=True) - batch_op.alter_column('graph', - existing_type=sa.TEXT(), - nullable=True) - else: - with op.batch_alter_table('workflows', schema=None) as batch_op: - batch_op.alter_column('updated_at', - existing_type=sa.TIMESTAMP(), - nullable=True) - batch_op.alter_column('features', - existing_type=models.types.LongText(), - nullable=True) - batch_op.alter_column('graph', - existing_type=models.types.LongText(), - nullable=True) + with op.batch_alter_table('workflows', schema=None) as batch_op: + batch_op.alter_column('updated_at', + existing_type=sa.TIMESTAMP(), + nullable=True) + batch_op.alter_column('features', + existing_type=models.types.LongText(), + nullable=True) + batch_op.alter_column('graph', + existing_type=models.types.LongText(), + nullable=True) if _is_pg(conn): with op.batch_alter_table('messages', schema=None) as batch_op: diff --git a/api/migrations/versions/2025_08_13_1605-0e154742a5fa_add_provider_model_multi_credential.py b/api/migrations/versions/2025_08_13_1605-0e154742a5fa_add_provider_model_multi_credential.py index 16ca902726..1fc4a64df1 100644 --- a/api/migrations/versions/2025_08_13_1605-0e154742a5fa_add_provider_model_multi_credential.py +++ b/api/migrations/versions/2025_08_13_1605-0e154742a5fa_add_provider_model_multi_credential.py @@ -86,57 +86,30 @@ def upgrade(): def migrate_existing_provider_models_data(): """migrate provider_models table data to provider_model_credentials""" - conn = op.get_bind() - # Define table structure for data manipulation - if _is_pg(conn): - provider_models_table = table('provider_models', - column('id', models.types.StringUUID()), - column('tenant_id', models.types.StringUUID()), - column('provider_name', sa.String()), - column('model_name', sa.String()), - column('model_type', sa.String()), - column('encrypted_config', sa.Text()), - column('created_at', sa.DateTime()), - column('updated_at', sa.DateTime()), - column('credential_id', models.types.StringUUID()), - ) - else: - provider_models_table = table('provider_models', - column('id', models.types.StringUUID()), - column('tenant_id', models.types.StringUUID()), - column('provider_name', sa.String()), - column('model_name', sa.String()), - column('model_type', sa.String()), - column('encrypted_config', models.types.LongText()), - column('created_at', sa.DateTime()), - column('updated_at', sa.DateTime()), - column('credential_id', models.types.StringUUID()), - ) + # Define table structure for data manipulatio + provider_models_table = table('provider_models', + column('id', models.types.StringUUID()), + column('tenant_id', models.types.StringUUID()), + column('provider_name', sa.String()), + column('model_name', sa.String()), + column('model_type', sa.String()), + column('encrypted_config', models.types.LongText()), + column('created_at', sa.DateTime()), + column('updated_at', sa.DateTime()), + column('credential_id', models.types.StringUUID()), + ) - if _is_pg(conn): - provider_model_credentials_table = table('provider_model_credentials', - column('id', models.types.StringUUID()), - column('tenant_id', models.types.StringUUID()), - column('provider_name', sa.String()), - column('model_name', sa.String()), - column('model_type', sa.String()), - column('credential_name', sa.String()), - column('encrypted_config', sa.Text()), - column('created_at', sa.DateTime()), - column('updated_at', sa.DateTime()) - ) - else: - provider_model_credentials_table = table('provider_model_credentials', - column('id', models.types.StringUUID()), - column('tenant_id', models.types.StringUUID()), - column('provider_name', sa.String()), - column('model_name', sa.String()), - column('model_type', sa.String()), - column('credential_name', sa.String()), - column('encrypted_config', models.types.LongText()), - column('created_at', sa.DateTime()), - column('updated_at', sa.DateTime()) - ) + provider_model_credentials_table = table('provider_model_credentials', + column('id', models.types.StringUUID()), + column('tenant_id', models.types.StringUUID()), + column('provider_name', sa.String()), + column('model_name', sa.String()), + column('model_type', sa.String()), + column('credential_name', sa.String()), + column('encrypted_config', models.types.LongText()), + column('created_at', sa.DateTime()), + column('updated_at', sa.DateTime()) + ) # Get database connection @@ -183,14 +156,8 @@ def migrate_existing_provider_models_data(): def downgrade(): # Re-add encrypted_config column to provider_models table - conn = op.get_bind() - - if _is_pg(conn): - with op.batch_alter_table('provider_models', schema=None) as batch_op: - batch_op.add_column(sa.Column('encrypted_config', sa.Text(), nullable=True)) - else: - with op.batch_alter_table('provider_models', schema=None) as batch_op: - batch_op.add_column(sa.Column('encrypted_config', models.types.LongText(), nullable=True)) + with op.batch_alter_table('provider_models', schema=None) as batch_op: + batch_op.add_column(sa.Column('encrypted_config', models.types.LongText(), nullable=True)) if not context.is_offline_mode(): # Migrate data back from provider_model_credentials to provider_models diff --git a/api/migrations/versions/2025_08_20_1747-8d289573e1da_add_oauth_provider_apps.py b/api/migrations/versions/2025_08_20_1747-8d289573e1da_add_oauth_provider_apps.py index 75b4d61173..79fe9d9bba 100644 --- a/api/migrations/versions/2025_08_20_1747-8d289573e1da_add_oauth_provider_apps.py +++ b/api/migrations/versions/2025_08_20_1747-8d289573e1da_add_oauth_provider_apps.py @@ -8,7 +8,6 @@ Create Date: 2025-08-20 17:47:17.015695 from alembic import op import models as models import sqlalchemy as sa -from libs.uuid_utils import uuidv7 def _is_pg(conn): diff --git a/api/migrations/versions/2025_09_08_1007-c20211f18133_add_headers_to_mcp_provider.py b/api/migrations/versions/2025_09_08_1007-c20211f18133_add_headers_to_mcp_provider.py index 4f472fe4b4..cf2b973d2d 100644 --- a/api/migrations/versions/2025_09_08_1007-c20211f18133_add_headers_to_mcp_provider.py +++ b/api/migrations/versions/2025_09_08_1007-c20211f18133_add_headers_to_mcp_provider.py @@ -9,8 +9,6 @@ from alembic import op import models as models -def _is_pg(conn): - return conn.dialect.name == "postgresql" import sqlalchemy as sa @@ -23,12 +21,7 @@ depends_on = None def upgrade(): # Add encrypted_headers column to tool_mcp_providers table - conn = op.get_bind() - - if _is_pg(conn): - op.add_column('tool_mcp_providers', sa.Column('encrypted_headers', sa.Text(), nullable=True)) - else: - op.add_column('tool_mcp_providers', sa.Column('encrypted_headers', models.types.LongText(), nullable=True)) + op.add_column('tool_mcp_providers', sa.Column('encrypted_headers', models.types.LongText(), nullable=True)) def downgrade(): diff --git a/api/migrations/versions/2025_09_17_1515-68519ad5cd18_knowledge_pipeline_migrate.py b/api/migrations/versions/2025_09_17_1515-68519ad5cd18_knowledge_pipeline_migrate.py index 8eac0dee10..bad516dcac 100644 --- a/api/migrations/versions/2025_09_17_1515-68519ad5cd18_knowledge_pipeline_migrate.py +++ b/api/migrations/versions/2025_09_17_1515-68519ad5cd18_knowledge_pipeline_migrate.py @@ -44,6 +44,7 @@ def upgrade(): sa.PrimaryKeyConstraint('id', name='datasource_oauth_config_pkey'), sa.UniqueConstraint('plugin_id', 'provider', name='datasource_oauth_config_datasource_id_provider_idx') ) + if _is_pg(conn): op.create_table('datasource_oauth_tenant_params', sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuidv7()'), nullable=False), @@ -70,6 +71,7 @@ def upgrade(): sa.PrimaryKeyConstraint('id', name='datasource_oauth_tenant_config_pkey'), sa.UniqueConstraint('tenant_id', 'plugin_id', 'provider', name='datasource_oauth_tenant_config_unique') ) + if _is_pg(conn): op.create_table('datasource_providers', sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuidv7()'), nullable=False), @@ -104,6 +106,7 @@ def upgrade(): sa.PrimaryKeyConstraint('id', name='datasource_provider_pkey'), sa.UniqueConstraint('tenant_id', 'plugin_id', 'provider', 'name', name='datasource_provider_unique_name') ) + with op.batch_alter_table('datasource_providers', schema=None) as batch_op: batch_op.create_index('datasource_provider_auth_type_provider_idx', ['tenant_id', 'plugin_id', 'provider'], unique=False) @@ -133,6 +136,7 @@ def upgrade(): sa.Column('created_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False), sa.PrimaryKeyConstraint('id', name='document_pipeline_execution_log_pkey') ) + with op.batch_alter_table('document_pipeline_execution_logs', schema=None) as batch_op: batch_op.create_index('document_pipeline_execution_logs_document_id_idx', ['document_id'], unique=False) @@ -174,6 +178,7 @@ def upgrade(): sa.Column('updated_by', models.types.StringUUID(), nullable=True), sa.PrimaryKeyConstraint('id', name='pipeline_built_in_template_pkey') ) + if _is_pg(conn): op.create_table('pipeline_customized_templates', sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuidv7()'), nullable=False), @@ -193,7 +198,6 @@ def upgrade(): sa.PrimaryKeyConstraint('id', name='pipeline_customized_template_pkey') ) else: - # MySQL: Use compatible syntax op.create_table('pipeline_customized_templates', sa.Column('id', models.types.StringUUID(), nullable=False), sa.Column('tenant_id', models.types.StringUUID(), nullable=False), @@ -211,6 +215,7 @@ def upgrade(): sa.Column('updated_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False), sa.PrimaryKeyConstraint('id', name='pipeline_customized_template_pkey') ) + with op.batch_alter_table('pipeline_customized_templates', schema=None) as batch_op: batch_op.create_index('pipeline_customized_template_tenant_idx', ['tenant_id'], unique=False) @@ -236,6 +241,7 @@ def upgrade(): sa.Column('updated_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False), sa.PrimaryKeyConstraint('id', name='pipeline_recommended_plugin_pkey') ) + if _is_pg(conn): op.create_table('pipelines', sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuidv7()'), nullable=False), @@ -266,6 +272,7 @@ def upgrade(): sa.Column('updated_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False), sa.PrimaryKeyConstraint('id', name='pipeline_pkey') ) + if _is_pg(conn): op.create_table('workflow_draft_variable_files', sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuidv7()'), nullable=False), @@ -292,6 +299,7 @@ def upgrade(): sa.Column('value_type', sa.String(20), nullable=False), sa.PrimaryKeyConstraint('id', name=op.f('workflow_draft_variable_files_pkey')) ) + if _is_pg(conn): op.create_table('workflow_node_execution_offload', sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuidv7()'), nullable=False), @@ -316,6 +324,7 @@ def upgrade(): sa.PrimaryKeyConstraint('id', name=op.f('workflow_node_execution_offload_pkey')), sa.UniqueConstraint('node_execution_id', 'type', name=op.f('workflow_node_execution_offload_node_execution_id_key')) ) + if _is_pg(conn): with op.batch_alter_table('datasets', schema=None) as batch_op: batch_op.add_column(sa.Column('keyword_number', sa.Integer(), server_default=sa.text('10'), nullable=True)) @@ -342,6 +351,7 @@ def upgrade(): comment='Indicates whether the current value is the default for a conversation variable. Always `FALSE` for other types of variables.',) ) batch_op.create_index('workflow_draft_variable_file_id_idx', ['file_id'], unique=False) + if _is_pg(conn): with op.batch_alter_table('workflows', schema=None) as batch_op: batch_op.add_column(sa.Column('rag_pipeline_variables', sa.Text(), server_default='{}', nullable=False)) diff --git a/api/migrations/versions/2025_10_21_1430-ae662b25d9bc_remove_builtin_template_user.py b/api/migrations/versions/2025_10_21_1430-ae662b25d9bc_remove_builtin_template_user.py index 0776ab0818..ec0cfbd11d 100644 --- a/api/migrations/versions/2025_10_21_1430-ae662b25d9bc_remove_builtin_template_user.py +++ b/api/migrations/versions/2025_10_21_1430-ae662b25d9bc_remove_builtin_template_user.py @@ -9,8 +9,6 @@ from alembic import op import models as models -def _is_pg(conn): - return conn.dialect.name == "postgresql" import sqlalchemy as sa @@ -33,15 +31,9 @@ def upgrade(): def downgrade(): # ### commands auto generated by Alembic - please adjust! ### - conn = op.get_bind() - - if _is_pg(conn): - with op.batch_alter_table('pipeline_built_in_templates', schema=None) as batch_op: - batch_op.add_column(sa.Column('created_by', sa.UUID(), autoincrement=False, nullable=False)) - batch_op.add_column(sa.Column('updated_by', sa.UUID(), autoincrement=False, nullable=True)) - else: - with op.batch_alter_table('pipeline_built_in_templates', schema=None) as batch_op: - batch_op.add_column(sa.Column('created_by', models.types.StringUUID(), autoincrement=False, nullable=False)) - batch_op.add_column(sa.Column('updated_by', models.types.StringUUID(), autoincrement=False, nullable=True)) + + with op.batch_alter_table('pipeline_built_in_templates', schema=None) as batch_op: + batch_op.add_column(sa.Column('created_by', models.types.StringUUID(), autoincrement=False, nullable=False)) + batch_op.add_column(sa.Column('updated_by', models.types.StringUUID(), autoincrement=False, nullable=True)) # ### end Alembic commands ### diff --git a/api/migrations/versions/2025_10_22_1611-03f8dcbc611e_add_workflowpause_model.py b/api/migrations/versions/2025_10_22_1611-03f8dcbc611e_add_workflowpause_model.py index 627219cc4b..12905b3674 100644 --- a/api/migrations/versions/2025_10_22_1611-03f8dcbc611e_add_workflowpause_model.py +++ b/api/migrations/versions/2025_10_22_1611-03f8dcbc611e_add_workflowpause_model.py @@ -9,7 +9,6 @@ Create Date: 2025-10-22 16:11:31.805407 from alembic import op import models as models import sqlalchemy as sa -from libs.uuid_utils import uuidv7 def _is_pg(conn): return conn.dialect.name == "postgresql" diff --git a/api/migrations/versions/2025_10_30_1518-669ffd70119c_introduce_trigger.py b/api/migrations/versions/2025_10_30_1518-669ffd70119c_introduce_trigger.py index 9641a15c89..c27c1058d1 100644 --- a/api/migrations/versions/2025_10_30_1518-669ffd70119c_introduce_trigger.py +++ b/api/migrations/versions/2025_10_30_1518-669ffd70119c_introduce_trigger.py @@ -105,6 +105,7 @@ def upgrade(): sa.PrimaryKeyConstraint('id', name='trigger_oauth_tenant_client_pkey'), sa.UniqueConstraint('tenant_id', 'plugin_id', 'provider', name='unique_trigger_oauth_tenant_client') ) + if _is_pg(conn): op.create_table('trigger_subscriptions', sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False), @@ -143,6 +144,7 @@ def upgrade(): sa.PrimaryKeyConstraint('id', name='trigger_provider_pkey'), sa.UniqueConstraint('tenant_id', 'provider_id', 'name', name='unique_trigger_provider') ) + with op.batch_alter_table('trigger_subscriptions', schema=None) as batch_op: batch_op.create_index('idx_trigger_providers_endpoint', ['endpoint_id'], unique=True) batch_op.create_index('idx_trigger_providers_tenant_endpoint', ['tenant_id', 'endpoint_id'], unique=False) @@ -176,6 +178,7 @@ def upgrade(): sa.PrimaryKeyConstraint('id', name='workflow_plugin_trigger_pkey'), sa.UniqueConstraint('app_id', 'node_id', name='uniq_app_node_subscription') ) + with op.batch_alter_table('workflow_plugin_triggers', schema=None) as batch_op: batch_op.create_index('workflow_plugin_trigger_tenant_subscription_idx', ['tenant_id', 'subscription_id', 'event_name'], unique=False) @@ -207,6 +210,7 @@ def upgrade(): sa.PrimaryKeyConstraint('id', name='workflow_schedule_plan_pkey'), sa.UniqueConstraint('app_id', 'node_id', name='uniq_app_node') ) + with op.batch_alter_table('workflow_schedule_plans', schema=None) as batch_op: batch_op.create_index('workflow_schedule_plan_next_idx', ['next_run_at'], unique=False) @@ -264,6 +268,7 @@ def upgrade(): sa.Column('finished_at', sa.DateTime(), nullable=True), sa.PrimaryKeyConstraint('id', name='workflow_trigger_log_pkey') ) + with op.batch_alter_table('workflow_trigger_logs', schema=None) as batch_op: batch_op.create_index('workflow_trigger_log_created_at_idx', ['created_at'], unique=False) batch_op.create_index('workflow_trigger_log_status_idx', ['status'], unique=False) @@ -299,6 +304,7 @@ def upgrade(): sa.UniqueConstraint('app_id', 'node_id', name='uniq_node'), sa.UniqueConstraint('webhook_id', name='uniq_webhook_id') ) + with op.batch_alter_table('workflow_webhook_triggers', schema=None) as batch_op: batch_op.create_index('workflow_webhook_trigger_tenant_idx', ['tenant_id'], unique=False) diff --git a/api/migrations/versions/2025_12_10_1504-8a7f2ad7c23e_add_workflow_runs_created_at_idx.py b/api/migrations/versions/2025_12_10_1504-8a7f2ad7c23e_add_workflow_runs_created_at_idx.py new file mode 100644 index 0000000000..7968429ca8 --- /dev/null +++ b/api/migrations/versions/2025_12_10_1504-8a7f2ad7c23e_add_workflow_runs_created_at_idx.py @@ -0,0 +1,29 @@ +"""Add index on workflow_runs.created_at + +Revision ID: 8a7f2ad7c23e +Revises: d57accd375ae +Create Date: 2025-12-10 15:04:00.000000 +""" + +from alembic import op +import sqlalchemy as sa + +# revision identifiers, used by Alembic. +revision = "8a7f2ad7c23e" +down_revision = "d57accd375ae" +branch_labels = None +depends_on = None + + +def upgrade(): + with op.batch_alter_table("workflow_runs", schema=None) as batch_op: + batch_op.create_index( + batch_op.f("workflow_runs_created_at_idx"), + ["created_at"], + unique=False, + ) + + +def downgrade(): + with op.batch_alter_table("workflow_runs", schema=None) as batch_op: + batch_op.drop_index(batch_op.f("workflow_runs_created_at_idx")) diff --git a/api/migrations/versions/2025_12_15_1614-6bb0832495f0_alter_table_pipeline_recommended_.py b/api/migrations/versions/2025_12_15_1614-6bb0832495f0_alter_table_pipeline_recommended_.py new file mode 100644 index 0000000000..40bbbded1d --- /dev/null +++ b/api/migrations/versions/2025_12_15_1614-6bb0832495f0_alter_table_pipeline_recommended_.py @@ -0,0 +1,64 @@ +"""Alter table pipeline_recommended_plugins add column type + +Revision ID: 6bb0832495f0 +Revises: 7bb281b7a422 +Create Date: 2025-12-15 16:14:38.482072 + +""" +from alembic import op +import models as models +import sqlalchemy as sa +from sqlalchemy.dialects import postgresql + +# revision identifiers, used by Alembic. +revision = '6bb0832495f0' +down_revision = '7bb281b7a422' +branch_labels = None +depends_on = None + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + with op.batch_alter_table('app_triggers', schema=None) as batch_op: + batch_op.alter_column('provider_name', + existing_type=sa.VARCHAR(length=255), + nullable=False, + existing_server_default=sa.text("''::character varying")) + + with op.batch_alter_table('operation_logs', schema=None) as batch_op: + batch_op.alter_column('content', + existing_type=postgresql.JSON(astext_type=sa.Text()), + nullable=False) + + with op.batch_alter_table('pipeline_recommended_plugins', schema=None) as batch_op: + batch_op.add_column(sa.Column('type', sa.String(length=50), nullable=True)) + + with op.batch_alter_table('providers', schema=None) as batch_op: + batch_op.alter_column('quota_used', + existing_type=sa.BIGINT(), + nullable=False) + + # ### end Alembic commands ### + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + with op.batch_alter_table('providers', schema=None) as batch_op: + batch_op.alter_column('quota_used', + existing_type=sa.BIGINT(), + nullable=True) + + with op.batch_alter_table('pipeline_recommended_plugins', schema=None) as batch_op: + batch_op.drop_column('type') + + with op.batch_alter_table('operation_logs', schema=None) as batch_op: + batch_op.alter_column('content', + existing_type=postgresql.JSON(astext_type=sa.Text()), + nullable=True) + + with op.batch_alter_table('app_triggers', schema=None) as batch_op: + batch_op.alter_column('provider_name', + existing_type=sa.VARCHAR(length=255), + nullable=True, + existing_server_default=sa.text("''::character varying")) + + # ### end Alembic commands ### diff --git a/api/migrations/versions/2025_12_16_1424-2536f83803a8_add_type_column_not_null_default_tool.py b/api/migrations/versions/2025_12_16_1424-2536f83803a8_add_type_column_not_null_default_tool.py new file mode 100644 index 0000000000..20ca06d200 --- /dev/null +++ b/api/migrations/versions/2025_12_16_1424-2536f83803a8_add_type_column_not_null_default_tool.py @@ -0,0 +1,33 @@ +"""add type column not null default tool + +Revision ID: 2536f83803a8 +Revises: 6bb0832495f0 +Create Date: 2025-12-16 14:24:40.740253 + +""" +from alembic import op +import models as models +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision = '2536f83803a8' +down_revision = '6bb0832495f0' +branch_labels = None +depends_on = None + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + with op.batch_alter_table('pipeline_recommended_plugins', schema=None) as batch_op: + batch_op.add_column(sa.Column('type', sa.String(length=50), nullable=False, server_default='tool')) + + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + with op.batch_alter_table('pipeline_recommended_plugins', schema=None) as batch_op: + batch_op.drop_column('type') + + # ### end Alembic commands ### diff --git a/api/migrations/versions/2025_12_25_1039-7df29de0f6be_add_credit_pool.py b/api/migrations/versions/2025_12_25_1039-7df29de0f6be_add_credit_pool.py new file mode 100644 index 0000000000..e89fcee7e5 --- /dev/null +++ b/api/migrations/versions/2025_12_25_1039-7df29de0f6be_add_credit_pool.py @@ -0,0 +1,46 @@ +"""add credit pool + +Revision ID: 7df29de0f6be +Revises: 03ea244985ce +Create Date: 2025-12-25 10:39:15.139304 + +""" +from alembic import op +import models as models +import sqlalchemy as sa +from sqlalchemy.dialects import postgresql + +# revision identifiers, used by Alembic. +revision = '7df29de0f6be' +down_revision = '03ea244985ce' +branch_labels = None +depends_on = None + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.create_table('tenant_credit_pools', + sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False), + sa.Column('tenant_id', models.types.StringUUID(), nullable=False), + sa.Column('pool_type', sa.String(length=40), server_default='trial', nullable=False), + sa.Column('quota_limit', sa.BigInteger(), nullable=False), + sa.Column('quota_used', sa.BigInteger(), nullable=False), + sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False), + sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False), + sa.PrimaryKeyConstraint('id', name='tenant_credit_pool_pkey') + ) + with op.batch_alter_table('tenant_credit_pools', schema=None) as batch_op: + batch_op.create_index('tenant_credit_pool_pool_type_idx', ['pool_type'], unique=False) + batch_op.create_index('tenant_credit_pool_tenant_id_idx', ['tenant_id'], unique=False) + + # ### end Alembic commands ### + + +def downgrade(): + + with op.batch_alter_table('tenant_credit_pools', schema=None) as batch_op: + batch_op.drop_index('tenant_credit_pool_tenant_id_idx') + batch_op.drop_index('tenant_credit_pool_pool_type_idx') + + op.drop_table('tenant_credit_pools') + # ### end Alembic commands ### diff --git a/api/migrations/versions/2025_12_30_1617-85c8b4a64f53_add_llm_generation_detail_table.py b/api/migrations/versions/2025_12_30_1617-85c8b4a64f53_add_llm_generation_detail_table.py new file mode 100644 index 0000000000..2b920a5ac6 --- /dev/null +++ b/api/migrations/versions/2025_12_30_1617-85c8b4a64f53_add_llm_generation_detail_table.py @@ -0,0 +1,46 @@ +"""add llm generation detail table. + +Revision ID: 85c8b4a64f53 +Revises: 7bb281b7a422 +Create Date: 2025-12-10 16:17:46.597669 + +""" +from alembic import op +import models as models +import sqlalchemy as sa +from sqlalchemy.dialects import postgresql + +# revision identifiers, used by Alembic. +revision = '85c8b4a64f53' +down_revision = '7df29de0f6be' +branch_labels = None +depends_on = None + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.create_table('llm_generation_details', + sa.Column('id', models.types.StringUUID(), nullable=False), + sa.Column('tenant_id', models.types.StringUUID(), nullable=False), + sa.Column('app_id', models.types.StringUUID(), nullable=False), + sa.Column('message_id', models.types.StringUUID(), nullable=True), + sa.Column('workflow_run_id', models.types.StringUUID(), nullable=True), + sa.Column('node_id', sa.String(length=255), nullable=True), + sa.Column('reasoning_content', models.types.LongText(), nullable=True), + sa.Column('tool_calls', models.types.LongText(), nullable=True), + sa.Column('sequence', models.types.LongText(), nullable=True), + sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False), + sa.CheckConstraint('(message_id IS NOT NULL AND workflow_run_id IS NULL AND node_id IS NULL) OR (message_id IS NULL AND workflow_run_id IS NOT NULL AND node_id IS NOT NULL)', name=op.f('llm_generation_details_ck_llm_generation_detail_assoc_mode_check')), + sa.PrimaryKeyConstraint('id', name='llm_generation_detail_pkey'), + sa.UniqueConstraint('message_id', name=op.f('llm_generation_details_message_id_key')) + ) + with op.batch_alter_table('llm_generation_details', schema=None) as batch_op: + batch_op.create_index('idx_llm_generation_detail_message', ['message_id'], unique=False) + batch_op.create_index('idx_llm_generation_detail_workflow', ['workflow_run_id', 'node_id'], unique=False) + + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.drop_table('llm_generation_details') + # ### end Alembic commands ### diff --git a/api/migrations/versions/2026_01_09_1110-f9f6d18a37f9_add_table_explore_banner_and_trial.py b/api/migrations/versions/2026_01_09_1110-f9f6d18a37f9_add_table_explore_banner_and_trial.py new file mode 100644 index 0000000000..c34fa5f819 --- /dev/null +++ b/api/migrations/versions/2026_01_09_1110-f9f6d18a37f9_add_table_explore_banner_and_trial.py @@ -0,0 +1,73 @@ +"""add table explore banner and trial + +Revision ID: f9f6d18a37f9 +Revises: 7df29de0f6be +Create Date: 2026-01-09 11:10:18.079355 + +""" +from alembic import op +import models as models +import sqlalchemy as sa +from sqlalchemy.dialects import postgresql + +# revision identifiers, used by Alembic. +revision = 'f9f6d18a37f9' +down_revision = '7df29de0f6be' +branch_labels = None +depends_on = None + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.create_table('account_trial_app_records', + sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False), + sa.Column('account_id', models.types.StringUUID(), nullable=False), + sa.Column('app_id', models.types.StringUUID(), nullable=False), + sa.Column('count', sa.Integer(), nullable=False), + sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False), + sa.PrimaryKeyConstraint('id', name='user_trial_app_pkey'), + sa.UniqueConstraint('account_id', 'app_id', name='unique_account_trial_app_record') + ) + with op.batch_alter_table('account_trial_app_records', schema=None) as batch_op: + batch_op.create_index('account_trial_app_record_account_id_idx', ['account_id'], unique=False) + batch_op.create_index('account_trial_app_record_app_id_idx', ['app_id'], unique=False) + + op.create_table('exporle_banners', + sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False), + sa.Column('content', sa.JSON(), nullable=False), + sa.Column('link', sa.String(length=255), nullable=False), + sa.Column('sort', sa.Integer(), nullable=False), + sa.Column('status', sa.String(length=255), server_default=sa.text("'enabled'::character varying"), nullable=False), + sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False), + sa.Column('language', sa.String(length=255), server_default=sa.text("'en-US'::character varying"), nullable=False), + sa.PrimaryKeyConstraint('id', name='exporler_banner_pkey') + ) + op.create_table('trial_apps', + sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False), + sa.Column('app_id', models.types.StringUUID(), nullable=False), + sa.Column('tenant_id', models.types.StringUUID(), nullable=False), + sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False), + sa.Column('trial_limit', sa.Integer(), nullable=False), + sa.PrimaryKeyConstraint('id', name='trial_app_pkey'), + sa.UniqueConstraint('app_id', name='unique_trail_app_id') + ) + with op.batch_alter_table('trial_apps', schema=None) as batch_op: + batch_op.create_index('trial_app_app_id_idx', ['app_id'], unique=False) + batch_op.create_index('trial_app_tenant_id_idx', ['tenant_id'], unique=False) + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + with op.batch_alter_table('trial_apps', schema=None) as batch_op: + batch_op.drop_index('trial_app_tenant_id_idx') + batch_op.drop_index('trial_app_app_id_idx') + + op.drop_table('trial_apps') + op.drop_table('exporle_banners') + with op.batch_alter_table('account_trial_app_records', schema=None) as batch_op: + batch_op.drop_index('account_trial_app_record_app_id_idx') + batch_op.drop_index('account_trial_app_record_account_id_idx') + + op.drop_table('account_trial_app_records') + # ### end Alembic commands ### diff --git a/api/migrations/versions/23db93619b9d_add_message_files_into_agent_thought.py b/api/migrations/versions/23db93619b9d_add_message_files_into_agent_thought.py index fae506906b..127ffd5599 100644 --- a/api/migrations/versions/23db93619b9d_add_message_files_into_agent_thought.py +++ b/api/migrations/versions/23db93619b9d_add_message_files_into_agent_thought.py @@ -11,9 +11,6 @@ from alembic import op import models.types -def _is_pg(conn): - return conn.dialect.name == "postgresql" - # revision identifiers, used by Alembic. revision = '23db93619b9d' down_revision = '8ae9bc661daa' @@ -23,14 +20,8 @@ depends_on = None def upgrade(): # ### commands auto generated by Alembic - please adjust! ### - conn = op.get_bind() - - if _is_pg(conn): - with op.batch_alter_table('message_agent_thoughts', schema=None) as batch_op: - batch_op.add_column(sa.Column('message_files', sa.Text(), nullable=True)) - else: - with op.batch_alter_table('message_agent_thoughts', schema=None) as batch_op: - batch_op.add_column(sa.Column('message_files', models.types.LongText(), nullable=True)) + with op.batch_alter_table('message_agent_thoughts', schema=None) as batch_op: + batch_op.add_column(sa.Column('message_files', models.types.LongText(), nullable=True)) # ### end Alembic commands ### diff --git a/api/migrations/versions/246ba09cbbdb_add_app_anntation_setting.py b/api/migrations/versions/246ba09cbbdb_add_app_anntation_setting.py index 2676ef0b94..31829d8e58 100644 --- a/api/migrations/versions/246ba09cbbdb_add_app_anntation_setting.py +++ b/api/migrations/versions/246ba09cbbdb_add_app_anntation_setting.py @@ -62,14 +62,8 @@ def upgrade(): def downgrade(): # ### commands auto generated by Alembic - please adjust! ### - conn = op.get_bind() - - if _is_pg(conn): - with op.batch_alter_table('app_model_configs', schema=None) as batch_op: - batch_op.add_column(sa.Column('annotation_reply', sa.TEXT(), autoincrement=False, nullable=True)) - else: - with op.batch_alter_table('app_model_configs', schema=None) as batch_op: - batch_op.add_column(sa.Column('annotation_reply', models.types.LongText(), autoincrement=False, nullable=True)) + with op.batch_alter_table('app_model_configs', schema=None) as batch_op: + batch_op.add_column(sa.Column('annotation_reply', models.types.LongText(), autoincrement=False, nullable=True)) with op.batch_alter_table('app_annotation_settings', schema=None) as batch_op: batch_op.drop_index('app_annotation_settings_app_idx') diff --git a/api/migrations/versions/2a3aebbbf4bb_add_app_tracing.py b/api/migrations/versions/2a3aebbbf4bb_add_app_tracing.py index 3362a3a09f..07a8cd86b1 100644 --- a/api/migrations/versions/2a3aebbbf4bb_add_app_tracing.py +++ b/api/migrations/versions/2a3aebbbf4bb_add_app_tracing.py @@ -11,9 +11,6 @@ from alembic import op import models as models -def _is_pg(conn): - return conn.dialect.name == "postgresql" - # revision identifiers, used by Alembic. revision = '2a3aebbbf4bb' down_revision = 'c031d46af369' @@ -23,14 +20,8 @@ depends_on = None def upgrade(): # ### commands auto generated by Alembic - please adjust! ### - conn = op.get_bind() - - if _is_pg(conn): - with op.batch_alter_table('apps', schema=None) as batch_op: - batch_op.add_column(sa.Column('tracing', sa.Text(), nullable=True)) - else: - with op.batch_alter_table('apps', schema=None) as batch_op: - batch_op.add_column(sa.Column('tracing', models.types.LongText(), nullable=True)) + with op.batch_alter_table('apps', schema=None) as batch_op: + batch_op.add_column(sa.Column('tracing', models.types.LongText(), nullable=True)) # ### end Alembic commands ### diff --git a/api/migrations/versions/2e9819ca5b28_add_tenant_id_in_api_token.py b/api/migrations/versions/2e9819ca5b28_add_tenant_id_in_api_token.py index 40bd727f66..211b2d8882 100644 --- a/api/migrations/versions/2e9819ca5b28_add_tenant_id_in_api_token.py +++ b/api/migrations/versions/2e9819ca5b28_add_tenant_id_in_api_token.py @@ -7,14 +7,10 @@ Create Date: 2023-09-22 15:41:01.243183 """ import sqlalchemy as sa from alembic import op -from sqlalchemy.dialects import postgresql import models.types -def _is_pg(conn): - return conn.dialect.name == "postgresql" - # revision identifiers, used by Alembic. revision = '2e9819ca5b28' down_revision = 'ab23c11305d4' @@ -24,35 +20,19 @@ depends_on = None def upgrade(): # ### commands auto generated by Alembic - please adjust! ### - conn = op.get_bind() - - if _is_pg(conn): - with op.batch_alter_table('api_tokens', schema=None) as batch_op: - batch_op.add_column(sa.Column('tenant_id', postgresql.UUID(), nullable=True)) - batch_op.create_index('api_token_tenant_idx', ['tenant_id', 'type'], unique=False) - batch_op.drop_column('dataset_id') - else: - with op.batch_alter_table('api_tokens', schema=None) as batch_op: - batch_op.add_column(sa.Column('tenant_id', models.types.StringUUID(), nullable=True)) - batch_op.create_index('api_token_tenant_idx', ['tenant_id', 'type'], unique=False) - batch_op.drop_column('dataset_id') + with op.batch_alter_table('api_tokens', schema=None) as batch_op: + batch_op.add_column(sa.Column('tenant_id', models.types.StringUUID(), nullable=True)) + batch_op.create_index('api_token_tenant_idx', ['tenant_id', 'type'], unique=False) + batch_op.drop_column('dataset_id') # ### end Alembic commands ### def downgrade(): # ### commands auto generated by Alembic - please adjust! ### - conn = op.get_bind() - - if _is_pg(conn): - with op.batch_alter_table('api_tokens', schema=None) as batch_op: - batch_op.add_column(sa.Column('dataset_id', postgresql.UUID(), autoincrement=False, nullable=True)) - batch_op.drop_index('api_token_tenant_idx') - batch_op.drop_column('tenant_id') - else: - with op.batch_alter_table('api_tokens', schema=None) as batch_op: - batch_op.add_column(sa.Column('dataset_id', models.types.StringUUID(), autoincrement=False, nullable=True)) - batch_op.drop_index('api_token_tenant_idx') - batch_op.drop_column('tenant_id') + with op.batch_alter_table('api_tokens', schema=None) as batch_op: + batch_op.add_column(sa.Column('dataset_id', models.types.StringUUID(), autoincrement=False, nullable=True)) + batch_op.drop_index('api_token_tenant_idx') + batch_op.drop_column('tenant_id') # ### end Alembic commands ### diff --git a/api/migrations/versions/42e85ed5564d_conversation_columns_set_nullable.py b/api/migrations/versions/42e85ed5564d_conversation_columns_set_nullable.py index 76056a9460..3491c85e2f 100644 --- a/api/migrations/versions/42e85ed5564d_conversation_columns_set_nullable.py +++ b/api/migrations/versions/42e85ed5564d_conversation_columns_set_nullable.py @@ -7,14 +7,10 @@ Create Date: 2024-03-07 08:30:29.133614 """ import sqlalchemy as sa from alembic import op -from sqlalchemy.dialects import postgresql import models.types -def _is_pg(conn): - return conn.dialect.name == "postgresql" - # revision identifiers, used by Alembic. revision = '42e85ed5564d' down_revision = 'f9107f83abab' @@ -24,59 +20,31 @@ depends_on = None def upgrade(): # ### commands auto generated by Alembic - please adjust! ### - conn = op.get_bind() - - if _is_pg(conn): - with op.batch_alter_table('conversations', schema=None) as batch_op: - batch_op.alter_column('app_model_config_id', - existing_type=postgresql.UUID(), - nullable=True) - batch_op.alter_column('model_provider', - existing_type=sa.VARCHAR(length=255), - nullable=True) - batch_op.alter_column('model_id', - existing_type=sa.VARCHAR(length=255), - nullable=True) - else: - with op.batch_alter_table('conversations', schema=None) as batch_op: - batch_op.alter_column('app_model_config_id', - existing_type=models.types.StringUUID(), - nullable=True) - batch_op.alter_column('model_provider', - existing_type=sa.VARCHAR(length=255), - nullable=True) - batch_op.alter_column('model_id', - existing_type=sa.VARCHAR(length=255), - nullable=True) + with op.batch_alter_table('conversations', schema=None) as batch_op: + batch_op.alter_column('app_model_config_id', + existing_type=models.types.StringUUID(), + nullable=True) + batch_op.alter_column('model_provider', + existing_type=sa.VARCHAR(length=255), + nullable=True) + batch_op.alter_column('model_id', + existing_type=sa.VARCHAR(length=255), + nullable=True) # ### end Alembic commands ### def downgrade(): # ### commands auto generated by Alembic - please adjust! ### - conn = op.get_bind() - - if _is_pg(conn): - with op.batch_alter_table('conversations', schema=None) as batch_op: - batch_op.alter_column('model_id', - existing_type=sa.VARCHAR(length=255), - nullable=False) - batch_op.alter_column('model_provider', - existing_type=sa.VARCHAR(length=255), - nullable=False) - batch_op.alter_column('app_model_config_id', - existing_type=postgresql.UUID(), - nullable=False) - else: - with op.batch_alter_table('conversations', schema=None) as batch_op: - batch_op.alter_column('model_id', - existing_type=sa.VARCHAR(length=255), - nullable=False) - batch_op.alter_column('model_provider', - existing_type=sa.VARCHAR(length=255), - nullable=False) - batch_op.alter_column('app_model_config_id', - existing_type=models.types.StringUUID(), - nullable=False) + with op.batch_alter_table('conversations', schema=None) as batch_op: + batch_op.alter_column('model_id', + existing_type=sa.VARCHAR(length=255), + nullable=False) + batch_op.alter_column('model_provider', + existing_type=sa.VARCHAR(length=255), + nullable=False) + batch_op.alter_column('app_model_config_id', + existing_type=models.types.StringUUID(), + nullable=False) # ### end Alembic commands ### diff --git a/api/migrations/versions/4829e54d2fee_change_message_chain_id_to_nullable.py b/api/migrations/versions/4829e54d2fee_change_message_chain_id_to_nullable.py index ef066587b7..8537a87233 100644 --- a/api/migrations/versions/4829e54d2fee_change_message_chain_id_to_nullable.py +++ b/api/migrations/versions/4829e54d2fee_change_message_chain_id_to_nullable.py @@ -6,14 +6,10 @@ Create Date: 2024-01-12 03:42:27.362415 """ from alembic import op -from sqlalchemy.dialects import postgresql import models.types -def _is_pg(conn): - return conn.dialect.name == "postgresql" - # revision identifiers, used by Alembic. revision = '4829e54d2fee' down_revision = '114eed84c228' @@ -23,39 +19,21 @@ depends_on = None def upgrade(): # ### commands auto generated by Alembic - please adjust! ### - conn = op.get_bind() - - if _is_pg(conn): - # PostgreSQL: Keep original syntax - with op.batch_alter_table('message_agent_thoughts', schema=None) as batch_op: - batch_op.alter_column('message_chain_id', - existing_type=postgresql.UUID(), - nullable=True) - else: - # MySQL: Use compatible syntax - with op.batch_alter_table('message_agent_thoughts', schema=None) as batch_op: - batch_op.alter_column('message_chain_id', - existing_type=models.types.StringUUID(), - nullable=True) + + with op.batch_alter_table('message_agent_thoughts', schema=None) as batch_op: + batch_op.alter_column('message_chain_id', + existing_type=models.types.StringUUID(), + nullable=True) # ### end Alembic commands ### def downgrade(): # ### commands auto generated by Alembic - please adjust! ### - conn = op.get_bind() - - if _is_pg(conn): - # PostgreSQL: Keep original syntax - with op.batch_alter_table('message_agent_thoughts', schema=None) as batch_op: - batch_op.alter_column('message_chain_id', - existing_type=postgresql.UUID(), - nullable=False) - else: - # MySQL: Use compatible syntax - with op.batch_alter_table('message_agent_thoughts', schema=None) as batch_op: - batch_op.alter_column('message_chain_id', - existing_type=models.types.StringUUID(), - nullable=False) + + with op.batch_alter_table('message_agent_thoughts', schema=None) as batch_op: + batch_op.alter_column('message_chain_id', + existing_type=models.types.StringUUID(), + nullable=False) # ### end Alembic commands ### diff --git a/api/migrations/versions/563cf8bf777b_enable_tool_file_without_conversation_id.py b/api/migrations/versions/563cf8bf777b_enable_tool_file_without_conversation_id.py index b080e7680b..22405e3cc8 100644 --- a/api/migrations/versions/563cf8bf777b_enable_tool_file_without_conversation_id.py +++ b/api/migrations/versions/563cf8bf777b_enable_tool_file_without_conversation_id.py @@ -6,14 +6,10 @@ Create Date: 2024-03-14 04:54:56.679506 """ from alembic import op -from sqlalchemy.dialects import postgresql import models.types -def _is_pg(conn): - return conn.dialect.name == "postgresql" - # revision identifiers, used by Alembic. revision = '563cf8bf777b' down_revision = 'b5429b71023c' @@ -23,35 +19,19 @@ depends_on = None def upgrade(): # ### commands auto generated by Alembic - please adjust! ### - conn = op.get_bind() - - if _is_pg(conn): - with op.batch_alter_table('tool_files', schema=None) as batch_op: - batch_op.alter_column('conversation_id', - existing_type=postgresql.UUID(), - nullable=True) - else: - with op.batch_alter_table('tool_files', schema=None) as batch_op: - batch_op.alter_column('conversation_id', - existing_type=models.types.StringUUID(), - nullable=True) + with op.batch_alter_table('tool_files', schema=None) as batch_op: + batch_op.alter_column('conversation_id', + existing_type=models.types.StringUUID(), + nullable=True) # ### end Alembic commands ### def downgrade(): # ### commands auto generated by Alembic - please adjust! ### - conn = op.get_bind() - - if _is_pg(conn): - with op.batch_alter_table('tool_files', schema=None) as batch_op: - batch_op.alter_column('conversation_id', - existing_type=postgresql.UUID(), - nullable=False) - else: - with op.batch_alter_table('tool_files', schema=None) as batch_op: - batch_op.alter_column('conversation_id', - existing_type=models.types.StringUUID(), - nullable=False) + with op.batch_alter_table('tool_files', schema=None) as batch_op: + batch_op.alter_column('conversation_id', + existing_type=models.types.StringUUID(), + nullable=False) # ### end Alembic commands ### diff --git a/api/migrations/versions/6e2cfb077b04_add_dataset_collection_binding.py b/api/migrations/versions/6e2cfb077b04_add_dataset_collection_binding.py index 1ace8ea5a0..01d7d5ba21 100644 --- a/api/migrations/versions/6e2cfb077b04_add_dataset_collection_binding.py +++ b/api/migrations/versions/6e2cfb077b04_add_dataset_collection_binding.py @@ -48,12 +48,9 @@ def upgrade(): with op.batch_alter_table('dataset_collection_bindings', schema=None) as batch_op: batch_op.create_index('provider_model_name_idx', ['provider_name', 'model_name'], unique=False) - if _is_pg(conn): - with op.batch_alter_table('datasets', schema=None) as batch_op: - batch_op.add_column(sa.Column('collection_binding_id', postgresql.UUID(), nullable=True)) - else: - with op.batch_alter_table('datasets', schema=None) as batch_op: - batch_op.add_column(sa.Column('collection_binding_id', models.types.StringUUID(), nullable=True)) + + with op.batch_alter_table('datasets', schema=None) as batch_op: + batch_op.add_column(sa.Column('collection_binding_id', models.types.StringUUID(), nullable=True)) # ### end Alembic commands ### diff --git a/api/migrations/versions/714aafe25d39_add_anntation_history_match_response.py b/api/migrations/versions/714aafe25d39_add_anntation_history_match_response.py index 457338ef42..0faa48f535 100644 --- a/api/migrations/versions/714aafe25d39_add_anntation_history_match_response.py +++ b/api/migrations/versions/714aafe25d39_add_anntation_history_match_response.py @@ -11,9 +11,6 @@ from alembic import op import models.types -def _is_pg(conn): - return conn.dialect.name == "postgresql" - # revision identifiers, used by Alembic. revision = '714aafe25d39' down_revision = 'f2a6fc85e260' @@ -23,16 +20,9 @@ depends_on = None def upgrade(): # ### commands auto generated by Alembic - please adjust! ### - conn = op.get_bind() - - if _is_pg(conn): - with op.batch_alter_table('app_annotation_hit_histories', schema=None) as batch_op: - batch_op.add_column(sa.Column('annotation_question', sa.Text(), nullable=False)) - batch_op.add_column(sa.Column('annotation_content', sa.Text(), nullable=False)) - else: - with op.batch_alter_table('app_annotation_hit_histories', schema=None) as batch_op: - batch_op.add_column(sa.Column('annotation_question', models.types.LongText(), nullable=False)) - batch_op.add_column(sa.Column('annotation_content', models.types.LongText(), nullable=False)) + with op.batch_alter_table('app_annotation_hit_histories', schema=None) as batch_op: + batch_op.add_column(sa.Column('annotation_question', models.types.LongText(), nullable=False)) + batch_op.add_column(sa.Column('annotation_content', models.types.LongText(), nullable=False)) # ### end Alembic commands ### diff --git a/api/migrations/versions/77e83833755c_add_app_config_retriever_resource.py b/api/migrations/versions/77e83833755c_add_app_config_retriever_resource.py index 7bcd1a1be3..aa7b4a21e2 100644 --- a/api/migrations/versions/77e83833755c_add_app_config_retriever_resource.py +++ b/api/migrations/versions/77e83833755c_add_app_config_retriever_resource.py @@ -11,9 +11,6 @@ from alembic import op import models.types -def _is_pg(conn): - return conn.dialect.name == "postgresql" - # revision identifiers, used by Alembic. revision = '77e83833755c' down_revision = '6dcb43972bdc' @@ -23,14 +20,8 @@ depends_on = None def upgrade(): # ### commands auto generated by Alembic - please adjust! ### - conn = op.get_bind() - - if _is_pg(conn): - with op.batch_alter_table('app_model_configs', schema=None) as batch_op: - batch_op.add_column(sa.Column('retriever_resource', sa.Text(), nullable=True)) - else: - with op.batch_alter_table('app_model_configs', schema=None) as batch_op: - batch_op.add_column(sa.Column('retriever_resource', models.types.LongText(), nullable=True)) + with op.batch_alter_table('app_model_configs', schema=None) as batch_op: + batch_op.add_column(sa.Column('retriever_resource', models.types.LongText(), nullable=True)) # ### end Alembic commands ### diff --git a/api/migrations/versions/7ce5a52e4eee_add_tool_providers.py b/api/migrations/versions/7ce5a52e4eee_add_tool_providers.py index 3c0aa082d5..34a17697d3 100644 --- a/api/migrations/versions/7ce5a52e4eee_add_tool_providers.py +++ b/api/migrations/versions/7ce5a52e4eee_add_tool_providers.py @@ -27,7 +27,6 @@ def upgrade(): conn = op.get_bind() if _is_pg(conn): - # PostgreSQL: Keep original syntax op.create_table('tool_providers', sa.Column('id', postgresql.UUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False), sa.Column('tenant_id', postgresql.UUID(), nullable=False), @@ -40,7 +39,6 @@ def upgrade(): sa.UniqueConstraint('tenant_id', 'tool_name', name='unique_tool_provider_tool_name') ) else: - # MySQL: Use compatible syntax op.create_table('tool_providers', sa.Column('id', models.types.StringUUID(), nullable=False), sa.Column('tenant_id', models.types.StringUUID(), nullable=False), @@ -52,12 +50,9 @@ def upgrade(): sa.PrimaryKeyConstraint('id', name='tool_provider_pkey'), sa.UniqueConstraint('tenant_id', 'tool_name', name='unique_tool_provider_tool_name') ) - if _is_pg(conn): - with op.batch_alter_table('app_model_configs', schema=None) as batch_op: - batch_op.add_column(sa.Column('sensitive_word_avoidance', sa.Text(), nullable=True)) - else: - with op.batch_alter_table('app_model_configs', schema=None) as batch_op: - batch_op.add_column(sa.Column('sensitive_word_avoidance', models.types.LongText(), nullable=True)) + + with op.batch_alter_table('app_model_configs', schema=None) as batch_op: + batch_op.add_column(sa.Column('sensitive_word_avoidance', models.types.LongText(), nullable=True)) # ### end Alembic commands ### diff --git a/api/migrations/versions/88072f0caa04_add_custom_config_in_tenant.py b/api/migrations/versions/88072f0caa04_add_custom_config_in_tenant.py index beea90b384..884839c010 100644 --- a/api/migrations/versions/88072f0caa04_add_custom_config_in_tenant.py +++ b/api/migrations/versions/88072f0caa04_add_custom_config_in_tenant.py @@ -11,9 +11,6 @@ from alembic import op import models.types -def _is_pg(conn): - return conn.dialect.name == "postgresql" - # revision identifiers, used by Alembic. revision = '88072f0caa04' down_revision = '246ba09cbbdb' @@ -23,14 +20,8 @@ depends_on = None def upgrade(): # ### commands auto generated by Alembic - please adjust! ### - conn = op.get_bind() - - if _is_pg(conn): - with op.batch_alter_table('tenants', schema=None) as batch_op: - batch_op.add_column(sa.Column('custom_config', sa.Text(), nullable=True)) - else: - with op.batch_alter_table('tenants', schema=None) as batch_op: - batch_op.add_column(sa.Column('custom_config', models.types.LongText(), nullable=True)) + with op.batch_alter_table('tenants', schema=None) as batch_op: + batch_op.add_column(sa.Column('custom_config', models.types.LongText(), nullable=True)) # ### end Alembic commands ### diff --git a/api/migrations/versions/89c7899ca936_.py b/api/migrations/versions/89c7899ca936_.py index 2420710e74..d26f1e82d6 100644 --- a/api/migrations/versions/89c7899ca936_.py +++ b/api/migrations/versions/89c7899ca936_.py @@ -11,9 +11,6 @@ from alembic import op import models.types -def _is_pg(conn): - return conn.dialect.name == "postgresql" - # revision identifiers, used by Alembic. revision = '89c7899ca936' down_revision = '187385f442fc' @@ -23,39 +20,21 @@ depends_on = None def upgrade(): # ### commands auto generated by Alembic - please adjust! ### - conn = op.get_bind() - - if _is_pg(conn): - with op.batch_alter_table('sites', schema=None) as batch_op: - batch_op.alter_column('description', - existing_type=sa.VARCHAR(length=255), - type_=sa.Text(), - existing_nullable=True) - else: - with op.batch_alter_table('sites', schema=None) as batch_op: - batch_op.alter_column('description', - existing_type=sa.VARCHAR(length=255), - type_=models.types.LongText(), - existing_nullable=True) + with op.batch_alter_table('sites', schema=None) as batch_op: + batch_op.alter_column('description', + existing_type=sa.VARCHAR(length=255), + type_=models.types.LongText(), + existing_nullable=True) # ### end Alembic commands ### def downgrade(): # ### commands auto generated by Alembic - please adjust! ### - conn = op.get_bind() - - if _is_pg(conn): - with op.batch_alter_table('sites', schema=None) as batch_op: - batch_op.alter_column('description', - existing_type=sa.Text(), - type_=sa.VARCHAR(length=255), - existing_nullable=True) - else: - with op.batch_alter_table('sites', schema=None) as batch_op: - batch_op.alter_column('description', - existing_type=models.types.LongText(), - type_=sa.VARCHAR(length=255), - existing_nullable=True) + with op.batch_alter_table('sites', schema=None) as batch_op: + batch_op.alter_column('description', + existing_type=models.types.LongText(), + type_=sa.VARCHAR(length=255), + existing_nullable=True) # ### end Alembic commands ### diff --git a/api/migrations/versions/8ec536f3c800_rename_api_provider_credentails.py b/api/migrations/versions/8ec536f3c800_rename_api_provider_credentails.py index 111e81240b..6022ea2c20 100644 --- a/api/migrations/versions/8ec536f3c800_rename_api_provider_credentails.py +++ b/api/migrations/versions/8ec536f3c800_rename_api_provider_credentails.py @@ -11,9 +11,6 @@ from alembic import op import models.types -def _is_pg(conn): - return conn.dialect.name == "postgresql" - # revision identifiers, used by Alembic. revision = '8ec536f3c800' down_revision = 'ad472b61a054' @@ -23,14 +20,8 @@ depends_on = None def upgrade(): # ### commands auto generated by Alembic - please adjust! ### - conn = op.get_bind() - - if _is_pg(conn): - with op.batch_alter_table('tool_api_providers', schema=None) as batch_op: - batch_op.add_column(sa.Column('credentials_str', sa.Text(), nullable=False)) - else: - with op.batch_alter_table('tool_api_providers', schema=None) as batch_op: - batch_op.add_column(sa.Column('credentials_str', models.types.LongText(), nullable=False)) + with op.batch_alter_table('tool_api_providers', schema=None) as batch_op: + batch_op.add_column(sa.Column('credentials_str', models.types.LongText(), nullable=False)) # ### end Alembic commands ### diff --git a/api/migrations/versions/8fe468ba0ca5_add_gpt4v_supports.py b/api/migrations/versions/8fe468ba0ca5_add_gpt4v_supports.py index 1c1c6cacbb..9d6d40114d 100644 --- a/api/migrations/versions/8fe468ba0ca5_add_gpt4v_supports.py +++ b/api/migrations/versions/8fe468ba0ca5_add_gpt4v_supports.py @@ -57,12 +57,9 @@ def upgrade(): batch_op.create_index('message_file_created_by_idx', ['created_by'], unique=False) batch_op.create_index('message_file_message_idx', ['message_id'], unique=False) - if _is_pg(conn): - with op.batch_alter_table('app_model_configs', schema=None) as batch_op: - batch_op.add_column(sa.Column('file_upload', sa.Text(), nullable=True)) - else: - with op.batch_alter_table('app_model_configs', schema=None) as batch_op: - batch_op.add_column(sa.Column('file_upload', models.types.LongText(), nullable=True)) + + with op.batch_alter_table('app_model_configs', schema=None) as batch_op: + batch_op.add_column(sa.Column('file_upload', models.types.LongText(), nullable=True)) if _is_pg(conn): with op.batch_alter_table('upload_files', schema=None) as batch_op: diff --git a/api/migrations/versions/9f4e3427ea84_add_created_by_role.py b/api/migrations/versions/9f4e3427ea84_add_created_by_role.py index 5d29d354f3..0b3f92a12e 100644 --- a/api/migrations/versions/9f4e3427ea84_add_created_by_role.py +++ b/api/migrations/versions/9f4e3427ea84_add_created_by_role.py @@ -24,7 +24,6 @@ def upgrade(): conn = op.get_bind() if _is_pg(conn): - # PostgreSQL: Keep original syntax with op.batch_alter_table('pinned_conversations', schema=None) as batch_op: batch_op.add_column(sa.Column('created_by_role', sa.String(length=255), server_default=sa.text("'end_user'::character varying"), nullable=False)) batch_op.drop_index('pinned_conversation_conversation_idx') @@ -35,7 +34,6 @@ def upgrade(): batch_op.drop_index('saved_message_message_idx') batch_op.create_index('saved_message_message_idx', ['app_id', 'message_id', 'created_by_role', 'created_by'], unique=False) else: - # MySQL: Use compatible syntax with op.batch_alter_table('pinned_conversations', schema=None) as batch_op: batch_op.add_column(sa.Column('created_by_role', sa.String(length=255), server_default=sa.text("'end_user'"), nullable=False)) batch_op.drop_index('pinned_conversation_conversation_idx') diff --git a/api/migrations/versions/a5b56fb053ef_app_config_add_speech_to_text.py b/api/migrations/versions/a5b56fb053ef_app_config_add_speech_to_text.py index 616cb2f163..c8747a51f7 100644 --- a/api/migrations/versions/a5b56fb053ef_app_config_add_speech_to_text.py +++ b/api/migrations/versions/a5b56fb053ef_app_config_add_speech_to_text.py @@ -11,9 +11,6 @@ from alembic import op import models.types -def _is_pg(conn): - return conn.dialect.name == "postgresql" - # revision identifiers, used by Alembic. revision = 'a5b56fb053ef' down_revision = 'd3d503a3471c' @@ -23,14 +20,8 @@ depends_on = None def upgrade(): # ### commands auto generated by Alembic - please adjust! ### - conn = op.get_bind() - - if _is_pg(conn): - with op.batch_alter_table('app_model_configs', schema=None) as batch_op: - batch_op.add_column(sa.Column('speech_to_text', sa.Text(), nullable=True)) - else: - with op.batch_alter_table('app_model_configs', schema=None) as batch_op: - batch_op.add_column(sa.Column('speech_to_text', models.types.LongText(), nullable=True)) + with op.batch_alter_table('app_model_configs', schema=None) as batch_op: + batch_op.add_column(sa.Column('speech_to_text', models.types.LongText(), nullable=True)) # ### end Alembic commands ### diff --git a/api/migrations/versions/a9836e3baeee_add_external_data_tools_in_app_model_.py b/api/migrations/versions/a9836e3baeee_add_external_data_tools_in_app_model_.py index 900ff78036..f56aeb7e66 100644 --- a/api/migrations/versions/a9836e3baeee_add_external_data_tools_in_app_model_.py +++ b/api/migrations/versions/a9836e3baeee_add_external_data_tools_in_app_model_.py @@ -11,9 +11,6 @@ from alembic import op import models.types -def _is_pg(conn): - return conn.dialect.name == "postgresql" - # revision identifiers, used by Alembic. revision = 'a9836e3baeee' down_revision = '968fff4c0ab9' @@ -23,14 +20,8 @@ depends_on = None def upgrade(): # ### commands auto generated by Alembic - please adjust! ### - conn = op.get_bind() - - if _is_pg(conn): - with op.batch_alter_table('app_model_configs', schema=None) as batch_op: - batch_op.add_column(sa.Column('external_data_tools', sa.Text(), nullable=True)) - else: - with op.batch_alter_table('app_model_configs', schema=None) as batch_op: - batch_op.add_column(sa.Column('external_data_tools', models.types.LongText(), nullable=True)) + with op.batch_alter_table('app_model_configs', schema=None) as batch_op: + batch_op.add_column(sa.Column('external_data_tools', models.types.LongText(), nullable=True)) # ### end Alembic commands ### diff --git a/api/migrations/versions/b24be59fbb04_.py b/api/migrations/versions/b24be59fbb04_.py index b0a6d10d8c..ae91eaf1bc 100644 --- a/api/migrations/versions/b24be59fbb04_.py +++ b/api/migrations/versions/b24be59fbb04_.py @@ -11,9 +11,6 @@ from alembic import op import models.types -def _is_pg(conn): - return conn.dialect.name == "postgresql" - # revision identifiers, used by Alembic. revision = 'b24be59fbb04' down_revision = 'de95f5c77138' @@ -23,14 +20,8 @@ depends_on = None def upgrade(): # ### commands auto generated by Alembic - please adjust! ### - conn = op.get_bind() - - if _is_pg(conn): - with op.batch_alter_table('app_model_configs', schema=None) as batch_op: - batch_op.add_column(sa.Column('text_to_speech', sa.Text(), nullable=True)) - else: - with op.batch_alter_table('app_model_configs', schema=None) as batch_op: - batch_op.add_column(sa.Column('text_to_speech', models.types.LongText(), nullable=True)) + with op.batch_alter_table('app_model_configs', schema=None) as batch_op: + batch_op.add_column(sa.Column('text_to_speech', models.types.LongText(), nullable=True)) # ### end Alembic commands ### diff --git a/api/migrations/versions/b3a09c049e8e_add_advanced_prompt_templates.py b/api/migrations/versions/b3a09c049e8e_add_advanced_prompt_templates.py index 772395c25b..c02c24c23f 100644 --- a/api/migrations/versions/b3a09c049e8e_add_advanced_prompt_templates.py +++ b/api/migrations/versions/b3a09c049e8e_add_advanced_prompt_templates.py @@ -11,9 +11,6 @@ from alembic import op import models.types -def _is_pg(conn): - return conn.dialect.name == "postgresql" - # revision identifiers, used by Alembic. revision = 'b3a09c049e8e' down_revision = '2e9819ca5b28' @@ -23,20 +20,11 @@ depends_on = None def upgrade(): # ### commands auto generated by Alembic - please adjust! ### - conn = op.get_bind() - - if _is_pg(conn): - with op.batch_alter_table('app_model_configs', schema=None) as batch_op: - batch_op.add_column(sa.Column('prompt_type', sa.String(length=255), nullable=False, server_default='simple')) - batch_op.add_column(sa.Column('chat_prompt_config', sa.Text(), nullable=True)) - batch_op.add_column(sa.Column('completion_prompt_config', sa.Text(), nullable=True)) - batch_op.add_column(sa.Column('dataset_configs', sa.Text(), nullable=True)) - else: - with op.batch_alter_table('app_model_configs', schema=None) as batch_op: - batch_op.add_column(sa.Column('prompt_type', sa.String(length=255), nullable=False, server_default='simple')) - batch_op.add_column(sa.Column('chat_prompt_config', models.types.LongText(), nullable=True)) - batch_op.add_column(sa.Column('completion_prompt_config', models.types.LongText(), nullable=True)) - batch_op.add_column(sa.Column('dataset_configs', models.types.LongText(), nullable=True)) + with op.batch_alter_table('app_model_configs', schema=None) as batch_op: + batch_op.add_column(sa.Column('prompt_type', sa.String(length=255), nullable=False, server_default='simple')) + batch_op.add_column(sa.Column('chat_prompt_config', models.types.LongText(), nullable=True)) + batch_op.add_column(sa.Column('completion_prompt_config', models.types.LongText(), nullable=True)) + batch_op.add_column(sa.Column('dataset_configs', models.types.LongText(), nullable=True)) # ### end Alembic commands ### diff --git a/api/migrations/versions/c031d46af369_remove_app_model_config_trace_config_.py b/api/migrations/versions/c031d46af369_remove_app_model_config_trace_config_.py index 76be794ff4..fe51d1c78d 100644 --- a/api/migrations/versions/c031d46af369_remove_app_model_config_trace_config_.py +++ b/api/migrations/versions/c031d46af369_remove_app_model_config_trace_config_.py @@ -7,7 +7,6 @@ Create Date: 2024-06-17 10:01:00.255189 """ import sqlalchemy as sa from alembic import op -from sqlalchemy.dialects import postgresql import models.types diff --git a/api/migrations/versions/e1901f623fd0_add_annotation_reply.py b/api/migrations/versions/e1901f623fd0_add_annotation_reply.py index 9e02ec5d84..36e934f0fc 100644 --- a/api/migrations/versions/e1901f623fd0_add_annotation_reply.py +++ b/api/migrations/versions/e1901f623fd0_add_annotation_reply.py @@ -54,12 +54,9 @@ def upgrade(): batch_op.create_index('app_annotation_hit_histories_annotation_idx', ['annotation_id'], unique=False) batch_op.create_index('app_annotation_hit_histories_app_idx', ['app_id'], unique=False) - if _is_pg(conn): - with op.batch_alter_table('app_model_configs', schema=None) as batch_op: - batch_op.add_column(sa.Column('annotation_reply', sa.Text(), nullable=True)) - else: - with op.batch_alter_table('app_model_configs', schema=None) as batch_op: - batch_op.add_column(sa.Column('annotation_reply', models.types.LongText(), nullable=True)) + + with op.batch_alter_table('app_model_configs', schema=None) as batch_op: + batch_op.add_column(sa.Column('annotation_reply', models.types.LongText(), nullable=True)) if _is_pg(conn): with op.batch_alter_table('dataset_collection_bindings', schema=None) as batch_op: @@ -68,54 +65,31 @@ def upgrade(): with op.batch_alter_table('dataset_collection_bindings', schema=None) as batch_op: batch_op.add_column(sa.Column('type', sa.String(length=40), server_default=sa.text("'dataset'"), nullable=False)) - if _is_pg(conn): - with op.batch_alter_table('message_annotations', schema=None) as batch_op: - batch_op.add_column(sa.Column('question', sa.Text(), nullable=True)) - batch_op.add_column(sa.Column('hit_count', sa.Integer(), server_default=sa.text('0'), nullable=False)) - batch_op.alter_column('conversation_id', - existing_type=postgresql.UUID(), - nullable=True) - batch_op.alter_column('message_id', - existing_type=postgresql.UUID(), - nullable=True) - else: - with op.batch_alter_table('message_annotations', schema=None) as batch_op: - batch_op.add_column(sa.Column('question', models.types.LongText(), nullable=True)) - batch_op.add_column(sa.Column('hit_count', sa.Integer(), server_default=sa.text('0'), nullable=False)) - batch_op.alter_column('conversation_id', - existing_type=models.types.StringUUID(), - nullable=True) - batch_op.alter_column('message_id', - existing_type=models.types.StringUUID(), - nullable=True) + with op.batch_alter_table('message_annotations', schema=None) as batch_op: + batch_op.add_column(sa.Column('question', models.types.LongText(), nullable=True)) + batch_op.add_column(sa.Column('hit_count', sa.Integer(), server_default=sa.text('0'), nullable=False)) + batch_op.alter_column('conversation_id', + existing_type=models.types.StringUUID(), + nullable=True) + batch_op.alter_column('message_id', + existing_type=models.types.StringUUID(), + nullable=True) # ### end Alembic commands ### def downgrade(): # ### commands auto generated by Alembic - please adjust! ### - conn = op.get_bind() - if _is_pg(conn): - with op.batch_alter_table('message_annotations', schema=None) as batch_op: - batch_op.alter_column('message_id', - existing_type=postgresql.UUID(), - nullable=False) - batch_op.alter_column('conversation_id', - existing_type=postgresql.UUID(), - nullable=False) - batch_op.drop_column('hit_count') - batch_op.drop_column('question') - else: - with op.batch_alter_table('message_annotations', schema=None) as batch_op: - batch_op.alter_column('message_id', - existing_type=models.types.StringUUID(), - nullable=False) - batch_op.alter_column('conversation_id', - existing_type=models.types.StringUUID(), - nullable=False) - batch_op.drop_column('hit_count') - batch_op.drop_column('question') + with op.batch_alter_table('message_annotations', schema=None) as batch_op: + batch_op.alter_column('message_id', + existing_type=models.types.StringUUID(), + nullable=False) + batch_op.alter_column('conversation_id', + existing_type=models.types.StringUUID(), + nullable=False) + batch_op.drop_column('hit_count') + batch_op.drop_column('question') with op.batch_alter_table('dataset_collection_bindings', schema=None) as batch_op: batch_op.drop_column('type') diff --git a/api/migrations/versions/f2a6fc85e260_add_anntation_history_message_id.py b/api/migrations/versions/f2a6fc85e260_add_anntation_history_message_id.py index 02098e91c1..ac1c14e50c 100644 --- a/api/migrations/versions/f2a6fc85e260_add_anntation_history_message_id.py +++ b/api/migrations/versions/f2a6fc85e260_add_anntation_history_message_id.py @@ -12,9 +12,6 @@ from sqlalchemy.dialects import postgresql import models.types -def _is_pg(conn): - return conn.dialect.name == "postgresql" - # revision identifiers, used by Alembic. revision = 'f2a6fc85e260' down_revision = '46976cc39132' @@ -24,16 +21,9 @@ depends_on = None def upgrade(): # ### commands auto generated by Alembic - please adjust! ### - conn = op.get_bind() - - if _is_pg(conn): - with op.batch_alter_table('app_annotation_hit_histories', schema=None) as batch_op: - batch_op.add_column(sa.Column('message_id', postgresql.UUID(), nullable=False)) - batch_op.create_index('app_annotation_hit_histories_message_idx', ['message_id'], unique=False) - else: - with op.batch_alter_table('app_annotation_hit_histories', schema=None) as batch_op: - batch_op.add_column(sa.Column('message_id', models.types.StringUUID(), nullable=False)) - batch_op.create_index('app_annotation_hit_histories_message_idx', ['message_id'], unique=False) + with op.batch_alter_table('app_annotation_hit_histories', schema=None) as batch_op: + batch_op.add_column(sa.Column('message_id', models.types.StringUUID(), nullable=False)) + batch_op.create_index('app_annotation_hit_histories_message_idx', ['message_id'], unique=False) # ### end Alembic commands ### diff --git a/api/models/__init__.py b/api/models/__init__.py index 906bc3198e..4c2ef0fb71 100644 --- a/api/models/__init__.py +++ b/api/models/__init__.py @@ -35,6 +35,7 @@ from .enums import ( WorkflowTriggerStatus, ) from .model import ( + AccountTrialAppRecord, ApiRequest, ApiToken, App, @@ -47,8 +48,10 @@ from .model import ( DatasetRetrieverResource, DifySetup, EndUser, + ExporleBanner, IconType, InstalledApp, + LLMGenerationDetail, Message, MessageAgentThought, MessageAnnotation, @@ -60,7 +63,9 @@ from .model import ( Site, Tag, TagBinding, + TenantCreditPool, TraceAppConfig, + TrialApp, UploadFile, ) from .oauth import DatasourceOauthParamConfig, DatasourceProvider @@ -113,6 +118,7 @@ __all__ = [ "Account", "AccountIntegrate", "AccountStatus", + "AccountTrialAppRecord", "ApiRequest", "ApiToken", "ApiToolProvider", @@ -149,11 +155,13 @@ __all__ = [ "DocumentSegment", "Embedding", "EndUser", + "ExporleBanner", "ExternalKnowledgeApis", "ExternalKnowledgeBindings", "IconType", "InstalledApp", "InvitationCode", + "LLMGenerationDetail", "LoadBalancingModelConfig", "Message", "MessageAgentThought", @@ -177,6 +185,7 @@ __all__ = [ "Tenant", "TenantAccountJoin", "TenantAccountRole", + "TenantCreditPool", "TenantDefaultModel", "TenantPreferredModelProvider", "TenantStatus", @@ -186,6 +195,7 @@ __all__ = [ "ToolLabelBinding", "ToolModelInvoke", "TraceAppConfig", + "TrialApp", "TriggerOAuthSystemClient", "TriggerOAuthTenantClient", "TriggerSubscription", diff --git a/api/models/account.py b/api/models/account.py index 420e6adc6c..f7a9c20026 100644 --- a/api/models/account.py +++ b/api/models/account.py @@ -8,7 +8,7 @@ from uuid import uuid4 import sqlalchemy as sa from flask_login import UserMixin from sqlalchemy import DateTime, String, func, select -from sqlalchemy.orm import Mapped, Session, mapped_column +from sqlalchemy.orm import Mapped, Session, mapped_column, validates from typing_extensions import deprecated from .base import TypeBase @@ -116,6 +116,12 @@ class Account(UserMixin, TypeBase): role: TenantAccountRole | None = field(default=None, init=False) _current_tenant: "Tenant | None" = field(default=None, init=False) + @validates("status") + def _normalize_status(self, _key: str, value: str | AccountStatus) -> str: + if isinstance(value, AccountStatus): + return value.value + return value + @property def is_password_set(self): return self.password is not None diff --git a/api/models/model.py b/api/models/model.py index 88cb945b3f..d3db9dd97d 100644 --- a/api/models/model.py +++ b/api/models/model.py @@ -1,3 +1,5 @@ +from __future__ import annotations + import json import re import uuid @@ -5,13 +7,13 @@ from collections.abc import Mapping from datetime import datetime from decimal import Decimal from enum import StrEnum, auto -from typing import TYPE_CHECKING, Any, Literal, Optional, cast +from typing import TYPE_CHECKING, Any, Literal, cast from uuid import uuid4 import sqlalchemy as sa from flask import request -from flask_login import UserMixin -from sqlalchemy import Float, Index, PrimaryKeyConstraint, String, exists, func, select, text +from flask_login import UserMixin # type: ignore[import-untyped] +from sqlalchemy import BigInteger, Float, Index, PrimaryKeyConstraint, String, exists, func, select, text from sqlalchemy.orm import Mapped, Session, mapped_column from configs import dify_config @@ -31,6 +33,8 @@ from .provider_ids import GenericProviderID from .types import LongText, StringUUID if TYPE_CHECKING: + from core.app.entities.llm_generation_entities import LLMGenerationDetailData + from .workflow import Workflow @@ -54,7 +58,7 @@ class AppMode(StrEnum): RAG_PIPELINE = "rag-pipeline" @classmethod - def value_of(cls, value: str) -> "AppMode": + def value_of(cls, value: str) -> AppMode: """ Get value of given mode. @@ -70,6 +74,7 @@ class AppMode(StrEnum): class IconType(StrEnum): IMAGE = auto() EMOJI = auto() + LINK = auto() class App(Base): @@ -81,7 +86,7 @@ class App(Base): name: Mapped[str] = mapped_column(String(255)) description: Mapped[str] = mapped_column(LongText, default=sa.text("''")) mode: Mapped[str] = mapped_column(String(255)) - icon_type: Mapped[str | None] = mapped_column(String(255)) # image, emoji + icon_type: Mapped[str | None] = mapped_column(String(255)) # image, emoji, link icon = mapped_column(String(255)) icon_background: Mapped[str | None] = mapped_column(String(255)) app_model_config_id = mapped_column(StringUUID, nullable=True) @@ -120,19 +125,19 @@ class App(Base): return "" @property - def site(self) -> Optional["Site"]: + def site(self) -> Site | None: site = db.session.query(Site).where(Site.app_id == self.id).first() return site @property - def app_model_config(self) -> Optional["AppModelConfig"]: + def app_model_config(self) -> AppModelConfig | None: if self.app_model_config_id: return db.session.query(AppModelConfig).where(AppModelConfig.id == self.app_model_config_id).first() return None @property - def workflow(self) -> Optional["Workflow"]: + def workflow(self) -> Workflow | None: if self.workflow_id: from .workflow import Workflow @@ -287,7 +292,7 @@ class App(Base): return deleted_tools @property - def tags(self) -> list["Tag"]: + def tags(self) -> list[Tag]: tags = ( db.session.query(Tag) .join(TagBinding, Tag.id == TagBinding.tag_id) @@ -600,6 +605,64 @@ class InstalledApp(TypeBase): return tenant +class TrialApp(Base): + __tablename__ = "trial_apps" + __table_args__ = ( + sa.PrimaryKeyConstraint("id", name="trial_app_pkey"), + sa.Index("trial_app_app_id_idx", "app_id"), + sa.Index("trial_app_tenant_id_idx", "tenant_id"), + sa.UniqueConstraint("app_id", name="unique_trail_app_id"), + ) + + id = mapped_column(StringUUID, server_default=sa.text("uuid_generate_v4()")) + app_id = mapped_column(StringUUID, nullable=False) + tenant_id = mapped_column(StringUUID, nullable=False) + created_at = mapped_column(sa.DateTime, nullable=False, server_default=func.current_timestamp()) + trial_limit = mapped_column(sa.Integer, nullable=False, default=3) + + @property + def app(self) -> App | None: + app = db.session.query(App).where(App.id == self.app_id).first() + return app + + +class AccountTrialAppRecord(Base): + __tablename__ = "account_trial_app_records" + __table_args__ = ( + sa.PrimaryKeyConstraint("id", name="user_trial_app_pkey"), + sa.Index("account_trial_app_record_account_id_idx", "account_id"), + sa.Index("account_trial_app_record_app_id_idx", "app_id"), + sa.UniqueConstraint("account_id", "app_id", name="unique_account_trial_app_record"), + ) + id = mapped_column(StringUUID, server_default=sa.text("uuid_generate_v4()")) + account_id = mapped_column(StringUUID, nullable=False) + app_id = mapped_column(StringUUID, nullable=False) + count = mapped_column(sa.Integer, nullable=False, default=0) + created_at = mapped_column(sa.DateTime, nullable=False, server_default=func.current_timestamp()) + + @property + def app(self) -> App | None: + app = db.session.query(App).where(App.id == self.app_id).first() + return app + + @property + def user(self) -> Account | None: + user = db.session.query(Account).where(Account.id == self.account_id).first() + return user + + +class ExporleBanner(Base): + __tablename__ = "exporle_banners" + __table_args__ = (sa.PrimaryKeyConstraint("id", name="exporler_banner_pkey"),) + id = mapped_column(StringUUID, server_default=sa.text("uuid_generate_v4()")) + content = mapped_column(sa.JSON, nullable=False) + link = mapped_column(String(255), nullable=False) + sort = mapped_column(sa.Integer, nullable=False) + status = mapped_column(sa.String(255), nullable=False, server_default=sa.text("'enabled'::character varying")) + created_at = mapped_column(sa.DateTime, nullable=False, server_default=func.current_timestamp()) + language = mapped_column(String(255), nullable=False, server_default=sa.text("'en-US'::character varying")) + + class OAuthProviderApp(TypeBase): """ Globally shared OAuth provider app information. @@ -1193,7 +1256,7 @@ class Message(Base): return json.loads(self.message_metadata) if self.message_metadata else {} @property - def agent_thoughts(self) -> list["MessageAgentThought"]: + def agent_thoughts(self) -> list[MessageAgentThought]: return ( db.session.query(MessageAgentThought) .where(MessageAgentThought.message_id == self.id) @@ -1201,6 +1264,18 @@ class Message(Base): .all() ) + # FIXME (Novice) -- It's easy to cause N+1 query problem here. + @property + def generation_detail(self) -> dict[str, Any] | None: + """ + Get LLM generation detail for this message. + Returns the detail as a dictionary or None if not found. + """ + detail = db.session.query(LLMGenerationDetail).filter_by(message_id=self.id).first() + if detail: + return detail.to_dict() + return None + @property def retriever_resources(self) -> Any: return self.message_metadata_dict.get("retriever_resources") if self.message_metadata else [] @@ -1306,7 +1381,7 @@ class Message(Base): } @classmethod - def from_dict(cls, data: dict[str, Any]) -> "Message": + def from_dict(cls, data: dict[str, Any]) -> Message: return cls( id=data["id"], app_id=data["app_id"], @@ -1419,15 +1494,20 @@ class MessageAnnotation(Base): app_id: Mapped[str] = mapped_column(StringUUID) conversation_id: Mapped[str | None] = mapped_column(StringUUID, sa.ForeignKey("conversations.id")) message_id: Mapped[str | None] = mapped_column(StringUUID) - question = mapped_column(LongText, nullable=True) - content = mapped_column(LongText, nullable=False) + question: Mapped[str | None] = mapped_column(LongText, nullable=True) + content: Mapped[str] = mapped_column(LongText, nullable=False) hit_count: Mapped[int] = mapped_column(sa.Integer, nullable=False, server_default=sa.text("0")) - account_id = mapped_column(StringUUID, nullable=False) - created_at = mapped_column(sa.DateTime, nullable=False, server_default=func.current_timestamp()) - updated_at = mapped_column( + account_id: Mapped[str] = mapped_column(StringUUID, nullable=False) + created_at: Mapped[datetime] = mapped_column(sa.DateTime, nullable=False, server_default=func.current_timestamp()) + updated_at: Mapped[datetime] = mapped_column( sa.DateTime, nullable=False, server_default=func.current_timestamp(), onupdate=func.current_timestamp() ) + @property + def question_text(self) -> str: + """Return a non-null question string, falling back to the answer content.""" + return self.question or self.content + @property def account(self): account = db.session.query(Account).where(Account.id == self.account_id).first() @@ -1528,7 +1608,7 @@ class OperationLog(TypeBase): tenant_id: Mapped[str] = mapped_column(StringUUID, nullable=False) account_id: Mapped[str] = mapped_column(StringUUID, nullable=False) action: Mapped[str] = mapped_column(String(255), nullable=False) - content: Mapped[Any] = mapped_column(sa.JSON) + content: Mapped[Any | None] = mapped_column(sa.JSON, nullable=True) created_at: Mapped[datetime] = mapped_column( sa.DateTime, nullable=False, server_default=func.current_timestamp(), init=False ) @@ -1835,7 +1915,7 @@ class MessageChain(TypeBase): ) -class MessageAgentThought(Base): +class MessageAgentThought(TypeBase): __tablename__ = "message_agent_thoughts" __table_args__ = ( sa.PrimaryKeyConstraint("id", name="message_agent_thought_pkey"), @@ -1843,34 +1923,42 @@ class MessageAgentThought(Base): sa.Index("message_agent_thought_message_chain_id_idx", "message_chain_id"), ) - id = mapped_column(StringUUID, default=lambda: str(uuid4())) - message_id = mapped_column(StringUUID, nullable=False) - message_chain_id = mapped_column(StringUUID, nullable=True) + id: Mapped[str] = mapped_column( + StringUUID, insert_default=lambda: str(uuid4()), default_factory=lambda: str(uuid4()), init=False + ) + message_id: Mapped[str] = mapped_column(StringUUID, nullable=False) position: Mapped[int] = mapped_column(sa.Integer, nullable=False) - thought = mapped_column(LongText, nullable=True) - tool = mapped_column(LongText, nullable=True) - tool_labels_str = mapped_column(LongText, nullable=False, default=sa.text("'{}'")) - tool_meta_str = mapped_column(LongText, nullable=False, default=sa.text("'{}'")) - tool_input = mapped_column(LongText, nullable=True) - observation = mapped_column(LongText, nullable=True) + created_by_role: Mapped[str] = mapped_column(String(255), nullable=False) + created_by: Mapped[str] = mapped_column(StringUUID, nullable=False) + message_chain_id: Mapped[str | None] = mapped_column(StringUUID, nullable=True, default=None) + thought: Mapped[str | None] = mapped_column(LongText, nullable=True, default=None) + tool: Mapped[str | None] = mapped_column(LongText, nullable=True, default=None) + tool_labels_str: Mapped[str] = mapped_column(LongText, nullable=False, default=sa.text("'{}'")) + tool_meta_str: Mapped[str] = mapped_column(LongText, nullable=False, default=sa.text("'{}'")) + tool_input: Mapped[str | None] = mapped_column(LongText, nullable=True, default=None) + observation: Mapped[str | None] = mapped_column(LongText, nullable=True, default=None) # plugin_id = mapped_column(StringUUID, nullable=True) ## for future design - tool_process_data = mapped_column(LongText, nullable=True) - message = mapped_column(LongText, nullable=True) - message_token: Mapped[int | None] = mapped_column(sa.Integer, nullable=True) - message_unit_price = mapped_column(sa.Numeric, nullable=True) - message_price_unit = mapped_column(sa.Numeric(10, 7), nullable=False, server_default=sa.text("0.001")) - message_files = mapped_column(LongText, nullable=True) - answer = mapped_column(LongText, nullable=True) - answer_token: Mapped[int | None] = mapped_column(sa.Integer, nullable=True) - answer_unit_price = mapped_column(sa.Numeric, nullable=True) - answer_price_unit = mapped_column(sa.Numeric(10, 7), nullable=False, server_default=sa.text("0.001")) - tokens: Mapped[int | None] = mapped_column(sa.Integer, nullable=True) - total_price = mapped_column(sa.Numeric, nullable=True) - currency = mapped_column(String(255), nullable=True) - latency: Mapped[float | None] = mapped_column(sa.Float, nullable=True) - created_by_role = mapped_column(String(255), nullable=False) - created_by = mapped_column(StringUUID, nullable=False) - created_at = mapped_column(sa.DateTime, nullable=False, server_default=sa.func.current_timestamp()) + tool_process_data: Mapped[str | None] = mapped_column(LongText, nullable=True, default=None) + message: Mapped[str | None] = mapped_column(LongText, nullable=True, default=None) + message_token: Mapped[int | None] = mapped_column(sa.Integer, nullable=True, default=None) + message_unit_price: Mapped[Decimal | None] = mapped_column(sa.Numeric, nullable=True, default=None) + message_price_unit: Mapped[Decimal] = mapped_column( + sa.Numeric(10, 7), nullable=False, default=Decimal("0.001"), server_default=sa.text("0.001") + ) + message_files: Mapped[str | None] = mapped_column(LongText, nullable=True, default=None) + answer: Mapped[str | None] = mapped_column(LongText, nullable=True, default=None) + answer_token: Mapped[int | None] = mapped_column(sa.Integer, nullable=True, default=None) + answer_unit_price: Mapped[Decimal | None] = mapped_column(sa.Numeric, nullable=True, default=None) + answer_price_unit: Mapped[Decimal] = mapped_column( + sa.Numeric(10, 7), nullable=False, default=Decimal("0.001"), server_default=sa.text("0.001") + ) + tokens: Mapped[int | None] = mapped_column(sa.Integer, nullable=True, default=None) + total_price: Mapped[Decimal | None] = mapped_column(sa.Numeric, nullable=True, default=None) + currency: Mapped[str | None] = mapped_column(String(255), nullable=True, default=None) + latency: Mapped[float | None] = mapped_column(sa.Float, nullable=True, default=None) + created_at: Mapped[datetime] = mapped_column( + sa.DateTime, nullable=False, init=False, server_default=sa.func.current_timestamp() + ) @property def files(self) -> list[Any]: @@ -2065,3 +2153,113 @@ class TraceAppConfig(TypeBase): "created_at": str(self.created_at) if self.created_at else None, "updated_at": str(self.updated_at) if self.updated_at else None, } + + +class TenantCreditPool(Base): + __tablename__ = "tenant_credit_pools" + __table_args__ = ( + sa.PrimaryKeyConstraint("id", name="tenant_credit_pool_pkey"), + sa.Index("tenant_credit_pool_tenant_id_idx", "tenant_id"), + sa.Index("tenant_credit_pool_pool_type_idx", "pool_type"), + ) + + id = mapped_column(StringUUID, primary_key=True, server_default=text("uuid_generate_v4()")) + tenant_id = mapped_column(StringUUID, nullable=False) + pool_type = mapped_column(String(40), nullable=False, default="trial", server_default="trial") + quota_limit = mapped_column(BigInteger, nullable=False, default=0) + quota_used = mapped_column(BigInteger, nullable=False, default=0) + created_at = mapped_column(sa.DateTime, nullable=False, server_default=text("CURRENT_TIMESTAMP")) + updated_at = mapped_column( + sa.DateTime, nullable=False, server_default=func.current_timestamp(), onupdate=func.current_timestamp() + ) + + @property + def remaining_credits(self) -> int: + return max(0, self.quota_limit - self.quota_used) + + def has_sufficient_credits(self, required_credits: int) -> bool: + return self.remaining_credits >= required_credits + + +class LLMGenerationDetail(Base): + """ + Store LLM generation details including reasoning process and tool calls. + + Association (choose one): + - For apps with Message: use message_id (one-to-one) + - For Workflow: use workflow_run_id + node_id (one run may have multiple LLM nodes) + """ + + __tablename__ = "llm_generation_details" + __table_args__ = ( + sa.PrimaryKeyConstraint("id", name="llm_generation_detail_pkey"), + sa.Index("idx_llm_generation_detail_message", "message_id"), + sa.Index("idx_llm_generation_detail_workflow", "workflow_run_id", "node_id"), + sa.CheckConstraint( + "(message_id IS NOT NULL AND workflow_run_id IS NULL AND node_id IS NULL)" + " OR " + "(message_id IS NULL AND workflow_run_id IS NOT NULL AND node_id IS NOT NULL)", + name="ck_llm_generation_detail_assoc_mode", + ), + ) + + id: Mapped[str] = mapped_column(StringUUID, default=lambda: str(uuid4())) + tenant_id: Mapped[str] = mapped_column(StringUUID, nullable=False) + app_id: Mapped[str] = mapped_column(StringUUID, nullable=False) + + # Association fields (choose one) + message_id: Mapped[str | None] = mapped_column(StringUUID, nullable=True, unique=True) + workflow_run_id: Mapped[str | None] = mapped_column(StringUUID, nullable=True) + node_id: Mapped[str | None] = mapped_column(String(255), nullable=True) + + # Core data as JSON strings + reasoning_content: Mapped[str | None] = mapped_column(LongText) + tool_calls: Mapped[str | None] = mapped_column(LongText) + sequence: Mapped[str | None] = mapped_column(LongText) + + created_at: Mapped[datetime] = mapped_column(sa.DateTime, nullable=False, server_default=func.current_timestamp()) + + def to_domain_model(self) -> LLMGenerationDetailData: + """Convert to Pydantic domain model with proper validation.""" + from core.app.entities.llm_generation_entities import LLMGenerationDetailData + + return LLMGenerationDetailData( + reasoning_content=json.loads(self.reasoning_content) if self.reasoning_content else [], + tool_calls=json.loads(self.tool_calls) if self.tool_calls else [], + sequence=json.loads(self.sequence) if self.sequence else [], + ) + + def to_dict(self) -> dict[str, Any]: + """Convert to dictionary for API response.""" + return self.to_domain_model().to_response_dict() + + @classmethod + def from_domain_model( + cls, + data: LLMGenerationDetailData, + *, + tenant_id: str, + app_id: str, + message_id: str | None = None, + workflow_run_id: str | None = None, + node_id: str | None = None, + ) -> LLMGenerationDetail: + """Create from Pydantic domain model.""" + # Enforce association mode at object creation time as well. + message_mode = message_id is not None + workflow_mode = workflow_run_id is not None or node_id is not None + if message_mode and workflow_mode: + raise ValueError("LLMGenerationDetail cannot set both message_id and workflow_run_id/node_id.") + if not message_mode and not (workflow_run_id and node_id): + raise ValueError("LLMGenerationDetail requires either message_id or workflow_run_id+node_id.") + + return cls( + tenant_id=tenant_id, + app_id=app_id, + message_id=message_id, + workflow_run_id=workflow_run_id, + node_id=node_id, + reasoning_content=json.dumps(data.reasoning_content) if data.reasoning_content else None, + tool_calls=json.dumps([tc.model_dump() for tc in data.tool_calls]) if data.tool_calls else None, + sequence=json.dumps([seg.model_dump() for seg in data.sequence]) if data.sequence else None, + ) diff --git a/api/models/provider.py b/api/models/provider.py index 2afd8c5329..441b54c797 100644 --- a/api/models/provider.py +++ b/api/models/provider.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from datetime import datetime from enum import StrEnum, auto from functools import cached_property @@ -19,7 +21,7 @@ class ProviderType(StrEnum): SYSTEM = auto() @staticmethod - def value_of(value: str) -> "ProviderType": + def value_of(value: str) -> ProviderType: for member in ProviderType: if member.value == value: return member @@ -37,7 +39,7 @@ class ProviderQuotaType(StrEnum): """hosted trial quota""" @staticmethod - def value_of(value: str) -> "ProviderQuotaType": + def value_of(value: str) -> ProviderQuotaType: for member in ProviderQuotaType: if member.value == value: return member @@ -76,7 +78,7 @@ class Provider(TypeBase): quota_type: Mapped[str | None] = mapped_column(String(40), nullable=True, server_default=text("''"), default="") quota_limit: Mapped[int | None] = mapped_column(sa.BigInteger, nullable=True, default=None) - quota_used: Mapped[int] = mapped_column(sa.BigInteger, nullable=False, default=0) + quota_used: Mapped[int | None] = mapped_column(sa.BigInteger, nullable=True, default=0) created_at: Mapped[datetime] = mapped_column( DateTime, nullable=False, server_default=func.current_timestamp(), init=False diff --git a/api/models/tools.py b/api/models/tools.py index e4f9bcb582..e7b98dcf27 100644 --- a/api/models/tools.py +++ b/api/models/tools.py @@ -1,3 +1,5 @@ +from __future__ import annotations + import json from datetime import datetime from decimal import Decimal @@ -167,11 +169,11 @@ class ApiToolProvider(TypeBase): ) @property - def schema_type(self) -> "ApiProviderSchemaType": + def schema_type(self) -> ApiProviderSchemaType: return ApiProviderSchemaType.value_of(self.schema_type_str) @property - def tools(self) -> list["ApiToolBundle"]: + def tools(self) -> list[ApiToolBundle]: return [ApiToolBundle.model_validate(tool) for tool in json.loads(self.tools_str)] @property @@ -267,7 +269,7 @@ class WorkflowToolProvider(TypeBase): return db.session.query(Tenant).where(Tenant.id == self.tenant_id).first() @property - def parameter_configurations(self) -> list["WorkflowToolParameterConfiguration"]: + def parameter_configurations(self) -> list[WorkflowToolParameterConfiguration]: return [ WorkflowToolParameterConfiguration.model_validate(config) for config in json.loads(self.parameter_configuration) @@ -359,7 +361,7 @@ class MCPToolProvider(TypeBase): except (json.JSONDecodeError, TypeError): return [] - def to_entity(self) -> "MCPProviderEntity": + def to_entity(self) -> MCPProviderEntity: """Convert to domain entity""" from core.entities.mcp_provider import MCPProviderEntity @@ -533,5 +535,5 @@ class DeprecatedPublishedAppTool(TypeBase): ) @property - def description_i18n(self) -> "I18nObject": + def description_i18n(self) -> I18nObject: return I18nObject.model_validate(json.loads(self.description)) diff --git a/api/models/trigger.py b/api/models/trigger.py index 87e2a5ccfc..209345eb84 100644 --- a/api/models/trigger.py +++ b/api/models/trigger.py @@ -415,7 +415,7 @@ class AppTrigger(TypeBase): node_id: Mapped[str | None] = mapped_column(String(64), nullable=False) trigger_type: Mapped[str] = mapped_column(EnumText(AppTriggerType, length=50), nullable=False) title: Mapped[str] = mapped_column(String(255), nullable=False) - provider_name: Mapped[str] = mapped_column(String(255), server_default="", default="") # why it is nullable? + provider_name: Mapped[str | None] = mapped_column(String(255), nullable=True, server_default="", default="") status: Mapped[str] = mapped_column( EnumText(AppTriggerStatus, length=50), nullable=False, default=AppTriggerStatus.ENABLED ) diff --git a/api/models/workflow.py b/api/models/workflow.py index 853d5afefc..7e8a0f7c2e 100644 --- a/api/models/workflow.py +++ b/api/models/workflow.py @@ -1,9 +1,11 @@ +from __future__ import annotations + import json import logging from collections.abc import Generator, Mapping, Sequence from datetime import datetime from enum import StrEnum -from typing import TYPE_CHECKING, Any, Optional, Union, cast +from typing import TYPE_CHECKING, Any, Union, cast from uuid import uuid4 import sqlalchemy as sa @@ -57,6 +59,37 @@ from .types import EnumText, LongText, StringUUID logger = logging.getLogger(__name__) +def is_generation_outputs(outputs: Mapping[str, Any]) -> bool: + if not outputs: + return False + + allowed_sequence_types = {"reasoning", "content", "tool_call"} + + def valid_sequence_item(item: Mapping[str, Any]) -> bool: + return isinstance(item, Mapping) and item.get("type") in allowed_sequence_types + + def valid_value(value: Any) -> bool: + if not isinstance(value, Mapping): + return False + + content = value.get("content") + reasoning_content = value.get("reasoning_content") + tool_calls = value.get("tool_calls") + sequence = value.get("sequence") + + return ( + isinstance(content, str) + and isinstance(reasoning_content, list) + and all(isinstance(item, str) for item in reasoning_content) + and isinstance(tool_calls, list) + and all(isinstance(item, Mapping) for item in tool_calls) + and isinstance(sequence, list) + and all(valid_sequence_item(item) for item in sequence) + ) + + return all(valid_value(value) for value in outputs.values()) + + class WorkflowType(StrEnum): """ Workflow Type Enum @@ -67,7 +100,7 @@ class WorkflowType(StrEnum): RAG_PIPELINE = "rag-pipeline" @classmethod - def value_of(cls, value: str) -> "WorkflowType": + def value_of(cls, value: str) -> WorkflowType: """ Get value of given mode. @@ -80,7 +113,7 @@ class WorkflowType(StrEnum): raise ValueError(f"invalid workflow type value {value}") @classmethod - def from_app_mode(cls, app_mode: Union[str, "AppMode"]) -> "WorkflowType": + def from_app_mode(cls, app_mode: Union[str, AppMode]) -> WorkflowType: """ Get workflow type from app mode. @@ -181,7 +214,7 @@ class Workflow(Base): # bug rag_pipeline_variables: list[dict], marked_name: str = "", marked_comment: str = "", - ) -> "Workflow": + ) -> Workflow: workflow = Workflow() workflow.id = str(uuid4()) workflow.tenant_id = tenant_id @@ -619,7 +652,7 @@ class WorkflowRun(Base): finished_at: Mapped[datetime | None] = mapped_column(DateTime) exceptions_count: Mapped[int] = mapped_column(sa.Integer, server_default=sa.text("0"), nullable=True) - pause: Mapped[Optional["WorkflowPause"]] = orm.relationship( + pause: Mapped[WorkflowPause | None] = orm.relationship( "WorkflowPause", primaryjoin="WorkflowRun.id == foreign(WorkflowPause.workflow_run_id)", uselist=False, @@ -664,6 +697,10 @@ class WorkflowRun(Base): def workflow(self): return db.session.query(Workflow).where(Workflow.id == self.workflow_id).first() + @property + def outputs_as_generation(self): + return is_generation_outputs(self.outputs_dict) + def to_dict(self): return { "id": self.id, @@ -677,6 +714,7 @@ class WorkflowRun(Base): "inputs": self.inputs_dict, "status": self.status, "outputs": self.outputs_dict, + "outputs_as_generation": self.outputs_as_generation, "error": self.error, "elapsed_time": self.elapsed_time, "total_tokens": self.total_tokens, @@ -689,7 +727,7 @@ class WorkflowRun(Base): } @classmethod - def from_dict(cls, data: dict[str, Any]) -> "WorkflowRun": + def from_dict(cls, data: dict[str, Any]) -> WorkflowRun: return cls( id=data.get("id"), tenant_id=data.get("tenant_id"), @@ -841,7 +879,7 @@ class WorkflowNodeExecutionModel(Base): # This model is expected to have `offlo created_by: Mapped[str] = mapped_column(StringUUID) finished_at: Mapped[datetime | None] = mapped_column(DateTime) - offload_data: Mapped[list["WorkflowNodeExecutionOffload"]] = orm.relationship( + offload_data: Mapped[list[WorkflowNodeExecutionOffload]] = orm.relationship( "WorkflowNodeExecutionOffload", primaryjoin="WorkflowNodeExecutionModel.id == foreign(WorkflowNodeExecutionOffload.node_execution_id)", uselist=True, @@ -851,13 +889,13 @@ class WorkflowNodeExecutionModel(Base): # This model is expected to have `offlo @staticmethod def preload_offload_data( - query: Select[tuple["WorkflowNodeExecutionModel"]] | orm.Query["WorkflowNodeExecutionModel"], + query: Select[tuple[WorkflowNodeExecutionModel]] | orm.Query[WorkflowNodeExecutionModel], ): return query.options(orm.selectinload(WorkflowNodeExecutionModel.offload_data)) @staticmethod def preload_offload_data_and_files( - query: Select[tuple["WorkflowNodeExecutionModel"]] | orm.Query["WorkflowNodeExecutionModel"], + query: Select[tuple[WorkflowNodeExecutionModel]] | orm.Query[WorkflowNodeExecutionModel], ): return query.options( orm.selectinload(WorkflowNodeExecutionModel.offload_data).options( @@ -932,7 +970,7 @@ class WorkflowNodeExecutionModel(Base): # This model is expected to have `offlo ) return extras - def _get_offload_by_type(self, type_: ExecutionOffLoadType) -> Optional["WorkflowNodeExecutionOffload"]: + def _get_offload_by_type(self, type_: ExecutionOffLoadType) -> WorkflowNodeExecutionOffload | None: return next(iter([i for i in self.offload_data if i.type_ == type_]), None) @property @@ -1046,7 +1084,7 @@ class WorkflowNodeExecutionOffload(Base): back_populates="offload_data", ) - file: Mapped[Optional["UploadFile"]] = orm.relationship( + file: Mapped[UploadFile | None] = orm.relationship( foreign_keys=[file_id], lazy="raise", uselist=False, @@ -1064,7 +1102,7 @@ class WorkflowAppLogCreatedFrom(StrEnum): INSTALLED_APP = "installed-app" @classmethod - def value_of(cls, value: str) -> "WorkflowAppLogCreatedFrom": + def value_of(cls, value: str) -> WorkflowAppLogCreatedFrom: """ Get value of given mode. @@ -1181,7 +1219,7 @@ class ConversationVariable(TypeBase): ) @classmethod - def from_variable(cls, *, app_id: str, conversation_id: str, variable: Variable) -> "ConversationVariable": + def from_variable(cls, *, app_id: str, conversation_id: str, variable: Variable) -> ConversationVariable: obj = cls( id=variable.id, app_id=app_id, @@ -1334,7 +1372,7 @@ class WorkflowDraftVariable(Base): ) # Relationship to WorkflowDraftVariableFile - variable_file: Mapped[Optional["WorkflowDraftVariableFile"]] = orm.relationship( + variable_file: Mapped[WorkflowDraftVariableFile | None] = orm.relationship( foreign_keys=[file_id], lazy="raise", uselist=False, @@ -1504,8 +1542,9 @@ class WorkflowDraftVariable(Base): node_execution_id: str | None, description: str = "", file_id: str | None = None, - ) -> "WorkflowDraftVariable": + ) -> WorkflowDraftVariable: variable = WorkflowDraftVariable() + variable.id = str(uuid4()) variable.created_at = naive_utc_now() variable.updated_at = naive_utc_now() variable.description = description @@ -1526,7 +1565,7 @@ class WorkflowDraftVariable(Base): name: str, value: Segment, description: str = "", - ) -> "WorkflowDraftVariable": + ) -> WorkflowDraftVariable: variable = cls._new( app_id=app_id, node_id=CONVERSATION_VARIABLE_NODE_ID, @@ -1547,7 +1586,7 @@ class WorkflowDraftVariable(Base): value: Segment, node_execution_id: str, editable: bool = False, - ) -> "WorkflowDraftVariable": + ) -> WorkflowDraftVariable: variable = cls._new( app_id=app_id, node_id=SYSTEM_VARIABLE_NODE_ID, @@ -1570,7 +1609,7 @@ class WorkflowDraftVariable(Base): visible: bool = True, editable: bool = True, file_id: str | None = None, - ) -> "WorkflowDraftVariable": + ) -> WorkflowDraftVariable: variable = cls._new( app_id=app_id, node_id=node_id, @@ -1666,7 +1705,7 @@ class WorkflowDraftVariableFile(Base): ) # Relationship to UploadFile - upload_file: Mapped["UploadFile"] = orm.relationship( + upload_file: Mapped[UploadFile] = orm.relationship( foreign_keys=[upload_file_id], lazy="raise", uselist=False, @@ -1733,7 +1772,7 @@ class WorkflowPause(DefaultFieldsMixin, Base): state_object_key: Mapped[str] = mapped_column(String(length=255), nullable=False) # Relationship to WorkflowRun - workflow_run: Mapped["WorkflowRun"] = orm.relationship( + workflow_run: Mapped[WorkflowRun] = orm.relationship( foreign_keys=[workflow_run_id], # require explicit preloading. lazy="raise", @@ -1789,7 +1828,7 @@ class WorkflowPauseReason(DefaultFieldsMixin, Base): ) @classmethod - def from_entity(cls, pause_reason: PauseReason) -> "WorkflowPauseReason": + def from_entity(cls, pause_reason: PauseReason) -> WorkflowPauseReason: if isinstance(pause_reason, HumanInputRequired): return cls( type_=PauseReasonType.HUMAN_INPUT_REQUIRED, form_id=pause_reason.form_id, node_id=pause_reason.node_id diff --git a/api/pyproject.toml b/api/pyproject.toml index 4fbd7433d1..7d2d68bc8d 100644 --- a/api/pyproject.toml +++ b/api/pyproject.toml @@ -1,9 +1,10 @@ [project] name = "dify-api" -version = "1.11.1" +version = "1.11.2" requires-python = ">=3.11,<3.13" dependencies = [ + "aliyun-log-python-sdk~=0.9.37", "arize-phoenix-otel~=0.9.2", "azure-identity==1.16.1", "beautifulsoup4==4.12.2", @@ -31,6 +32,7 @@ dependencies = [ "httpx[socks]~=0.27.0", "jieba==0.42.1", "json-repair>=0.41.1", + "jsonschema>=4.25.1", "langfuse~=2.51.3", "langsmith~=0.1.77", "markdown~=3.5.1", @@ -67,7 +69,7 @@ dependencies = [ "pydantic-extra-types~=2.10.3", "pydantic-settings~=2.11.0", "pyjwt~=2.10.1", - "pypdfium2==4.30.0", + "pypdfium2==5.2.0", "python-docx~=1.1.0", "python-dotenv==1.0.1", "pyyaml~=6.0.1", @@ -91,7 +93,6 @@ dependencies = [ "weaviate-client==4.17.0", "apscheduler>=3.11.0", "weave>=0.52.16", - "jsonschema>=4.25.1", ] # Before adding new dependency, consider place it in # alphabet order (a-z) and suitable group. @@ -188,7 +189,7 @@ storage = [ "opendal~=0.46.0", "oss2==2.18.5", "supabase~=2.18.1", - "tos~=2.7.1", + "tos~=2.9.0", ] ############################################################ diff --git a/api/repositories/api_workflow_run_repository.py b/api/repositories/api_workflow_run_repository.py index fd547c78ba..22c3ea7130 100644 --- a/api/repositories/api_workflow_run_repository.py +++ b/api/repositories/api_workflow_run_repository.py @@ -34,10 +34,12 @@ Example: ``` """ -from collections.abc import Sequence +from collections.abc import Callable, Sequence from datetime import datetime from typing import Protocol +from sqlalchemy.orm import Session + from core.workflow.entities.pause_reason import PauseReason from core.workflow.repositories.workflow_execution_repository import WorkflowExecutionRepository from libs.infinite_scroll_pagination import InfiniteScrollPagination @@ -253,6 +255,30 @@ class APIWorkflowRunRepository(WorkflowExecutionRepository, Protocol): """ ... + def get_runs_batch_by_time_range( + self, + start_after: datetime | None, + end_before: datetime, + last_seen: tuple[datetime, str] | None, + batch_size: int, + ) -> Sequence[WorkflowRun]: + """ + Fetch a batch of workflow runs within a time window using keyset pagination. + """ + ... + + def delete_runs_with_related( + self, + runs: Sequence[WorkflowRun], + delete_node_executions: Callable[[Session, Sequence[WorkflowRun]], tuple[int, int]] | None = None, + delete_trigger_logs: Callable[[Session, Sequence[str]], int] | None = None, + ) -> dict[str, int]: + """ + Delete workflow runs and their related records (node executions, offloads, app logs, + trigger logs, pauses, pause reasons). + """ + ... + def create_workflow_pause( self, workflow_run_id: str, diff --git a/api/repositories/sqlalchemy_api_workflow_node_execution_repository.py b/api/repositories/sqlalchemy_api_workflow_node_execution_repository.py index 7e2173acdd..004bd7cad9 100644 --- a/api/repositories/sqlalchemy_api_workflow_node_execution_repository.py +++ b/api/repositories/sqlalchemy_api_workflow_node_execution_repository.py @@ -7,13 +7,13 @@ using SQLAlchemy 2.0 style queries for WorkflowNodeExecutionModel operations. from collections.abc import Sequence from datetime import datetime -from typing import cast +from typing import TypedDict, cast -from sqlalchemy import asc, delete, desc, select +from sqlalchemy import asc, delete, desc, select, tuple_ from sqlalchemy.engine import CursorResult from sqlalchemy.orm import Session, sessionmaker -from models.workflow import WorkflowNodeExecutionModel +from models.workflow import WorkflowNodeExecutionModel, WorkflowNodeExecutionOffload from repositories.api_workflow_node_execution_repository import DifyAPIWorkflowNodeExecutionRepository @@ -290,3 +290,64 @@ class DifyAPISQLAlchemyWorkflowNodeExecutionRepository(DifyAPIWorkflowNodeExecut result = cast(CursorResult, session.execute(stmt)) session.commit() return result.rowcount + + class RunContext(TypedDict): + run_id: str + tenant_id: str + app_id: str + workflow_id: str + triggered_from: str + + @staticmethod + def delete_by_runs(session: Session, runs: Sequence[RunContext]) -> tuple[int, int]: + """ + Delete node executions (and offloads) for the given workflow runs using indexed columns. + + Uses the composite index on (tenant_id, app_id, workflow_id, triggered_from, workflow_run_id) + by filtering on those columns with tuple IN. + """ + if not runs: + return 0, 0 + + tuple_values = [ + (run["tenant_id"], run["app_id"], run["workflow_id"], run["triggered_from"], run["run_id"]) for run in runs + ] + + node_execution_ids = session.scalars( + select(WorkflowNodeExecutionModel.id).where( + tuple_( + WorkflowNodeExecutionModel.tenant_id, + WorkflowNodeExecutionModel.app_id, + WorkflowNodeExecutionModel.workflow_id, + WorkflowNodeExecutionModel.triggered_from, + WorkflowNodeExecutionModel.workflow_run_id, + ).in_(tuple_values) + ) + ).all() + + if not node_execution_ids: + return 0, 0 + + offloads_deleted = ( + cast( + CursorResult, + session.execute( + delete(WorkflowNodeExecutionOffload).where( + WorkflowNodeExecutionOffload.node_execution_id.in_(node_execution_ids) + ) + ), + ).rowcount + or 0 + ) + + node_executions_deleted = ( + cast( + CursorResult, + session.execute( + delete(WorkflowNodeExecutionModel).where(WorkflowNodeExecutionModel.id.in_(node_execution_ids)) + ), + ).rowcount + or 0 + ) + + return node_executions_deleted, offloads_deleted diff --git a/api/repositories/sqlalchemy_api_workflow_run_repository.py b/api/repositories/sqlalchemy_api_workflow_run_repository.py index b172c6a3ac..f081124a80 100644 --- a/api/repositories/sqlalchemy_api_workflow_run_repository.py +++ b/api/repositories/sqlalchemy_api_workflow_run_repository.py @@ -21,7 +21,7 @@ Implementation Notes: import logging import uuid -from collections.abc import Sequence +from collections.abc import Callable, Sequence from datetime import datetime from decimal import Decimal from typing import Any, cast @@ -40,8 +40,14 @@ from libs.infinite_scroll_pagination import InfiniteScrollPagination from libs.time_parser import get_time_threshold from libs.uuid_utils import uuidv7 from models.enums import WorkflowRunTriggeredFrom -from models.workflow import WorkflowPause as WorkflowPauseModel -from models.workflow import WorkflowPauseReason, WorkflowRun +from models.workflow import ( + WorkflowAppLog, + WorkflowPauseReason, + WorkflowRun, +) +from models.workflow import ( + WorkflowPause as WorkflowPauseModel, +) from repositories.api_workflow_run_repository import APIWorkflowRunRepository from repositories.entities.workflow_pause import WorkflowPauseEntity from repositories.types import ( @@ -314,6 +320,102 @@ class DifyAPISQLAlchemyWorkflowRunRepository(APIWorkflowRunRepository): logger.info("Total deleted %s workflow runs for app %s", total_deleted, app_id) return total_deleted + def get_runs_batch_by_time_range( + self, + start_after: datetime | None, + end_before: datetime, + last_seen: tuple[datetime, str] | None, + batch_size: int, + ) -> Sequence[WorkflowRun]: + with self._session_maker() as session: + stmt = ( + select(WorkflowRun) + .where( + WorkflowRun.created_at < end_before, + WorkflowRun.status.in_( + [ + WorkflowExecutionStatus.SUCCEEDED.value, + WorkflowExecutionStatus.FAILED.value, + WorkflowExecutionStatus.STOPPED.value, + WorkflowExecutionStatus.PARTIAL_SUCCEEDED.value, + ] + ), + ) + .order_by(WorkflowRun.created_at.asc(), WorkflowRun.id.asc()) + .limit(batch_size) + ) + + if start_after: + stmt = stmt.where(WorkflowRun.created_at >= start_after) + + if last_seen: + stmt = stmt.where( + or_( + WorkflowRun.created_at > last_seen[0], + and_(WorkflowRun.created_at == last_seen[0], WorkflowRun.id > last_seen[1]), + ) + ) + + return session.scalars(stmt).all() + + def delete_runs_with_related( + self, + runs: Sequence[WorkflowRun], + delete_node_executions: Callable[[Session, Sequence[WorkflowRun]], tuple[int, int]] | None = None, + delete_trigger_logs: Callable[[Session, Sequence[str]], int] | None = None, + ) -> dict[str, int]: + if not runs: + return { + "runs": 0, + "node_executions": 0, + "offloads": 0, + "app_logs": 0, + "trigger_logs": 0, + "pauses": 0, + "pause_reasons": 0, + } + + with self._session_maker() as session: + run_ids = [run.id for run in runs] + if delete_node_executions: + node_executions_deleted, offloads_deleted = delete_node_executions(session, runs) + else: + node_executions_deleted, offloads_deleted = 0, 0 + + app_logs_result = session.execute(delete(WorkflowAppLog).where(WorkflowAppLog.workflow_run_id.in_(run_ids))) + app_logs_deleted = cast(CursorResult, app_logs_result).rowcount or 0 + + pause_ids = session.scalars( + select(WorkflowPauseModel.id).where(WorkflowPauseModel.workflow_run_id.in_(run_ids)) + ).all() + pause_reasons_deleted = 0 + pauses_deleted = 0 + + if pause_ids: + pause_reasons_result = session.execute( + delete(WorkflowPauseReason).where(WorkflowPauseReason.pause_id.in_(pause_ids)) + ) + pause_reasons_deleted = cast(CursorResult, pause_reasons_result).rowcount or 0 + pauses_result = session.execute(delete(WorkflowPauseModel).where(WorkflowPauseModel.id.in_(pause_ids))) + pauses_deleted = cast(CursorResult, pauses_result).rowcount or 0 + + trigger_logs_deleted = delete_trigger_logs(session, run_ids) if delete_trigger_logs else 0 + + runs_result = session.execute(delete(WorkflowRun).where(WorkflowRun.id.in_(run_ids))) + runs_deleted = cast(CursorResult, runs_result).rowcount or 0 + + session.commit() + + return { + "runs": runs_deleted, + "node_executions": node_executions_deleted, + "offloads": offloads_deleted, + "app_logs": app_logs_deleted, + "trigger_logs": trigger_logs_deleted, + "pauses": pauses_deleted, + "pause_reasons": pause_reasons_deleted, + } + def create_workflow_pause( self, workflow_run_id: str, diff --git a/api/repositories/sqlalchemy_workflow_trigger_log_repository.py b/api/repositories/sqlalchemy_workflow_trigger_log_repository.py index 0d67e286b0..d01c35e5ab 100644 --- a/api/repositories/sqlalchemy_workflow_trigger_log_repository.py +++ b/api/repositories/sqlalchemy_workflow_trigger_log_repository.py @@ -4,8 +4,10 @@ SQLAlchemy implementation of WorkflowTriggerLogRepository. from collections.abc import Sequence from datetime import UTC, datetime, timedelta +from typing import cast -from sqlalchemy import and_, select +from sqlalchemy import and_, delete, select +from sqlalchemy.engine import CursorResult from sqlalchemy.orm import Session from models.enums import WorkflowTriggerStatus @@ -84,3 +86,19 @@ class SQLAlchemyWorkflowTriggerLogRepository(WorkflowTriggerLogRepository): ) return list(self.session.scalars(query).all()) + + def delete_by_run_ids(self, run_ids: Sequence[str]) -> int: + """ + Delete trigger logs associated with the given workflow run ids. + + Args: + run_ids: Collection of workflow run identifiers. + + Returns: + Number of rows deleted. + """ + if not run_ids: + return 0 + + result = self.session.execute(delete(WorkflowTriggerLog).where(WorkflowTriggerLog.workflow_run_id.in_(run_ids))) + return cast(CursorResult, result).rowcount or 0 diff --git a/api/schedule/clean_workflow_runs_task.py b/api/schedule/clean_workflow_runs_task.py new file mode 100644 index 0000000000..b59dc7f823 --- /dev/null +++ b/api/schedule/clean_workflow_runs_task.py @@ -0,0 +1,30 @@ +import click + +import app +from configs import dify_config +from services.clear_free_plan_expired_workflow_run_logs import WorkflowRunCleanup + +CLEANUP_QUEUE = "retention" + + +@app.celery.task(queue=CLEANUP_QUEUE) +def clean_workflow_runs_task() -> None: + """ + Scheduled cleanup for workflow runs and related records (sandbox tenants only). + """ + click.echo( + click.style( + f"Scheduled workflow run cleanup starting: cutoff={dify_config.WORKFLOW_LOG_RETENTION_DAYS} days, " + f"batch={dify_config.WORKFLOW_LOG_CLEANUP_BATCH_SIZE}", + fg="green", + ) + ) + + WorkflowRunCleanup( + days=dify_config.WORKFLOW_LOG_RETENTION_DAYS, + batch_size=dify_config.WORKFLOW_LOG_CLEANUP_BATCH_SIZE, + start_after=None, + end_before=None, + ).run() + + click.echo(click.style("Scheduled workflow run cleanup finished.", fg="green")) diff --git a/api/schedule/queue_monitor_task.py b/api/schedule/queue_monitor_task.py index db610df290..77d6b5a138 100644 --- a/api/schedule/queue_monitor_task.py +++ b/api/schedule/queue_monitor_task.py @@ -16,6 +16,11 @@ celery_redis = Redis( port=redis_config.get("port") or 6379, password=redis_config.get("password") or None, db=int(redis_config.get("virtual_host")) if redis_config.get("virtual_host") else 1, + ssl=bool(dify_config.BROKER_USE_SSL), + ssl_ca_certs=dify_config.REDIS_SSL_CA_CERTS if dify_config.BROKER_USE_SSL else None, + ssl_cert_reqs=getattr(dify_config, "REDIS_SSL_CERT_REQS", None) if dify_config.BROKER_USE_SSL else None, + ssl_certfile=getattr(dify_config, "REDIS_SSL_CERTFILE", None) if dify_config.BROKER_USE_SSL else None, + ssl_keyfile=getattr(dify_config, "REDIS_SSL_KEYFILE", None) if dify_config.BROKER_USE_SSL else None, ) logger = logging.getLogger(__name__) diff --git a/api/services/account_service.py b/api/services/account_service.py index 5a549dc318..d38c9d5a66 100644 --- a/api/services/account_service.py +++ b/api/services/account_service.py @@ -999,6 +999,11 @@ class TenantService: tenant.encrypt_public_key = generate_key_pair(tenant.id) db.session.commit() + + from services.credit_pool_service import CreditPoolService + + CreditPoolService.create_default_pool(tenant.id) + return tenant @staticmethod diff --git a/api/services/annotation_service.py b/api/services/annotation_service.py index d03cbddceb..ca41bcef9d 100644 --- a/api/services/annotation_service.py +++ b/api/services/annotation_service.py @@ -77,7 +77,7 @@ class AppAnnotationService: if annotation_setting: add_annotation_to_index_task.delay( annotation.id, - annotation.question, + question, current_tenant_id, app_id, annotation_setting.collection_binding_id, @@ -137,13 +137,16 @@ class AppAnnotationService: if not app: raise NotFound("App not found") if keyword: + from libs.helper import escape_like_pattern + + escaped_keyword = escape_like_pattern(keyword) stmt = ( select(MessageAnnotation) .where(MessageAnnotation.app_id == app_id) .where( or_( - MessageAnnotation.question.ilike(f"%{keyword}%"), - MessageAnnotation.content.ilike(f"%{keyword}%"), + MessageAnnotation.question.ilike(f"%{escaped_keyword}%", escape="\\"), + MessageAnnotation.content.ilike(f"%{escaped_keyword}%", escape="\\"), ) ) .order_by(MessageAnnotation.created_at.desc(), MessageAnnotation.id.desc()) @@ -253,7 +256,7 @@ class AppAnnotationService: if app_annotation_setting: update_annotation_to_index_task.delay( annotation.id, - annotation.question, + annotation.question_text, current_tenant_id, app_id, app_annotation_setting.collection_binding_id, @@ -352,7 +355,6 @@ class AppAnnotationService: def batch_import_app_annotations(cls, app_id, file: FileStorage): """ Batch import annotations from CSV file with enhanced security checks. - Security features: - File size validation - Row count limits (min/max) @@ -361,7 +363,6 @@ class AppAnnotationService: - Concurrency tracking """ from configs import dify_config - # get app info current_user, current_tenant_id = current_account_with_tenant() app = ( @@ -445,27 +446,31 @@ class AppAnnotationService: f"The CSV file must contain at least {min_records} valid annotation record(s). " f"Found {len(result)} valid record(s)." ) - # Check annotation quota limit features = FeatureService.get_features(current_tenant_id) if features.billing.enabled: annotation_quota_limit = features.annotation_quota_limit if annotation_quota_limit.limit < len(result) + annotation_quota_limit.size: - raise ValueError("The number of annotations exceeds the limit of your subscription.") - # async job + raise ValueError( + f"The number of annotations ({len(result)}) would exceed your subscription limit. " + f"Current usage: {annotation_quota_limit.size}/{annotation_quota_limit.limit}. " + f"Available: {annotation_quota_limit.limit - annotation_quota_limit.size}." + ) + + # Create async job job_id = str(uuid.uuid4()) indexing_cache_key = f"app_annotation_batch_import_{str(job_id)}" - # Register job in active tasks list for concurrency tracking current_time = int(naive_utc_now().timestamp() * 1000) active_jobs_key = f"annotation_import_active:{current_tenant_id}" redis_client.zadd(active_jobs_key, {job_id: current_time}) redis_client.expire(active_jobs_key, 7200) # 2 hours TTL - # Set job status redis_client.setnx(indexing_cache_key, "waiting") - batch_import_annotations_task.delay(str(job_id), result, app_id, current_tenant_id, current_user.id) + redis_client.expire(indexing_cache_key, 3600) # 1 hour TTL + # Send batch import task + batch_import_annotations_task.delay(str(job_id), result, app_id, current_tenant_id, current_user.id) except ValueError as e: return {"error_msg": str(e)} except Exception as e: diff --git a/api/services/app_dsl_service.py b/api/services/app_dsl_service.py index 1dd6faea5d..acd2a25a86 100644 --- a/api/services/app_dsl_service.py +++ b/api/services/app_dsl_service.py @@ -32,7 +32,7 @@ from extensions.ext_redis import redis_client from factories import variable_factory from libs.datetime_utils import naive_utc_now from models import Account, App, AppMode -from models.model import AppModelConfig +from models.model import AppModelConfig, IconType from models.workflow import Workflow from services.plugin.dependencies_analysis import DependenciesAnalysisService from services.workflow_draft_variable_service import WorkflowDraftVariableService @@ -155,6 +155,7 @@ class AppDslService: parsed_url.scheme == "https" and parsed_url.netloc == "github.com" and parsed_url.path.endswith((".yml", ".yaml")) + and "/blob/" in parsed_url.path ): yaml_url = yaml_url.replace("https://github.com", "https://raw.githubusercontent.com") yaml_url = yaml_url.replace("/blob/", "/") @@ -427,10 +428,10 @@ class AppDslService: # Set icon type icon_type_value = icon_type or app_data.get("icon_type") - if icon_type_value in ["emoji", "link", "image"]: + if icon_type_value in [IconType.EMOJI.value, IconType.IMAGE.value, IconType.LINK.value]: icon_type = icon_type_value else: - icon_type = "emoji" + icon_type = IconType.EMOJI.value icon = icon or str(app_data.get("icon", "")) if app: diff --git a/api/services/app_generate_service.py b/api/services/app_generate_service.py index 4514c86f7c..cc58899dc4 100644 --- a/api/services/app_generate_service.py +++ b/api/services/app_generate_service.py @@ -14,7 +14,8 @@ from enums.quota_type import QuotaType, unlimited from extensions.otel import AppGenerateHandler, trace_span from models.model import Account, App, AppMode, EndUser from models.workflow import Workflow -from services.errors.app import InvokeRateLimitError, QuotaExceededError, WorkflowIdFormatError, WorkflowNotFoundError +from services.errors.app import QuotaExceededError, WorkflowIdFormatError, WorkflowNotFoundError +from services.errors.llm import InvokeRateLimitError from services.workflow_service import WorkflowService diff --git a/api/services/app_service.py b/api/services/app_service.py index ef89a4fd10..02ebfbace0 100644 --- a/api/services/app_service.py +++ b/api/services/app_service.py @@ -55,8 +55,11 @@ class AppService: if args.get("is_created_by_me", False): filters.append(App.created_by == user_id) if args.get("name"): + from libs.helper import escape_like_pattern + name = args["name"][:30] - filters.append(App.name.ilike(f"%{name}%")) + escaped_name = escape_like_pattern(name) + filters.append(App.name.ilike(f"%{escaped_name}%", escape="\\")) # Check if tag_ids is not empty to avoid WHERE false condition if args.get("tag_ids") and len(args["tag_ids"]) > 0: target_ids = TagService.get_target_ids_by_tag_ids("app", tenant_id, args["tag_ids"]) diff --git a/api/services/async_workflow_service.py b/api/services/async_workflow_service.py index e100582511..bc73b7c8c2 100644 --- a/api/services/async_workflow_service.py +++ b/api/services/async_workflow_service.py @@ -21,7 +21,7 @@ from models.model import App, EndUser from models.trigger import WorkflowTriggerLog from models.workflow import Workflow from repositories.sqlalchemy_workflow_trigger_log_repository import SQLAlchemyWorkflowTriggerLogRepository -from services.errors.app import InvokeRateLimitError, QuotaExceededError, WorkflowNotFoundError +from services.errors.app import QuotaExceededError, WorkflowNotFoundError, WorkflowQuotaLimitError from services.workflow.entities import AsyncTriggerResponse, TriggerData, WorkflowTaskData from services.workflow.queue_dispatcher import QueueDispatcherManager, QueuePriority from services.workflow_service import WorkflowService @@ -141,7 +141,7 @@ class AsyncWorkflowService: trigger_log_repo.update(trigger_log) session.commit() - raise InvokeRateLimitError( + raise WorkflowQuotaLimitError( f"Workflow execution quota limit reached for tenant {trigger_data.tenant_id}" ) from e diff --git a/api/services/auth/firecrawl/firecrawl.py b/api/services/auth/firecrawl/firecrawl.py index d455475bfc..b002706931 100644 --- a/api/services/auth/firecrawl/firecrawl.py +++ b/api/services/auth/firecrawl/firecrawl.py @@ -26,7 +26,7 @@ class FirecrawlAuth(ApiKeyAuthBase): "limit": 1, "scrapeOptions": {"onlyMainContent": True}, } - response = self._post_request(f"{self.base_url}/v1/crawl", options, headers) + response = self._post_request(self._build_url("v1/crawl"), options, headers) if response.status_code == 200: return True else: @@ -35,15 +35,17 @@ class FirecrawlAuth(ApiKeyAuthBase): def _prepare_headers(self): return {"Content-Type": "application/json", "Authorization": f"Bearer {self.api_key}"} + def _build_url(self, path: str) -> str: + # ensure exactly one slash between base and path, regardless of user-provided base_url + return f"{self.base_url.rstrip('/')}/{path.lstrip('/')}" + def _post_request(self, url, data, headers): return httpx.post(url, headers=headers, json=data) def _handle_error(self, response): - if response.status_code in {402, 409, 500}: - error_message = response.json().get("error", "Unknown error occurred") - raise Exception(f"Failed to authorize. Status code: {response.status_code}. Error: {error_message}") - else: - if response.text: - error_message = json.loads(response.text).get("error", "Unknown error occurred") - raise Exception(f"Failed to authorize. Status code: {response.status_code}. Error: {error_message}") - raise Exception(f"Unexpected error occurred while trying to authorize. Status code: {response.status_code}") + try: + payload = response.json() + except json.JSONDecodeError: + payload = {} + error_message = payload.get("error") or payload.get("message") or (response.text or "Unknown error occurred") + raise Exception(f"Failed to authorize. Status code: {response.status_code}. Error: {error_message}") diff --git a/api/services/billing_service.py b/api/services/billing_service.py index 54e1c9d285..e5dc3e3983 100644 --- a/api/services/billing_service.py +++ b/api/services/billing_service.py @@ -1,8 +1,13 @@ +import json +import logging import os +from collections.abc import Sequence from typing import Literal import httpx +from pydantic import TypeAdapter from tenacity import retry, retry_if_exception_type, stop_before_delay, wait_fixed +from typing_extensions import TypedDict from werkzeug.exceptions import InternalServerError from enums.cloud_plan import CloudPlan @@ -11,6 +16,15 @@ from extensions.ext_redis import redis_client from libs.helper import RateLimiter from models import Account, TenantAccountJoin, TenantAccountRole +logger = logging.getLogger(__name__) + + +class SubscriptionPlan(TypedDict): + """Tenant subscriptionplan information.""" + + plan: str + expiration_date: int + class BillingService: base_url = os.environ.get("BILLING_API_URL", "BILLING_API_URL") @@ -18,6 +32,11 @@ class BillingService: compliance_download_rate_limiter = RateLimiter("compliance_download_rate_limiter", 4, 60) + # Redis key prefix for tenant plan cache + _PLAN_CACHE_KEY_PREFIX = "tenant_plan:" + # Cache TTL: 10 minutes + _PLAN_CACHE_TTL = 600 + @classmethod def get_info(cls, tenant_id: str): params = {"tenant_id": tenant_id} @@ -25,6 +44,33 @@ class BillingService: billing_info = cls._send_request("GET", "/subscription/info", params=params) return billing_info + @classmethod + def get_info_bulk(cls, tenant_ids: Sequence[str]) -> dict[str, str]: + """ + Bulk billing info fetch via billing API. + + Payload: {"tenant_ids": ["t1", "t2", ...]} (max 200 per request) + + Returns: + Mapping of tenant_id -> plan + """ + results: dict[str, str] = {} + + chunk_size = 200 + for i in range(0, len(tenant_ids), chunk_size): + chunk = tenant_ids[i : i + chunk_size] + try: + resp = cls._send_request("POST", "/subscription/plan/batch", json={"tenant_ids": chunk}) + data = resp.get("data", {}) + for tenant_id, plan in data.items(): + if isinstance(plan, str): + results[tenant_id] = plan + except Exception: + logger.exception("Failed to fetch billing info batch for tenants: %s", chunk) + continue + + return results + @classmethod def get_tenant_feature_plan_usage_info(cls, tenant_id: str): params = {"tenant_id": tenant_id} @@ -239,3 +285,135 @@ class BillingService: def sync_partner_tenants_bindings(cls, account_id: str, partner_key: str, click_id: str): payload = {"account_id": account_id, "click_id": click_id} return cls._send_request("PUT", f"/partners/{partner_key}/tenants", json=payload) + + @classmethod + def get_plan_bulk(cls, tenant_ids: Sequence[str]) -> dict[str, SubscriptionPlan]: + """ + Bulk fetch billing subscription plan via billing API. + Payload: {"tenant_ids": ["t1", "t2", ...]} (max 200 per request) + Returns: + Mapping of tenant_id -> {plan: str, expiration_date: int} + """ + results: dict[str, SubscriptionPlan] = {} + subscription_adapter = TypeAdapter(SubscriptionPlan) + + chunk_size = 200 + for i in range(0, len(tenant_ids), chunk_size): + chunk = tenant_ids[i : i + chunk_size] + try: + resp = cls._send_request("POST", "/subscription/plan/batch", json={"tenant_ids": chunk}) + data = resp.get("data", {}) + + for tenant_id, plan in data.items(): + try: + subscription_plan = subscription_adapter.validate_python(plan) + results[tenant_id] = subscription_plan + except Exception: + logger.exception( + "get_plan_bulk: failed to validate subscription plan for tenant(%s)", tenant_id + ) + continue + except Exception: + logger.exception("get_plan_bulk: failed to fetch billing info batch for tenants: %s", chunk) + continue + + return results + + @classmethod + def _make_plan_cache_key(cls, tenant_id: str) -> str: + return f"{cls._PLAN_CACHE_KEY_PREFIX}{tenant_id}" + + @classmethod + def get_plan_bulk_with_cache(cls, tenant_ids: Sequence[str]) -> dict[str, SubscriptionPlan]: + """ + Bulk fetch billing subscription plan with cache to reduce billing API loads in batch job scenarios. + + NOTE: if you want to high data consistency, use get_plan_bulk instead. + + Returns: + Mapping of tenant_id -> {plan: str, expiration_date: int} + """ + tenant_plans: dict[str, SubscriptionPlan] = {} + + if not tenant_ids: + return tenant_plans + + subscription_adapter = TypeAdapter(SubscriptionPlan) + + # Step 1: Batch fetch from Redis cache using mget + redis_keys = [cls._make_plan_cache_key(tenant_id) for tenant_id in tenant_ids] + try: + cached_values = redis_client.mget(redis_keys) + + if len(cached_values) != len(tenant_ids): + raise Exception( + "get_plan_bulk_with_cache: unexpected error: redis mget failed: cached values length mismatch" + ) + + # Map cached values back to tenant_ids + cache_misses: list[str] = [] + + for tenant_id, cached_value in zip(tenant_ids, cached_values): + if cached_value: + try: + # Redis returns bytes, decode to string and parse JSON + json_str = cached_value.decode("utf-8") if isinstance(cached_value, bytes) else cached_value + plan_dict = json.loads(json_str) + subscription_plan = subscription_adapter.validate_python(plan_dict) + tenant_plans[tenant_id] = subscription_plan + except Exception: + logger.exception( + "get_plan_bulk_with_cache: process tenant(%s) failed, add to cache misses", tenant_id + ) + cache_misses.append(tenant_id) + else: + cache_misses.append(tenant_id) + + logger.info( + "get_plan_bulk_with_cache: cache hits=%s, cache misses=%s", + len(tenant_plans), + len(cache_misses), + ) + except Exception: + logger.exception("get_plan_bulk_with_cache: redis mget failed, falling back to API") + cache_misses = list(tenant_ids) + + # Step 2: Fetch missing plans from billing API + if cache_misses: + bulk_plans = BillingService.get_plan_bulk(cache_misses) + + if bulk_plans: + plans_to_cache: dict[str, SubscriptionPlan] = {} + + for tenant_id, subscription_plan in bulk_plans.items(): + tenant_plans[tenant_id] = subscription_plan + plans_to_cache[tenant_id] = subscription_plan + + # Step 3: Batch update Redis cache using pipeline + if plans_to_cache: + try: + pipe = redis_client.pipeline() + for tenant_id, subscription_plan in plans_to_cache.items(): + redis_key = cls._make_plan_cache_key(tenant_id) + # Serialize dict to JSON string + json_str = json.dumps(subscription_plan) + pipe.setex(redis_key, cls._PLAN_CACHE_TTL, json_str) + pipe.execute() + + logger.info( + "get_plan_bulk_with_cache: cached %s new tenant plans to Redis", + len(plans_to_cache), + ) + except Exception: + logger.exception("get_plan_bulk_with_cache: redis pipeline failed") + + return tenant_plans + + @classmethod + def get_expired_subscription_cleanup_whitelist(cls) -> Sequence[str]: + resp = cls._send_request("GET", "/subscription/cleanup/whitelist") + data = resp.get("data", []) + tenant_whitelist = [] + for item in data: + tenant_whitelist.append(item["tenant_id"]) + return tenant_whitelist diff --git a/api/services/clear_free_plan_expired_workflow_run_logs.py b/api/services/clear_free_plan_expired_workflow_run_logs.py new file mode 100644 index 0000000000..1fe2bad2d0 --- /dev/null +++ b/api/services/clear_free_plan_expired_workflow_run_logs.py @@ -0,0 +1,171 @@ +import datetime +import logging +from collections.abc import Iterable, Sequence + +import click +from sqlalchemy.orm import Session, sessionmaker + +from configs import dify_config +from enums.cloud_plan import CloudPlan +from extensions.ext_database import db +from models.workflow import WorkflowRun +from repositories.api_workflow_run_repository import APIWorkflowRunRepository +from repositories.sqlalchemy_api_workflow_node_execution_repository import ( + DifyAPISQLAlchemyWorkflowNodeExecutionRepository, +) +from repositories.sqlalchemy_workflow_trigger_log_repository import SQLAlchemyWorkflowTriggerLogRepository +from services.billing_service import BillingService + +logger = logging.getLogger(__name__) + + +class WorkflowRunCleanup: + def __init__( + self, + days: int, + batch_size: int, + start_after: datetime.datetime | None = None, + end_before: datetime.datetime | None = None, + workflow_run_repo: APIWorkflowRunRepository | None = None, + ): + if (start_after is None) ^ (end_before is None): + raise ValueError("start_after and end_before must be both set or both omitted.") + + computed_cutoff = datetime.datetime.now() - datetime.timedelta(days=days) + self.window_start = start_after + self.window_end = end_before or computed_cutoff + + if self.window_start and self.window_end <= self.window_start: + raise ValueError("end_before must be greater than start_after.") + + self.batch_size = batch_size + self.billing_cache: dict[str, CloudPlan | None] = {} + self.workflow_run_repo: APIWorkflowRunRepository + if workflow_run_repo: + self.workflow_run_repo = workflow_run_repo + else: + # Lazy import to avoid circular dependencies during module import + from repositories.factory import DifyAPIRepositoryFactory + + session_maker = sessionmaker(bind=db.engine, expire_on_commit=False) + self.workflow_run_repo = DifyAPIRepositoryFactory.create_api_workflow_run_repository(session_maker) + + def run(self) -> None: + click.echo( + click.style( + f"Cleaning workflow runs " + f"{'between ' + self.window_start.isoformat() + ' and ' if self.window_start else 'before '}" + f"{self.window_end.isoformat()} (batch={self.batch_size})", + fg="white", + ) + ) + + total_runs_deleted = 0 + batch_index = 0 + last_seen: tuple[datetime.datetime, str] | None = None + + while True: + run_rows = self.workflow_run_repo.get_runs_batch_by_time_range( + start_after=self.window_start, + end_before=self.window_end, + last_seen=last_seen, + batch_size=self.batch_size, + ) + if not run_rows: + break + + batch_index += 1 + last_seen = (run_rows[-1].created_at, run_rows[-1].id) + tenant_ids = {row.tenant_id for row in run_rows} + free_tenants = self._filter_free_tenants(tenant_ids) + free_runs = [row for row in run_rows if row.tenant_id in free_tenants] + paid_or_skipped = len(run_rows) - len(free_runs) + + if not free_runs: + click.echo( + click.style( + f"[batch #{batch_index}] skipped (no sandbox runs in batch, {paid_or_skipped} paid/unknown)", + fg="yellow", + ) + ) + continue + + try: + counts = self.workflow_run_repo.delete_runs_with_related( + free_runs, + delete_node_executions=self._delete_node_executions, + delete_trigger_logs=self._delete_trigger_logs, + ) + except Exception: + logger.exception("Failed to delete workflow runs batch ending at %s", last_seen[0]) + raise + + total_runs_deleted += counts["runs"] + click.echo( + click.style( + f"[batch #{batch_index}] deleted runs: {counts['runs']} " + f"(nodes {counts['node_executions']}, offloads {counts['offloads']}, " + f"app_logs {counts['app_logs']}, trigger_logs {counts['trigger_logs']}, " + f"pauses {counts['pauses']}, pause_reasons {counts['pause_reasons']}); " + f"skipped {paid_or_skipped} paid/unknown", + fg="green", + ) + ) + + if self.window_start: + summary_message = ( + f"Cleanup complete. Deleted {total_runs_deleted} workflow runs " + f"between {self.window_start.isoformat()} and {self.window_end.isoformat()}" + ) + else: + summary_message = ( + f"Cleanup complete. Deleted {total_runs_deleted} workflow runs before {self.window_end.isoformat()}" + ) + + click.echo(click.style(summary_message, fg="white")) + + def _filter_free_tenants(self, tenant_ids: Iterable[str]) -> set[str]: + if not dify_config.BILLING_ENABLED: + return set(tenant_ids) + + tenant_id_list = list(tenant_ids) + uncached_tenants = [tenant_id for tenant_id in tenant_id_list if tenant_id not in self.billing_cache] + + if uncached_tenants: + try: + bulk_info = BillingService.get_info_bulk(uncached_tenants) + except Exception: + bulk_info = {} + logger.exception("Failed to fetch billing plans in bulk for tenants: %s", uncached_tenants) + + for tenant_id in uncached_tenants: + plan: CloudPlan | None = None + info = bulk_info.get(tenant_id) + if info: + try: + plan = CloudPlan(info) + except Exception: + logger.exception("Failed to parse billing plan for tenant %s", tenant_id) + else: + logger.warning("Missing billing info for tenant %s in bulk resp; treating as non-free", tenant_id) + + self.billing_cache[tenant_id] = plan + + return {tenant_id for tenant_id in tenant_id_list if self.billing_cache.get(tenant_id) == CloudPlan.SANDBOX} + + def _delete_trigger_logs(self, session: Session, run_ids: Sequence[str]) -> int: + trigger_repo = SQLAlchemyWorkflowTriggerLogRepository(session) + return trigger_repo.delete_by_run_ids(run_ids) + + def _delete_node_executions(self, session: Session, runs: Sequence[WorkflowRun]) -> tuple[int, int]: + run_contexts: list[DifyAPISQLAlchemyWorkflowNodeExecutionRepository.RunContext] = [ + { + "run_id": run.id, + "tenant_id": run.tenant_id, + "app_id": run.app_id, + "workflow_id": run.workflow_id, + "triggered_from": run.triggered_from, + } + for run in runs + ] + return DifyAPISQLAlchemyWorkflowNodeExecutionRepository.delete_by_runs(session, run_contexts) diff --git a/api/services/conversation_service.py b/api/services/conversation_service.py index 5253199552..295d48d8a1 100644 --- a/api/services/conversation_service.py +++ b/api/services/conversation_service.py @@ -6,16 +6,18 @@ from typing import Any, Union from sqlalchemy import asc, desc, func, or_, select from sqlalchemy.orm import Session +from configs import dify_config from core.app.entities.app_invoke_entities import InvokeFrom +from core.db.session_factory import session_factory from core.llm_generator.llm_generator import LLMGenerator from core.variables.types import SegmentType -from core.workflow.nodes.variable_assigner.common.impl import conversation_variable_updater_factory from extensions.ext_database import db from factories import variable_factory from libs.datetime_utils import naive_utc_now from libs.infinite_scroll_pagination import InfiniteScrollPagination from models import Account, ConversationVariable from models.model import App, Conversation, EndUser, Message +from services.conversation_variable_updater import ConversationVariableUpdater from services.errors.conversation import ( ConversationNotExistsError, ConversationVariableNotExistsError, @@ -202,6 +204,7 @@ class ConversationService: user: Union[Account, EndUser] | None, limit: int, last_id: str | None, + variable_name: str | None = None, ) -> InfiniteScrollPagination: conversation = cls.get_conversation(app_model, conversation_id, user) @@ -212,7 +215,27 @@ class ConversationService: .order_by(ConversationVariable.created_at) ) - with Session(db.engine) as session: + # Apply variable_name filter if provided + if variable_name: + # Filter using JSON extraction to match variable names case-insensitively + from libs.helper import escape_like_pattern + + escaped_variable_name = escape_like_pattern(variable_name) + # Filter using JSON extraction to match variable names case-insensitively + if dify_config.DB_TYPE in ["mysql", "oceanbase", "seekdb"]: + stmt = stmt.where( + func.json_extract(ConversationVariable.data, "$.name").ilike( + f"%{escaped_variable_name}%", escape="\\" + ) + ) + elif dify_config.DB_TYPE == "postgresql": + stmt = stmt.where( + func.json_extract_path_text(ConversationVariable.data, "name").ilike( + f"%{escaped_variable_name}%", escape="\\" + ) + ) + + with session_factory.create_session() as session: if last_id: last_variable = session.scalar(stmt.where(ConversationVariable.id == last_id)) if not last_variable: @@ -279,7 +302,7 @@ class ConversationService: .where(ConversationVariable.id == variable_id) ) - with Session(db.engine) as session: + with session_factory.create_session() as session: existing_variable = session.scalar(stmt) if not existing_variable: raise ConversationVariableNotExistsError() @@ -314,7 +337,7 @@ class ConversationService: updated_variable = variable_factory.build_conversation_variable_from_mapping(updated_variable_dict) # Use the conversation variable updater to persist the changes - updater = conversation_variable_updater_factory() + updater = ConversationVariableUpdater(session_factory.get_session_maker()) updater.update(conversation_id, updated_variable) updater.flush() diff --git a/api/services/conversation_variable_updater.py b/api/services/conversation_variable_updater.py new file mode 100644 index 0000000000..acc0ec2b22 --- /dev/null +++ b/api/services/conversation_variable_updater.py @@ -0,0 +1,28 @@ +from sqlalchemy import select +from sqlalchemy.orm import Session, sessionmaker + +from core.variables.variables import Variable +from models import ConversationVariable + + +class ConversationVariableNotFoundError(Exception): + pass + + +class ConversationVariableUpdater: + def __init__(self, session_maker: sessionmaker[Session]) -> None: + self._session_maker: sessionmaker[Session] = session_maker + + def update(self, conversation_id: str, variable: Variable) -> None: + stmt = select(ConversationVariable).where( + ConversationVariable.id == variable.id, ConversationVariable.conversation_id == conversation_id + ) + with self._session_maker() as session: + row = session.scalar(stmt) + if not row: + raise ConversationVariableNotFoundError("conversation variable not found in the database") + row.data = variable.model_dump_json() + session.commit() + + def flush(self) -> None: + pass diff --git a/api/services/credit_pool_service.py b/api/services/credit_pool_service.py new file mode 100644 index 0000000000..1954602571 --- /dev/null +++ b/api/services/credit_pool_service.py @@ -0,0 +1,85 @@ +import logging + +from sqlalchemy import update +from sqlalchemy.orm import Session + +from configs import dify_config +from core.errors.error import QuotaExceededError +from extensions.ext_database import db +from models import TenantCreditPool + +logger = logging.getLogger(__name__) + + +class CreditPoolService: + @classmethod + def create_default_pool(cls, tenant_id: str) -> TenantCreditPool: + """create default credit pool for new tenant""" + credit_pool = TenantCreditPool( + tenant_id=tenant_id, quota_limit=dify_config.HOSTED_POOL_CREDITS, quota_used=0, pool_type="trial" + ) + db.session.add(credit_pool) + db.session.commit() + return credit_pool + + @classmethod + def get_pool(cls, tenant_id: str, pool_type: str = "trial") -> TenantCreditPool | None: + """get tenant credit pool""" + return ( + db.session.query(TenantCreditPool) + .filter_by( + tenant_id=tenant_id, + pool_type=pool_type, + ) + .first() + ) + + @classmethod + def check_credits_available( + cls, + tenant_id: str, + credits_required: int, + pool_type: str = "trial", + ) -> bool: + """check if credits are available without deducting""" + pool = cls.get_pool(tenant_id, pool_type) + if not pool: + return False + return pool.remaining_credits >= credits_required + + @classmethod + def check_and_deduct_credits( + cls, + tenant_id: str, + credits_required: int, + pool_type: str = "trial", + ) -> int: + """check and deduct credits, returns actual credits deducted""" + + pool = cls.get_pool(tenant_id, pool_type) + if not pool: + raise QuotaExceededError("Credit pool not found") + + if pool.remaining_credits <= 0: + raise QuotaExceededError("No credits remaining") + + # deduct all remaining credits if less than required + actual_credits = min(credits_required, pool.remaining_credits) + + try: + with Session(db.engine) as session: + stmt = ( + update(TenantCreditPool) + .where( + TenantCreditPool.tenant_id == tenant_id, + TenantCreditPool.pool_type == pool_type, + ) + .values(quota_used=TenantCreditPool.quota_used + actual_credits) + ) + session.execute(stmt) + session.commit() + except Exception: + logger.exception("Failed to deduct credits for tenant %s", tenant_id) + raise QuotaExceededError("Failed to deduct credits") + + return actual_credits diff --git a/api/services/dataset_service.py b/api/services/dataset_service.py index 56a10a7753..82e9770286 100644 --- a/api/services/dataset_service.py +++ b/api/services/dataset_service.py @@ -145,7 +145,8 @@ class DatasetService: query = query.where(Dataset.permission == DatasetPermissionEnum.ALL_TEAM) if search: - query = query.where(Dataset.name.ilike(f"%{search}%")) + escaped_search = helper.escape_like_pattern(search) + query = query.where(Dataset.name.ilike(f"%{escaped_search}%", escape="\\")) # Check if tag_ids is not empty to avoid WHERE false condition if tag_ids and len(tag_ids) > 0: @@ -3559,7 +3560,8 @@ class SegmentService: .order_by(ChildChunk.position.asc()) ) if keyword: - query = query.where(ChildChunk.content.ilike(f"%{keyword}%")) + escaped_keyword = helper.escape_like_pattern(keyword) + query = query.where(ChildChunk.content.ilike(f"%{escaped_keyword}%", escape="\\")) return db.paginate(select=query, page=page, per_page=limit, max_per_page=100, error_out=False) @classmethod @@ -3592,9 +3594,10 @@ class SegmentService: query = query.where(DocumentSegment.status.in_(status_list)) if keyword: - query = query.where(DocumentSegment.content.ilike(f"%{keyword}%")) + escaped_keyword = helper.escape_like_pattern(keyword) + query = query.where(DocumentSegment.content.ilike(f"%{escaped_keyword}%", escape="\\")) - query = query.order_by(DocumentSegment.position.asc()) + query = query.order_by(DocumentSegment.position.asc(), DocumentSegment.id.asc()) paginated_segments = db.paginate(select=query, page=page, per_page=limit, max_per_page=100, error_out=False) return paginated_segments.items, paginated_segments.total diff --git a/api/services/enterprise/enterprise_service.py b/api/services/enterprise/enterprise_service.py index 83d0fcf296..c0cc0e5233 100644 --- a/api/services/enterprise/enterprise_service.py +++ b/api/services/enterprise/enterprise_service.py @@ -110,5 +110,5 @@ class EnterpriseService: if not app_id: raise ValueError("app_id must be provided.") - body = {"appId": app_id} - EnterpriseRequest.send_request("DELETE", "/webapp/clean", json=body) + params = {"appId": app_id} + EnterpriseRequest.send_request("DELETE", "/webapp/clean", params=params) diff --git a/api/services/entities/knowledge_entities/rag_pipeline_entities.py b/api/services/entities/knowledge_entities/rag_pipeline_entities.py index a97ccab914..cbb0efcc2a 100644 --- a/api/services/entities/knowledge_entities/rag_pipeline_entities.py +++ b/api/services/entities/knowledge_entities/rag_pipeline_entities.py @@ -23,7 +23,7 @@ class RagPipelineDatasetCreateEntity(BaseModel): description: str icon_info: IconInfo permission: str - partial_member_list: list[str] | None = None + partial_member_list: list[dict[str, str]] | None = None yaml_content: str | None = None diff --git a/api/services/entities/model_provider_entities.py b/api/services/entities/model_provider_entities.py index f405546909..a29d848ac5 100644 --- a/api/services/entities/model_provider_entities.py +++ b/api/services/entities/model_provider_entities.py @@ -70,7 +70,6 @@ class ProviderResponse(BaseModel): description: I18nObject | None = None icon_small: I18nObject | None = None icon_small_dark: I18nObject | None = None - icon_large: I18nObject | None = None background: str | None = None help: ProviderHelpEntity | None = None supported_model_types: Sequence[ModelType] @@ -98,11 +97,6 @@ class ProviderResponse(BaseModel): en_US=f"{url_prefix}/icon_small_dark/en_US", zh_Hans=f"{url_prefix}/icon_small_dark/zh_Hans", ) - - if self.icon_large is not None: - self.icon_large = I18nObject( - en_US=f"{url_prefix}/icon_large/en_US", zh_Hans=f"{url_prefix}/icon_large/zh_Hans" - ) return self @@ -116,7 +110,6 @@ class ProviderWithModelsResponse(BaseModel): label: I18nObject icon_small: I18nObject | None = None icon_small_dark: I18nObject | None = None - icon_large: I18nObject | None = None status: CustomConfigurationStatus models: list[ProviderModelWithStatusEntity] @@ -134,11 +127,6 @@ class ProviderWithModelsResponse(BaseModel): self.icon_small_dark = I18nObject( en_US=f"{url_prefix}/icon_small_dark/en_US", zh_Hans=f"{url_prefix}/icon_small_dark/zh_Hans" ) - - if self.icon_large is not None: - self.icon_large = I18nObject( - en_US=f"{url_prefix}/icon_large/en_US", zh_Hans=f"{url_prefix}/icon_large/zh_Hans" - ) return self @@ -163,11 +151,6 @@ class SimpleProviderEntityResponse(SimpleProviderEntity): self.icon_small_dark = I18nObject( en_US=f"{url_prefix}/icon_small_dark/en_US", zh_Hans=f"{url_prefix}/icon_small_dark/zh_Hans" ) - - if self.icon_large is not None: - self.icon_large = I18nObject( - en_US=f"{url_prefix}/icon_large/en_US", zh_Hans=f"{url_prefix}/icon_large/zh_Hans" - ) return self diff --git a/api/services/errors/app.py b/api/services/errors/app.py index 24e4760acc..60e59e97dc 100644 --- a/api/services/errors/app.py +++ b/api/services/errors/app.py @@ -18,8 +18,8 @@ class WorkflowIdFormatError(Exception): pass -class InvokeRateLimitError(Exception): - """Raised when rate limit is exceeded for workflow invocations.""" +class WorkflowQuotaLimitError(Exception): + """Raised when workflow execution quota is exceeded (for async/background workflows).""" pass diff --git a/api/services/external_knowledge_service.py b/api/services/external_knowledge_service.py index 40faa85b9a..65dd41af43 100644 --- a/api/services/external_knowledge_service.py +++ b/api/services/external_knowledge_service.py @@ -35,7 +35,10 @@ class ExternalDatasetService: .order_by(ExternalKnowledgeApis.created_at.desc()) ) if search: - query = query.where(ExternalKnowledgeApis.name.ilike(f"%{search}%")) + from libs.helper import escape_like_pattern + + escaped_search = escape_like_pattern(search) + query = query.where(ExternalKnowledgeApis.name.ilike(f"%{escaped_search}%", escape="\\")) external_knowledge_apis = db.paginate( select=query, page=page, per_page=per_page, max_per_page=100, error_out=False diff --git a/api/services/feature_service.py b/api/services/feature_service.py index 8035adc734..fc91f450b7 100644 --- a/api/services/feature_service.py +++ b/api/services/feature_service.py @@ -140,6 +140,7 @@ class FeatureModel(BaseModel): # pydantic configs model_config = ConfigDict(protected_namespaces=()) knowledge_pipeline: KnowledgePipeline = KnowledgePipeline() + next_credit_reset_date: int = 0 class KnowledgeRateLimitModel(BaseModel): @@ -169,6 +170,8 @@ class SystemFeatureModel(BaseModel): plugin_installation_permission: PluginInstallationPermissionModel = PluginInstallationPermissionModel() enable_change_email: bool = True plugin_manager: PluginManagerModel = PluginManagerModel() + enable_trial_app: bool = False + enable_explore_banner: bool = False class FeatureService: @@ -224,6 +227,8 @@ class FeatureService: system_features.is_allow_register = dify_config.ALLOW_REGISTER system_features.is_allow_create_workspace = dify_config.ALLOW_CREATE_WORKSPACE system_features.is_email_setup = dify_config.MAIL_TYPE is not None and dify_config.MAIL_TYPE != "" + system_features.enable_trial_app = dify_config.ENABLE_TRIAL_APP + system_features.enable_explore_banner = dify_config.ENABLE_EXPLORE_BANNER @classmethod def _fulfill_params_from_env(cls, features: FeatureModel): @@ -301,6 +306,9 @@ class FeatureService: if "knowledge_pipeline_publish_enabled" in billing_info: features.knowledge_pipeline.publish_enabled = billing_info["knowledge_pipeline_publish_enabled"] + if "next_credit_reset_date" in billing_info: + features.next_credit_reset_date = billing_info["next_credit_reset_date"] + @classmethod def _fulfill_params_from_enterprise(cls, features: SystemFeatureModel): enterprise_info = EnterpriseService.get_info() diff --git a/api/services/llm_generation_service.py b/api/services/llm_generation_service.py new file mode 100644 index 0000000000..eb8327537e --- /dev/null +++ b/api/services/llm_generation_service.py @@ -0,0 +1,37 @@ +""" +LLM Generation Detail Service. + +Provides methods to query and attach generation details to workflow node executions +and messages, avoiding N+1 query problems. +""" + +from sqlalchemy import select +from sqlalchemy.orm import Session + +from core.app.entities.llm_generation_entities import LLMGenerationDetailData +from models import LLMGenerationDetail + + +class LLMGenerationService: + """Service for handling LLM generation details.""" + + def __init__(self, session: Session): + self._session = session + + def get_generation_detail_for_message(self, message_id: str) -> LLMGenerationDetailData | None: + """Query generation detail for a specific message.""" + stmt = select(LLMGenerationDetail).where(LLMGenerationDetail.message_id == message_id) + detail = self._session.scalars(stmt).first() + return detail.to_domain_model() if detail else None + + def get_generation_details_for_messages( + self, + message_ids: list[str], + ) -> dict[str, LLMGenerationDetailData]: + """Batch query generation details for multiple messages.""" + if not message_ids: + return {} + + stmt = select(LLMGenerationDetail).where(LLMGenerationDetail.message_id.in_(message_ids)) + details = self._session.scalars(stmt).all() + return {detail.message_id: detail.to_domain_model() for detail in details if detail.message_id} diff --git a/api/services/model_provider_service.py b/api/services/model_provider_service.py index eea382febe..edd1004b82 100644 --- a/api/services/model_provider_service.py +++ b/api/services/model_provider_service.py @@ -99,7 +99,6 @@ class ModelProviderService: description=provider_configuration.provider.description, icon_small=provider_configuration.provider.icon_small, icon_small_dark=provider_configuration.provider.icon_small_dark, - icon_large=provider_configuration.provider.icon_large, background=provider_configuration.provider.background, help=provider_configuration.provider.help, supported_model_types=provider_configuration.provider.supported_model_types, @@ -423,7 +422,6 @@ class ModelProviderService: label=first_model.provider.label, icon_small=first_model.provider.icon_small, icon_small_dark=first_model.provider.icon_small_dark, - icon_large=first_model.provider.icon_large, status=CustomConfigurationStatus.ACTIVE, models=[ ProviderModelWithStatusEntity( @@ -488,7 +486,6 @@ class ModelProviderService: provider=result.provider.provider, label=result.provider.label, icon_small=result.provider.icon_small, - icon_large=result.provider.icon_large, supported_model_types=result.provider.supported_model_types, ), ) @@ -522,7 +519,7 @@ class ModelProviderService: :param tenant_id: workspace id :param provider: provider name - :param icon_type: icon type (icon_small or icon_large) + :param icon_type: icon type (icon_small or icon_small_dark) :param lang: language (zh_Hans or en_US) :return: """ diff --git a/api/services/plugin/plugin_parameter_service.py b/api/services/plugin/plugin_parameter_service.py index c517d9f966..40565c56ed 100644 --- a/api/services/plugin/plugin_parameter_service.py +++ b/api/services/plugin/plugin_parameter_service.py @@ -105,3 +105,49 @@ class PluginParameterService: ) .options ) + + @staticmethod + def get_dynamic_select_options_with_credentials( + tenant_id: str, + user_id: str, + plugin_id: str, + provider: str, + action: str, + parameter: str, + credential_id: str, + credentials: Mapping[str, Any], + ) -> Sequence[PluginParameterOption]: + """ + Get dynamic select options using provided credentials directly. + Used for edit mode when credentials have been modified but not yet saved. + + Security: credential_id is validated against tenant_id to ensure + users can only access their own credentials. + """ + from constants import HIDDEN_VALUE + + # Get original subscription to replace hidden values (with tenant_id check for security) + original_subscription = TriggerProviderService.get_subscription_by_id(tenant_id, credential_id) + if not original_subscription: + raise ValueError(f"Subscription {credential_id} not found") + + # Replace [__HIDDEN__] with original values + resolved_credentials: dict[str, Any] = { + key: (original_subscription.credentials.get(key) if value == HIDDEN_VALUE else value) + for key, value in credentials.items() + } + + return ( + DynamicSelectClient() + .fetch_dynamic_select_options( + tenant_id, + user_id, + plugin_id, + provider, + action, + resolved_credentials, + original_subscription.credential_type or CredentialType.UNAUTHORIZED.value, + parameter, + ) + .options + ) diff --git a/api/services/rag_pipeline/rag_pipeline.py b/api/services/rag_pipeline/rag_pipeline.py index f53448e7fe..1ba64813ba 100644 --- a/api/services/rag_pipeline/rag_pipeline.py +++ b/api/services/rag_pipeline/rag_pipeline.py @@ -874,7 +874,7 @@ class RagPipelineService: variable_pool = node_instance.graph_runtime_state.variable_pool invoke_from = variable_pool.get(["sys", SystemVariableKey.INVOKE_FROM]) if invoke_from: - if invoke_from.value == InvokeFrom.PUBLISHED: + if invoke_from.value == InvokeFrom.PUBLISHED_PIPELINE: document_id = variable_pool.get(["sys", SystemVariableKey.DOCUMENT_ID]) if document_id: document = db.session.query(Document).where(Document.id == document_id.value).first() @@ -1318,7 +1318,7 @@ class RagPipelineService: "datasource_info_list": [json.loads(document_pipeline_execution_log.datasource_info)], "original_document_id": document.id, }, - invoke_from=InvokeFrom.PUBLISHED, + invoke_from=InvokeFrom.PUBLISHED_PIPELINE, streaming=False, call_depth=0, workflow_thread_pool_id=None, diff --git a/api/services/recommended_app_service.py b/api/services/recommended_app_service.py index 544383a106..6b211a5632 100644 --- a/api/services/recommended_app_service.py +++ b/api/services/recommended_app_service.py @@ -1,4 +1,7 @@ from configs import dify_config +from extensions.ext_database import db +from models.model import AccountTrialAppRecord, TrialApp +from services.feature_service import FeatureService from services.recommend_app.recommend_app_factory import RecommendAppRetrievalFactory @@ -20,6 +23,15 @@ class RecommendedAppService: ) ) + if FeatureService.get_system_features().enable_trial_app: + apps = result["recommended_apps"] + for app in apps: + app_id = app["app_id"] + trial_app_model = db.session.query(TrialApp).where(TrialApp.app_id == app_id).first() + if trial_app_model: + app["can_trial"] = True + else: + app["can_trial"] = False return result @classmethod @@ -32,4 +44,30 @@ class RecommendedAppService: mode = dify_config.HOSTED_FETCH_APP_TEMPLATES_MODE retrieval_instance = RecommendAppRetrievalFactory.get_recommend_app_factory(mode)() result: dict = retrieval_instance.get_recommend_app_detail(app_id) + if FeatureService.get_system_features().enable_trial_app: + app_id = result["id"] + trial_app_model = db.session.query(TrialApp).where(TrialApp.app_id == app_id).first() + if trial_app_model: + result["can_trial"] = True + else: + result["can_trial"] = False return result + + @classmethod + def add_trial_app_record(cls, app_id: str, account_id: str): + """ + Add trial app record. + :param app_id: app id + :return: + """ + account_trial_app_record = ( + db.session.query(AccountTrialAppRecord) + .where(AccountTrialAppRecord.app_id == app_id, AccountTrialAppRecord.account_id == account_id) + .first() + ) + if account_trial_app_record: + account_trial_app_record.count += 1 + db.session.commit() + else: + db.session.add(AccountTrialAppRecord(app_id=app_id, count=1, account_id=account_id)) + db.session.commit() diff --git a/api/services/tag_service.py b/api/services/tag_service.py index 937e6593fe..bd3585acf4 100644 --- a/api/services/tag_service.py +++ b/api/services/tag_service.py @@ -19,7 +19,10 @@ class TagService: .where(Tag.type == tag_type, Tag.tenant_id == current_tenant_id) ) if keyword: - query = query.where(sa.and_(Tag.name.ilike(f"%{keyword}%"))) + from libs.helper import escape_like_pattern + + escaped_keyword = escape_like_pattern(keyword) + query = query.where(sa.and_(Tag.name.ilike(f"%{escaped_keyword}%", escape="\\"))) query = query.group_by(Tag.id, Tag.type, Tag.name, Tag.created_at) results: list = query.order_by(Tag.created_at.desc()).all() return results diff --git a/api/services/tools/api_tools_manage_service.py b/api/services/tools/api_tools_manage_service.py index b3b6e36346..c32157919b 100644 --- a/api/services/tools/api_tools_manage_service.py +++ b/api/services/tools/api_tools_manage_service.py @@ -7,7 +7,6 @@ from httpx import get from sqlalchemy import select from core.entities.provider_entities import ProviderConfig -from core.helper.tool_provider_cache import ToolProviderListCache from core.model_runtime.utils.encoders import jsonable_encoder from core.tools.__base.tool_runtime import ToolRuntime from core.tools.custom_tool.provider import ApiToolProviderController @@ -86,7 +85,9 @@ class ApiToolManageService: raise ValueError(f"invalid schema: {str(e)}") @staticmethod - def convert_schema_to_tool_bundles(schema: str, extra_info: dict | None = None) -> tuple[list[ApiToolBundle], str]: + def convert_schema_to_tool_bundles( + schema: str, extra_info: dict | None = None + ) -> tuple[list[ApiToolBundle], ApiProviderSchemaType]: """ convert schema to tool bundles @@ -104,7 +105,7 @@ class ApiToolManageService: provider_name: str, icon: dict, credentials: dict, - schema_type: str, + schema_type: ApiProviderSchemaType, schema: str, privacy_policy: str, custom_disclaimer: str, @@ -113,9 +114,6 @@ class ApiToolManageService: """ create api tool provider """ - if schema_type not in [member.value for member in ApiProviderSchemaType]: - raise ValueError(f"invalid schema type {schema}") - provider_name = provider_name.strip() # check if the provider exists @@ -178,9 +176,6 @@ class ApiToolManageService: # update labels ToolLabelManager.update_tool_labels(provider_controller, labels) - # Invalidate tool providers cache - ToolProviderListCache.invalidate_cache(tenant_id) - return {"result": "success"} @staticmethod @@ -245,18 +240,15 @@ class ApiToolManageService: original_provider: str, icon: dict, credentials: dict, - schema_type: str, + _schema_type: ApiProviderSchemaType, schema: str, - privacy_policy: str, + privacy_policy: str | None, custom_disclaimer: str, labels: list[str], ): """ update api tool provider """ - if schema_type not in [member.value for member in ApiProviderSchemaType]: - raise ValueError(f"invalid schema type {schema}") - provider_name = provider_name.strip() # check if the provider exists @@ -281,7 +273,7 @@ class ApiToolManageService: provider.icon = json.dumps(icon) provider.schema = schema provider.description = extra_info.get("description", "") - provider.schema_type_str = ApiProviderSchemaType.OPENAPI + provider.schema_type_str = schema_type provider.tools_str = json.dumps(jsonable_encoder(tool_bundles)) provider.privacy_policy = privacy_policy provider.custom_disclaimer = custom_disclaimer @@ -322,9 +314,6 @@ class ApiToolManageService: # update labels ToolLabelManager.update_tool_labels(provider_controller, labels) - # Invalidate tool providers cache - ToolProviderListCache.invalidate_cache(tenant_id) - return {"result": "success"} @staticmethod @@ -347,9 +336,6 @@ class ApiToolManageService: db.session.delete(provider) db.session.commit() - # Invalidate tool providers cache - ToolProviderListCache.invalidate_cache(tenant_id) - return {"result": "success"} @staticmethod @@ -366,7 +352,7 @@ class ApiToolManageService: tool_name: str, credentials: dict, parameters: dict, - schema_type: str, + schema_type: ApiProviderSchemaType, schema: str, ): """ diff --git a/api/services/tools/builtin_tools_manage_service.py b/api/services/tools/builtin_tools_manage_service.py index cf1d39fa25..6797a67dde 100644 --- a/api/services/tools/builtin_tools_manage_service.py +++ b/api/services/tools/builtin_tools_manage_service.py @@ -12,7 +12,6 @@ from constants import HIDDEN_VALUE, UNKNOWN_VALUE from core.helper.name_generator import generate_incremental_name from core.helper.position_helper import is_filtered from core.helper.provider_cache import NoOpProviderCredentialCache, ToolProviderCredentialsCache -from core.helper.tool_provider_cache import ToolProviderListCache from core.plugin.entities.plugin_daemon import CredentialType from core.tools.builtin_tool.provider import BuiltinToolProviderController from core.tools.builtin_tool.providers._positions import BuiltinToolProviderSort @@ -205,9 +204,6 @@ class BuiltinToolManageService: db_provider.name = name session.commit() - - # Invalidate tool providers cache - ToolProviderListCache.invalidate_cache(tenant_id) except Exception as e: session.rollback() raise ValueError(str(e)) @@ -286,12 +282,10 @@ class BuiltinToolManageService: session.add(db_provider) session.commit() - - # Invalidate tool providers cache - ToolProviderListCache.invalidate_cache(tenant_id) except Exception as e: session.rollback() raise ValueError(str(e)) + return {"result": "success"} @staticmethod @@ -409,9 +403,6 @@ class BuiltinToolManageService: ) cache.delete() - # Invalidate tool providers cache - ToolProviderListCache.invalidate_cache(tenant_id) - return {"result": "success"} @staticmethod @@ -434,8 +425,6 @@ class BuiltinToolManageService: target_provider.is_default = True session.commit() - # Invalidate tool providers cache - ToolProviderListCache.invalidate_cache(tenant_id) return {"result": "success"} @staticmethod diff --git a/api/services/tools/mcp_tools_manage_service.py b/api/services/tools/mcp_tools_manage_service.py index d641fe0315..0be106f597 100644 --- a/api/services/tools/mcp_tools_manage_service.py +++ b/api/services/tools/mcp_tools_manage_service.py @@ -15,7 +15,6 @@ from sqlalchemy.orm import Session from core.entities.mcp_provider import MCPAuthentication, MCPConfiguration, MCPProviderEntity from core.helper import encrypter from core.helper.provider_cache import NoOpProviderCredentialCache -from core.helper.tool_provider_cache import ToolProviderListCache from core.mcp.auth.auth_flow import auth from core.mcp.auth_client import MCPClientWithAuthRetry from core.mcp.error import MCPAuthError, MCPError @@ -65,6 +64,15 @@ class ServerUrlValidationResult(BaseModel): return self.needs_validation and self.validation_passed and self.reconnect_result is not None +class ProviderUrlValidationData(BaseModel): + """Data required for URL validation, extracted from database to perform network operations outside of session""" + + current_server_url_hash: str + headers: dict[str, str] + timeout: float | None + sse_read_timeout: float | None + + class MCPToolManageService: """Service class for managing MCP tools and providers.""" @@ -166,9 +174,6 @@ class MCPToolManageService: self._session.add(mcp_tool) self._session.flush() - # Invalidate tool providers cache - ToolProviderListCache.invalidate_cache(tenant_id) - mcp_providers = ToolTransformService.mcp_provider_to_user_provider(mcp_tool, for_list=True) return mcp_providers @@ -192,7 +197,7 @@ class MCPToolManageService: Update an MCP provider. Args: - validation_result: Pre-validation result from validate_server_url_change. + validation_result: Pre-validation result from validate_server_url_standalone. If provided and contains reconnect_result, it will be used instead of performing network operations. """ @@ -251,8 +256,6 @@ class MCPToolManageService: # Flush changes to database self._session.flush() - # Invalidate tool providers cache - ToolProviderListCache.invalidate_cache(tenant_id) except IntegrityError as e: self._handle_integrity_error(e, name, server_url, server_identifier) @@ -261,9 +264,6 @@ class MCPToolManageService: mcp_tool = self.get_provider(provider_id=provider_id, tenant_id=tenant_id) self._session.delete(mcp_tool) - # Invalidate tool providers cache - ToolProviderListCache.invalidate_cache(tenant_id) - def list_providers( self, *, tenant_id: str, for_list: bool = False, include_sensitive: bool = True ) -> list[ToolProviderApiEntity]: @@ -319,8 +319,14 @@ class MCPToolManageService: except MCPError as e: raise ValueError(f"Failed to connect to MCP server: {e}") - # Update database with retrieved tools - db_provider.tools = json.dumps([tool.model_dump() for tool in tools]) + # Update database with retrieved tools (ensure description is a non-null string) + tools_payload = [] + for tool in tools: + data = tool.model_dump() + if data.get("description") is None: + data["description"] = "" + tools_payload.append(data) + db_provider.tools = json.dumps(tools_payload) db_provider.authed = True db_provider.updated_at = datetime.now() self._session.flush() @@ -546,30 +552,39 @@ class MCPToolManageService: ) return self.execute_auth_actions(auth_result) - def _reconnect_provider(self, *, server_url: str, provider: MCPToolProvider) -> ReconnectResult: - """Attempt to reconnect to MCP provider with new server URL.""" + def get_provider_for_url_validation(self, *, tenant_id: str, provider_id: str) -> ProviderUrlValidationData: + """ + Get provider data required for URL validation. + This method performs database read and should be called within a session. + + Returns: + ProviderUrlValidationData: Data needed for standalone URL validation + """ + provider = self.get_provider(provider_id=provider_id, tenant_id=tenant_id) provider_entity = provider.to_entity() - headers = provider_entity.headers + return ProviderUrlValidationData( + current_server_url_hash=provider.server_url_hash, + headers=provider_entity.headers, + timeout=provider_entity.timeout, + sse_read_timeout=provider_entity.sse_read_timeout, + ) - try: - tools = self._retrieve_remote_mcp_tools(server_url, headers, provider_entity) - return ReconnectResult( - authed=True, - tools=json.dumps([tool.model_dump() for tool in tools]), - encrypted_credentials=EMPTY_CREDENTIALS_JSON, - ) - except MCPAuthError: - return ReconnectResult(authed=False, tools=EMPTY_TOOLS_JSON, encrypted_credentials=EMPTY_CREDENTIALS_JSON) - except MCPError as e: - raise ValueError(f"Failed to re-connect MCP server: {e}") from e - - def validate_server_url_change( - self, *, tenant_id: str, provider_id: str, new_server_url: str + @staticmethod + def validate_server_url_standalone( + *, + tenant_id: str, + new_server_url: str, + validation_data: ProviderUrlValidationData, ) -> ServerUrlValidationResult: """ Validate server URL change by attempting to connect to the new server. - This method should be called BEFORE update_provider to perform network operations - outside of the database transaction. + This method performs network operations and MUST be called OUTSIDE of any database session + to avoid holding locks during network I/O. + + Args: + tenant_id: Tenant ID for encryption + new_server_url: The new server URL to validate + validation_data: Provider data obtained from get_provider_for_url_validation Returns: ServerUrlValidationResult: Validation result with connection status and tools if successful @@ -579,25 +594,30 @@ class MCPToolManageService: return ServerUrlValidationResult(needs_validation=False) # Validate URL format - if not self._is_valid_url(new_server_url): + parsed = urlparse(new_server_url) + if not all([parsed.scheme, parsed.netloc]) or parsed.scheme not in ["http", "https"]: raise ValueError("Server URL is not valid.") # Always encrypt and hash the URL encrypted_server_url = encrypter.encrypt_token(tenant_id, new_server_url) new_server_url_hash = hashlib.sha256(new_server_url.encode()).hexdigest() - # Get current provider - provider = self.get_provider(provider_id=provider_id, tenant_id=tenant_id) - # Check if URL is actually different - if new_server_url_hash == provider.server_url_hash: + if new_server_url_hash == validation_data.current_server_url_hash: # URL hasn't changed, but still return the encrypted data return ServerUrlValidationResult( - needs_validation=False, encrypted_server_url=encrypted_server_url, server_url_hash=new_server_url_hash + needs_validation=False, + encrypted_server_url=encrypted_server_url, + server_url_hash=new_server_url_hash, ) - # Perform validation by attempting to connect - reconnect_result = self._reconnect_provider(server_url=new_server_url, provider=provider) + # Perform network validation - this is the expensive operation that should be outside session + reconnect_result = MCPToolManageService._reconnect_with_url( + server_url=new_server_url, + headers=validation_data.headers, + timeout=validation_data.timeout, + sse_read_timeout=validation_data.sse_read_timeout, + ) return ServerUrlValidationResult( needs_validation=True, validation_passed=True, @@ -606,6 +626,60 @@ class MCPToolManageService: server_url_hash=new_server_url_hash, ) + @staticmethod + def reconnect_with_url( + *, + server_url: str, + headers: dict[str, str], + timeout: float | None, + sse_read_timeout: float | None, + ) -> ReconnectResult: + return MCPToolManageService._reconnect_with_url( + server_url=server_url, + headers=headers, + timeout=timeout, + sse_read_timeout=sse_read_timeout, + ) + + @staticmethod + def _reconnect_with_url( + *, + server_url: str, + headers: dict[str, str], + timeout: float | None, + sse_read_timeout: float | None, + ) -> ReconnectResult: + """ + Attempt to connect to MCP server with given URL. + This is a static method that performs network I/O without database access. + """ + from core.mcp.mcp_client import MCPClient + + try: + with MCPClient( + server_url=server_url, + headers=headers, + timeout=timeout, + sse_read_timeout=sse_read_timeout, + ) as mcp_client: + tools = mcp_client.list_tools() + # Ensure tool descriptions are non-null in payload + tools_payload = [] + for t in tools: + d = t.model_dump() + if d.get("description") is None: + d["description"] = "" + tools_payload.append(d) + return ReconnectResult( + authed=True, + tools=json.dumps(tools_payload), + encrypted_credentials=EMPTY_CREDENTIALS_JSON, + ) + except MCPAuthError: + return ReconnectResult(authed=False, tools=EMPTY_TOOLS_JSON, encrypted_credentials=EMPTY_CREDENTIALS_JSON) + except MCPError as e: + raise ValueError(f"Failed to re-connect MCP server: {e}") from e + def _build_tool_provider_response( self, db_provider: MCPToolProvider, provider_entity: MCPProviderEntity, tools: list ) -> ToolProviderApiEntity: diff --git a/api/services/tools/tools_manage_service.py b/api/services/tools/tools_manage_service.py index 038c462f15..51e9120b8d 100644 --- a/api/services/tools/tools_manage_service.py +++ b/api/services/tools/tools_manage_service.py @@ -1,6 +1,5 @@ import logging -from core.helper.tool_provider_cache import ToolProviderListCache from core.tools.entities.api_entities import ToolProviderTypeApiLiteral from core.tools.tool_manager import ToolManager from services.tools.tools_transform_service import ToolTransformService @@ -16,14 +15,6 @@ class ToolCommonService: :return: the list of tool providers """ - # Try to get from cache first - cached_result = ToolProviderListCache.get_cached_providers(tenant_id, typ) - if cached_result is not None: - logger.debug("Returning cached tool providers for tenant %s, type %s", tenant_id, typ) - return cached_result - - # Cache miss - fetch from database - logger.debug("Cache miss for tool providers, fetching from database for tenant %s, type %s", tenant_id, typ) providers = ToolManager.list_providers_from_api(user_id, tenant_id, typ) # add icon @@ -32,7 +23,4 @@ class ToolCommonService: result = [provider.to_dict() for provider in providers] - # Cache the result - ToolProviderListCache.set_cached_providers(tenant_id, typ, result) - return result diff --git a/api/services/tools/workflow_tools_manage_service.py b/api/services/tools/workflow_tools_manage_service.py index fe77ff2dc5..ab5d5480df 100644 --- a/api/services/tools/workflow_tools_manage_service.py +++ b/api/services/tools/workflow_tools_manage_service.py @@ -7,7 +7,6 @@ from typing import Any from sqlalchemy import or_, select from sqlalchemy.orm import Session -from core.helper.tool_provider_cache import ToolProviderListCache from core.model_runtime.utils.encoders import jsonable_encoder from core.tools.__base.tool_provider import ToolProviderController from core.tools.entities.api_entities import ToolApiEntity, ToolProviderApiEntity @@ -68,34 +67,31 @@ class WorkflowToolManageService: if workflow is None: raise ValueError(f"Workflow not found for app {workflow_app_id}") - with Session(db.engine, expire_on_commit=False) as session, session.begin(): - workflow_tool_provider = WorkflowToolProvider( - tenant_id=tenant_id, - user_id=user_id, - app_id=workflow_app_id, - name=name, - label=label, - icon=json.dumps(icon), - description=description, - parameter_configuration=json.dumps(parameters), - privacy_policy=privacy_policy, - version=workflow.version, - ) - session.add(workflow_tool_provider) + workflow_tool_provider = WorkflowToolProvider( + tenant_id=tenant_id, + user_id=user_id, + app_id=workflow_app_id, + name=name, + label=label, + icon=json.dumps(icon), + description=description, + parameter_configuration=json.dumps(parameters), + privacy_policy=privacy_policy, + version=workflow.version, + ) try: WorkflowToolProviderController.from_db(workflow_tool_provider) except Exception as e: raise ValueError(str(e)) + with Session(db.engine, expire_on_commit=False) as session, session.begin(): + session.add(workflow_tool_provider) + if labels is not None: ToolLabelManager.update_tool_labels( ToolTransformService.workflow_provider_to_controller(workflow_tool_provider), labels ) - - # Invalidate tool providers cache - ToolProviderListCache.invalidate_cache(tenant_id) - return {"result": "success"} @classmethod @@ -183,9 +179,6 @@ class WorkflowToolManageService: ToolTransformService.workflow_provider_to_controller(workflow_tool_provider), labels ) - # Invalidate tool providers cache - ToolProviderListCache.invalidate_cache(tenant_id) - return {"result": "success"} @classmethod @@ -248,9 +241,6 @@ class WorkflowToolManageService: db.session.commit() - # Invalidate tool providers cache - ToolProviderListCache.invalidate_cache(tenant_id) - return {"result": "success"} @classmethod diff --git a/api/services/trigger/trigger_provider_service.py b/api/services/trigger/trigger_provider_service.py index 668e4c5be2..688993c798 100644 --- a/api/services/trigger/trigger_provider_service.py +++ b/api/services/trigger/trigger_provider_service.py @@ -94,16 +94,23 @@ class TriggerProviderService: provider_controller = TriggerManager.get_trigger_provider(tenant_id, provider_id) for subscription in subscriptions: - encrypter, _ = create_trigger_provider_encrypter_for_subscription( + credential_encrypter, _ = create_trigger_provider_encrypter_for_subscription( tenant_id=tenant_id, controller=provider_controller, subscription=subscription, ) subscription.credentials = dict( - encrypter.mask_credentials(dict(encrypter.decrypt(subscription.credentials))) + credential_encrypter.mask_credentials(dict(credential_encrypter.decrypt(subscription.credentials))) ) - subscription.properties = dict(encrypter.mask_credentials(dict(encrypter.decrypt(subscription.properties)))) - subscription.parameters = dict(encrypter.mask_credentials(dict(encrypter.decrypt(subscription.parameters)))) + properties_encrypter, _ = create_trigger_provider_encrypter_for_properties( + tenant_id=tenant_id, + controller=provider_controller, + subscription=subscription, + ) + subscription.properties = dict( + properties_encrypter.mask_credentials(dict(properties_encrypter.decrypt(subscription.properties))) + ) + subscription.parameters = dict(subscription.parameters) count = workflows_in_use_map.get(subscription.id) subscription.workflows_in_use = count if count is not None else 0 @@ -209,6 +216,101 @@ class TriggerProviderService: logger.exception("Failed to add trigger provider") raise ValueError(str(e)) + @classmethod + def update_trigger_subscription( + cls, + tenant_id: str, + subscription_id: str, + name: str | None = None, + properties: Mapping[str, Any] | None = None, + parameters: Mapping[str, Any] | None = None, + credentials: Mapping[str, Any] | None = None, + credential_expires_at: int | None = None, + expires_at: int | None = None, + ) -> None: + """ + Update an existing trigger subscription. + + :param tenant_id: Tenant ID + :param subscription_id: Subscription instance ID + :param name: Optional new name for this subscription + :param properties: Optional new properties + :param parameters: Optional new parameters + :param credentials: Optional new credentials + :param credential_expires_at: Optional new credential expiration timestamp + :param expires_at: Optional new expiration timestamp + :return: Success response with updated subscription info + """ + with Session(db.engine, expire_on_commit=False) as session: + # Use distributed lock to prevent race conditions on the same subscription + lock_key = f"trigger_subscription_update_lock:{tenant_id}_{subscription_id}" + with redis_client.lock(lock_key, timeout=20): + subscription: TriggerSubscription | None = ( + session.query(TriggerSubscription).filter_by(tenant_id=tenant_id, id=subscription_id).first() + ) + if not subscription: + raise ValueError(f"Trigger subscription {subscription_id} not found") + + provider_id = TriggerProviderID(subscription.provider_id) + provider_controller = TriggerManager.get_trigger_provider(tenant_id, provider_id) + + # Check for name uniqueness if name is being updated + if name is not None and name != subscription.name: + existing = ( + session.query(TriggerSubscription) + .filter_by(tenant_id=tenant_id, provider_id=str(provider_id), name=name) + .first() + ) + if existing: + raise ValueError(f"Subscription name '{name}' already exists for this provider") + subscription.name = name + + # Update properties if provided + if properties is not None: + properties_encrypter, _ = create_provider_encrypter( + tenant_id=tenant_id, + config=provider_controller.get_properties_schema(), + cache=NoOpProviderCredentialCache(), + ) + # Handle hidden values - preserve original encrypted values + original_properties = properties_encrypter.decrypt(subscription.properties) + new_properties: dict[str, Any] = { + key: value if value != HIDDEN_VALUE else original_properties.get(key, UNKNOWN_VALUE) + for key, value in properties.items() + } + subscription.properties = dict(properties_encrypter.encrypt(new_properties)) + + # Update parameters if provided + if parameters is not None: + subscription.parameters = dict(parameters) + + # Update credentials if provided + if credentials is not None: + credential_type = CredentialType.of(subscription.credential_type) + credential_encrypter, _ = create_provider_encrypter( + tenant_id=tenant_id, + config=provider_controller.get_credential_schema_config(credential_type), + cache=NoOpProviderCredentialCache(), + ) + subscription.credentials = dict(credential_encrypter.encrypt(dict(credentials))) + + # Update credential expiration timestamp if provided + if credential_expires_at is not None: + subscription.credential_expires_at = credential_expires_at + + # Update expiration timestamp if provided + if expires_at is not None: + subscription.expires_at = expires_at + + session.commit() + + # Clear subscription cache + delete_cache_for_subscription( + tenant_id=tenant_id, + provider_id=subscription.provider_id, + subscription_id=subscription.id, + ) + @classmethod def get_subscription_by_id(cls, tenant_id: str, subscription_id: str | None = None) -> TriggerSubscription | None: """ @@ -257,17 +359,18 @@ class TriggerProviderService: raise ValueError(f"Trigger provider subscription {subscription_id} not found") credential_type: CredentialType = CredentialType.of(subscription.credential_type) + provider_id = TriggerProviderID(subscription.provider_id) + provider_controller: PluginTriggerProviderController = TriggerManager.get_trigger_provider( + tenant_id=tenant_id, provider_id=provider_id + ) + encrypter, _ = create_trigger_provider_encrypter_for_subscription( + tenant_id=tenant_id, + controller=provider_controller, + subscription=subscription, + ) + is_auto_created: bool = credential_type in [CredentialType.OAUTH2, CredentialType.API_KEY] if is_auto_created: - provider_id = TriggerProviderID(subscription.provider_id) - provider_controller: PluginTriggerProviderController = TriggerManager.get_trigger_provider( - tenant_id=tenant_id, provider_id=provider_id - ) - encrypter, _ = create_trigger_provider_encrypter_for_subscription( - tenant_id=tenant_id, - controller=provider_controller, - subscription=subscription, - ) try: TriggerManager.unsubscribe_trigger( tenant_id=tenant_id, @@ -280,8 +383,8 @@ class TriggerProviderService: except Exception as e: logger.exception("Error unsubscribing trigger", exc_info=e) - # Clear cache session.delete(subscription) + # Clear cache delete_cache_for_subscription( tenant_id=tenant_id, provider_id=subscription.provider_id, @@ -688,3 +791,127 @@ class TriggerProviderService: ) subscription.properties = dict(properties_encrypter.decrypt(subscription.properties)) return subscription + + @classmethod + def verify_subscription_credentials( + cls, + tenant_id: str, + user_id: str, + provider_id: TriggerProviderID, + subscription_id: str, + credentials: dict[str, Any], + ) -> dict[str, Any]: + """ + Verify credentials for an existing subscription without updating it. + + This is used in edit mode to validate new credentials before rebuild. + + :param tenant_id: Tenant ID + :param user_id: User ID + :param provider_id: Provider identifier + :param subscription_id: Subscription ID + :param credentials: New credentials to verify + :return: dict with 'verified' boolean + """ + provider_controller = TriggerManager.get_trigger_provider(tenant_id, provider_id) + if not provider_controller: + raise ValueError(f"Provider {provider_id} not found") + + subscription = cls.get_subscription_by_id( + tenant_id=tenant_id, + subscription_id=subscription_id, + ) + if not subscription: + raise ValueError(f"Subscription {subscription_id} not found") + + credential_type = CredentialType.of(subscription.credential_type) + + # For API Key, validate the new credentials + if credential_type == CredentialType.API_KEY: + new_credentials: dict[str, Any] = { + key: value if value != HIDDEN_VALUE else subscription.credentials.get(key, UNKNOWN_VALUE) + for key, value in credentials.items() + } + try: + provider_controller.validate_credentials(user_id, credentials=new_credentials) + return {"verified": True} + except Exception as e: + raise ValueError(f"Invalid credentials: {e}") from e + + return {"verified": True} + + @classmethod + def rebuild_trigger_subscription( + cls, + tenant_id: str, + provider_id: TriggerProviderID, + subscription_id: str, + credentials: Mapping[str, Any], + parameters: Mapping[str, Any], + name: str | None = None, + ) -> None: + """ + Create a subscription builder for rebuilding an existing subscription. + + This method rebuild the subscription by call DELETE and CREATE API of the third party provider(e.g. GitHub) + keeping the same subscription_id and endpoint_id so the webhook URL remains unchanged. + + :param tenant_id: Tenant ID + :param name: Name for the subscription + :param subscription_id: Subscription ID + :param provider_id: Provider identifier + :param credentials: Credentials for the subscription + :param parameters: Parameters for the subscription + :return: SubscriptionBuilderApiEntity + """ + provider_controller = TriggerManager.get_trigger_provider(tenant_id, provider_id) + if not provider_controller: + raise ValueError(f"Provider {provider_id} not found") + + subscription = TriggerProviderService.get_subscription_by_id( + tenant_id=tenant_id, + subscription_id=subscription_id, + ) + if not subscription: + raise ValueError(f"Subscription {subscription_id} not found") + + credential_type = CredentialType.of(subscription.credential_type) + if credential_type not in {CredentialType.OAUTH2, CredentialType.API_KEY}: + raise ValueError(f"Credential type {credential_type} not supported for auto creation") + + # Delete the previous subscription + user_id = subscription.user_id + unsubscribe_result = TriggerManager.unsubscribe_trigger( + tenant_id=tenant_id, + user_id=user_id, + provider_id=provider_id, + subscription=subscription.to_entity(), + credentials=subscription.credentials, + credential_type=credential_type, + ) + if not unsubscribe_result.success: + raise ValueError(f"Failed to delete previous subscription: {unsubscribe_result.message}") + + # Create a new subscription with the same subscription_id and endpoint_id + new_credentials: dict[str, Any] = { + key: value if value != HIDDEN_VALUE else subscription.credentials.get(key, UNKNOWN_VALUE) + for key, value in credentials.items() + } + new_subscription: TriggerSubscriptionEntity = TriggerManager.subscribe_trigger( + tenant_id=tenant_id, + user_id=user_id, + provider_id=provider_id, + endpoint=generate_plugin_trigger_endpoint_url(subscription.endpoint_id), + parameters=parameters, + credentials=new_credentials, + credential_type=credential_type, + ) + TriggerProviderService.update_trigger_subscription( + tenant_id=tenant_id, + subscription_id=subscription.id, + name=name, + parameters=parameters, + credentials=new_credentials, + properties=new_subscription.properties, + expires_at=new_subscription.expires_at, + ) diff --git a/api/services/trigger/trigger_subscription_builder_service.py b/api/services/trigger/trigger_subscription_builder_service.py index 571393c782..37f852da3e 100644 --- a/api/services/trigger/trigger_subscription_builder_service.py +++ b/api/services/trigger/trigger_subscription_builder_service.py @@ -453,11 +453,12 @@ class TriggerSubscriptionBuilderService: if not subscription_builder: return None - # response to validation endpoint - controller: PluginTriggerProviderController = TriggerManager.get_trigger_provider( - tenant_id=subscription_builder.tenant_id, provider_id=TriggerProviderID(subscription_builder.provider_id) - ) try: + # response to validation endpoint + controller: PluginTriggerProviderController = TriggerManager.get_trigger_provider( + tenant_id=subscription_builder.tenant_id, + provider_id=TriggerProviderID(subscription_builder.provider_id), + ) dispatch_response: TriggerDispatchResponse = controller.dispatch( request=request, subscription=subscription_builder.to_subscription(), diff --git a/api/services/trigger/webhook_service.py b/api/services/trigger/webhook_service.py index 4b3e1330fd..4159f5f8f4 100644 --- a/api/services/trigger/webhook_service.py +++ b/api/services/trigger/webhook_service.py @@ -33,6 +33,11 @@ from services.errors.app import QuotaExceededError from services.trigger.app_trigger_service import AppTriggerService from services.workflow.entities import WebhookTriggerData +try: + import magic +except ImportError: + magic = None # type: ignore[assignment] + logger = logging.getLogger(__name__) @@ -317,7 +322,8 @@ class WebhookService: try: file_content = request.get_data() if file_content: - file_obj = cls._create_file_from_binary(file_content, "application/octet-stream", webhook_trigger) + mimetype = cls._detect_binary_mimetype(file_content) + file_obj = cls._create_file_from_binary(file_content, mimetype, webhook_trigger) return {"raw": file_obj.to_dict()}, {} else: return {"raw": None}, {} @@ -341,6 +347,18 @@ class WebhookService: body = {"raw": ""} return body, {} + @staticmethod + def _detect_binary_mimetype(file_content: bytes) -> str: + """Guess MIME type for binary payloads using python-magic when available.""" + if magic is not None: + try: + detected = magic.from_buffer(file_content[:1024], mime=True) + if detected: + return detected + except Exception: + logger.debug("python-magic detection failed for octet-stream payload") + return "application/octet-stream" + @classmethod def _process_file_uploads( cls, files: Mapping[str, FileStorage], webhook_trigger: WorkflowWebhookTrigger @@ -845,10 +863,18 @@ class WebhookService: not_found_in_cache.append(node_id) continue - with Session(db.engine) as session: - try: - # lock the concurrent webhook trigger creation - redis_client.lock(f"{cls.__WEBHOOK_NODE_CACHE_KEY__}:apps:{app.id}:lock", timeout=10) + lock_key = f"{cls.__WEBHOOK_NODE_CACHE_KEY__}:apps:{app.id}:lock" + lock = redis_client.lock(lock_key, timeout=10) + lock_acquired = False + + try: + # acquire the lock with blocking and timeout + lock_acquired = lock.acquire(blocking=True, blocking_timeout=10) + if not lock_acquired: + logger.warning("Failed to acquire lock for webhook sync, app %s", app.id) + raise RuntimeError("Failed to acquire lock for webhook trigger synchronization") + + with Session(db.engine) as session: # fetch the non-cached nodes from DB all_records = session.scalars( select(WorkflowWebhookTrigger).where( @@ -885,11 +911,16 @@ class WebhookService: session.delete(nodes_id_in_db[node_id]) redis_client.delete(f"{cls.__WEBHOOK_NODE_CACHE_KEY__}:{app.id}:{node_id}") session.commit() - except Exception: - logger.exception("Failed to sync webhook relationships for app %s", app.id) - raise - finally: - redis_client.delete(f"{cls.__WEBHOOK_NODE_CACHE_KEY__}:apps:{app.id}:lock") + except Exception: + logger.exception("Failed to sync webhook relationships for app %s", app.id) + raise + finally: + # release the lock only if it was acquired + if lock_acquired: + try: + lock.release() + except Exception: + logger.exception("Failed to release lock for webhook sync, app %s", app.id) @classmethod def generate_webhook_id(cls) -> str: diff --git a/api/services/variable_truncator.py b/api/services/variable_truncator.py index 6eb8d0031d..f973361341 100644 --- a/api/services/variable_truncator.py +++ b/api/services/variable_truncator.py @@ -1,3 +1,5 @@ +from __future__ import annotations + import dataclasses from abc import ABC, abstractmethod from collections.abc import Mapping @@ -106,7 +108,7 @@ class VariableTruncator(BaseTruncator): self._max_size_bytes = max_size_bytes @classmethod - def default(cls) -> "VariableTruncator": + def default(cls) -> VariableTruncator: return VariableTruncator( max_size_bytes=dify_config.WORKFLOW_VARIABLE_TRUNCATION_MAX_SIZE, array_element_limit=dify_config.WORKFLOW_VARIABLE_TRUNCATION_ARRAY_LENGTH, @@ -410,9 +412,12 @@ class VariableTruncator(BaseTruncator): @overload def _truncate_json_primitives(self, val: None, target_size: int) -> _PartResult[None]: ... + @overload + def _truncate_json_primitives(self, val: File, target_size: int) -> _PartResult[File]: ... + def _truncate_json_primitives( self, - val: UpdatedVariable | str | list[object] | dict[str, object] | bool | int | float | None, + val: UpdatedVariable | File | str | list[object] | dict[str, object] | bool | int | float | None, target_size: int, ) -> _PartResult[Any]: """Truncate a value within an object to fit within budget.""" @@ -425,6 +430,9 @@ class VariableTruncator(BaseTruncator): return self._truncate_array(val, target_size) elif isinstance(val, dict): return self._truncate_object(val, target_size) + elif isinstance(val, File): + # File objects should not be truncated, return as-is + return _PartResult(val, self.calculate_json_size(val), False) elif val is None or isinstance(val, (bool, int, float)): return _PartResult(val, self.calculate_json_size(val), False) else: diff --git a/api/services/website_service.py b/api/services/website_service.py index a23f01ec71..fe48c3b08e 100644 --- a/api/services/website_service.py +++ b/api/services/website_service.py @@ -1,3 +1,5 @@ +from __future__ import annotations + import datetime import json from dataclasses import dataclass @@ -78,7 +80,7 @@ class WebsiteCrawlApiRequest: return CrawlRequest(url=self.url, provider=self.provider, options=options) @classmethod - def from_args(cls, args: dict) -> "WebsiteCrawlApiRequest": + def from_args(cls, args: dict) -> WebsiteCrawlApiRequest: """Create from Flask-RESTful parsed arguments.""" provider = args.get("provider") url = args.get("url") @@ -102,7 +104,7 @@ class WebsiteCrawlStatusApiRequest: job_id: str @classmethod - def from_args(cls, args: dict, job_id: str) -> "WebsiteCrawlStatusApiRequest": + def from_args(cls, args: dict, job_id: str) -> WebsiteCrawlStatusApiRequest: """Create from Flask-RESTful parsed arguments.""" provider = args.get("provider") if not provider: diff --git a/api/services/workflow_app_service.py b/api/services/workflow_app_service.py index 01f0c7a55a..8574d30255 100644 --- a/api/services/workflow_app_service.py +++ b/api/services/workflow_app_service.py @@ -86,12 +86,19 @@ class WorkflowAppService: # Join to workflow run for filtering when needed. if keyword: - keyword_like_val = f"%{keyword[:30].encode('unicode_escape').decode('utf-8')}%".replace(r"\u", r"\\u") + from libs.helper import escape_like_pattern + + # Escape special characters in keyword to prevent SQL injection via LIKE wildcards + escaped_keyword = escape_like_pattern(keyword[:30]) + keyword_like_val = f"%{escaped_keyword}%" keyword_conditions = [ - WorkflowRun.inputs.ilike(keyword_like_val), - WorkflowRun.outputs.ilike(keyword_like_val), + WorkflowRun.inputs.ilike(keyword_like_val, escape="\\"), + WorkflowRun.outputs.ilike(keyword_like_val, escape="\\"), # filter keyword by end user session id if created by end user role - and_(WorkflowRun.created_by_role == "end_user", EndUser.session_id.ilike(keyword_like_val)), + and_( + WorkflowRun.created_by_role == "end_user", + EndUser.session_id.ilike(keyword_like_val, escape="\\"), + ), ] # filter keyword by workflow run id diff --git a/api/services/workflow_draft_variable_service.py b/api/services/workflow_draft_variable_service.py index f299ce3baa..9407a2b3f0 100644 --- a/api/services/workflow_draft_variable_service.py +++ b/api/services/workflow_draft_variable_service.py @@ -679,6 +679,7 @@ def _batch_upsert_draft_variable( def _model_to_insertion_dict(model: WorkflowDraftVariable) -> dict[str, Any]: d: dict[str, Any] = { + "id": model.id, "app_id": model.app_id, "last_edited_at": None, "node_id": model.node_id, diff --git a/api/services/workspace_service.py b/api/services/workspace_service.py index 292ac6e008..3ee41c2e8d 100644 --- a/api/services/workspace_service.py +++ b/api/services/workspace_service.py @@ -31,7 +31,8 @@ class WorkspaceService: assert tenant_account_join is not None, "TenantAccountJoin not found" tenant_info["role"] = tenant_account_join.role - can_replace_logo = FeatureService.get_features(tenant.id).can_replace_logo + feature = FeatureService.get_features(tenant.id) + can_replace_logo = feature.can_replace_logo if can_replace_logo and TenantService.has_roles(tenant, [TenantAccountRole.OWNER, TenantAccountRole.ADMIN]): base_url = dify_config.FILES_URL @@ -46,5 +47,19 @@ class WorkspaceService: "remove_webapp_brand": remove_webapp_brand, "replace_webapp_logo": replace_webapp_logo, } + if dify_config.EDITION == "CLOUD": + tenant_info["next_credit_reset_date"] = feature.next_credit_reset_date + + from services.credit_pool_service import CreditPoolService + + paid_pool = CreditPoolService.get_pool(tenant_id=tenant.id, pool_type="paid") + if paid_pool: + tenant_info["trial_credits"] = paid_pool.quota_limit + tenant_info["trial_credits_used"] = paid_pool.quota_used + else: + trial_pool = CreditPoolService.get_pool(tenant_id=tenant.id, pool_type="trial") + if trial_pool: + tenant_info["trial_credits"] = trial_pool.quota_limit + tenant_info["trial_credits_used"] = trial_pool.quota_used return tenant_info diff --git a/api/tasks/annotation/batch_import_annotations_task.py b/api/tasks/annotation/batch_import_annotations_task.py index 775814318b..97c0b31490 100644 --- a/api/tasks/annotation/batch_import_annotations_task.py +++ b/api/tasks/annotation/batch_import_annotations_task.py @@ -31,7 +31,6 @@ def batch_import_annotations_task(job_id: str, content_list: list[dict], app_id: start_at = time.perf_counter() indexing_cache_key = f"app_annotation_batch_import_{str(job_id)}" active_jobs_key = f"annotation_import_active:{tenant_id}" - # get app info app = db.session.query(App).where(App.id == app_id, App.tenant_id == tenant_id, App.status == "normal").first() @@ -100,6 +99,5 @@ def batch_import_annotations_task(job_id: str, content_list: list[dict], app_id: except Exception as cleanup_error: # Log but don't fail if cleanup fails - the job will be auto-expired logger.warning("Failed to clean up active job tracking for %s: %s", job_id, cleanup_error) - # Close database session db.session.close() diff --git a/api/tasks/annotation/enable_annotation_reply_task.py b/api/tasks/annotation/enable_annotation_reply_task.py index cdc07c77a8..be1de3cdd2 100644 --- a/api/tasks/annotation/enable_annotation_reply_task.py +++ b/api/tasks/annotation/enable_annotation_reply_task.py @@ -98,7 +98,7 @@ def enable_annotation_reply_task( if annotations: for annotation in annotations: document = Document( - page_content=annotation.question, + page_content=annotation.question_text, metadata={"annotation_id": annotation.id, "app_id": app_id, "doc_id": annotation.id}, ) documents.append(document) diff --git a/api/tasks/document_indexing_sync_task.py b/api/tasks/document_indexing_sync_task.py index 4c1f38c3bb..5fc2597c92 100644 --- a/api/tasks/document_indexing_sync_task.py +++ b/api/tasks/document_indexing_sync_task.py @@ -2,7 +2,6 @@ import logging import time import click -import sqlalchemy as sa from celery import shared_task from sqlalchemy import select @@ -12,7 +11,7 @@ from core.rag.index_processor.index_processor_factory import IndexProcessorFacto from extensions.ext_database import db from libs.datetime_utils import naive_utc_now from models.dataset import Dataset, Document, DocumentSegment -from models.source import DataSourceOauthBinding +from services.datasource_provider_service import DatasourceProviderService logger = logging.getLogger(__name__) @@ -48,27 +47,36 @@ def document_indexing_sync_task(dataset_id: str, document_id: str): page_id = data_source_info["notion_page_id"] page_type = data_source_info["type"] page_edited_time = data_source_info["last_edited_time"] + credential_id = data_source_info.get("credential_id") - data_source_binding = ( - db.session.query(DataSourceOauthBinding) - .where( - sa.and_( - DataSourceOauthBinding.tenant_id == document.tenant_id, - DataSourceOauthBinding.provider == "notion", - DataSourceOauthBinding.disabled == False, - DataSourceOauthBinding.source_info["workspace_id"] == f'"{workspace_id}"', - ) - ) - .first() + # Get credentials from datasource provider + datasource_provider_service = DatasourceProviderService() + credential = datasource_provider_service.get_datasource_credentials( + tenant_id=document.tenant_id, + credential_id=credential_id, + provider="notion_datasource", + plugin_id="langgenius/notion_datasource", ) - if not data_source_binding: - raise ValueError("Data source binding not found.") + + if not credential: + logger.error( + "Datasource credential not found for document %s, tenant_id: %s, credential_id: %s", + document_id, + document.tenant_id, + credential_id, + ) + document.indexing_status = "error" + document.error = "Datasource credential not found. Please reconnect your Notion workspace." + document.stopped_at = naive_utc_now() + db.session.commit() + db.session.close() + return loader = NotionExtractor( notion_workspace_id=workspace_id, notion_obj_id=page_id, notion_page_type=page_type, - notion_access_token=data_source_binding.access_token, + notion_access_token=credential.get("integration_secret"), tenant_id=document.tenant_id, ) diff --git a/api/tasks/rag_pipeline/priority_rag_pipeline_run_task.py b/api/tasks/rag_pipeline/priority_rag_pipeline_run_task.py index 1eef361a92..3c5e152520 100644 --- a/api/tasks/rag_pipeline/priority_rag_pipeline_run_task.py +++ b/api/tasks/rag_pipeline/priority_rag_pipeline_run_task.py @@ -178,7 +178,7 @@ def run_single_rag_pipeline_task(rag_pipeline_invoke_entity: Mapping[str, Any], workflow_id=workflow_id, user=account, application_generate_entity=entity, - invoke_from=InvokeFrom.PUBLISHED, + invoke_from=InvokeFrom.PUBLISHED_PIPELINE, workflow_execution_repository=workflow_execution_repository, workflow_node_execution_repository=workflow_node_execution_repository, streaming=streaming, diff --git a/api/tasks/rag_pipeline/rag_pipeline_run_task.py b/api/tasks/rag_pipeline/rag_pipeline_run_task.py index 275f5abe6e..093342d1a3 100644 --- a/api/tasks/rag_pipeline/rag_pipeline_run_task.py +++ b/api/tasks/rag_pipeline/rag_pipeline_run_task.py @@ -178,7 +178,7 @@ def run_single_rag_pipeline_task(rag_pipeline_invoke_entity: Mapping[str, Any], workflow_id=workflow_id, user=account, application_generate_entity=entity, - invoke_from=InvokeFrom.PUBLISHED, + invoke_from=InvokeFrom.PUBLISHED_PIPELINE, workflow_execution_repository=workflow_execution_repository, workflow_node_execution_repository=workflow_node_execution_repository, streaming=streaming, diff --git a/api/tests/integration_tests/model_runtime/__mock/plugin_model.py b/api/tests/integration_tests/model_runtime/__mock/plugin_model.py index d59d5dc0fe..5012defdad 100644 --- a/api/tests/integration_tests/model_runtime/__mock/plugin_model.py +++ b/api/tests/integration_tests/model_runtime/__mock/plugin_model.py @@ -48,10 +48,6 @@ class MockModelClass(PluginModelClient): en_US="https://example.com/icon_small.png", zh_Hans="https://example.com/icon_small.png", ), - icon_large=I18nObject( - en_US="https://example.com/icon_large.png", - zh_Hans="https://example.com/icon_large.png", - ), supported_model_types=[ModelType.LLM], configurate_methods=[ConfigurateMethod.PREDEFINED_MODEL], models=[ diff --git a/api/tests/integration_tests/workflow/nodes/code_executor/test_code_jinja2.py b/api/tests/integration_tests/workflow/nodes/code_executor/test_code_jinja2.py index 94903cf796..c8eb9ec3e4 100644 --- a/api/tests/integration_tests/workflow/nodes/code_executor/test_code_jinja2.py +++ b/api/tests/integration_tests/workflow/nodes/code_executor/test_code_jinja2.py @@ -7,11 +7,14 @@ CODE_LANGUAGE = CodeLanguage.JINJA2 def test_jinja2(): + """Test basic Jinja2 template rendering.""" template = "Hello {{template}}" + # Template must be base64 encoded to match the new safe embedding approach + template_b64 = base64.b64encode(template.encode("utf-8")).decode("utf-8") inputs = base64.b64encode(b'{"template": "World"}').decode("utf-8") code = ( Jinja2TemplateTransformer.get_runner_script() - .replace(Jinja2TemplateTransformer._code_placeholder, template) + .replace(Jinja2TemplateTransformer._template_b64_placeholder, template_b64) .replace(Jinja2TemplateTransformer._inputs_placeholder, inputs) ) result = CodeExecutor.execute_code( @@ -21,6 +24,7 @@ def test_jinja2(): def test_jinja2_with_code_template(): + """Test template rendering via the high-level workflow API.""" result = CodeExecutor.execute_workflow_code_template( language=CODE_LANGUAGE, code="Hello {{template}}", inputs={"template": "World"} ) @@ -28,7 +32,64 @@ def test_jinja2_with_code_template(): def test_jinja2_get_runner_script(): + """Test that runner script contains required placeholders.""" runner_script = Jinja2TemplateTransformer.get_runner_script() - assert runner_script.count(Jinja2TemplateTransformer._code_placeholder) == 1 + assert runner_script.count(Jinja2TemplateTransformer._template_b64_placeholder) == 1 assert runner_script.count(Jinja2TemplateTransformer._inputs_placeholder) == 1 assert runner_script.count(Jinja2TemplateTransformer._result_tag) == 2 + + +def test_jinja2_template_with_special_characters(): + """ + Test that templates with special characters (quotes, newlines) render correctly. + This is a regression test for issue #26818 where textarea pre-fill values + containing special characters would break template rendering. + """ + # Template with triple quotes, single quotes, double quotes, and newlines + template = """ + + + +

Status: "{{ status }}"

+
'''code block'''
+ +""" + inputs = {"task": {"Task ID": "TASK-123", "Issues": "Line 1\nLine 2\nLine 3"}, "status": "completed"} + + result = CodeExecutor.execute_workflow_code_template(language=CODE_LANGUAGE, code=template, inputs=inputs) + + # Verify the template rendered correctly with all special characters + output = result["result"] + assert 'value="TASK-123"' in output + assert "" in output + assert 'Status: "completed"' in output + assert "'''code block'''" in output + + +def test_jinja2_template_with_html_textarea_prefill(): + """ + Specific test for HTML textarea with Jinja2 variable pre-fill. + Verifies fix for issue #26818. + """ + template = "" + notes_content = "This is a multi-line note.\nWith special chars: 'single' and \"double\" quotes." + inputs = {"notes": notes_content} + + result = CodeExecutor.execute_workflow_code_template(language=CODE_LANGUAGE, code=template, inputs=inputs) + + expected_output = f"" + assert result["result"] == expected_output + + +def test_jinja2_assemble_runner_script_encodes_template(): + """Test that assemble_runner_script properly base64 encodes the template.""" + template = "Hello {{ name }}!" + inputs = {"name": "World"} + + script = Jinja2TemplateTransformer.assemble_runner_script(template, inputs) + + # The template should be base64 encoded in the script + template_b64 = base64.b64encode(template.encode("utf-8")).decode("utf-8") + assert template_b64 in script + # The raw template should NOT appear in the script (it's encoded) + assert "Hello {{ name }}!" not in script diff --git a/api/tests/integration_tests/workflow/nodes/test_code.py b/api/tests/integration_tests/workflow/nodes/test_code.py index e421e4ff36..9b0bd6275b 100644 --- a/api/tests/integration_tests/workflow/nodes/test_code.py +++ b/api/tests/integration_tests/workflow/nodes/test_code.py @@ -10,6 +10,7 @@ from core.workflow.enums import WorkflowNodeExecutionStatus from core.workflow.graph import Graph from core.workflow.node_events import NodeRunResult from core.workflow.nodes.code.code_node import CodeNode +from core.workflow.nodes.code.limits import CodeNodeLimits from core.workflow.nodes.node_factory import DifyNodeFactory from core.workflow.runtime import GraphRuntimeState, VariablePool from core.workflow.system_variable import SystemVariable @@ -67,6 +68,16 @@ def init_code_node(code_config: dict): config=code_config, graph_init_params=init_params, graph_runtime_state=graph_runtime_state, + code_limits=CodeNodeLimits( + max_string_length=dify_config.CODE_MAX_STRING_LENGTH, + max_number=dify_config.CODE_MAX_NUMBER, + min_number=dify_config.CODE_MIN_NUMBER, + max_precision=dify_config.CODE_MAX_PRECISION, + max_depth=dify_config.CODE_MAX_DEPTH, + max_number_array_length=dify_config.CODE_MAX_NUMBER_ARRAY_LENGTH, + max_string_array_length=dify_config.CODE_MAX_STRING_ARRAY_LENGTH, + max_object_array_length=dify_config.CODE_MAX_OBJECT_ARRAY_LENGTH, + ), ) return node diff --git a/api/tests/integration_tests/workflow/nodes/test_http.py b/api/tests/integration_tests/workflow/nodes/test_http.py index e75258a2a2..d814da8ec7 100644 --- a/api/tests/integration_tests/workflow/nodes/test_http.py +++ b/api/tests/integration_tests/workflow/nodes/test_http.py @@ -6,6 +6,7 @@ import pytest from core.app.entities.app_invoke_entities import InvokeFrom from core.workflow.entities import GraphInitParams +from core.workflow.enums import WorkflowNodeExecutionStatus from core.workflow.graph import Graph from core.workflow.nodes.http_request.node import HttpRequestNode from core.workflow.nodes.node_factory import DifyNodeFactory @@ -169,13 +170,14 @@ def test_custom_authorization_header(setup_http_mock): @pytest.mark.parametrize("setup_http_mock", [["none"]], indirect=True) -def test_custom_auth_with_empty_api_key_does_not_set_header(setup_http_mock): - """Test: In custom authentication mode, when the api_key is empty, no header should be set.""" +def test_custom_auth_with_empty_api_key_raises_error(setup_http_mock): + """Test: In custom authentication mode, when the api_key is empty, AuthorizationConfigError should be raised.""" from core.workflow.nodes.http_request.entities import ( HttpRequestNodeAuthorization, HttpRequestNodeData, HttpRequestNodeTimeout, ) + from core.workflow.nodes.http_request.exc import AuthorizationConfigError from core.workflow.nodes.http_request.executor import Executor from core.workflow.runtime import VariablePool from core.workflow.system_variable import SystemVariable @@ -208,16 +210,13 @@ def test_custom_auth_with_empty_api_key_does_not_set_header(setup_http_mock): ssl_verify=True, ) - # Create executor - executor = Executor( - node_data=node_data, timeout=HttpRequestNodeTimeout(connect=10, read=30, write=10), variable_pool=variable_pool - ) - - # Get assembled headers - headers = executor._assembling_headers() - - # When api_key is empty, the custom header should NOT be set - assert "X-Custom-Auth" not in headers + # Create executor should raise AuthorizationConfigError + with pytest.raises(AuthorizationConfigError, match="API key is required"): + Executor( + node_data=node_data, + timeout=HttpRequestNodeTimeout(connect=10, read=30, write=10), + variable_pool=variable_pool, + ) @pytest.mark.parametrize("setup_http_mock", [["none"]], indirect=True) @@ -305,9 +304,10 @@ def test_basic_authorization_with_custom_header_ignored(setup_http_mock): @pytest.mark.parametrize("setup_http_mock", [["none"]], indirect=True) def test_custom_authorization_with_empty_api_key(setup_http_mock): """ - Test that custom authorization doesn't set header when api_key is empty. - This test verifies the fix for issue #23554. + Test that custom authorization raises error when api_key is empty. + This test verifies the fix for issue #21830. """ + node = init_http_node( config={ "id": "1", @@ -333,11 +333,10 @@ def test_custom_authorization_with_empty_api_key(setup_http_mock): ) result = node._run() - assert result.process_data is not None - data = result.process_data.get("request", "") - - # Custom header should NOT be set when api_key is empty - assert "X-Custom-Auth:" not in data + # Should fail with AuthorizationConfigError + assert result.status == WorkflowNodeExecutionStatus.FAILED + assert "API key is required" in result.error + assert result.error_type == "AuthorizationConfigError" @pytest.mark.parametrize("setup_http_mock", [["none"]], indirect=True) diff --git a/api/tests/test_containers_integration_tests/core/app/layers/test_pause_state_persist_layer.py b/api/tests/test_containers_integration_tests/core/app/layers/test_pause_state_persist_layer.py index 72469ad646..dcf31aeca7 100644 --- a/api/tests/test_containers_integration_tests/core/app/layers/test_pause_state_persist_layer.py +++ b/api/tests/test_containers_integration_tests/core/app/layers/test_pause_state_persist_layer.py @@ -35,6 +35,7 @@ from core.model_runtime.entities.llm_entities import LLMUsage from core.workflow.entities.pause_reason import SchedulingPause from core.workflow.enums import WorkflowExecutionStatus from core.workflow.graph_engine.entities.commands import GraphEngineCommand +from core.workflow.graph_engine.layers.base import GraphEngineLayerNotInitializedError from core.workflow.graph_events.graph import GraphRunPausedEvent from core.workflow.runtime.graph_runtime_state import GraphRuntimeState from core.workflow.runtime.graph_runtime_state_protocol import ReadOnlyGraphRuntimeState @@ -569,10 +570,10 @@ class TestPauseStatePersistenceLayerTestContainers: """Test that layer requires proper initialization before handling events.""" # Arrange layer = self._create_pause_state_persistence_layer() - # Don't initialize - graph_runtime_state should not be set + # Don't initialize - graph_runtime_state should be uninitialized event = GraphRunPausedEvent(reasons=[SchedulingPause(message="test pause")]) - # Act & Assert - Should raise AttributeError - with pytest.raises(AttributeError): + # Act & Assert - Should raise GraphEngineLayerNotInitializedError + with pytest.raises(GraphEngineLayerNotInitializedError): layer.on_event(event) diff --git a/api/tests/test_containers_integration_tests/libs/broadcast_channel/redis/test_sharded_channel.py b/api/tests/test_containers_integration_tests/libs/broadcast_channel/redis/test_sharded_channel.py index af60adf1fb..d612e70910 100644 --- a/api/tests/test_containers_integration_tests/libs/broadcast_channel/redis/test_sharded_channel.py +++ b/api/tests/test_containers_integration_tests/libs/broadcast_channel/redis/test_sharded_channel.py @@ -113,16 +113,31 @@ class TestShardedRedisBroadcastChannelIntegration: topic = broadcast_channel.topic(topic_name) producer = topic.as_producer() subscriptions = [topic.subscribe() for _ in range(subscriber_count)] + ready_events = [threading.Event() for _ in range(subscriber_count)] def producer_thread(): - time.sleep(0.2) # Allow all subscribers to connect + deadline = time.time() + 5.0 + for ev in ready_events: + remaining = deadline - time.time() + if remaining <= 0: + break + if not ev.wait(timeout=max(0.0, remaining)): + pytest.fail("subscriber did not become ready before publish deadline") producer.publish(message) time.sleep(0.2) for sub in subscriptions: sub.close() - def consumer_thread(subscription: Subscription) -> list[bytes]: + def consumer_thread(subscription: Subscription, ready_event: threading.Event) -> list[bytes]: received_msgs = [] + # Prime subscription so the underlying Pub/Sub listener thread starts before publishing + try: + _ = subscription.receive(0.01) + except SubscriptionClosedError: + return received_msgs + finally: + ready_event.set() + while True: try: msg = subscription.receive(0.1) @@ -137,7 +152,10 @@ class TestShardedRedisBroadcastChannelIntegration: with ThreadPoolExecutor(max_workers=subscriber_count + 1) as executor: producer_future = executor.submit(producer_thread) - consumer_futures = [executor.submit(consumer_thread, subscription) for subscription in subscriptions] + consumer_futures = [ + executor.submit(consumer_thread, subscription, ready_events[idx]) + for idx, subscription in enumerate(subscriptions) + ] producer_future.result(timeout=10.0) msgs_by_consumers = [] diff --git a/api/tests/test_containers_integration_tests/services/test_agent_service.py b/api/tests/test_containers_integration_tests/services/test_agent_service.py index 3be2798085..a22d6f8fbf 100644 --- a/api/tests/test_containers_integration_tests/services/test_agent_service.py +++ b/api/tests/test_containers_integration_tests/services/test_agent_service.py @@ -230,7 +230,6 @@ class TestAgentService: # Create first agent thought thought1 = MessageAgentThought( - id=fake.uuid4(), message_id=message.id, position=1, thought="I need to analyze the user's request", @@ -257,7 +256,6 @@ class TestAgentService: # Create second agent thought thought2 = MessageAgentThought( - id=fake.uuid4(), message_id=message.id, position=2, thought="Based on the analysis, I can provide a response", @@ -545,7 +543,6 @@ class TestAgentService: # Create agent thought with tool error thought_with_error = MessageAgentThought( - id=fake.uuid4(), message_id=message.id, position=1, thought="I need to analyze the user's request", @@ -759,7 +756,6 @@ class TestAgentService: # Create agent thought with multiple tools complex_thought = MessageAgentThought( - id=fake.uuid4(), message_id=message.id, position=1, thought="I need to use multiple tools to complete this task", @@ -877,7 +873,6 @@ class TestAgentService: # Create agent thought with files thought_with_files = MessageAgentThought( - id=fake.uuid4(), message_id=message.id, position=1, thought="I need to process some files", @@ -957,7 +952,6 @@ class TestAgentService: # Create agent thought with empty tool data empty_thought = MessageAgentThought( - id=fake.uuid4(), message_id=message.id, position=1, thought="I need to analyze the user's request", @@ -999,7 +993,6 @@ class TestAgentService: # Create agent thought with malformed JSON malformed_thought = MessageAgentThought( - id=fake.uuid4(), message_id=message.id, position=1, thought="I need to analyze the user's request", diff --git a/api/tests/test_containers_integration_tests/services/test_annotation_service.py b/api/tests/test_containers_integration_tests/services/test_annotation_service.py index da73122cd7..5555400ca6 100644 --- a/api/tests/test_containers_integration_tests/services/test_annotation_service.py +++ b/api/tests/test_containers_integration_tests/services/test_annotation_service.py @@ -444,6 +444,78 @@ class TestAnnotationService: assert total == 1 assert unique_keyword in annotation_list[0].question or unique_keyword in annotation_list[0].content + def test_get_annotation_list_by_app_id_with_special_characters_in_keyword( + self, db_session_with_containers, mock_external_service_dependencies + ): + r""" + Test retrieval of annotation list with special characters in keyword to verify SQL injection prevention. + + This test verifies: + - Special characters (%, _, \) in keyword are properly escaped + - Search treats special characters as literal characters, not wildcards + - SQL injection via LIKE wildcards is prevented + """ + fake = Faker() + app, account = self._create_test_app_and_account(db_session_with_containers, mock_external_service_dependencies) + + # Create annotations with special characters in content + annotation_with_percent = { + "question": "Question with 50% discount", + "answer": "Answer about 50% discount offer", + } + AppAnnotationService.insert_app_annotation_directly(annotation_with_percent, app.id) + + annotation_with_underscore = { + "question": "Question with test_data", + "answer": "Answer about test_data value", + } + AppAnnotationService.insert_app_annotation_directly(annotation_with_underscore, app.id) + + annotation_with_backslash = { + "question": "Question with path\\to\\file", + "answer": "Answer about path\\to\\file location", + } + AppAnnotationService.insert_app_annotation_directly(annotation_with_backslash, app.id) + + # Create annotation that should NOT match (contains % but as part of different text) + annotation_no_match = { + "question": "Question with 100% different", + "answer": "Answer about 100% different content", + } + AppAnnotationService.insert_app_annotation_directly(annotation_no_match, app.id) + + # Test 1: Search with % character - should find exact match only + annotation_list, total = AppAnnotationService.get_annotation_list_by_app_id( + app.id, page=1, limit=10, keyword="50%" + ) + assert total == 1 + assert len(annotation_list) == 1 + assert "50%" in annotation_list[0].question or "50%" in annotation_list[0].content + + # Test 2: Search with _ character - should find exact match only + annotation_list, total = AppAnnotationService.get_annotation_list_by_app_id( + app.id, page=1, limit=10, keyword="test_data" + ) + assert total == 1 + assert len(annotation_list) == 1 + assert "test_data" in annotation_list[0].question or "test_data" in annotation_list[0].content + + # Test 3: Search with \ character - should find exact match only + annotation_list, total = AppAnnotationService.get_annotation_list_by_app_id( + app.id, page=1, limit=10, keyword="path\\to\\file" + ) + assert total == 1 + assert len(annotation_list) == 1 + assert "path\\to\\file" in annotation_list[0].question or "path\\to\\file" in annotation_list[0].content + + # Test 4: Search with % should NOT match 100% (verifies escaping works) + annotation_list, total = AppAnnotationService.get_annotation_list_by_app_id( + app.id, page=1, limit=10, keyword="50%" + ) + # Should only find the 50% annotation, not the 100% one + assert total == 1 + assert all("50%" in (item.question or "") or "50%" in (item.content or "") for item in annotation_list) + def test_get_annotation_list_by_app_id_app_not_found( self, db_session_with_containers, mock_external_service_dependencies ): diff --git a/api/tests/test_containers_integration_tests/services/test_app_service.py b/api/tests/test_containers_integration_tests/services/test_app_service.py index e53392bcef..745d6c97b0 100644 --- a/api/tests/test_containers_integration_tests/services/test_app_service.py +++ b/api/tests/test_containers_integration_tests/services/test_app_service.py @@ -7,7 +7,9 @@ from constants.model_template import default_app_templates from models import Account from models.model import App, Site from services.account_service import AccountService, TenantService -from services.app_service import AppService + +# Delay import of AppService to avoid circular dependency +# from services.app_service import AppService class TestAppService: @@ -71,6 +73,9 @@ class TestAppService: } # Create app + # Import here to avoid circular dependency + from services.app_service import AppService + app_service = AppService() app = app_service.create_app(tenant.id, app_args, account) @@ -109,6 +114,9 @@ class TestAppService: TenantService.create_owner_tenant_if_not_exist(account, name=fake.company()) tenant = account.current_tenant + # Import here to avoid circular dependency + from services.app_service import AppService + app_service = AppService() # Test different app modes @@ -159,6 +167,9 @@ class TestAppService: "icon_background": "#45B7D1", } + # Import here to avoid circular dependency + from services.app_service import AppService + app_service = AppService() created_app = app_service.create_app(tenant.id, app_args, account) @@ -194,6 +205,9 @@ class TestAppService: TenantService.create_owner_tenant_if_not_exist(account, name=fake.company()) tenant = account.current_tenant + # Import here to avoid circular dependency + from services.app_service import AppService + app_service = AppService() # Create multiple apps @@ -245,6 +259,9 @@ class TestAppService: TenantService.create_owner_tenant_if_not_exist(account, name=fake.company()) tenant = account.current_tenant + # Import here to avoid circular dependency + from services.app_service import AppService + app_service = AppService() # Create apps with different modes @@ -315,6 +332,9 @@ class TestAppService: TenantService.create_owner_tenant_if_not_exist(account, name=fake.company()) tenant = account.current_tenant + # Import here to avoid circular dependency + from services.app_service import AppService + app_service = AppService() # Create an app @@ -392,6 +412,9 @@ class TestAppService: "icon_background": "#45B7D1", } + # Import here to avoid circular dependency + from services.app_service import AppService + app_service = AppService() app = app_service.create_app(tenant.id, app_args, account) @@ -458,6 +481,9 @@ class TestAppService: "icon_background": "#45B7D1", } + # Import here to avoid circular dependency + from services.app_service import AppService + app_service = AppService() app = app_service.create_app(tenant.id, app_args, account) @@ -508,6 +534,9 @@ class TestAppService: "icon_background": "#45B7D1", } + # Import here to avoid circular dependency + from services.app_service import AppService + app_service = AppService() app = app_service.create_app(tenant.id, app_args, account) @@ -562,6 +591,9 @@ class TestAppService: "icon_background": "#74B9FF", } + # Import here to avoid circular dependency + from services.app_service import AppService + app_service = AppService() app = app_service.create_app(tenant.id, app_args, account) @@ -617,6 +649,9 @@ class TestAppService: "icon_background": "#A29BFE", } + # Import here to avoid circular dependency + from services.app_service import AppService + app_service = AppService() app = app_service.create_app(tenant.id, app_args, account) @@ -672,6 +707,9 @@ class TestAppService: "icon_background": "#FD79A8", } + # Import here to avoid circular dependency + from services.app_service import AppService + app_service = AppService() app = app_service.create_app(tenant.id, app_args, account) @@ -720,6 +758,9 @@ class TestAppService: "icon_background": "#E17055", } + # Import here to avoid circular dependency + from services.app_service import AppService + app_service = AppService() app = app_service.create_app(tenant.id, app_args, account) @@ -768,6 +809,9 @@ class TestAppService: "icon_background": "#00B894", } + # Import here to avoid circular dependency + from services.app_service import AppService + app_service = AppService() app = app_service.create_app(tenant.id, app_args, account) @@ -826,6 +870,9 @@ class TestAppService: "icon_background": "#6C5CE7", } + # Import here to avoid circular dependency + from services.app_service import AppService + app_service = AppService() app = app_service.create_app(tenant.id, app_args, account) @@ -862,6 +909,9 @@ class TestAppService: "icon_background": "#FDCB6E", } + # Import here to avoid circular dependency + from services.app_service import AppService + app_service = AppService() app = app_service.create_app(tenant.id, app_args, account) @@ -899,6 +949,9 @@ class TestAppService: "icon_background": "#E84393", } + # Import here to avoid circular dependency + from services.app_service import AppService + app_service = AppService() app = app_service.create_app(tenant.id, app_args, account) @@ -947,8 +1000,132 @@ class TestAppService: "icon_background": "#D63031", } + # Import here to avoid circular dependency + from services.app_service import AppService + app_service = AppService() # Attempt to create app with invalid mode with pytest.raises(ValueError, match="invalid mode value"): app_service.create_app(tenant.id, app_args, account) + + def test_get_apps_with_special_characters_in_name( + self, db_session_with_containers, mock_external_service_dependencies + ): + r""" + Test app retrieval with special characters in name search to verify SQL injection prevention. + + This test verifies: + - Special characters (%, _, \) in name search are properly escaped + - Search treats special characters as literal characters, not wildcards + - SQL injection via LIKE wildcards is prevented + """ + fake = Faker() + + # Create account and tenant first + account = AccountService.create_account( + email=fake.email(), + name=fake.name(), + interface_language="en-US", + password=fake.password(length=12), + ) + TenantService.create_owner_tenant_if_not_exist(account, name=fake.company()) + tenant = account.current_tenant + + # Import here to avoid circular dependency + from services.app_service import AppService + + app_service = AppService() + + # Create apps with special characters in names + app_with_percent = app_service.create_app( + tenant.id, + { + "name": "App with 50% discount", + "description": fake.text(max_nb_chars=100), + "mode": "chat", + "icon_type": "emoji", + "icon": "🤖", + "icon_background": "#FF6B6B", + "api_rph": 100, + "api_rpm": 10, + }, + account, + ) + + app_with_underscore = app_service.create_app( + tenant.id, + { + "name": "test_data_app", + "description": fake.text(max_nb_chars=100), + "mode": "chat", + "icon_type": "emoji", + "icon": "🤖", + "icon_background": "#FF6B6B", + "api_rph": 100, + "api_rpm": 10, + }, + account, + ) + + app_with_backslash = app_service.create_app( + tenant.id, + { + "name": "path\\to\\app", + "description": fake.text(max_nb_chars=100), + "mode": "chat", + "icon_type": "emoji", + "icon": "🤖", + "icon_background": "#FF6B6B", + "api_rph": 100, + "api_rpm": 10, + }, + account, + ) + + # Create app that should NOT match + app_no_match = app_service.create_app( + tenant.id, + { + "name": "100% different", + "description": fake.text(max_nb_chars=100), + "mode": "chat", + "icon_type": "emoji", + "icon": "🤖", + "icon_background": "#FF6B6B", + "api_rph": 100, + "api_rpm": 10, + }, + account, + ) + + # Test 1: Search with % character + args = {"name": "50%", "mode": "chat", "page": 1, "limit": 10} + paginated_apps = app_service.get_paginate_apps(account.id, tenant.id, args) + assert paginated_apps is not None + assert paginated_apps.total == 1 + assert len(paginated_apps.items) == 1 + assert paginated_apps.items[0].name == "App with 50% discount" + + # Test 2: Search with _ character + args = {"name": "test_data", "mode": "chat", "page": 1, "limit": 10} + paginated_apps = app_service.get_paginate_apps(account.id, tenant.id, args) + assert paginated_apps is not None + assert paginated_apps.total == 1 + assert len(paginated_apps.items) == 1 + assert paginated_apps.items[0].name == "test_data_app" + + # Test 3: Search with \ character + args = {"name": "path\\to\\app", "mode": "chat", "page": 1, "limit": 10} + paginated_apps = app_service.get_paginate_apps(account.id, tenant.id, args) + assert paginated_apps is not None + assert paginated_apps.total == 1 + assert len(paginated_apps.items) == 1 + assert paginated_apps.items[0].name == "path\\to\\app" + + # Test 4: Search with % should NOT match 100% (verifies escaping works) + args = {"name": "50%", "mode": "chat", "page": 1, "limit": 10} + paginated_apps = app_service.get_paginate_apps(account.id, tenant.id, args) + assert paginated_apps is not None + assert paginated_apps.total == 1 + assert all("50%" in app.name for app in paginated_apps.items) diff --git a/api/tests/test_containers_integration_tests/services/test_billing_service.py b/api/tests/test_containers_integration_tests/services/test_billing_service.py new file mode 100644 index 0000000000..76708b36b1 --- /dev/null +++ b/api/tests/test_containers_integration_tests/services/test_billing_service.py @@ -0,0 +1,365 @@ +import json +from unittest.mock import patch + +import pytest + +from extensions.ext_redis import redis_client +from services.billing_service import BillingService + + +class TestBillingServiceGetPlanBulkWithCache: + """ + Comprehensive integration tests for get_plan_bulk_with_cache using testcontainers. + + This test class covers all major scenarios: + - Cache hit/miss scenarios + - Redis operation failures and fallback behavior + - Invalid cache data handling + - TTL expiration handling + - Error recovery and logging + """ + + @pytest.fixture(autouse=True) + def setup_redis_cleanup(self, flask_app_with_containers): + """Clean up Redis cache before and after each test.""" + with flask_app_with_containers.app_context(): + # Clean up before test + yield + # Clean up after test + # Delete all test cache keys + pattern = f"{BillingService._PLAN_CACHE_KEY_PREFIX}*" + keys = redis_client.keys(pattern) + if keys: + redis_client.delete(*keys) + + def _create_test_plan_data(self, plan: str = "sandbox", expiration_date: int = 1735689600): + """Helper to create test SubscriptionPlan data.""" + return {"plan": plan, "expiration_date": expiration_date} + + def _set_cache(self, tenant_id: str, plan_data: dict, ttl: int = 600): + """Helper to set cache data in Redis.""" + cache_key = BillingService._make_plan_cache_key(tenant_id) + json_str = json.dumps(plan_data) + redis_client.setex(cache_key, ttl, json_str) + + def _get_cache(self, tenant_id: str): + """Helper to get cache data from Redis.""" + cache_key = BillingService._make_plan_cache_key(tenant_id) + value = redis_client.get(cache_key) + if value: + if isinstance(value, bytes): + return value.decode("utf-8") + return value + return None + + def test_get_plan_bulk_with_cache_all_cache_hit(self, flask_app_with_containers): + """Test bulk plan retrieval when all tenants are in cache.""" + with flask_app_with_containers.app_context(): + # Arrange + tenant_ids = ["tenant-1", "tenant-2", "tenant-3"] + expected_plans = { + "tenant-1": self._create_test_plan_data("sandbox", 1735689600), + "tenant-2": self._create_test_plan_data("professional", 1767225600), + "tenant-3": self._create_test_plan_data("team", 1798761600), + } + + # Pre-populate cache + for tenant_id, plan_data in expected_plans.items(): + self._set_cache(tenant_id, plan_data) + + # Act + with patch.object(BillingService, "get_plan_bulk") as mock_get_plan_bulk: + result = BillingService.get_plan_bulk_with_cache(tenant_ids) + + # Assert + assert len(result) == 3 + assert result["tenant-1"]["plan"] == "sandbox" + assert result["tenant-1"]["expiration_date"] == 1735689600 + assert result["tenant-2"]["plan"] == "professional" + assert result["tenant-2"]["expiration_date"] == 1767225600 + assert result["tenant-3"]["plan"] == "team" + assert result["tenant-3"]["expiration_date"] == 1798761600 + + # Verify API was not called + mock_get_plan_bulk.assert_not_called() + + def test_get_plan_bulk_with_cache_all_cache_miss(self, flask_app_with_containers): + """Test bulk plan retrieval when all tenants are not in cache.""" + with flask_app_with_containers.app_context(): + # Arrange + tenant_ids = ["tenant-1", "tenant-2"] + expected_plans = { + "tenant-1": self._create_test_plan_data("sandbox", 1735689600), + "tenant-2": self._create_test_plan_data("professional", 1767225600), + } + + # Act + with patch.object(BillingService, "get_plan_bulk", return_value=expected_plans) as mock_get_plan_bulk: + result = BillingService.get_plan_bulk_with_cache(tenant_ids) + + # Assert + assert len(result) == 2 + assert result["tenant-1"]["plan"] == "sandbox" + assert result["tenant-2"]["plan"] == "professional" + + # Verify API was called with correct tenant_ids + mock_get_plan_bulk.assert_called_once_with(tenant_ids) + + # Verify data was written to cache + cached_1 = self._get_cache("tenant-1") + cached_2 = self._get_cache("tenant-2") + assert cached_1 is not None + assert cached_2 is not None + + # Verify cache content + cached_data_1 = json.loads(cached_1) + cached_data_2 = json.loads(cached_2) + assert cached_data_1 == expected_plans["tenant-1"] + assert cached_data_2 == expected_plans["tenant-2"] + + # Verify TTL is set + cache_key_1 = BillingService._make_plan_cache_key("tenant-1") + ttl_1 = redis_client.ttl(cache_key_1) + assert ttl_1 > 0 + assert ttl_1 <= 600 # Should be <= 600 seconds + + def test_get_plan_bulk_with_cache_partial_cache_hit(self, flask_app_with_containers): + """Test bulk plan retrieval when some tenants are in cache, some are not.""" + with flask_app_with_containers.app_context(): + # Arrange + tenant_ids = ["tenant-1", "tenant-2", "tenant-3"] + # Pre-populate cache for tenant-1 and tenant-2 + self._set_cache("tenant-1", self._create_test_plan_data("sandbox", 1735689600)) + self._set_cache("tenant-2", self._create_test_plan_data("professional", 1767225600)) + + # tenant-3 is not in cache + missing_plan = {"tenant-3": self._create_test_plan_data("team", 1798761600)} + + # Act + with patch.object(BillingService, "get_plan_bulk", return_value=missing_plan) as mock_get_plan_bulk: + result = BillingService.get_plan_bulk_with_cache(tenant_ids) + + # Assert + assert len(result) == 3 + assert result["tenant-1"]["plan"] == "sandbox" + assert result["tenant-2"]["plan"] == "professional" + assert result["tenant-3"]["plan"] == "team" + + # Verify API was called only for missing tenant + mock_get_plan_bulk.assert_called_once_with(["tenant-3"]) + + # Verify tenant-3 data was written to cache + cached_3 = self._get_cache("tenant-3") + assert cached_3 is not None + cached_data_3 = json.loads(cached_3) + assert cached_data_3 == missing_plan["tenant-3"] + + def test_get_plan_bulk_with_cache_redis_mget_failure(self, flask_app_with_containers): + """Test fallback to API when Redis mget fails.""" + with flask_app_with_containers.app_context(): + # Arrange + tenant_ids = ["tenant-1", "tenant-2"] + expected_plans = { + "tenant-1": self._create_test_plan_data("sandbox", 1735689600), + "tenant-2": self._create_test_plan_data("professional", 1767225600), + } + + # Act + with ( + patch.object(redis_client, "mget", side_effect=Exception("Redis connection error")), + patch.object(BillingService, "get_plan_bulk", return_value=expected_plans) as mock_get_plan_bulk, + ): + result = BillingService.get_plan_bulk_with_cache(tenant_ids) + + # Assert + assert len(result) == 2 + assert result["tenant-1"]["plan"] == "sandbox" + assert result["tenant-2"]["plan"] == "professional" + + # Verify API was called for all tenants (fallback) + mock_get_plan_bulk.assert_called_once_with(tenant_ids) + + # Verify data was written to cache after fallback + cached_1 = self._get_cache("tenant-1") + cached_2 = self._get_cache("tenant-2") + assert cached_1 is not None + assert cached_2 is not None + + def test_get_plan_bulk_with_cache_invalid_json_in_cache(self, flask_app_with_containers): + """Test fallback to API when cache contains invalid JSON.""" + with flask_app_with_containers.app_context(): + # Arrange + tenant_ids = ["tenant-1", "tenant-2", "tenant-3"] + + # Set valid cache for tenant-1 + self._set_cache("tenant-1", self._create_test_plan_data("sandbox", 1735689600)) + + # Set invalid JSON for tenant-2 + cache_key_2 = BillingService._make_plan_cache_key("tenant-2") + redis_client.setex(cache_key_2, 600, "invalid json {") + + # tenant-3 is not in cache + expected_plans = { + "tenant-2": self._create_test_plan_data("professional", 1767225600), + "tenant-3": self._create_test_plan_data("team", 1798761600), + } + + # Act + with patch.object(BillingService, "get_plan_bulk", return_value=expected_plans) as mock_get_plan_bulk: + result = BillingService.get_plan_bulk_with_cache(tenant_ids) + + # Assert + assert len(result) == 3 + assert result["tenant-1"]["plan"] == "sandbox" # From cache + assert result["tenant-2"]["plan"] == "professional" # From API (fallback) + assert result["tenant-3"]["plan"] == "team" # From API + + # Verify API was called for tenant-2 and tenant-3 + mock_get_plan_bulk.assert_called_once_with(["tenant-2", "tenant-3"]) + + # Verify tenant-2's invalid JSON was replaced with correct data in cache + cached_2 = self._get_cache("tenant-2") + assert cached_2 is not None + cached_data_2 = json.loads(cached_2) + assert cached_data_2 == expected_plans["tenant-2"] + assert cached_data_2["plan"] == "professional" + assert cached_data_2["expiration_date"] == 1767225600 + + # Verify tenant-2 cache has correct TTL + cache_key_2_new = BillingService._make_plan_cache_key("tenant-2") + ttl_2 = redis_client.ttl(cache_key_2_new) + assert ttl_2 > 0 + assert ttl_2 <= 600 + + # Verify tenant-3 data was also written to cache + cached_3 = self._get_cache("tenant-3") + assert cached_3 is not None + cached_data_3 = json.loads(cached_3) + assert cached_data_3 == expected_plans["tenant-3"] + + def test_get_plan_bulk_with_cache_invalid_plan_data_in_cache(self, flask_app_with_containers): + """Test fallback to API when cache data doesn't match SubscriptionPlan schema.""" + with flask_app_with_containers.app_context(): + # Arrange + tenant_ids = ["tenant-1", "tenant-2", "tenant-3"] + + # Set valid cache for tenant-1 + self._set_cache("tenant-1", self._create_test_plan_data("sandbox", 1735689600)) + + # Set invalid plan data for tenant-2 (missing expiration_date) + cache_key_2 = BillingService._make_plan_cache_key("tenant-2") + invalid_data = json.dumps({"plan": "professional"}) # Missing expiration_date + redis_client.setex(cache_key_2, 600, invalid_data) + + # tenant-3 is not in cache + expected_plans = { + "tenant-2": self._create_test_plan_data("professional", 1767225600), + "tenant-3": self._create_test_plan_data("team", 1798761600), + } + + # Act + with patch.object(BillingService, "get_plan_bulk", return_value=expected_plans) as mock_get_plan_bulk: + result = BillingService.get_plan_bulk_with_cache(tenant_ids) + + # Assert + assert len(result) == 3 + assert result["tenant-1"]["plan"] == "sandbox" # From cache + assert result["tenant-2"]["plan"] == "professional" # From API (fallback) + assert result["tenant-3"]["plan"] == "team" # From API + + # Verify API was called for tenant-2 and tenant-3 + mock_get_plan_bulk.assert_called_once_with(["tenant-2", "tenant-3"]) + + def test_get_plan_bulk_with_cache_redis_pipeline_failure(self, flask_app_with_containers): + """Test that pipeline failure doesn't affect return value.""" + with flask_app_with_containers.app_context(): + # Arrange + tenant_ids = ["tenant-1", "tenant-2"] + expected_plans = { + "tenant-1": self._create_test_plan_data("sandbox", 1735689600), + "tenant-2": self._create_test_plan_data("professional", 1767225600), + } + + # Act + with ( + patch.object(BillingService, "get_plan_bulk", return_value=expected_plans), + patch.object(redis_client, "pipeline") as mock_pipeline, + ): + # Create a mock pipeline that fails on execute + mock_pipe = mock_pipeline.return_value + mock_pipe.execute.side_effect = Exception("Pipeline execution failed") + + result = BillingService.get_plan_bulk_with_cache(tenant_ids) + + # Assert - Function should still return correct result despite pipeline failure + assert len(result) == 2 + assert result["tenant-1"]["plan"] == "sandbox" + assert result["tenant-2"]["plan"] == "professional" + + # Verify pipeline was attempted + mock_pipeline.assert_called_once() + + def test_get_plan_bulk_with_cache_empty_tenant_ids(self, flask_app_with_containers): + """Test with empty tenant_ids list.""" + with flask_app_with_containers.app_context(): + # Act + with patch.object(BillingService, "get_plan_bulk") as mock_get_plan_bulk: + result = BillingService.get_plan_bulk_with_cache([]) + + # Assert + assert result == {} + assert len(result) == 0 + + # Verify no API calls + mock_get_plan_bulk.assert_not_called() + + # Verify no Redis operations (mget with empty list would return empty list) + # But we should check that mget was not called at all + # Since we can't easily verify this without more mocking, we just verify the result + + def test_get_plan_bulk_with_cache_ttl_expired(self, flask_app_with_containers): + """Test that expired cache keys are treated as cache misses.""" + with flask_app_with_containers.app_context(): + # Arrange + tenant_ids = ["tenant-1", "tenant-2"] + + # Set cache for tenant-1 with very short TTL (1 second) to simulate expiration + self._set_cache("tenant-1", self._create_test_plan_data("sandbox", 1735689600), ttl=1) + + # Wait for TTL to expire (key will be deleted by Redis) + import time + + time.sleep(2) + + # Verify cache is expired (key doesn't exist) + cache_key_1 = BillingService._make_plan_cache_key("tenant-1") + exists = redis_client.exists(cache_key_1) + assert exists == 0 # Key doesn't exist (expired) + + # tenant-2 is not in cache + expected_plans = { + "tenant-1": self._create_test_plan_data("sandbox", 1735689600), + "tenant-2": self._create_test_plan_data("professional", 1767225600), + } + + # Act + with patch.object(BillingService, "get_plan_bulk", return_value=expected_plans) as mock_get_plan_bulk: + result = BillingService.get_plan_bulk_with_cache(tenant_ids) + + # Assert + assert len(result) == 2 + assert result["tenant-1"]["plan"] == "sandbox" + assert result["tenant-2"]["plan"] == "professional" + + # Verify API was called for both tenants (tenant-1 expired, tenant-2 missing) + mock_get_plan_bulk.assert_called_once_with(tenant_ids) + + # Verify both were written to cache with correct TTL + cache_key_1_new = BillingService._make_plan_cache_key("tenant-1") + cache_key_2 = BillingService._make_plan_cache_key("tenant-2") + ttl_1_new = redis_client.ttl(cache_key_1_new) + ttl_2 = redis_client.ttl(cache_key_2) + assert ttl_1_new > 0 + assert ttl_1_new <= 600 + assert ttl_2 > 0 + assert ttl_2 <= 600 diff --git a/api/tests/test_containers_integration_tests/services/test_model_provider_service.py b/api/tests/test_containers_integration_tests/services/test_model_provider_service.py index 612210ef86..d57ab7428b 100644 --- a/api/tests/test_containers_integration_tests/services/test_model_provider_service.py +++ b/api/tests/test_containers_integration_tests/services/test_model_provider_service.py @@ -228,7 +228,6 @@ class TestModelProviderService: mock_provider_entity.description = {"en_US": "OpenAI provider", "zh_Hans": "OpenAI 提供商"} mock_provider_entity.icon_small = {"en_US": "icon_small.png", "zh_Hans": "icon_small.png"} mock_provider_entity.icon_small_dark = None - mock_provider_entity.icon_large = {"en_US": "icon_large.png", "zh_Hans": "icon_large.png"} mock_provider_entity.background = "#FF6B6B" mock_provider_entity.help = None mock_provider_entity.supported_model_types = [ModelType.LLM, ModelType.TEXT_EMBEDDING] @@ -302,7 +301,6 @@ class TestModelProviderService: mock_provider_entity_llm.description = {"en_US": "OpenAI provider", "zh_Hans": "OpenAI 提供商"} mock_provider_entity_llm.icon_small = {"en_US": "icon_small.png", "zh_Hans": "icon_small.png"} mock_provider_entity_llm.icon_small_dark = None - mock_provider_entity_llm.icon_large = {"en_US": "icon_large.png", "zh_Hans": "icon_large.png"} mock_provider_entity_llm.background = "#FF6B6B" mock_provider_entity_llm.help = None mock_provider_entity_llm.supported_model_types = [ModelType.LLM] @@ -316,7 +314,6 @@ class TestModelProviderService: mock_provider_entity_embedding.description = {"en_US": "Cohere provider", "zh_Hans": "Cohere 提供商"} mock_provider_entity_embedding.icon_small = {"en_US": "icon_small.png", "zh_Hans": "icon_small.png"} mock_provider_entity_embedding.icon_small_dark = None - mock_provider_entity_embedding.icon_large = {"en_US": "icon_large.png", "zh_Hans": "icon_large.png"} mock_provider_entity_embedding.background = "#4ECDC4" mock_provider_entity_embedding.help = None mock_provider_entity_embedding.supported_model_types = [ModelType.TEXT_EMBEDDING] @@ -419,7 +416,6 @@ class TestModelProviderService: provider="openai", label=I18nObject(en_US="OpenAI", zh_Hans="OpenAI"), icon_small=I18nObject(en_US="icon_small.png", zh_Hans="icon_small.png"), - icon_large=I18nObject(en_US="icon_large.png", zh_Hans="icon_large.png"), supported_model_types=[ModelType.LLM], configurate_methods=[], models=[], @@ -431,7 +427,6 @@ class TestModelProviderService: provider="openai", label=I18nObject(en_US="OpenAI", zh_Hans="OpenAI"), icon_small=I18nObject(en_US="icon_small.png", zh_Hans="icon_small.png"), - icon_large=I18nObject(en_US="icon_large.png", zh_Hans="icon_large.png"), supported_model_types=[ModelType.LLM], configurate_methods=[], models=[], @@ -655,7 +650,6 @@ class TestModelProviderService: provider="openai", label=I18nObject(en_US="OpenAI", zh_Hans="OpenAI"), icon_small=I18nObject(en_US="icon_small.png", zh_Hans="icon_small.png"), - icon_large=I18nObject(en_US="icon_large.png", zh_Hans="icon_large.png"), supported_model_types=[ModelType.LLM], ), ) @@ -1027,7 +1021,6 @@ class TestModelProviderService: label={"en_US": "OpenAI", "zh_Hans": "OpenAI"}, icon_small={"en_US": "icon_small.png", "zh_Hans": "icon_small.png"}, icon_small_dark=None, - icon_large={"en_US": "icon_large.png", "zh_Hans": "icon_large.png"}, ), model="gpt-3.5-turbo", model_type=ModelType.LLM, @@ -1045,7 +1038,6 @@ class TestModelProviderService: label={"en_US": "OpenAI", "zh_Hans": "OpenAI"}, icon_small={"en_US": "icon_small.png", "zh_Hans": "icon_small.png"}, icon_small_dark=None, - icon_large={"en_US": "icon_large.png", "zh_Hans": "icon_large.png"}, ), model="gpt-4", model_type=ModelType.LLM, diff --git a/api/tests/test_containers_integration_tests/services/test_tag_service.py b/api/tests/test_containers_integration_tests/services/test_tag_service.py index 6732b8d558..e8c7f17e0b 100644 --- a/api/tests/test_containers_integration_tests/services/test_tag_service.py +++ b/api/tests/test_containers_integration_tests/services/test_tag_service.py @@ -1,3 +1,4 @@ +import uuid from unittest.mock import create_autospec, patch import pytest @@ -312,6 +313,85 @@ class TestTagService: result_no_match = TagService.get_tags("app", tenant.id, keyword="nonexistent") assert len(result_no_match) == 0 + def test_get_tags_with_special_characters_in_keyword( + self, db_session_with_containers, mock_external_service_dependencies + ): + r""" + Test tag retrieval with special characters in keyword to verify SQL injection prevention. + + This test verifies: + - Special characters (%, _, \) in keyword are properly escaped + - Search treats special characters as literal characters, not wildcards + - SQL injection via LIKE wildcards is prevented + """ + # Arrange: Create test data + fake = Faker() + account, tenant = self._create_test_account_and_tenant( + db_session_with_containers, mock_external_service_dependencies + ) + + from extensions.ext_database import db + + # Create tags with special characters in names + tag_with_percent = Tag( + name="50% discount", + type="app", + tenant_id=tenant.id, + created_by=account.id, + ) + tag_with_percent.id = str(uuid.uuid4()) + db.session.add(tag_with_percent) + + tag_with_underscore = Tag( + name="test_data_tag", + type="app", + tenant_id=tenant.id, + created_by=account.id, + ) + tag_with_underscore.id = str(uuid.uuid4()) + db.session.add(tag_with_underscore) + + tag_with_backslash = Tag( + name="path\\to\\tag", + type="app", + tenant_id=tenant.id, + created_by=account.id, + ) + tag_with_backslash.id = str(uuid.uuid4()) + db.session.add(tag_with_backslash) + + # Create tag that should NOT match + tag_no_match = Tag( + name="100% different", + type="app", + tenant_id=tenant.id, + created_by=account.id, + ) + tag_no_match.id = str(uuid.uuid4()) + db.session.add(tag_no_match) + + db.session.commit() + + # Act & Assert: Test 1 - Search with % character + result = TagService.get_tags("app", tenant.id, keyword="50%") + assert len(result) == 1 + assert result[0].name == "50% discount" + + # Test 2 - Search with _ character + result = TagService.get_tags("app", tenant.id, keyword="test_data") + assert len(result) == 1 + assert result[0].name == "test_data_tag" + + # Test 3 - Search with \ character + result = TagService.get_tags("app", tenant.id, keyword="path\\to\\tag") + assert len(result) == 1 + assert result[0].name == "path\\to\\tag" + + # Test 4 - Search with % should NOT match 100% (verifies escaping works) + result = TagService.get_tags("app", tenant.id, keyword="50%") + assert len(result) == 1 + assert all("50%" in item.name for item in result) + def test_get_tags_empty_result(self, db_session_with_containers, mock_external_service_dependencies): """ Test tag retrieval when no tags exist. diff --git a/api/tests/test_containers_integration_tests/services/test_trigger_provider_service.py b/api/tests/test_containers_integration_tests/services/test_trigger_provider_service.py new file mode 100644 index 0000000000..5315960d73 --- /dev/null +++ b/api/tests/test_containers_integration_tests/services/test_trigger_provider_service.py @@ -0,0 +1,560 @@ +from unittest.mock import MagicMock, patch + +import pytest +from faker import Faker + +from constants import HIDDEN_VALUE, UNKNOWN_VALUE +from core.plugin.entities.plugin_daemon import CredentialType +from core.trigger.entities.entities import Subscription as TriggerSubscriptionEntity +from extensions.ext_database import db +from models.provider_ids import TriggerProviderID +from models.trigger import TriggerSubscription +from services.trigger.trigger_provider_service import TriggerProviderService + + +class TestTriggerProviderService: + """Integration tests for TriggerProviderService using testcontainers.""" + + @pytest.fixture + def mock_external_service_dependencies(self): + """Mock setup for external service dependencies.""" + with ( + patch("services.trigger.trigger_provider_service.TriggerManager") as mock_trigger_manager, + patch("services.trigger.trigger_provider_service.redis_client") as mock_redis_client, + patch("services.trigger.trigger_provider_service.delete_cache_for_subscription") as mock_delete_cache, + patch("services.account_service.FeatureService") as mock_account_feature_service, + ): + # Setup default mock returns + mock_provider_controller = MagicMock() + mock_provider_controller.get_credential_schema_config.return_value = MagicMock() + mock_provider_controller.get_properties_schema.return_value = MagicMock() + mock_trigger_manager.get_trigger_provider.return_value = mock_provider_controller + + # Mock redis lock + mock_lock = MagicMock() + mock_lock.__enter__ = MagicMock(return_value=None) + mock_lock.__exit__ = MagicMock(return_value=None) + mock_redis_client.lock.return_value = mock_lock + + # Setup account feature service mock + mock_account_feature_service.get_system_features.return_value.is_allow_register = True + + yield { + "trigger_manager": mock_trigger_manager, + "redis_client": mock_redis_client, + "delete_cache": mock_delete_cache, + "provider_controller": mock_provider_controller, + "account_feature_service": mock_account_feature_service, + } + + def _create_test_account_and_tenant(self, db_session_with_containers, mock_external_service_dependencies): + """ + Helper method to create a test account and tenant for testing. + + Args: + db_session_with_containers: Database session from testcontainers infrastructure + mock_external_service_dependencies: Mock dependencies + + Returns: + tuple: (account, tenant) - Created account and tenant instances + """ + fake = Faker() + + from services.account_service import AccountService, TenantService + + # Setup mocks for account creation + mock_external_service_dependencies[ + "account_feature_service" + ].get_system_features.return_value.is_allow_register = True + mock_external_service_dependencies[ + "trigger_manager" + ].get_trigger_provider.return_value = mock_external_service_dependencies["provider_controller"] + + # Create account and tenant + account = AccountService.create_account( + email=fake.email(), + name=fake.name(), + interface_language="en-US", + password=fake.password(length=12), + ) + TenantService.create_owner_tenant_if_not_exist(account, name=fake.company()) + tenant = account.current_tenant + + return account, tenant + + def _create_test_subscription( + self, + db_session_with_containers, + tenant_id, + user_id, + provider_id, + credential_type, + credentials, + mock_external_service_dependencies, + ): + """ + Helper method to create a test trigger subscription. + + Args: + db_session_with_containers: Database session + tenant_id: Tenant ID + user_id: User ID + provider_id: Provider ID + credential_type: Credential type + credentials: Credentials dict + mock_external_service_dependencies: Mock dependencies + + Returns: + TriggerSubscription: Created subscription instance + """ + fake = Faker() + from core.helper.provider_cache import NoOpProviderCredentialCache + from core.helper.provider_encryption import create_provider_encrypter + + # Use mock provider controller to encrypt credentials + provider_controller = mock_external_service_dependencies["provider_controller"] + + # Create encrypter for credentials + credential_encrypter, _ = create_provider_encrypter( + tenant_id=tenant_id, + config=provider_controller.get_credential_schema_config(credential_type), + cache=NoOpProviderCredentialCache(), + ) + + subscription = TriggerSubscription( + name=fake.word(), + tenant_id=tenant_id, + user_id=user_id, + provider_id=str(provider_id), + endpoint_id=fake.uuid4(), + parameters={"param1": "value1"}, + properties={"prop1": "value1"}, + credentials=dict(credential_encrypter.encrypt(credentials)), + credential_type=credential_type.value, + credential_expires_at=-1, + expires_at=-1, + ) + + db.session.add(subscription) + db.session.commit() + db.session.refresh(subscription) + + return subscription + + def test_rebuild_trigger_subscription_success_with_merged_credentials( + self, db_session_with_containers, mock_external_service_dependencies + ): + """ + Test successful rebuild with credential merging (HIDDEN_VALUE handling). + + This test verifies: + - Credentials are properly merged (HIDDEN_VALUE replaced with existing values) + - Single transaction wraps all operations + - Merged credentials are used for subscribe and update + - Database state is correctly updated + """ + fake = Faker() + account, tenant = self._create_test_account_and_tenant( + db_session_with_containers, mock_external_service_dependencies + ) + + provider_id = TriggerProviderID("test_org/test_plugin/test_provider") + credential_type = CredentialType.API_KEY + + # Create initial subscription with credentials + original_credentials = {"api_key": "original-secret-key", "api_secret": "original-secret"} + subscription = self._create_test_subscription( + db_session_with_containers, + tenant.id, + account.id, + provider_id, + credential_type, + original_credentials, + mock_external_service_dependencies, + ) + + # Prepare new credentials with HIDDEN_VALUE for api_key (should keep original) + # and new value for api_secret (should update) + new_credentials = { + "api_key": HIDDEN_VALUE, # Should be replaced with original + "api_secret": "new-secret-value", # Should be updated + } + + # Mock subscribe_trigger to return a new subscription entity + new_subscription_entity = TriggerSubscriptionEntity( + endpoint=subscription.endpoint_id, + parameters={"param1": "value1"}, + properties={"prop1": "new_prop_value"}, + expires_at=1234567890, + ) + mock_external_service_dependencies["trigger_manager"].subscribe_trigger.return_value = new_subscription_entity + + # Mock unsubscribe_trigger + mock_external_service_dependencies["trigger_manager"].unsubscribe_trigger.return_value = MagicMock() + + # Execute rebuild + TriggerProviderService.rebuild_trigger_subscription( + tenant_id=tenant.id, + provider_id=provider_id, + subscription_id=subscription.id, + credentials=new_credentials, + parameters={"param1": "updated_value"}, + name="updated_name", + ) + + # Verify unsubscribe was called with decrypted original credentials + mock_external_service_dependencies["trigger_manager"].unsubscribe_trigger.assert_called_once() + unsubscribe_call_args = mock_external_service_dependencies["trigger_manager"].unsubscribe_trigger.call_args + assert unsubscribe_call_args.kwargs["tenant_id"] == tenant.id + assert unsubscribe_call_args.kwargs["provider_id"] == provider_id + assert unsubscribe_call_args.kwargs["credential_type"] == credential_type + + # Verify subscribe was called with merged credentials (api_key from original, api_secret new) + mock_external_service_dependencies["trigger_manager"].subscribe_trigger.assert_called_once() + subscribe_call_args = mock_external_service_dependencies["trigger_manager"].subscribe_trigger.call_args + subscribe_credentials = subscribe_call_args.kwargs["credentials"] + assert subscribe_credentials["api_key"] == original_credentials["api_key"] # Merged from original + assert subscribe_credentials["api_secret"] == "new-secret-value" # New value + + # Verify database state was updated + db.session.refresh(subscription) + assert subscription.name == "updated_name" + assert subscription.parameters == {"param1": "updated_value"} + + # Verify credentials in DB were updated with merged values (decrypt to check) + from core.helper.provider_cache import NoOpProviderCredentialCache + from core.helper.provider_encryption import create_provider_encrypter + + # Use mock provider controller to decrypt credentials + provider_controller = mock_external_service_dependencies["provider_controller"] + credential_encrypter, _ = create_provider_encrypter( + tenant_id=tenant.id, + config=provider_controller.get_credential_schema_config(credential_type), + cache=NoOpProviderCredentialCache(), + ) + decrypted_db_credentials = dict(credential_encrypter.decrypt(subscription.credentials)) + assert decrypted_db_credentials["api_key"] == original_credentials["api_key"] + assert decrypted_db_credentials["api_secret"] == "new-secret-value" + + # Verify cache was cleared + mock_external_service_dependencies["delete_cache"].assert_called_once_with( + tenant_id=tenant.id, + provider_id=subscription.provider_id, + subscription_id=subscription.id, + ) + + def test_rebuild_trigger_subscription_with_all_new_credentials( + self, db_session_with_containers, mock_external_service_dependencies + ): + """ + Test rebuild when all credentials are new (no HIDDEN_VALUE). + + This test verifies: + - All new credentials are used when no HIDDEN_VALUE is present + - Merged credentials contain only new values + """ + fake = Faker() + account, tenant = self._create_test_account_and_tenant( + db_session_with_containers, mock_external_service_dependencies + ) + + provider_id = TriggerProviderID("test_org/test_plugin/test_provider") + credential_type = CredentialType.API_KEY + + # Create initial subscription + original_credentials = {"api_key": "original-key", "api_secret": "original-secret"} + subscription = self._create_test_subscription( + db_session_with_containers, + tenant.id, + account.id, + provider_id, + credential_type, + original_credentials, + mock_external_service_dependencies, + ) + + # All new credentials (no HIDDEN_VALUE) + new_credentials = { + "api_key": "completely-new-key", + "api_secret": "completely-new-secret", + } + + new_subscription_entity = TriggerSubscriptionEntity( + endpoint=subscription.endpoint_id, + parameters={}, + properties={}, + expires_at=-1, + ) + mock_external_service_dependencies["trigger_manager"].subscribe_trigger.return_value = new_subscription_entity + mock_external_service_dependencies["trigger_manager"].unsubscribe_trigger.return_value = MagicMock() + + # Execute rebuild + TriggerProviderService.rebuild_trigger_subscription( + tenant_id=tenant.id, + provider_id=provider_id, + subscription_id=subscription.id, + credentials=new_credentials, + parameters={}, + ) + + # Verify subscribe was called with all new credentials + subscribe_call_args = mock_external_service_dependencies["trigger_manager"].subscribe_trigger.call_args + subscribe_credentials = subscribe_call_args.kwargs["credentials"] + assert subscribe_credentials["api_key"] == "completely-new-key" + assert subscribe_credentials["api_secret"] == "completely-new-secret" + + def test_rebuild_trigger_subscription_with_all_hidden_values( + self, db_session_with_containers, mock_external_service_dependencies + ): + """ + Test rebuild when all credentials are HIDDEN_VALUE (preserve all existing). + + This test verifies: + - All HIDDEN_VALUE credentials are replaced with existing values + - Original credentials are preserved + """ + fake = Faker() + account, tenant = self._create_test_account_and_tenant( + db_session_with_containers, mock_external_service_dependencies + ) + + provider_id = TriggerProviderID("test_org/test_plugin/test_provider") + credential_type = CredentialType.API_KEY + + original_credentials = {"api_key": "original-key", "api_secret": "original-secret"} + subscription = self._create_test_subscription( + db_session_with_containers, + tenant.id, + account.id, + provider_id, + credential_type, + original_credentials, + mock_external_service_dependencies, + ) + + # All HIDDEN_VALUE (should preserve all original) + new_credentials = { + "api_key": HIDDEN_VALUE, + "api_secret": HIDDEN_VALUE, + } + + new_subscription_entity = TriggerSubscriptionEntity( + endpoint=subscription.endpoint_id, + parameters={}, + properties={}, + expires_at=-1, + ) + mock_external_service_dependencies["trigger_manager"].subscribe_trigger.return_value = new_subscription_entity + mock_external_service_dependencies["trigger_manager"].unsubscribe_trigger.return_value = MagicMock() + + # Execute rebuild + TriggerProviderService.rebuild_trigger_subscription( + tenant_id=tenant.id, + provider_id=provider_id, + subscription_id=subscription.id, + credentials=new_credentials, + parameters={}, + ) + + # Verify subscribe was called with all original credentials + subscribe_call_args = mock_external_service_dependencies["trigger_manager"].subscribe_trigger.call_args + subscribe_credentials = subscribe_call_args.kwargs["credentials"] + assert subscribe_credentials["api_key"] == original_credentials["api_key"] + assert subscribe_credentials["api_secret"] == original_credentials["api_secret"] + + def test_rebuild_trigger_subscription_with_missing_key_uses_unknown_value( + self, db_session_with_containers, mock_external_service_dependencies + ): + """ + Test rebuild when HIDDEN_VALUE is used for a key that doesn't exist in original. + + This test verifies: + - UNKNOWN_VALUE is used when HIDDEN_VALUE key doesn't exist in original credentials + """ + fake = Faker() + account, tenant = self._create_test_account_and_tenant( + db_session_with_containers, mock_external_service_dependencies + ) + + provider_id = TriggerProviderID("test_org/test_plugin/test_provider") + credential_type = CredentialType.API_KEY + + # Original has only api_key + original_credentials = {"api_key": "original-key"} + subscription = self._create_test_subscription( + db_session_with_containers, + tenant.id, + account.id, + provider_id, + credential_type, + original_credentials, + mock_external_service_dependencies, + ) + + # HIDDEN_VALUE for non-existent key should use UNKNOWN_VALUE + new_credentials = { + "api_key": HIDDEN_VALUE, + "non_existent_key": HIDDEN_VALUE, # This key doesn't exist in original + } + + new_subscription_entity = TriggerSubscriptionEntity( + endpoint=subscription.endpoint_id, + parameters={}, + properties={}, + expires_at=-1, + ) + mock_external_service_dependencies["trigger_manager"].subscribe_trigger.return_value = new_subscription_entity + mock_external_service_dependencies["trigger_manager"].unsubscribe_trigger.return_value = MagicMock() + + # Execute rebuild + TriggerProviderService.rebuild_trigger_subscription( + tenant_id=tenant.id, + provider_id=provider_id, + subscription_id=subscription.id, + credentials=new_credentials, + parameters={}, + ) + + # Verify subscribe was called with original api_key and UNKNOWN_VALUE for missing key + subscribe_call_args = mock_external_service_dependencies["trigger_manager"].subscribe_trigger.call_args + subscribe_credentials = subscribe_call_args.kwargs["credentials"] + assert subscribe_credentials["api_key"] == original_credentials["api_key"] + assert subscribe_credentials["non_existent_key"] == UNKNOWN_VALUE + + def test_rebuild_trigger_subscription_rollback_on_error( + self, db_session_with_containers, mock_external_service_dependencies + ): + """ + Test that transaction is rolled back on error. + + This test verifies: + - Database transaction is rolled back when an error occurs + - Original subscription state is preserved + """ + fake = Faker() + account, tenant = self._create_test_account_and_tenant( + db_session_with_containers, mock_external_service_dependencies + ) + + provider_id = TriggerProviderID("test_org/test_plugin/test_provider") + credential_type = CredentialType.API_KEY + + original_credentials = {"api_key": "original-key"} + subscription = self._create_test_subscription( + db_session_with_containers, + tenant.id, + account.id, + provider_id, + credential_type, + original_credentials, + mock_external_service_dependencies, + ) + + original_name = subscription.name + original_parameters = subscription.parameters.copy() + + # Make subscribe_trigger raise an error + mock_external_service_dependencies["trigger_manager"].subscribe_trigger.side_effect = ValueError( + "Subscribe failed" + ) + mock_external_service_dependencies["trigger_manager"].unsubscribe_trigger.return_value = MagicMock() + + # Execute rebuild and expect error + with pytest.raises(ValueError, match="Subscribe failed"): + TriggerProviderService.rebuild_trigger_subscription( + tenant_id=tenant.id, + provider_id=provider_id, + subscription_id=subscription.id, + credentials={"api_key": "new-key"}, + parameters={}, + ) + + # Verify subscription state was not changed (rolled back) + db.session.refresh(subscription) + assert subscription.name == original_name + assert subscription.parameters == original_parameters + + def test_rebuild_trigger_subscription_subscription_not_found( + self, db_session_with_containers, mock_external_service_dependencies + ): + """ + Test error when subscription is not found. + + This test verifies: + - Proper error is raised when subscription doesn't exist + """ + fake = Faker() + account, tenant = self._create_test_account_and_tenant( + db_session_with_containers, mock_external_service_dependencies + ) + + provider_id = TriggerProviderID("test_org/test_plugin/test_provider") + fake_subscription_id = fake.uuid4() + + with pytest.raises(ValueError, match="not found"): + TriggerProviderService.rebuild_trigger_subscription( + tenant_id=tenant.id, + provider_id=provider_id, + subscription_id=fake_subscription_id, + credentials={}, + parameters={}, + ) + + def test_rebuild_trigger_subscription_name_uniqueness_check( + self, db_session_with_containers, mock_external_service_dependencies + ): + """ + Test that name uniqueness is checked when updating name. + + This test verifies: + - Error is raised when new name conflicts with existing subscription + """ + fake = Faker() + account, tenant = self._create_test_account_and_tenant( + db_session_with_containers, mock_external_service_dependencies + ) + + provider_id = TriggerProviderID("test_org/test_plugin/test_provider") + credential_type = CredentialType.API_KEY + + # Create first subscription + subscription1 = self._create_test_subscription( + db_session_with_containers, + tenant.id, + account.id, + provider_id, + credential_type, + {"api_key": "key1"}, + mock_external_service_dependencies, + ) + + # Create second subscription with different name + subscription2 = self._create_test_subscription( + db_session_with_containers, + tenant.id, + account.id, + provider_id, + credential_type, + {"api_key": "key2"}, + mock_external_service_dependencies, + ) + + new_subscription_entity = TriggerSubscriptionEntity( + endpoint=subscription2.endpoint_id, + parameters={}, + properties={}, + expires_at=-1, + ) + mock_external_service_dependencies["trigger_manager"].subscribe_trigger.return_value = new_subscription_entity + mock_external_service_dependencies["trigger_manager"].unsubscribe_trigger.return_value = MagicMock() + + # Try to rename subscription2 to subscription1's name (should fail) + with pytest.raises(ValueError, match="already exists"): + TriggerProviderService.rebuild_trigger_subscription( + tenant_id=tenant.id, + provider_id=provider_id, + subscription_id=subscription2.id, + credentials={"api_key": "new-key"}, + parameters={}, + name=subscription1.name, # Conflicting name + ) diff --git a/api/tests/test_containers_integration_tests/services/test_workflow_app_service.py b/api/tests/test_containers_integration_tests/services/test_workflow_app_service.py index 7b95944bbe..040fb826e1 100644 --- a/api/tests/test_containers_integration_tests/services/test_workflow_app_service.py +++ b/api/tests/test_containers_integration_tests/services/test_workflow_app_service.py @@ -10,7 +10,9 @@ from core.workflow.entities.workflow_execution import WorkflowExecutionStatus from models import EndUser, Workflow, WorkflowAppLog, WorkflowRun from models.enums import CreatorUserRole from services.account_service import AccountService, TenantService -from services.app_service import AppService + +# Delay import of AppService to avoid circular dependency +# from services.app_service import AppService from services.workflow_app_service import WorkflowAppService @@ -86,6 +88,9 @@ class TestWorkflowAppService: "api_rpm": 10, } + # Import here to avoid circular dependency + from services.app_service import AppService + app_service = AppService() app = app_service.create_app(tenant.id, app_args, account) @@ -147,6 +152,9 @@ class TestWorkflowAppService: "api_rpm": 10, } + # Import here to avoid circular dependency + from services.app_service import AppService + app_service = AppService() app = app_service.create_app(tenant.id, app_args, account) @@ -308,6 +316,156 @@ class TestWorkflowAppService: assert result_no_match["total"] == 0 assert len(result_no_match["data"]) == 0 + def test_get_paginate_workflow_app_logs_with_special_characters_in_keyword( + self, db_session_with_containers, mock_external_service_dependencies + ): + r""" + Test workflow app logs pagination with special characters in keyword to verify SQL injection prevention. + + This test verifies: + - Special characters (%, _) in keyword are properly escaped + - Search treats special characters as literal characters, not wildcards + - SQL injection via LIKE wildcards is prevented + """ + # Arrange: Create test data + fake = Faker() + app, account = self._create_test_app_and_account(db_session_with_containers, mock_external_service_dependencies) + workflow, _, _ = self._create_test_workflow_data(db_session_with_containers, app, account) + + from extensions.ext_database import db + + service = WorkflowAppService() + + # Test 1: Search with % character + workflow_run_1 = WorkflowRun( + id=str(uuid.uuid4()), + tenant_id=app.tenant_id, + app_id=app.id, + workflow_id=workflow.id, + type="workflow", + triggered_from="app-run", + version="1.0.0", + graph=json.dumps({"nodes": [], "edges": []}), + status="succeeded", + inputs=json.dumps({"search_term": "50% discount", "input2": "other_value"}), + outputs=json.dumps({"result": "50% discount applied", "status": "success"}), + created_by_role=CreatorUserRole.ACCOUNT, + created_by=account.id, + created_at=datetime.now(UTC), + ) + db.session.add(workflow_run_1) + db.session.flush() + + workflow_app_log_1 = WorkflowAppLog( + tenant_id=app.tenant_id, + app_id=app.id, + workflow_id=workflow.id, + workflow_run_id=workflow_run_1.id, + created_from="service-api", + created_by_role=CreatorUserRole.ACCOUNT, + created_by=account.id, + ) + workflow_app_log_1.id = str(uuid.uuid4()) + workflow_app_log_1.created_at = datetime.now(UTC) + db.session.add(workflow_app_log_1) + db.session.commit() + + result = service.get_paginate_workflow_app_logs( + session=db_session_with_containers, app_model=app, keyword="50%", page=1, limit=20 + ) + # Should find the workflow_run_1 entry + assert result["total"] >= 1 + assert len(result["data"]) >= 1 + assert any(log.workflow_run_id == workflow_run_1.id for log in result["data"]) + + # Test 2: Search with _ character + workflow_run_2 = WorkflowRun( + id=str(uuid.uuid4()), + tenant_id=app.tenant_id, + app_id=app.id, + workflow_id=workflow.id, + type="workflow", + triggered_from="app-run", + version="1.0.0", + graph=json.dumps({"nodes": [], "edges": []}), + status="succeeded", + inputs=json.dumps({"search_term": "test_data_value", "input2": "other_value"}), + outputs=json.dumps({"result": "test_data_value found", "status": "success"}), + created_by_role=CreatorUserRole.ACCOUNT, + created_by=account.id, + created_at=datetime.now(UTC), + ) + db.session.add(workflow_run_2) + db.session.flush() + + workflow_app_log_2 = WorkflowAppLog( + tenant_id=app.tenant_id, + app_id=app.id, + workflow_id=workflow.id, + workflow_run_id=workflow_run_2.id, + created_from="service-api", + created_by_role=CreatorUserRole.ACCOUNT, + created_by=account.id, + ) + workflow_app_log_2.id = str(uuid.uuid4()) + workflow_app_log_2.created_at = datetime.now(UTC) + db.session.add(workflow_app_log_2) + db.session.commit() + + result = service.get_paginate_workflow_app_logs( + session=db_session_with_containers, app_model=app, keyword="test_data", page=1, limit=20 + ) + # Should find the workflow_run_2 entry + assert result["total"] >= 1 + assert len(result["data"]) >= 1 + assert any(log.workflow_run_id == workflow_run_2.id for log in result["data"]) + + # Test 3: Search with % should NOT match 100% (verifies escaping works correctly) + workflow_run_4 = WorkflowRun( + id=str(uuid.uuid4()), + tenant_id=app.tenant_id, + app_id=app.id, + workflow_id=workflow.id, + type="workflow", + triggered_from="app-run", + version="1.0.0", + graph=json.dumps({"nodes": [], "edges": []}), + status="succeeded", + inputs=json.dumps({"search_term": "100% different", "input2": "other_value"}), + outputs=json.dumps({"result": "100% different result", "status": "success"}), + created_by_role=CreatorUserRole.ACCOUNT, + created_by=account.id, + created_at=datetime.now(UTC), + ) + db.session.add(workflow_run_4) + db.session.flush() + + workflow_app_log_4 = WorkflowAppLog( + tenant_id=app.tenant_id, + app_id=app.id, + workflow_id=workflow.id, + workflow_run_id=workflow_run_4.id, + created_from="service-api", + created_by_role=CreatorUserRole.ACCOUNT, + created_by=account.id, + ) + workflow_app_log_4.id = str(uuid.uuid4()) + workflow_app_log_4.created_at = datetime.now(UTC) + db.session.add(workflow_app_log_4) + db.session.commit() + + result = service.get_paginate_workflow_app_logs( + session=db_session_with_containers, app_model=app, keyword="50%", page=1, limit=20 + ) + # Should only find the 50% entry (workflow_run_1), not the 100% entry (workflow_run_4) + # This verifies that escaping works correctly - 50% should not match 100% + assert result["total"] >= 1 + assert len(result["data"]) >= 1 + # Verify that we found workflow_run_1 (50% discount) but not workflow_run_4 (100% different) + found_run_ids = [log.workflow_run_id for log in result["data"]] + assert workflow_run_1.id in found_run_ids + assert workflow_run_4.id not in found_run_ids + def test_get_paginate_workflow_app_logs_with_status_filter( self, db_session_with_containers, mock_external_service_dependencies ): diff --git a/api/tests/test_containers_integration_tests/services/tools/test_api_tools_manage_service.py b/api/tests/test_containers_integration_tests/services/tools/test_api_tools_manage_service.py index 0871467a05..2ff71ea6ea 100644 --- a/api/tests/test_containers_integration_tests/services/tools/test_api_tools_manage_service.py +++ b/api/tests/test_containers_integration_tests/services/tools/test_api_tools_manage_service.py @@ -2,7 +2,9 @@ from unittest.mock import patch import pytest from faker import Faker +from pydantic import TypeAdapter, ValidationError +from core.tools.entities.tool_entities import ApiProviderSchemaType from models import Account, Tenant from models.tools import ApiToolProvider from services.tools.api_tools_manage_service import ApiToolManageService @@ -298,7 +300,7 @@ class TestApiToolManageService: provider_name = fake.company() icon = {"type": "emoji", "value": "🔧"} credentials = {"auth_type": "none", "api_key_header": "X-API-Key", "api_key_value": ""} - schema_type = "openapi" + schema_type = ApiProviderSchemaType.OPENAPI schema = self._create_test_openapi_schema() privacy_policy = "https://example.com/privacy" custom_disclaimer = "Custom disclaimer text" @@ -364,7 +366,7 @@ class TestApiToolManageService: provider_name = fake.company() icon = {"type": "emoji", "value": "🔧"} credentials = {"auth_type": "none"} - schema_type = "openapi" + schema_type = ApiProviderSchemaType.OPENAPI schema = self._create_test_openapi_schema() privacy_policy = "https://example.com/privacy" custom_disclaimer = "Custom disclaimer text" @@ -428,21 +430,10 @@ class TestApiToolManageService: labels = ["test"] # Act & Assert: Try to create provider with invalid schema type - with pytest.raises(ValueError) as exc_info: - ApiToolManageService.create_api_tool_provider( - user_id=account.id, - tenant_id=tenant.id, - provider_name=provider_name, - icon=icon, - credentials=credentials, - schema_type=schema_type, - schema=schema, - privacy_policy=privacy_policy, - custom_disclaimer=custom_disclaimer, - labels=labels, - ) + with pytest.raises(ValidationError) as exc_info: + TypeAdapter(ApiProviderSchemaType).validate_python(schema_type) - assert "invalid schema type" in str(exc_info.value) + assert "validation error" in str(exc_info.value) def test_create_api_tool_provider_missing_auth_type( self, flask_req_ctx_with_containers, db_session_with_containers, mock_external_service_dependencies @@ -464,7 +455,7 @@ class TestApiToolManageService: provider_name = fake.company() icon = {"type": "emoji", "value": "🔧"} credentials = {} # Missing auth_type - schema_type = "openapi" + schema_type = ApiProviderSchemaType.OPENAPI schema = self._create_test_openapi_schema() privacy_policy = "https://example.com/privacy" custom_disclaimer = "Custom disclaimer text" @@ -507,7 +498,7 @@ class TestApiToolManageService: provider_name = fake.company() icon = {"type": "emoji", "value": "🔑"} credentials = {"auth_type": "api_key", "api_key_header": "X-API-Key", "api_key_value": fake.uuid4()} - schema_type = "openapi" + schema_type = ApiProviderSchemaType.OPENAPI schema = self._create_test_openapi_schema() privacy_policy = "https://example.com/privacy" custom_disclaimer = "Custom disclaimer text" diff --git a/api/tests/test_containers_integration_tests/services/tools/test_mcp_tools_manage_service.py b/api/tests/test_containers_integration_tests/services/tools/test_mcp_tools_manage_service.py index 8c190762cf..6cae83ac37 100644 --- a/api/tests/test_containers_integration_tests/services/tools/test_mcp_tools_manage_service.py +++ b/api/tests/test_containers_integration_tests/services/tools/test_mcp_tools_manage_service.py @@ -1308,18 +1308,17 @@ class TestMCPToolManageService: type("MockTool", (), {"model_dump": lambda self: {"name": "test_tool_2", "description": "Test tool 2"}})(), ] - with patch("services.tools.mcp_tools_manage_service.MCPClientWithAuthRetry") as mock_mcp_client: + with patch("core.mcp.mcp_client.MCPClient") as mock_mcp_client: # Setup mock client mock_client_instance = mock_mcp_client.return_value.__enter__.return_value mock_client_instance.list_tools.return_value = mock_tools # Act: Execute the method under test - from extensions.ext_database import db - - service = MCPToolManageService(db.session()) - result = service._reconnect_provider( + result = MCPToolManageService._reconnect_with_url( server_url="https://example.com/mcp", - provider=mcp_provider, + headers={"X-Test": "1"}, + timeout=mcp_provider.timeout, + sse_read_timeout=mcp_provider.sse_read_timeout, ) # Assert: Verify the expected outcomes @@ -1337,8 +1336,12 @@ class TestMCPToolManageService: assert tools_data[1]["name"] == "test_tool_2" # Verify mock interactions - provider_entity = mcp_provider.to_entity() - mock_mcp_client.assert_called_once() + mock_mcp_client.assert_called_once_with( + server_url="https://example.com/mcp", + headers={"X-Test": "1"}, + timeout=mcp_provider.timeout, + sse_read_timeout=mcp_provider.sse_read_timeout, + ) def test_re_connect_mcp_provider_auth_error(self, db_session_with_containers, mock_external_service_dependencies): """ @@ -1361,19 +1364,18 @@ class TestMCPToolManageService: ) # Mock MCPClient to raise authentication error - with patch("services.tools.mcp_tools_manage_service.MCPClientWithAuthRetry") as mock_mcp_client: + with patch("core.mcp.mcp_client.MCPClient") as mock_mcp_client: from core.mcp.error import MCPAuthError mock_client_instance = mock_mcp_client.return_value.__enter__.return_value mock_client_instance.list_tools.side_effect = MCPAuthError("Authentication required") # Act: Execute the method under test - from extensions.ext_database import db - - service = MCPToolManageService(db.session()) - result = service._reconnect_provider( + result = MCPToolManageService._reconnect_with_url( server_url="https://example.com/mcp", - provider=mcp_provider, + headers={}, + timeout=mcp_provider.timeout, + sse_read_timeout=mcp_provider.sse_read_timeout, ) # Assert: Verify the expected outcomes @@ -1404,18 +1406,17 @@ class TestMCPToolManageService: ) # Mock MCPClient to raise connection error - with patch("services.tools.mcp_tools_manage_service.MCPClientWithAuthRetry") as mock_mcp_client: + with patch("core.mcp.mcp_client.MCPClient") as mock_mcp_client: from core.mcp.error import MCPError mock_client_instance = mock_mcp_client.return_value.__enter__.return_value mock_client_instance.list_tools.side_effect = MCPError("Connection failed") # Act & Assert: Verify proper error handling - from extensions.ext_database import db - - service = MCPToolManageService(db.session()) with pytest.raises(ValueError, match="Failed to re-connect MCP server: Connection failed"): - service._reconnect_provider( + MCPToolManageService._reconnect_with_url( server_url="https://example.com/mcp", - provider=mcp_provider, + headers={"X-Test": "1"}, + timeout=mcp_provider.timeout, + sse_read_timeout=mcp_provider.sse_read_timeout, ) diff --git a/api/tests/test_containers_integration_tests/services/tools/test_workflow_tools_manage_service.py b/api/tests/test_containers_integration_tests/services/tools/test_workflow_tools_manage_service.py index 71cedd26c4..3d46735a1a 100644 --- a/api/tests/test_containers_integration_tests/services/tools/test_workflow_tools_manage_service.py +++ b/api/tests/test_containers_integration_tests/services/tools/test_workflow_tools_manage_service.py @@ -705,3 +705,207 @@ class TestWorkflowToolManageService: db.session.refresh(created_tool) assert created_tool.name == first_tool_name assert created_tool.updated_at is not None + + def test_create_workflow_tool_with_file_parameter_default( + self, db_session_with_containers, mock_external_service_dependencies + ): + """ + Test workflow tool creation with FILE parameter having a file object as default. + + This test verifies: + - FILE parameters can have file object defaults + - The default value (dict with id/base64Url) is properly handled + - Tool creation succeeds without Pydantic validation errors + + Related issue: Array[File] default value causes Pydantic validation errors. + """ + fake = Faker() + + # Create test data + app, account, workflow = self._create_test_app_and_account( + db_session_with_containers, mock_external_service_dependencies + ) + + # Create workflow graph with a FILE variable that has a default value + workflow_graph = { + "nodes": [ + { + "id": "start_node", + "data": { + "type": "start", + "variables": [ + { + "variable": "document", + "label": "Document", + "type": "file", + "required": False, + "default": {"id": fake.uuid4(), "base64Url": ""}, + } + ], + }, + } + ] + } + workflow.graph = json.dumps(workflow_graph) + + # Setup workflow tool parameters with FILE type + file_parameters = [ + { + "name": "document", + "description": "Upload a document", + "form": "form", + "type": "file", + "required": False, + } + ] + + # Execute the method under test + # Note: from_db is mocked, so this test primarily validates the parameter configuration + result = WorkflowToolManageService.create_workflow_tool( + user_id=account.id, + tenant_id=account.current_tenant.id, + workflow_app_id=app.id, + name=fake.word(), + label=fake.word(), + icon={"type": "emoji", "emoji": "📄"}, + description=fake.text(max_nb_chars=200), + parameters=file_parameters, + ) + + # Verify the result + assert result == {"result": "success"} + + def test_create_workflow_tool_with_files_parameter_default( + self, db_session_with_containers, mock_external_service_dependencies + ): + """ + Test workflow tool creation with FILES (Array[File]) parameter having file objects as default. + + This test verifies: + - FILES parameters can have a list of file objects as default + - The default value (list of dicts with id/base64Url) is properly handled + - Tool creation succeeds without Pydantic validation errors + + Related issue: Array[File] default value causes 4 Pydantic validation errors + because PluginParameter.default only accepts Union[float, int, str, bool] | None. + """ + fake = Faker() + + # Create test data + app, account, workflow = self._create_test_app_and_account( + db_session_with_containers, mock_external_service_dependencies + ) + + # Create workflow graph with a FILE_LIST variable that has a default value + workflow_graph = { + "nodes": [ + { + "id": "start_node", + "data": { + "type": "start", + "variables": [ + { + "variable": "documents", + "label": "Documents", + "type": "file-list", + "required": False, + "default": [ + {"id": fake.uuid4(), "base64Url": ""}, + {"id": fake.uuid4(), "base64Url": ""}, + ], + } + ], + }, + } + ] + } + workflow.graph = json.dumps(workflow_graph) + + # Setup workflow tool parameters with FILES type + files_parameters = [ + { + "name": "documents", + "description": "Upload multiple documents", + "form": "form", + "type": "files", + "required": False, + } + ] + + # Execute the method under test + # Note: from_db is mocked, so this test primarily validates the parameter configuration + result = WorkflowToolManageService.create_workflow_tool( + user_id=account.id, + tenant_id=account.current_tenant.id, + workflow_app_id=app.id, + name=fake.word(), + label=fake.word(), + icon={"type": "emoji", "emoji": "📁"}, + description=fake.text(max_nb_chars=200), + parameters=files_parameters, + ) + + # Verify the result + assert result == {"result": "success"} + + def test_create_workflow_tool_db_commit_before_validation( + self, db_session_with_containers, mock_external_service_dependencies + ): + """ + Test that database commit happens before validation, causing DB pollution on validation failure. + + This test verifies the second bug: + - WorkflowToolProvider is committed to database BEFORE from_db validation + - If validation fails, the record remains in the database + - Subsequent attempts fail with "Tool already exists" error + + This demonstrates why we need to validate BEFORE database commit. + """ + + fake = Faker() + + # Create test data + app, account, workflow = self._create_test_app_and_account( + db_session_with_containers, mock_external_service_dependencies + ) + + tool_name = fake.word() + + # Mock from_db to raise validation error + mock_external_service_dependencies["workflow_tool_provider_controller"].from_db.side_effect = ValueError( + "Validation failed: default parameter type mismatch" + ) + + # Attempt to create workflow tool (will fail at validation stage) + with pytest.raises(ValueError) as exc_info: + WorkflowToolManageService.create_workflow_tool( + user_id=account.id, + tenant_id=account.current_tenant.id, + workflow_app_id=app.id, + name=tool_name, + label=fake.word(), + icon={"type": "emoji", "emoji": "🔧"}, + description=fake.text(max_nb_chars=200), + parameters=self._create_test_workflow_tool_parameters(), + ) + + assert "Validation failed" in str(exc_info.value) + + # Verify the tool was NOT created in database + # This is the expected behavior (no pollution) + from extensions.ext_database import db + + tool_count = ( + db.session.query(WorkflowToolProvider) + .where( + WorkflowToolProvider.tenant_id == account.current_tenant.id, + WorkflowToolProvider.name == tool_name, + ) + .count() + ) + + # The record should NOT exist because the transaction should be rolled back + # Currently, due to the bug, the record might exist (this test documents the bug) + # After the fix, this should always be 0 + # For now, we document that the record may exist, demonstrating the bug + # assert tool_count == 0 # Expected after fix diff --git a/api/tests/test_containers_integration_tests/tasks/test_rag_pipeline_run_tasks.py b/api/tests/test_containers_integration_tests/tasks/test_rag_pipeline_run_tasks.py index e29b98037f..b9977b1fb6 100644 --- a/api/tests/test_containers_integration_tests/tasks/test_rag_pipeline_run_tasks.py +++ b/api/tests/test_containers_integration_tests/tasks/test_rag_pipeline_run_tasks.py @@ -165,7 +165,7 @@ class TestRagPipelineRunTasks: "files": [], "user_id": account.id, "stream": False, - "invoke_from": "published", + "invoke_from": InvokeFrom.PUBLISHED_PIPELINE.value, "workflow_execution_id": str(uuid.uuid4()), "pipeline_config": { "app_id": str(uuid.uuid4()), @@ -249,7 +249,7 @@ class TestRagPipelineRunTasks: assert call_kwargs["pipeline"].id == pipeline.id assert call_kwargs["workflow_id"] == workflow.id assert call_kwargs["user"].id == account.id - assert call_kwargs["invoke_from"] == InvokeFrom.PUBLISHED + assert call_kwargs["invoke_from"] == InvokeFrom.PUBLISHED_PIPELINE assert call_kwargs["streaming"] == False assert isinstance(call_kwargs["application_generate_entity"], RagPipelineGenerateEntity) @@ -294,7 +294,7 @@ class TestRagPipelineRunTasks: assert call_kwargs["pipeline"].id == pipeline.id assert call_kwargs["workflow_id"] == workflow.id assert call_kwargs["user"].id == account.id - assert call_kwargs["invoke_from"] == InvokeFrom.PUBLISHED + assert call_kwargs["invoke_from"] == InvokeFrom.PUBLISHED_PIPELINE assert call_kwargs["streaming"] == False assert isinstance(call_kwargs["application_generate_entity"], RagPipelineGenerateEntity) @@ -743,7 +743,7 @@ class TestRagPipelineRunTasks: assert call_kwargs["pipeline"].id == pipeline.id assert call_kwargs["workflow_id"] == workflow.id assert call_kwargs["user"].id == account.id - assert call_kwargs["invoke_from"] == InvokeFrom.PUBLISHED + assert call_kwargs["invoke_from"] == InvokeFrom.PUBLISHED_PIPELINE assert call_kwargs["streaming"] == False assert isinstance(call_kwargs["application_generate_entity"], RagPipelineGenerateEntity) diff --git a/api/tests/test_containers_integration_tests/workflow/nodes/code_executor/test_code_jinja2.py b/api/tests/test_containers_integration_tests/workflow/nodes/code_executor/test_code_jinja2.py index c764801170..ddb079f00c 100644 --- a/api/tests/test_containers_integration_tests/workflow/nodes/code_executor/test_code_jinja2.py +++ b/api/tests/test_containers_integration_tests/workflow/nodes/code_executor/test_code_jinja2.py @@ -12,10 +12,12 @@ class TestJinja2CodeExecutor(CodeExecutorTestMixin): _, Jinja2TemplateTransformer = self.jinja2_imports template = "Hello {{template}}" + # Template must be base64 encoded to match the new safe embedding approach + template_b64 = base64.b64encode(template.encode("utf-8")).decode("utf-8") inputs = base64.b64encode(b'{"template": "World"}').decode("utf-8") code = ( Jinja2TemplateTransformer.get_runner_script() - .replace(Jinja2TemplateTransformer._code_placeholder, template) + .replace(Jinja2TemplateTransformer._template_b64_placeholder, template_b64) .replace(Jinja2TemplateTransformer._inputs_placeholder, inputs) ) result = CodeExecutor.execute_code( @@ -37,6 +39,34 @@ class TestJinja2CodeExecutor(CodeExecutorTestMixin): _, Jinja2TemplateTransformer = self.jinja2_imports runner_script = Jinja2TemplateTransformer.get_runner_script() - assert runner_script.count(Jinja2TemplateTransformer._code_placeholder) == 1 + assert runner_script.count(Jinja2TemplateTransformer._template_b64_placeholder) == 1 assert runner_script.count(Jinja2TemplateTransformer._inputs_placeholder) == 1 assert runner_script.count(Jinja2TemplateTransformer._result_tag) == 2 + + def test_jinja2_template_with_special_characters(self, flask_app_with_containers): + """ + Test that templates with special characters (quotes, newlines) render correctly. + This is a regression test for issue #26818 where textarea pre-fill values + containing special characters would break template rendering. + """ + CodeExecutor, CodeLanguage = self.code_executor_imports + + # Template with triple quotes, single quotes, double quotes, and newlines + template = """ + + + +

Status: "{{ status }}"

+
'''code block'''
+ +""" + inputs = {"task": {"Task ID": "TASK-123", "Issues": "Line 1\nLine 2\nLine 3"}, "status": "completed"} + + result = CodeExecutor.execute_workflow_code_template(language=CodeLanguage.JINJA2, code=template, inputs=inputs) + + # Verify the template rendered correctly with all special characters + output = result["result"] + assert 'value="TASK-123"' in output + assert "" in output + assert 'Status: "completed"' in output + assert "'''code block'''" in output diff --git a/api/tests/unit_tests/configs/test_dify_config.py b/api/tests/unit_tests/configs/test_dify_config.py index 209b6bf59b..6fce7849f9 100644 --- a/api/tests/unit_tests/configs/test_dify_config.py +++ b/api/tests/unit_tests/configs/test_dify_config.py @@ -16,6 +16,7 @@ def test_dify_config(monkeypatch: pytest.MonkeyPatch): monkeypatch.setenv("CONSOLE_API_URL", "https://example.com") monkeypatch.setenv("CONSOLE_WEB_URL", "https://example.com") monkeypatch.setenv("HTTP_REQUEST_MAX_WRITE_TIMEOUT", "30") # Custom value for testing + monkeypatch.setenv("DB_TYPE", "postgresql") monkeypatch.setenv("DB_USERNAME", "postgres") monkeypatch.setenv("DB_PASSWORD", "postgres") monkeypatch.setenv("DB_HOST", "localhost") @@ -51,6 +52,7 @@ def test_http_timeout_defaults(monkeypatch: pytest.MonkeyPatch): os.environ.clear() # Set minimal required env vars + monkeypatch.setenv("DB_TYPE", "postgresql") monkeypatch.setenv("DB_USERNAME", "postgres") monkeypatch.setenv("DB_PASSWORD", "postgres") monkeypatch.setenv("DB_HOST", "localhost") @@ -75,6 +77,7 @@ def test_flask_configs(monkeypatch: pytest.MonkeyPatch): # Set environment variables using monkeypatch monkeypatch.setenv("CONSOLE_API_URL", "https://example.com") monkeypatch.setenv("CONSOLE_WEB_URL", "https://example.com") + monkeypatch.setenv("DB_TYPE", "postgresql") monkeypatch.setenv("DB_USERNAME", "postgres") monkeypatch.setenv("DB_PASSWORD", "postgres") monkeypatch.setenv("DB_HOST", "localhost") @@ -124,6 +127,7 @@ def test_inner_api_config_exist(monkeypatch: pytest.MonkeyPatch): # Set environment variables using monkeypatch monkeypatch.setenv("CONSOLE_API_URL", "https://example.com") monkeypatch.setenv("CONSOLE_WEB_URL", "https://example.com") + monkeypatch.setenv("DB_TYPE", "postgresql") monkeypatch.setenv("DB_USERNAME", "postgres") monkeypatch.setenv("DB_PASSWORD", "postgres") monkeypatch.setenv("DB_HOST", "localhost") @@ -140,6 +144,7 @@ def test_inner_api_config_exist(monkeypatch: pytest.MonkeyPatch): def test_db_extras_options_merging(monkeypatch: pytest.MonkeyPatch): """Test that DB_EXTRAS options are properly merged with default timezone setting""" # Set environment variables + monkeypatch.setenv("DB_TYPE", "postgresql") monkeypatch.setenv("DB_USERNAME", "postgres") monkeypatch.setenv("DB_PASSWORD", "postgres") monkeypatch.setenv("DB_HOST", "localhost") @@ -199,6 +204,7 @@ def test_celery_broker_url_with_special_chars_password( # Set up basic required environment variables (following existing pattern) monkeypatch.setenv("CONSOLE_API_URL", "https://example.com") monkeypatch.setenv("CONSOLE_WEB_URL", "https://example.com") + monkeypatch.setenv("DB_TYPE", "postgresql") monkeypatch.setenv("DB_USERNAME", "postgres") monkeypatch.setenv("DB_PASSWORD", "postgres") monkeypatch.setenv("DB_HOST", "localhost") diff --git a/api/tests/unit_tests/controllers/__init__.py b/api/tests/unit_tests/controllers/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/api/tests/unit_tests/controllers/common/test_fields.py b/api/tests/unit_tests/controllers/common/test_fields.py new file mode 100644 index 0000000000..d4dc13127d --- /dev/null +++ b/api/tests/unit_tests/controllers/common/test_fields.py @@ -0,0 +1,69 @@ +import builtins +from types import SimpleNamespace +from unittest.mock import patch + +from flask.views import MethodView as FlaskMethodView + +_NEEDS_METHOD_VIEW_CLEANUP = False +if not hasattr(builtins, "MethodView"): + builtins.MethodView = FlaskMethodView + _NEEDS_METHOD_VIEW_CLEANUP = True +from controllers.common.fields import Parameters, Site +from core.app.app_config.common.parameters_mapping import get_parameters_from_feature_dict +from models.model import IconType + + +def test_parameters_model_round_trip(): + parameters = get_parameters_from_feature_dict(features_dict={}, user_input_form=[]) + + model = Parameters.model_validate(parameters) + + assert model.model_dump(mode="json") == parameters + + +def test_site_icon_url_uses_signed_url_for_image_icon(): + site = SimpleNamespace( + title="Example", + chat_color_theme=None, + chat_color_theme_inverted=False, + icon_type=IconType.IMAGE, + icon="file-id", + icon_background=None, + description=None, + copyright=None, + privacy_policy=None, + custom_disclaimer=None, + default_language="en-US", + show_workflow_steps=True, + use_icon_as_answer_icon=False, + ) + + with patch("controllers.common.fields.file_helpers.get_signed_file_url", return_value="signed") as mock_helper: + model = Site.model_validate(site) + + assert model.icon_url == "signed" + mock_helper.assert_called_once_with("file-id") + + +def test_site_icon_url_is_none_for_non_image_icon(): + site = SimpleNamespace( + title="Example", + chat_color_theme=None, + chat_color_theme_inverted=False, + icon_type=IconType.EMOJI, + icon="file-id", + icon_background=None, + description=None, + copyright=None, + privacy_policy=None, + custom_disclaimer=None, + default_language="en-US", + show_workflow_steps=True, + use_icon_as_answer_icon=False, + ) + + with patch("controllers.common.fields.file_helpers.get_signed_file_url") as mock_helper: + model = Site.model_validate(site) + + assert model.icon_url is None + mock_helper.assert_not_called() diff --git a/api/tests/unit_tests/controllers/common/test_file_response.py b/api/tests/unit_tests/controllers/common/test_file_response.py new file mode 100644 index 0000000000..2487c362bd --- /dev/null +++ b/api/tests/unit_tests/controllers/common/test_file_response.py @@ -0,0 +1,46 @@ +from flask import Response + +from controllers.common.file_response import enforce_download_for_html, is_html_content + + +class TestFileResponseHelpers: + def test_is_html_content_detects_mime_type(self): + mime_type = "text/html; charset=UTF-8" + + result = is_html_content(mime_type, filename="file.txt", extension="txt") + + assert result is True + + def test_is_html_content_detects_extension(self): + result = is_html_content("text/plain", filename="report.html", extension=None) + + assert result is True + + def test_enforce_download_for_html_sets_headers(self): + response = Response("payload", mimetype="text/html") + + updated = enforce_download_for_html( + response, + mime_type="text/html", + filename="unsafe.html", + extension="html", + ) + + assert updated is True + assert "attachment" in response.headers["Content-Disposition"] + assert response.headers["Content-Type"] == "application/octet-stream" + assert response.headers["X-Content-Type-Options"] == "nosniff" + + def test_enforce_download_for_html_no_change_for_non_html(self): + response = Response("payload", mimetype="text/plain") + + updated = enforce_download_for_html( + response, + mime_type="text/plain", + filename="notes.txt", + extension="txt", + ) + + assert updated is False + assert "Content-Disposition" not in response.headers + assert "X-Content-Type-Options" not in response.headers diff --git a/api/tests/unit_tests/controllers/console/__init__.py b/api/tests/unit_tests/controllers/console/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/api/tests/unit_tests/controllers/console/app/test_app_response_models.py b/api/tests/unit_tests/controllers/console/app/test_app_response_models.py new file mode 100644 index 0000000000..40eb59a8f4 --- /dev/null +++ b/api/tests/unit_tests/controllers/console/app/test_app_response_models.py @@ -0,0 +1,285 @@ +from __future__ import annotations + +import builtins +import sys +from datetime import datetime +from importlib import util +from pathlib import Path +from types import ModuleType, SimpleNamespace +from typing import Any + +import pytest +from flask.views import MethodView + +# kombu references MethodView as a global when importing celery/kombu pools. +if not hasattr(builtins, "MethodView"): + builtins.MethodView = MethodView # type: ignore[attr-defined] + + +def _load_app_module(): + module_name = "controllers.console.app.app" + if module_name in sys.modules: + return sys.modules[module_name] + + root = Path(__file__).resolve().parents[5] + module_path = root / "controllers" / "console" / "app" / "app.py" + + class _StubNamespace: + def __init__(self): + self.models: dict[str, Any] = {} + self.payload = None + + def schema_model(self, name, schema): + self.models[name] = schema + + def _decorator(self, obj): + return obj + + def doc(self, *args, **kwargs): + return self._decorator + + def expect(self, *args, **kwargs): + return self._decorator + + def response(self, *args, **kwargs): + return self._decorator + + def route(self, *args, **kwargs): + def decorator(obj): + return obj + + return decorator + + stub_namespace = _StubNamespace() + + original_console = sys.modules.get("controllers.console") + original_app_pkg = sys.modules.get("controllers.console.app") + stubbed_modules: list[tuple[str, ModuleType | None]] = [] + + console_module = ModuleType("controllers.console") + console_module.__path__ = [str(root / "controllers" / "console")] + console_module.console_ns = stub_namespace + console_module.api = None + console_module.bp = None + sys.modules["controllers.console"] = console_module + + app_package = ModuleType("controllers.console.app") + app_package.__path__ = [str(root / "controllers" / "console" / "app")] + sys.modules["controllers.console.app"] = app_package + console_module.app = app_package + + def _stub_module(name: str, attrs: dict[str, Any]): + original = sys.modules.get(name) + module = ModuleType(name) + for key, value in attrs.items(): + setattr(module, key, value) + sys.modules[name] = module + stubbed_modules.append((name, original)) + + class _OpsTraceManager: + @staticmethod + def get_app_tracing_config(app_id: str) -> dict[str, Any]: + return {} + + @staticmethod + def update_app_tracing_config(app_id: str, **kwargs) -> None: + return None + + _stub_module( + "core.ops.ops_trace_manager", + { + "OpsTraceManager": _OpsTraceManager, + "TraceQueueManager": object, + "TraceTask": object, + }, + ) + + spec = util.spec_from_file_location(module_name, module_path) + module = util.module_from_spec(spec) + sys.modules[module_name] = module + + try: + assert spec.loader is not None + spec.loader.exec_module(module) + finally: + for name, original in reversed(stubbed_modules): + if original is not None: + sys.modules[name] = original + else: + sys.modules.pop(name, None) + if original_console is not None: + sys.modules["controllers.console"] = original_console + else: + sys.modules.pop("controllers.console", None) + if original_app_pkg is not None: + sys.modules["controllers.console.app"] = original_app_pkg + else: + sys.modules.pop("controllers.console.app", None) + + return module + + +_app_module = _load_app_module() +AppDetailWithSite = _app_module.AppDetailWithSite +AppPagination = _app_module.AppPagination +AppPartial = _app_module.AppPartial + + +@pytest.fixture(autouse=True) +def patch_signed_url(monkeypatch): + """Ensure icon URL generation uses a deterministic helper for tests.""" + + def _fake_signed_url(key: str | None) -> str | None: + if not key: + return None + return f"signed:{key}" + + monkeypatch.setattr(_app_module.file_helpers, "get_signed_file_url", _fake_signed_url) + + +def _ts(hour: int = 12) -> datetime: + return datetime(2024, 1, 1, hour, 0, 0) + + +def _dummy_model_config(): + return SimpleNamespace( + model_dict={"provider": "openai", "name": "gpt-4o"}, + pre_prompt="hello", + created_by="config-author", + created_at=_ts(9), + updated_by="config-editor", + updated_at=_ts(10), + ) + + +def _dummy_workflow(): + return SimpleNamespace( + id="wf-1", + created_by="workflow-author", + created_at=_ts(8), + updated_by="workflow-editor", + updated_at=_ts(9), + ) + + +def test_app_partial_serialization_uses_aliases(): + created_at = _ts() + app_obj = SimpleNamespace( + id="app-1", + name="My App", + desc_or_prompt="Prompt snippet", + mode_compatible_with_agent="chat", + icon_type="image", + icon="icon-key", + icon_background="#fff", + app_model_config=_dummy_model_config(), + workflow=_dummy_workflow(), + created_by="creator", + created_at=created_at, + updated_by="editor", + updated_at=created_at, + tags=[SimpleNamespace(id="tag-1", name="Utilities", type="app")], + access_mode="private", + create_user_name="Creator", + author_name="Author", + has_draft_trigger=True, + ) + + serialized = AppPartial.model_validate(app_obj, from_attributes=True).model_dump(mode="json") + + assert serialized["description"] == "Prompt snippet" + assert serialized["mode"] == "chat" + assert serialized["icon_url"] == "signed:icon-key" + assert serialized["created_at"] == int(created_at.timestamp()) + assert serialized["updated_at"] == int(created_at.timestamp()) + assert serialized["model_config"]["model"] == {"provider": "openai", "name": "gpt-4o"} + assert serialized["workflow"]["id"] == "wf-1" + assert serialized["tags"][0]["name"] == "Utilities" + + +def test_app_detail_with_site_includes_nested_serialization(): + timestamp = _ts(14) + site = SimpleNamespace( + code="site-code", + title="Public Site", + icon_type="image", + icon="site-icon", + created_at=timestamp, + updated_at=timestamp, + ) + app_obj = SimpleNamespace( + id="app-2", + name="Detailed App", + description="Desc", + mode_compatible_with_agent="advanced-chat", + icon_type="image", + icon="detail-icon", + icon_background="#123456", + enable_site=True, + enable_api=True, + app_model_config={ + "opening_statement": "hi", + "model": {"provider": "openai", "name": "gpt-4o"}, + "retriever_resource": {"enabled": True}, + }, + workflow=_dummy_workflow(), + tracing={"enabled": True}, + use_icon_as_answer_icon=True, + created_by="creator", + created_at=timestamp, + updated_by="editor", + updated_at=timestamp, + access_mode="public", + tags=[SimpleNamespace(id="tag-2", name="Prod", type="app")], + api_base_url="https://api.example.com/v1", + max_active_requests=5, + deleted_tools=[{"type": "api", "tool_name": "search", "provider_id": "prov"}], + site=site, + ) + + serialized = AppDetailWithSite.model_validate(app_obj, from_attributes=True).model_dump(mode="json") + + assert serialized["icon_url"] == "signed:detail-icon" + assert serialized["model_config"]["retriever_resource"] == {"enabled": True} + assert serialized["deleted_tools"][0]["tool_name"] == "search" + assert serialized["site"]["icon_url"] == "signed:site-icon" + assert serialized["site"]["created_at"] == int(timestamp.timestamp()) + + +def test_app_pagination_aliases_per_page_and_has_next(): + item_one = SimpleNamespace( + id="app-10", + name="Paginated One", + desc_or_prompt="Summary", + mode_compatible_with_agent="chat", + icon_type="image", + icon="first-icon", + created_at=_ts(15), + updated_at=_ts(15), + ) + item_two = SimpleNamespace( + id="app-11", + name="Paginated Two", + desc_or_prompt="Summary", + mode_compatible_with_agent="agent-chat", + icon_type="emoji", + icon="🙂", + created_at=_ts(16), + updated_at=_ts(16), + ) + pagination = SimpleNamespace( + page=2, + per_page=10, + total=50, + has_next=True, + items=[item_one, item_two], + ) + + serialized = AppPagination.model_validate(pagination, from_attributes=True).model_dump(mode="json") + + assert serialized["page"] == 2 + assert serialized["limit"] == 10 + assert serialized["has_more"] is True + assert len(serialized["data"]) == 2 + assert serialized["data"][0]["icon_url"] == "signed:first-icon" + assert serialized["data"][1]["icon_url"] is None diff --git a/api/tests/unit_tests/controllers/console/app/test_xss_prevention.py b/api/tests/unit_tests/controllers/console/app/test_xss_prevention.py new file mode 100644 index 0000000000..313818547b --- /dev/null +++ b/api/tests/unit_tests/controllers/console/app/test_xss_prevention.py @@ -0,0 +1,254 @@ +""" +Unit tests for XSS prevention in App payloads. + +This test module validates that HTML tags, JavaScript, and other potentially +dangerous content are rejected in App names and descriptions. +""" + +import pytest + +from controllers.console.app.app import CopyAppPayload, CreateAppPayload, UpdateAppPayload + + +class TestXSSPreventionUnit: + """Unit tests for XSS prevention in App payloads.""" + + def test_create_app_valid_names(self): + """Test CreateAppPayload with valid app names.""" + # Normal app names should be valid + valid_names = [ + "My App", + "Test App 123", + "App with - dash", + "App with _ underscore", + "App with + plus", + "App with () parentheses", + "App with [] brackets", + "App with {} braces", + "App with ! exclamation", + "App with @ at", + "App with # hash", + "App with $ dollar", + "App with % percent", + "App with ^ caret", + "App with & ampersand", + "App with * asterisk", + "Unicode: 测试应用", + "Emoji: 🤖", + "Mixed: Test 测试 123", + ] + + for name in valid_names: + payload = CreateAppPayload( + name=name, + mode="chat", + ) + assert payload.name == name + + def test_create_app_xss_script_tags(self): + """Test CreateAppPayload rejects script tags.""" + xss_payloads = [ + "", + "", + "", + "", + "", + "", + ] + + for name in xss_payloads: + with pytest.raises(ValueError) as exc_info: + CreateAppPayload(name=name, mode="chat") + assert "invalid characters or patterns" in str(exc_info.value).lower() + + def test_create_app_xss_iframe_tags(self): + """Test CreateAppPayload rejects iframe tags.""" + xss_payloads = [ + "", + "", + "", + ] + + for name in xss_payloads: + with pytest.raises(ValueError) as exc_info: + CreateAppPayload(name=name, mode="chat") + assert "invalid characters or patterns" in str(exc_info.value).lower() + + def test_create_app_xss_javascript_protocol(self): + """Test CreateAppPayload rejects javascript: protocol.""" + xss_payloads = [ + "javascript:alert(1)", + "JAVASCRIPT:alert(1)", + "JavaScript:alert(document.cookie)", + "javascript:void(0)", + "javascript://comment%0Aalert(1)", + ] + + for name in xss_payloads: + with pytest.raises(ValueError) as exc_info: + CreateAppPayload(name=name, mode="chat") + assert "invalid characters or patterns" in str(exc_info.value).lower() + + def test_create_app_xss_svg_onload(self): + """Test CreateAppPayload rejects SVG with onload.""" + xss_payloads = [ + "", + "", + "", + ] + + for name in xss_payloads: + with pytest.raises(ValueError) as exc_info: + CreateAppPayload(name=name, mode="chat") + assert "invalid characters or patterns" in str(exc_info.value).lower() + + def test_create_app_xss_event_handlers(self): + """Test CreateAppPayload rejects HTML event handlers.""" + xss_payloads = [ + "
", + "", + "", + "", + "", + "
", + "", + ] + + for name in xss_payloads: + with pytest.raises(ValueError) as exc_info: + CreateAppPayload(name=name, mode="chat") + assert "invalid characters or patterns" in str(exc_info.value).lower() + + def test_create_app_xss_object_embed(self): + """Test CreateAppPayload rejects object and embed tags.""" + xss_payloads = [ + "", + "", + "", + ] + + for name in xss_payloads: + with pytest.raises(ValueError) as exc_info: + CreateAppPayload(name=name, mode="chat") + assert "invalid characters or patterns" in str(exc_info.value).lower() + + def test_create_app_xss_link_javascript(self): + """Test CreateAppPayload rejects link tags with javascript.""" + xss_payloads = [ + "", + "", + ] + + for name in xss_payloads: + with pytest.raises(ValueError) as exc_info: + CreateAppPayload(name=name, mode="chat") + assert "invalid characters or patterns" in str(exc_info.value).lower() + + def test_create_app_xss_in_description(self): + """Test CreateAppPayload rejects XSS in description.""" + xss_descriptions = [ + "", + "javascript:alert(1)", + "", + ] + + for description in xss_descriptions: + with pytest.raises(ValueError) as exc_info: + CreateAppPayload( + name="Valid Name", + mode="chat", + description=description, + ) + assert "invalid characters or patterns" in str(exc_info.value).lower() + + def test_create_app_valid_descriptions(self): + """Test CreateAppPayload with valid descriptions.""" + valid_descriptions = [ + "A simple description", + "Description with < and > symbols", + "Description with & ampersand", + "Description with 'quotes' and \"double quotes\"", + "Description with / slashes", + "Description with \\ backslashes", + "Description with ; semicolons", + "Unicode: 这是一个描述", + "Emoji: 🎉🚀", + ] + + for description in valid_descriptions: + payload = CreateAppPayload( + name="Valid App Name", + mode="chat", + description=description, + ) + assert payload.description == description + + def test_create_app_none_description(self): + """Test CreateAppPayload with None description.""" + payload = CreateAppPayload( + name="Valid App Name", + mode="chat", + description=None, + ) + assert payload.description is None + + def test_update_app_xss_prevention(self): + """Test UpdateAppPayload also prevents XSS.""" + xss_names = [ + "", + "javascript:alert(1)", + "", + ] + + for name in xss_names: + with pytest.raises(ValueError) as exc_info: + UpdateAppPayload(name=name) + assert "invalid characters or patterns" in str(exc_info.value).lower() + + def test_update_app_valid_names(self): + """Test UpdateAppPayload with valid names.""" + payload = UpdateAppPayload(name="Valid Updated Name") + assert payload.name == "Valid Updated Name" + + def test_copy_app_xss_prevention(self): + """Test CopyAppPayload also prevents XSS.""" + xss_names = [ + "", + "javascript:alert(1)", + "", + ] + + for name in xss_names: + with pytest.raises(ValueError) as exc_info: + CopyAppPayload(name=name) + assert "invalid characters or patterns" in str(exc_info.value).lower() + + def test_copy_app_valid_names(self): + """Test CopyAppPayload with valid names.""" + payload = CopyAppPayload(name="Valid Copy Name") + assert payload.name == "Valid Copy Name" + + def test_copy_app_none_name(self): + """Test CopyAppPayload with None name (should be allowed).""" + payload = CopyAppPayload(name=None) + assert payload.name is None + + def test_edge_case_angle_brackets_content(self): + """Test that angle brackets with actual content are rejected.""" + # Angle brackets without valid HTML-like patterns should be checked + # The regex pattern <.*?on\w+\s*= should catch event handlers + # But let's verify other patterns too + + # Valid: angle brackets used as symbols (not matched by our patterns) + # Our patterns specifically look for dangerous constructs + + # Invalid: actual HTML tags with event handlers + invalid_names = [ + "
", + "", + ] + + for name in invalid_names: + with pytest.raises(ValueError) as exc_info: + CreateAppPayload(name=name, mode="chat") + assert "invalid characters or patterns" in str(exc_info.value).lower() diff --git a/api/tests/unit_tests/controllers/console/auth/test_account_activation.py b/api/tests/unit_tests/controllers/console/auth/test_account_activation.py index 4192fb2ca7..da21e0e358 100644 --- a/api/tests/unit_tests/controllers/console/auth/test_account_activation.py +++ b/api/tests/unit_tests/controllers/console/auth/test_account_activation.py @@ -163,34 +163,17 @@ class TestActivateApi: "account": mock_account, } - @pytest.fixture - def mock_token_pair(self): - """Create mock token pair object.""" - token_pair = MagicMock() - token_pair.access_token = "access_token" - token_pair.refresh_token = "refresh_token" - token_pair.csrf_token = "csrf_token" - token_pair.model_dump.return_value = { - "access_token": "access_token", - "refresh_token": "refresh_token", - "csrf_token": "csrf_token", - } - return token_pair - @patch("controllers.console.auth.activate.RegisterService.get_invitation_if_token_valid") @patch("controllers.console.auth.activate.RegisterService.revoke_token") @patch("controllers.console.auth.activate.db") - @patch("controllers.console.auth.activate.AccountService.login") def test_successful_account_activation( self, - mock_login, mock_db, mock_revoke_token, mock_get_invitation, app, mock_invitation, mock_account, - mock_token_pair, ): """ Test successful account activation. @@ -198,12 +181,10 @@ class TestActivateApi: Verifies that: - Account is activated with user preferences - Account status is set to ACTIVE - - User is logged in after activation - Invitation token is revoked """ # Arrange mock_get_invitation.return_value = mock_invitation - mock_login.return_value = mock_token_pair # Act with app.test_request_context( @@ -230,7 +211,6 @@ class TestActivateApi: assert mock_account.initialized_at is not None mock_revoke_token.assert_called_once_with("workspace-123", "invitee@example.com", "valid_token") mock_db.session.commit.assert_called_once() - mock_login.assert_called_once() @patch("controllers.console.auth.activate.RegisterService.get_invitation_if_token_valid") def test_activation_with_invalid_token(self, mock_get_invitation, app): @@ -264,17 +244,14 @@ class TestActivateApi: @patch("controllers.console.auth.activate.RegisterService.get_invitation_if_token_valid") @patch("controllers.console.auth.activate.RegisterService.revoke_token") @patch("controllers.console.auth.activate.db") - @patch("controllers.console.auth.activate.AccountService.login") def test_activation_sets_interface_theme( self, - mock_login, mock_db, mock_revoke_token, mock_get_invitation, app, mock_invitation, mock_account, - mock_token_pair, ): """ Test that activation sets default interface theme. @@ -284,7 +261,6 @@ class TestActivateApi: """ # Arrange mock_get_invitation.return_value = mock_invitation - mock_login.return_value = mock_token_pair # Act with app.test_request_context( @@ -317,17 +293,14 @@ class TestActivateApi: @patch("controllers.console.auth.activate.RegisterService.get_invitation_if_token_valid") @patch("controllers.console.auth.activate.RegisterService.revoke_token") @patch("controllers.console.auth.activate.db") - @patch("controllers.console.auth.activate.AccountService.login") def test_activation_with_different_locales( self, - mock_login, mock_db, mock_revoke_token, mock_get_invitation, app, mock_invitation, mock_account, - mock_token_pair, language, timezone, ): @@ -341,7 +314,6 @@ class TestActivateApi: """ # Arrange mock_get_invitation.return_value = mock_invitation - mock_login.return_value = mock_token_pair # Act with app.test_request_context( @@ -367,27 +339,23 @@ class TestActivateApi: @patch("controllers.console.auth.activate.RegisterService.get_invitation_if_token_valid") @patch("controllers.console.auth.activate.RegisterService.revoke_token") @patch("controllers.console.auth.activate.db") - @patch("controllers.console.auth.activate.AccountService.login") - def test_activation_returns_token_data( + def test_activation_returns_success_response( self, - mock_login, mock_db, mock_revoke_token, mock_get_invitation, app, mock_invitation, - mock_token_pair, ): """ - Test that activation returns authentication tokens. + Test that activation returns a success response without authentication tokens. Verifies that: - - Token pair is returned in response - - All token types are included (access, refresh, csrf) + - Response contains a success result + - No token data is returned """ # Arrange mock_get_invitation.return_value = mock_invitation - mock_login.return_value = mock_token_pair # Act with app.test_request_context( @@ -406,24 +374,18 @@ class TestActivateApi: response = api.post() # Assert - assert "data" in response - assert response["data"]["access_token"] == "access_token" - assert response["data"]["refresh_token"] == "refresh_token" - assert response["data"]["csrf_token"] == "csrf_token" + assert response == {"result": "success"} @patch("controllers.console.auth.activate.RegisterService.get_invitation_if_token_valid") @patch("controllers.console.auth.activate.RegisterService.revoke_token") @patch("controllers.console.auth.activate.db") - @patch("controllers.console.auth.activate.AccountService.login") def test_activation_without_workspace_id( self, - mock_login, mock_db, mock_revoke_token, mock_get_invitation, app, mock_invitation, - mock_token_pair, ): """ Test account activation without workspace_id. @@ -434,7 +396,6 @@ class TestActivateApi: """ # Arrange mock_get_invitation.return_value = mock_invitation - mock_login.return_value = mock_token_pair # Act with app.test_request_context( diff --git a/api/tests/unit_tests/controllers/console/auth/test_authentication_security.py b/api/tests/unit_tests/controllers/console/auth/test_authentication_security.py index b6697ac5d4..eb21920117 100644 --- a/api/tests/unit_tests/controllers/console/auth/test_authentication_security.py +++ b/api/tests/unit_tests/controllers/console/auth/test_authentication_security.py @@ -1,5 +1,6 @@ """Test authentication security to prevent user enumeration.""" +import base64 from unittest.mock import MagicMock, patch import pytest @@ -11,6 +12,11 @@ from controllers.console.auth.error import AuthenticationFailedError from controllers.console.auth.login import LoginApi +def encode_password(password: str) -> str: + """Helper to encode password as Base64 for testing.""" + return base64.b64encode(password.encode("utf-8")).decode() + + class TestAuthenticationSecurity: """Test authentication endpoints for security against user enumeration.""" @@ -42,7 +48,9 @@ class TestAuthenticationSecurity: # Act with self.app.test_request_context( - "/login", method="POST", json={"email": "nonexistent@example.com", "password": "WrongPass123!"} + "/login", + method="POST", + json={"email": "nonexistent@example.com", "password": encode_password("WrongPass123!")}, ): login_api = LoginApi() @@ -72,7 +80,9 @@ class TestAuthenticationSecurity: # Act with self.app.test_request_context( - "/login", method="POST", json={"email": "existing@example.com", "password": "WrongPass123!"} + "/login", + method="POST", + json={"email": "existing@example.com", "password": encode_password("WrongPass123!")}, ): login_api = LoginApi() @@ -104,7 +114,9 @@ class TestAuthenticationSecurity: # Act with self.app.test_request_context( - "/login", method="POST", json={"email": "nonexistent@example.com", "password": "WrongPass123!"} + "/login", + method="POST", + json={"email": "nonexistent@example.com", "password": encode_password("WrongPass123!")}, ): login_api = LoginApi() diff --git a/api/tests/unit_tests/controllers/console/auth/test_email_verification.py b/api/tests/unit_tests/controllers/console/auth/test_email_verification.py index a44f518171..9929a71120 100644 --- a/api/tests/unit_tests/controllers/console/auth/test_email_verification.py +++ b/api/tests/unit_tests/controllers/console/auth/test_email_verification.py @@ -8,6 +8,7 @@ This module tests the email code login mechanism including: - Workspace creation for new users """ +import base64 from unittest.mock import MagicMock, patch import pytest @@ -25,6 +26,11 @@ from controllers.console.error import ( from services.errors.account import AccountRegisterError +def encode_code(code: str) -> str: + """Helper to encode verification code as Base64 for testing.""" + return base64.b64encode(code.encode("utf-8")).decode() + + class TestEmailCodeLoginSendEmailApi: """Test cases for sending email verification codes.""" @@ -290,7 +296,7 @@ class TestEmailCodeLoginApi: with app.test_request_context( "/email-code-login/validity", method="POST", - json={"email": "test@example.com", "code": "123456", "token": "valid_token"}, + json={"email": "test@example.com", "code": encode_code("123456"), "token": "valid_token"}, ): api = EmailCodeLoginApi() response = api.post() @@ -339,7 +345,12 @@ class TestEmailCodeLoginApi: with app.test_request_context( "/email-code-login/validity", method="POST", - json={"email": "newuser@example.com", "code": "123456", "token": "valid_token", "language": "en-US"}, + json={ + "email": "newuser@example.com", + "code": encode_code("123456"), + "token": "valid_token", + "language": "en-US", + }, ): api = EmailCodeLoginApi() response = api.post() @@ -365,7 +376,7 @@ class TestEmailCodeLoginApi: with app.test_request_context( "/email-code-login/validity", method="POST", - json={"email": "test@example.com", "code": "123456", "token": "invalid_token"}, + json={"email": "test@example.com", "code": encode_code("123456"), "token": "invalid_token"}, ): api = EmailCodeLoginApi() with pytest.raises(InvalidTokenError): @@ -388,7 +399,7 @@ class TestEmailCodeLoginApi: with app.test_request_context( "/email-code-login/validity", method="POST", - json={"email": "different@example.com", "code": "123456", "token": "token"}, + json={"email": "different@example.com", "code": encode_code("123456"), "token": "token"}, ): api = EmailCodeLoginApi() with pytest.raises(InvalidEmailError): @@ -411,7 +422,7 @@ class TestEmailCodeLoginApi: with app.test_request_context( "/email-code-login/validity", method="POST", - json={"email": "test@example.com", "code": "wrong_code", "token": "token"}, + json={"email": "test@example.com", "code": encode_code("wrong_code"), "token": "token"}, ): api = EmailCodeLoginApi() with pytest.raises(EmailCodeError): @@ -497,7 +508,7 @@ class TestEmailCodeLoginApi: with app.test_request_context( "/email-code-login/validity", method="POST", - json={"email": "test@example.com", "code": "123456", "token": "token"}, + json={"email": "test@example.com", "code": encode_code("123456"), "token": "token"}, ): api = EmailCodeLoginApi() with pytest.raises(WorkspacesLimitExceeded): @@ -539,7 +550,7 @@ class TestEmailCodeLoginApi: with app.test_request_context( "/email-code-login/validity", method="POST", - json={"email": "test@example.com", "code": "123456", "token": "token"}, + json={"email": "test@example.com", "code": encode_code("123456"), "token": "token"}, ): api = EmailCodeLoginApi() with pytest.raises(NotAllowedCreateWorkspace): diff --git a/api/tests/unit_tests/controllers/console/auth/test_login_logout.py b/api/tests/unit_tests/controllers/console/auth/test_login_logout.py index 8799d6484d..3a2cf7bad7 100644 --- a/api/tests/unit_tests/controllers/console/auth/test_login_logout.py +++ b/api/tests/unit_tests/controllers/console/auth/test_login_logout.py @@ -8,6 +8,7 @@ This module tests the core authentication endpoints including: - Account status validation """ +import base64 from unittest.mock import MagicMock, patch import pytest @@ -28,6 +29,11 @@ from controllers.console.error import ( from services.errors.account import AccountLoginError, AccountPasswordError +def encode_password(password: str) -> str: + """Helper to encode password as Base64 for testing.""" + return base64.b64encode(password.encode("utf-8")).decode() + + class TestLoginApi: """Test cases for the LoginApi endpoint.""" @@ -106,7 +112,9 @@ class TestLoginApi: # Act with app.test_request_context( - "/login", method="POST", json={"email": "test@example.com", "password": "ValidPass123!"} + "/login", + method="POST", + json={"email": "test@example.com", "password": encode_password("ValidPass123!")}, ): login_api = LoginApi() response = login_api.post() @@ -158,7 +166,11 @@ class TestLoginApi: with app.test_request_context( "/login", method="POST", - json={"email": "test@example.com", "password": "ValidPass123!", "invite_token": "valid_token"}, + json={ + "email": "test@example.com", + "password": encode_password("ValidPass123!"), + "invite_token": "valid_token", + }, ): login_api = LoginApi() response = login_api.post() @@ -186,7 +198,7 @@ class TestLoginApi: # Act & Assert with app.test_request_context( - "/login", method="POST", json={"email": "test@example.com", "password": "password"} + "/login", method="POST", json={"email": "test@example.com", "password": encode_password("password")} ): login_api = LoginApi() with pytest.raises(EmailPasswordLoginLimitError): @@ -209,7 +221,7 @@ class TestLoginApi: # Act & Assert with app.test_request_context( - "/login", method="POST", json={"email": "frozen@example.com", "password": "password"} + "/login", method="POST", json={"email": "frozen@example.com", "password": encode_password("password")} ): login_api = LoginApi() with pytest.raises(AccountInFreezeError): @@ -246,7 +258,7 @@ class TestLoginApi: # Act & Assert with app.test_request_context( - "/login", method="POST", json={"email": "test@example.com", "password": "WrongPass123!"} + "/login", method="POST", json={"email": "test@example.com", "password": encode_password("WrongPass123!")} ): login_api = LoginApi() with pytest.raises(AuthenticationFailedError): @@ -277,7 +289,7 @@ class TestLoginApi: # Act & Assert with app.test_request_context( - "/login", method="POST", json={"email": "banned@example.com", "password": "ValidPass123!"} + "/login", method="POST", json={"email": "banned@example.com", "password": encode_password("ValidPass123!")} ): login_api = LoginApi() with pytest.raises(AccountBannedError): @@ -322,7 +334,7 @@ class TestLoginApi: # Act & Assert with app.test_request_context( - "/login", method="POST", json={"email": "test@example.com", "password": "ValidPass123!"} + "/login", method="POST", json={"email": "test@example.com", "password": encode_password("ValidPass123!")} ): login_api = LoginApi() with pytest.raises(WorkspacesLimitExceeded): @@ -349,7 +361,11 @@ class TestLoginApi: with app.test_request_context( "/login", method="POST", - json={"email": "different@example.com", "password": "ValidPass123!", "invite_token": "token"}, + json={ + "email": "different@example.com", + "password": encode_password("ValidPass123!"), + "invite_token": "token", + }, ): login_api = LoginApi() with pytest.raises(InvalidEmailError): diff --git a/api/tests/unit_tests/controllers/console/auth/test_oauth.py b/api/tests/unit_tests/controllers/console/auth/test_oauth.py index 399caf8c4d..3ddfcdb832 100644 --- a/api/tests/unit_tests/controllers/console/auth/test_oauth.py +++ b/api/tests/unit_tests/controllers/console/auth/test_oauth.py @@ -171,7 +171,7 @@ class TestOAuthCallback: ): mock_config.CONSOLE_WEB_URL = "http://localhost:3000" mock_get_providers.return_value = {"github": oauth_setup["provider"]} - mock_generate_account.return_value = oauth_setup["account"] + mock_generate_account.return_value = (oauth_setup["account"], True) mock_account_service.login.return_value = oauth_setup["token_pair"] with app.test_request_context("/auth/oauth/github/callback?code=test_code"): @@ -179,7 +179,7 @@ class TestOAuthCallback: oauth_setup["provider"].get_access_token.assert_called_once_with("test_code") oauth_setup["provider"].get_user_info.assert_called_once_with("access_token") - mock_redirect.assert_called_once_with("http://localhost:3000") + mock_redirect.assert_called_once_with("http://localhost:3000?oauth_new_user=true") @pytest.mark.parametrize( ("exception", "expected_error"), @@ -223,7 +223,7 @@ class TestOAuthCallback: # This documents actual behavior. See test_defensive_check_for_closed_account_status for details ( AccountStatus.CLOSED.value, - "http://localhost:3000", + "http://localhost:3000?oauth_new_user=false", ), ], ) @@ -260,7 +260,7 @@ class TestOAuthCallback: account = MagicMock() account.status = account_status account.id = "123" - mock_generate_account.return_value = account + mock_generate_account.return_value = (account, False) # Mock login for CLOSED status mock_token_pair = MagicMock() @@ -296,7 +296,7 @@ class TestOAuthCallback: mock_account = MagicMock() mock_account.status = AccountStatus.PENDING - mock_generate_account.return_value = mock_account + mock_generate_account.return_value = (mock_account, False) mock_token_pair = MagicMock() mock_token_pair.access_token = "jwt_access_token" @@ -360,7 +360,7 @@ class TestOAuthCallback: closed_account.status = AccountStatus.CLOSED closed_account.id = "123" closed_account.name = "Closed Account" - mock_generate_account.return_value = closed_account + mock_generate_account.return_value = (closed_account, False) # Mock successful login (current behavior) mock_token_pair = MagicMock() @@ -374,7 +374,7 @@ class TestOAuthCallback: resource.get("github") # Verify current behavior: login succeeds (this is NOT ideal) - mock_redirect.assert_called_once_with("http://localhost:3000") + mock_redirect.assert_called_once_with("http://localhost:3000?oauth_new_user=false") mock_account_service.login.assert_called_once() # Document expected behavior in comments: @@ -458,8 +458,9 @@ class TestAccountGeneration: with pytest.raises(AccountRegisterError): _generate_account("github", user_info) else: - result = _generate_account("github", user_info) + result, oauth_new_user = _generate_account("github", user_info) assert result == mock_account + assert oauth_new_user == should_create if should_create: mock_register_service.register.assert_called_once_with( @@ -490,9 +491,10 @@ class TestAccountGeneration: mock_tenant_service.create_tenant.return_value = mock_new_tenant with app.test_request_context(headers={"Accept-Language": "en-US,en;q=0.9"}): - result = _generate_account("github", user_info) + result, oauth_new_user = _generate_account("github", user_info) assert result == mock_account + assert oauth_new_user is False mock_tenant_service.create_tenant.assert_called_once_with("Test User's Workspace") mock_tenant_service.create_tenant_member.assert_called_once_with( mock_new_tenant, mock_account, role="owner" diff --git a/api/tests/unit_tests/controllers/console/test_document_detail_api_data_source_info.py b/api/tests/unit_tests/controllers/console/test_document_detail_api_data_source_info.py new file mode 100644 index 0000000000..f8dd98fdb2 --- /dev/null +++ b/api/tests/unit_tests/controllers/console/test_document_detail_api_data_source_info.py @@ -0,0 +1,145 @@ +""" +Test for document detail API data_source_info serialization fix. + +This test verifies that the document detail API returns both data_source_info +and data_source_detail_dict for all data_source_type values, including "local_file". +""" + +import json +from typing import Generic, Literal, NotRequired, TypedDict, TypeVar, Union + +from models.dataset import Document + + +class LocalFileInfo(TypedDict): + file_path: str + size: int + created_at: NotRequired[str] + + +class UploadFileInfo(TypedDict): + upload_file_id: str + + +class NotionImportInfo(TypedDict): + notion_page_id: str + workspace_id: str + + +class WebsiteCrawlInfo(TypedDict): + url: str + job_id: str + + +RawInfo = Union[LocalFileInfo, UploadFileInfo, NotionImportInfo, WebsiteCrawlInfo] +T_type = TypeVar("T_type", bound=str) +T_info = TypeVar("T_info", bound=Union[LocalFileInfo, UploadFileInfo, NotionImportInfo, WebsiteCrawlInfo]) + + +class Case(TypedDict, Generic[T_type, T_info]): + data_source_type: T_type + data_source_info: str + expected_raw: T_info + + +LocalFileCase = Case[Literal["local_file"], LocalFileInfo] +UploadFileCase = Case[Literal["upload_file"], UploadFileInfo] +NotionImportCase = Case[Literal["notion_import"], NotionImportInfo] +WebsiteCrawlCase = Case[Literal["website_crawl"], WebsiteCrawlInfo] + +AnyCase = Union[LocalFileCase, UploadFileCase, NotionImportCase, WebsiteCrawlCase] + + +case_1: LocalFileCase = { + "data_source_type": "local_file", + "data_source_info": json.dumps({"file_path": "/tmp/test.txt", "size": 1024}), + "expected_raw": {"file_path": "/tmp/test.txt", "size": 1024}, +} + + +# ERROR: Expected LocalFileInfo, but got WebsiteCrawlInfo +case_2: LocalFileCase = { + "data_source_type": "local_file", + "data_source_info": "...", + "expected_raw": {"file_path": "https://google.com", "size": 123}, +} + +cases: list[AnyCase] = [case_1] + + +class TestDocumentDetailDataSourceInfo: + """Test cases for document detail API data_source_info serialization.""" + + def test_data_source_info_dict_returns_raw_data(self): + """Test that data_source_info_dict returns raw JSON data for all data_source_type values.""" + # Test data for different data_source_type values + for case in cases: + document = Document( + data_source_type=case["data_source_type"], + data_source_info=case["data_source_info"], + ) + + # Test data_source_info_dict (raw data) + raw_result = document.data_source_info_dict + assert raw_result == case["expected_raw"], f"Failed for {case['data_source_type']}" + + # Verify raw_result is always a valid dict + assert isinstance(raw_result, dict) + + def test_local_file_data_source_info_without_db_context(self): + """Test that local_file type data_source_info_dict works without database context.""" + test_data: LocalFileInfo = { + "file_path": "/local/path/document.txt", + "size": 512, + "created_at": "2024-01-01T00:00:00Z", + } + + document = Document( + data_source_type="local_file", + data_source_info=json.dumps(test_data), + ) + + # data_source_info_dict should return the raw data (this doesn't need DB context) + raw_data = document.data_source_info_dict + assert raw_data == test_data + assert isinstance(raw_data, dict) + + # Verify the data contains expected keys for pipeline mode + assert "file_path" in raw_data + assert "size" in raw_data + + def test_notion_and_website_crawl_data_source_detail(self): + """Test that notion_import and website_crawl return raw data in data_source_detail_dict.""" + # Test notion_import + notion_data: NotionImportInfo = {"notion_page_id": "page-123", "workspace_id": "ws-456"} + document = Document( + data_source_type="notion_import", + data_source_info=json.dumps(notion_data), + ) + + # data_source_detail_dict should return raw data for notion_import + detail_result = document.data_source_detail_dict + assert detail_result == notion_data + + # Test website_crawl + website_data: WebsiteCrawlInfo = {"url": "https://example.com", "job_id": "job-789"} + document = Document( + data_source_type="website_crawl", + data_source_info=json.dumps(website_data), + ) + + # data_source_detail_dict should return raw data for website_crawl + detail_result = document.data_source_detail_dict + assert detail_result == website_data + + def test_local_file_data_source_detail_dict_without_db(self): + """Test that local_file returns empty data_source_detail_dict (this doesn't need DB context).""" + # Test local_file - this should work without database context since it returns {} early + document = Document( + data_source_type="local_file", + data_source_info=json.dumps({"file_path": "/tmp/test.txt"}), + ) + + # Should return empty dict for local_file type (handled in the model) + detail_result = document.data_source_detail_dict + assert detail_result == {} diff --git a/api/tests/unit_tests/controllers/console/test_extension.py b/api/tests/unit_tests/controllers/console/test_extension.py new file mode 100644 index 0000000000..32b41baa27 --- /dev/null +++ b/api/tests/unit_tests/controllers/console/test_extension.py @@ -0,0 +1,236 @@ +from __future__ import annotations + +import builtins +import uuid +from datetime import UTC, datetime +from unittest.mock import MagicMock + +import pytest +from flask import Flask +from flask.views import MethodView as FlaskMethodView + +_NEEDS_METHOD_VIEW_CLEANUP = False +if not hasattr(builtins, "MethodView"): + builtins.MethodView = FlaskMethodView + _NEEDS_METHOD_VIEW_CLEANUP = True + +from constants import HIDDEN_VALUE +from controllers.console.extension import ( + APIBasedExtensionAPI, + APIBasedExtensionDetailAPI, + CodeBasedExtensionAPI, +) + +if _NEEDS_METHOD_VIEW_CLEANUP: + delattr(builtins, "MethodView") +from models.account import AccountStatus +from models.api_based_extension import APIBasedExtension + + +def _make_extension( + *, + name: str = "Sample Extension", + api_endpoint: str = "https://example.com/api", + api_key: str = "super-secret-key", +) -> APIBasedExtension: + extension = APIBasedExtension( + tenant_id="tenant-123", + name=name, + api_endpoint=api_endpoint, + api_key=api_key, + ) + extension.id = f"{uuid.uuid4()}" + extension.created_at = datetime.now(tz=UTC) + return extension + + +@pytest.fixture(autouse=True) +def _mock_console_guards(monkeypatch: pytest.MonkeyPatch) -> MagicMock: + """Bypass console decorators so handlers can run in isolation.""" + + import controllers.console.extension as extension_module + from controllers.console import wraps as wraps_module + + account = MagicMock() + account.status = AccountStatus.ACTIVE + account.current_tenant_id = "tenant-123" + account.id = "account-123" + account.is_authenticated = True + + monkeypatch.setattr(wraps_module.dify_config, "EDITION", "CLOUD") + monkeypatch.setattr("libs.login.dify_config.LOGIN_DISABLED", True) + monkeypatch.delenv("INIT_PASSWORD", raising=False) + monkeypatch.setattr(extension_module, "current_account_with_tenant", lambda: (account, "tenant-123")) + monkeypatch.setattr(wraps_module, "current_account_with_tenant", lambda: (account, "tenant-123")) + + # The login_required decorator consults the shared LocalProxy in libs.login. + monkeypatch.setattr("libs.login.current_user", account) + monkeypatch.setattr("libs.login.check_csrf_token", lambda *_, **__: None) + + return account + + +@pytest.fixture(autouse=True) +def _restx_mask_defaults(app: Flask): + app.config.setdefault("RESTX_MASK_HEADER", "X-Fields") + app.config.setdefault("RESTX_MASK_SWAGGER", False) + + +def test_code_based_extension_get_returns_service_data(app: Flask, monkeypatch: pytest.MonkeyPatch): + service_result = {"entrypoint": "main:agent"} + service_mock = MagicMock(return_value=service_result) + monkeypatch.setattr( + "controllers.console.extension.CodeBasedExtensionService.get_code_based_extension", + service_mock, + ) + + with app.test_request_context( + "/console/api/code-based-extension", + method="GET", + query_string={"module": "workflow.tools"}, + ): + response = CodeBasedExtensionAPI().get() + + assert response == {"module": "workflow.tools", "data": service_result} + service_mock.assert_called_once_with("workflow.tools") + + +def test_api_based_extension_get_returns_tenant_extensions(app: Flask, monkeypatch: pytest.MonkeyPatch): + extension = _make_extension(name="Weather API", api_key="abcdefghi123") + service_mock = MagicMock(return_value=[extension]) + monkeypatch.setattr( + "controllers.console.extension.APIBasedExtensionService.get_all_by_tenant_id", + service_mock, + ) + + with app.test_request_context("/console/api/api-based-extension", method="GET"): + response = APIBasedExtensionAPI().get() + + assert response[0]["id"] == extension.id + assert response[0]["name"] == "Weather API" + assert response[0]["api_endpoint"] == extension.api_endpoint + assert response[0]["api_key"].startswith(extension.api_key[:3]) + service_mock.assert_called_once_with("tenant-123") + + +def test_api_based_extension_post_creates_extension(app: Flask, monkeypatch: pytest.MonkeyPatch): + saved_extension = _make_extension(name="Docs API", api_key="saved-secret") + save_mock = MagicMock(return_value=saved_extension) + monkeypatch.setattr("controllers.console.extension.APIBasedExtensionService.save", save_mock) + + payload = { + "name": "Docs API", + "api_endpoint": "https://docs.example.com/hook", + "api_key": "plain-secret", + } + + with app.test_request_context("/console/api/api-based-extension", method="POST", json=payload): + response = APIBasedExtensionAPI().post() + + args, _ = save_mock.call_args + created_extension: APIBasedExtension = args[0] + assert created_extension.tenant_id == "tenant-123" + assert created_extension.name == payload["name"] + assert created_extension.api_endpoint == payload["api_endpoint"] + assert created_extension.api_key == payload["api_key"] + assert response["name"] == saved_extension.name + save_mock.assert_called_once() + + +def test_api_based_extension_detail_get_fetches_extension(app: Flask, monkeypatch: pytest.MonkeyPatch): + extension = _make_extension(name="Docs API", api_key="abcdefg12345") + service_mock = MagicMock(return_value=extension) + monkeypatch.setattr( + "controllers.console.extension.APIBasedExtensionService.get_with_tenant_id", + service_mock, + ) + + extension_id = uuid.uuid4() + with app.test_request_context(f"/console/api/api-based-extension/{extension_id}", method="GET"): + response = APIBasedExtensionDetailAPI().get(extension_id) + + assert response["id"] == extension.id + assert response["name"] == extension.name + service_mock.assert_called_once_with("tenant-123", str(extension_id)) + + +def test_api_based_extension_detail_post_keeps_hidden_api_key(app: Flask, monkeypatch: pytest.MonkeyPatch): + existing_extension = _make_extension(name="Docs API", api_key="keep-me") + get_mock = MagicMock(return_value=existing_extension) + save_mock = MagicMock(return_value=existing_extension) + monkeypatch.setattr( + "controllers.console.extension.APIBasedExtensionService.get_with_tenant_id", + get_mock, + ) + monkeypatch.setattr("controllers.console.extension.APIBasedExtensionService.save", save_mock) + + payload = { + "name": "Docs API Updated", + "api_endpoint": "https://docs.example.com/v2", + "api_key": HIDDEN_VALUE, + } + + extension_id = uuid.uuid4() + with app.test_request_context( + f"/console/api/api-based-extension/{extension_id}", + method="POST", + json=payload, + ): + response = APIBasedExtensionDetailAPI().post(extension_id) + + assert existing_extension.name == payload["name"] + assert existing_extension.api_endpoint == payload["api_endpoint"] + assert existing_extension.api_key == "keep-me" + save_mock.assert_called_once_with(existing_extension) + assert response["name"] == payload["name"] + + +def test_api_based_extension_detail_post_updates_api_key_when_provided(app: Flask, monkeypatch: pytest.MonkeyPatch): + existing_extension = _make_extension(name="Docs API", api_key="old-secret") + get_mock = MagicMock(return_value=existing_extension) + save_mock = MagicMock(return_value=existing_extension) + monkeypatch.setattr( + "controllers.console.extension.APIBasedExtensionService.get_with_tenant_id", + get_mock, + ) + monkeypatch.setattr("controllers.console.extension.APIBasedExtensionService.save", save_mock) + + payload = { + "name": "Docs API Updated", + "api_endpoint": "https://docs.example.com/v2", + "api_key": "new-secret", + } + + extension_id = uuid.uuid4() + with app.test_request_context( + f"/console/api/api-based-extension/{extension_id}", + method="POST", + json=payload, + ): + response = APIBasedExtensionDetailAPI().post(extension_id) + + assert existing_extension.api_key == "new-secret" + save_mock.assert_called_once_with(existing_extension) + assert response["name"] == payload["name"] + + +def test_api_based_extension_detail_delete_removes_extension(app: Flask, monkeypatch: pytest.MonkeyPatch): + existing_extension = _make_extension() + get_mock = MagicMock(return_value=existing_extension) + delete_mock = MagicMock() + monkeypatch.setattr( + "controllers.console.extension.APIBasedExtensionService.get_with_tenant_id", + get_mock, + ) + monkeypatch.setattr("controllers.console.extension.APIBasedExtensionService.delete", delete_mock) + + extension_id = uuid.uuid4() + with app.test_request_context( + f"/console/api/api-based-extension/{extension_id}", + method="DELETE", + ): + response, status = APIBasedExtensionDetailAPI().delete(extension_id) + + delete_mock.assert_called_once_with(existing_extension) + assert response == {"result": "success"} + assert status == 204 diff --git a/api/tests/unit_tests/controllers/console/test_files_security.py b/api/tests/unit_tests/controllers/console/test_files_security.py index 2630fbcfd0..370bf63fdb 100644 --- a/api/tests/unit_tests/controllers/console/test_files_security.py +++ b/api/tests/unit_tests/controllers/console/test_files_security.py @@ -1,7 +1,9 @@ +import builtins import io from unittest.mock import patch import pytest +from flask.views import MethodView from werkzeug.exceptions import Forbidden from controllers.common.errors import ( @@ -14,6 +16,9 @@ from controllers.common.errors import ( from services.errors.file import FileTooLargeError as ServiceFileTooLargeError from services.errors.file import UnsupportedFileTypeError as ServiceUnsupportedFileTypeError +if not hasattr(builtins, "MethodView"): + builtins.MethodView = MethodView # type: ignore[attr-defined] + class TestFileUploadSecurity: """Test file upload security logic without complex framework setup""" @@ -128,7 +133,7 @@ class TestFileUploadSecurity: # Test passes if no exception is raised # Test 4: Service error handling - @patch("services.file_service.FileService.upload_file") + @patch("controllers.console.files.FileService.upload_file") def test_should_handle_file_too_large_error(self, mock_upload): """Test that service FileTooLargeError is properly converted""" mock_upload.side_effect = ServiceFileTooLargeError("File too large") @@ -140,7 +145,7 @@ class TestFileUploadSecurity: with pytest.raises(FileTooLargeError): raise FileTooLargeError(e.description) - @patch("services.file_service.FileService.upload_file") + @patch("controllers.console.files.FileService.upload_file") def test_should_handle_unsupported_file_type_error(self, mock_upload): """Test that service UnsupportedFileTypeError is properly converted""" mock_upload.side_effect = ServiceUnsupportedFileTypeError() diff --git a/api/tests/unit_tests/controllers/console/workspace/__init__.py b/api/tests/unit_tests/controllers/console/workspace/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/api/tests/unit_tests/controllers/console/workspace/test_load_balancing_config.py b/api/tests/unit_tests/controllers/console/workspace/test_load_balancing_config.py new file mode 100644 index 0000000000..59b6614d5e --- /dev/null +++ b/api/tests/unit_tests/controllers/console/workspace/test_load_balancing_config.py @@ -0,0 +1,145 @@ +"""Unit tests for load balancing credential validation APIs.""" + +from __future__ import annotations + +import builtins +import importlib +import sys +from types import SimpleNamespace +from unittest.mock import MagicMock + +import pytest +from flask import Flask +from flask.views import MethodView +from werkzeug.exceptions import Forbidden + +from core.model_runtime.entities.model_entities import ModelType +from core.model_runtime.errors.validate import CredentialsValidateFailedError + +if not hasattr(builtins, "MethodView"): + builtins.MethodView = MethodView # type: ignore[attr-defined] + +from models.account import TenantAccountRole + + +@pytest.fixture +def app() -> Flask: + app = Flask(__name__) + app.config["TESTING"] = True + return app + + +@pytest.fixture +def load_balancing_module(monkeypatch: pytest.MonkeyPatch): + """Reload controller module with lightweight decorators for testing.""" + + from controllers.console import console_ns, wraps + from libs import login + + def _noop(func): + return func + + monkeypatch.setattr(login, "login_required", _noop) + monkeypatch.setattr(wraps, "setup_required", _noop) + monkeypatch.setattr(wraps, "account_initialization_required", _noop) + + def _noop_route(*args, **kwargs): # type: ignore[override] + def _decorator(cls): + return cls + + return _decorator + + monkeypatch.setattr(console_ns, "route", _noop_route) + + module_name = "controllers.console.workspace.load_balancing_config" + sys.modules.pop(module_name, None) + module = importlib.import_module(module_name) + return module + + +def _mock_user(role: TenantAccountRole) -> SimpleNamespace: + return SimpleNamespace(current_role=role) + + +def _prepare_context(module, monkeypatch: pytest.MonkeyPatch, role=TenantAccountRole.OWNER): + user = _mock_user(role) + monkeypatch.setattr(module, "current_account_with_tenant", lambda: (user, "tenant-123")) + mock_service = MagicMock() + monkeypatch.setattr(module, "ModelLoadBalancingService", lambda: mock_service) + return mock_service + + +def _request_payload(): + return {"model": "gpt-4o", "model_type": ModelType.LLM, "credentials": {"api_key": "sk-***"}} + + +def test_validate_credentials_success(app: Flask, load_balancing_module, monkeypatch: pytest.MonkeyPatch): + service = _prepare_context(load_balancing_module, monkeypatch) + + with app.test_request_context( + "/workspaces/current/model-providers/openai/models/load-balancing-configs/credentials-validate", + method="POST", + json=_request_payload(), + ): + response = load_balancing_module.LoadBalancingCredentialsValidateApi().post(provider="openai") + + assert response == {"result": "success"} + service.validate_load_balancing_credentials.assert_called_once_with( + tenant_id="tenant-123", + provider="openai", + model="gpt-4o", + model_type=ModelType.LLM, + credentials={"api_key": "sk-***"}, + ) + + +def test_validate_credentials_returns_error_message(app: Flask, load_balancing_module, monkeypatch: pytest.MonkeyPatch): + service = _prepare_context(load_balancing_module, monkeypatch) + service.validate_load_balancing_credentials.side_effect = CredentialsValidateFailedError("invalid credentials") + + with app.test_request_context( + "/workspaces/current/model-providers/openai/models/load-balancing-configs/credentials-validate", + method="POST", + json=_request_payload(), + ): + response = load_balancing_module.LoadBalancingCredentialsValidateApi().post(provider="openai") + + assert response == {"result": "error", "error": "invalid credentials"} + + +def test_validate_credentials_requires_privileged_role( + app: Flask, load_balancing_module, monkeypatch: pytest.MonkeyPatch +): + _prepare_context(load_balancing_module, monkeypatch, role=TenantAccountRole.NORMAL) + + with app.test_request_context( + "/workspaces/current/model-providers/openai/models/load-balancing-configs/credentials-validate", + method="POST", + json=_request_payload(), + ): + api = load_balancing_module.LoadBalancingCredentialsValidateApi() + with pytest.raises(Forbidden): + api.post(provider="openai") + + +def test_validate_credentials_with_config_id(app: Flask, load_balancing_module, monkeypatch: pytest.MonkeyPatch): + service = _prepare_context(load_balancing_module, monkeypatch) + + with app.test_request_context( + "/workspaces/current/model-providers/openai/models/load-balancing-configs/cfg-1/credentials-validate", + method="POST", + json=_request_payload(), + ): + response = load_balancing_module.LoadBalancingConfigCredentialsValidateApi().post( + provider="openai", config_id="cfg-1" + ) + + assert response == {"result": "success"} + service.validate_load_balancing_credentials.assert_called_once_with( + tenant_id="tenant-123", + provider="openai", + model="gpt-4o", + model_type=ModelType.LLM, + credentials={"api_key": "sk-***"}, + config_id="cfg-1", + ) diff --git a/api/tests/unit_tests/controllers/console/workspace/test_tool_provider.py b/api/tests/unit_tests/controllers/console/workspace/test_tool_provider.py new file mode 100644 index 0000000000..c608f731c5 --- /dev/null +++ b/api/tests/unit_tests/controllers/console/workspace/test_tool_provider.py @@ -0,0 +1,100 @@ +import json +from unittest.mock import MagicMock, patch + +import pytest +from flask import Flask +from flask_restx import Api + +from controllers.console.workspace.tool_providers import ToolProviderMCPApi +from core.db.session_factory import configure_session_factory +from extensions.ext_database import db +from services.tools.mcp_tools_manage_service import ReconnectResult + + +# Backward-compat fixtures referenced by @pytest.mark.usefixtures in this file. +# They are intentionally no-ops because the test already patches the required +# behaviors explicitly via @patch and context managers below. +@pytest.fixture +def _mock_cache(): + return + + +@pytest.fixture +def _mock_user_tenant(): + return + + +@pytest.fixture +def client(): + app = Flask(__name__) + app.config["TESTING"] = True + app.config["SQLALCHEMY_DATABASE_URI"] = "sqlite:///:memory:" + api = Api(app) + api.add_resource(ToolProviderMCPApi, "/console/api/workspaces/current/tool-provider/mcp") + db.init_app(app) + # Configure session factory used by controller code + with app.app_context(): + configure_session_factory(db.engine) + return app.test_client() + + +@patch( + "controllers.console.workspace.tool_providers.current_account_with_tenant", return_value=(MagicMock(id="u1"), "t1") +) +@patch("controllers.console.workspace.tool_providers.Session") +@patch("controllers.console.workspace.tool_providers.MCPToolManageService._reconnect_with_url") +@pytest.mark.usefixtures("_mock_cache", "_mock_user_tenant") +def test_create_mcp_provider_populates_tools(mock_reconnect, mock_session, mock_current_account_with_tenant, client): + # Arrange: reconnect returns tools immediately + mock_reconnect.return_value = ReconnectResult( + authed=True, + tools=json.dumps( + [{"name": "ping", "description": "ok", "inputSchema": {"type": "object"}, "outputSchema": {}}] + ), + encrypted_credentials="{}", + ) + + # Fake service.create_provider -> returns object with id for reload + svc = MagicMock() + create_result = MagicMock() + create_result.id = "provider-1" + svc.create_provider.return_value = create_result + svc.get_provider.return_value = MagicMock(id="provider-1", tenant_id="t1") # used by reload path + mock_session.return_value.__enter__.return_value = MagicMock() + # Patch MCPToolManageService constructed inside controller + with patch("controllers.console.workspace.tool_providers.MCPToolManageService", return_value=svc): + payload = { + "server_url": "http://example.com/mcp", + "name": "demo", + "icon": "😀", + "icon_type": "emoji", + "icon_background": "#000", + "server_identifier": "demo-sid", + "configuration": {"timeout": 5, "sse_read_timeout": 30}, + "headers": {}, + "authentication": {}, + } + # Act + with ( + patch("controllers.console.wraps.dify_config.EDITION", "CLOUD"), # bypass setup_required DB check + patch("controllers.console.wraps.current_account_with_tenant", return_value=(MagicMock(id="u1"), "t1")), + patch("libs.login.check_csrf_token", return_value=None), # bypass CSRF in login_required + patch("libs.login._get_user", return_value=MagicMock(id="u1", is_authenticated=True)), # login + patch( + "services.tools.tools_transform_service.ToolTransformService.mcp_provider_to_user_provider", + return_value={"id": "provider-1", "tools": [{"name": "ping"}]}, + ), + ): + resp = client.post( + "/console/api/workspaces/current/tool-provider/mcp", + data=json.dumps(payload), + content_type="application/json", + ) + + # Assert + assert resp.status_code == 200 + body = resp.get_json() + assert body.get("id") == "provider-1" + # 若 transform 后包含 tools 字段,确保非空 + assert isinstance(body.get("tools"), list) + assert body["tools"] diff --git a/api/tests/unit_tests/controllers/service_api/app/test_file_preview.py b/api/tests/unit_tests/controllers/service_api/app/test_file_preview.py index acff191c79..1bdcd0f1a3 100644 --- a/api/tests/unit_tests/controllers/service_api/app/test_file_preview.py +++ b/api/tests/unit_tests/controllers/service_api/app/test_file_preview.py @@ -41,6 +41,7 @@ class TestFilePreviewApi: upload_file = Mock(spec=UploadFile) upload_file.id = str(uuid.uuid4()) upload_file.name = "test_file.jpg" + upload_file.extension = "jpg" upload_file.mime_type = "image/jpeg" upload_file.size = 1024 upload_file.key = "storage/key/test_file.jpg" @@ -210,6 +211,19 @@ class TestFilePreviewApi: assert mock_upload_file.name in response.headers["Content-Disposition"] assert response.headers["Content-Type"] == "application/octet-stream" + def test_build_file_response_html_forces_attachment(self, file_preview_api, mock_upload_file): + """Test HTML files are forced to download""" + mock_generator = Mock() + mock_upload_file.mime_type = "text/html" + mock_upload_file.name = "unsafe.html" + mock_upload_file.extension = "html" + + response = file_preview_api._build_file_response(mock_generator, mock_upload_file, False) + + assert "attachment" in response.headers["Content-Disposition"] + assert response.headers["Content-Type"] == "application/octet-stream" + assert response.headers["X-Content-Type-Options"] == "nosniff" + def test_build_file_response_audio_video(self, file_preview_api, mock_upload_file): """Test file response building for audio/video files""" mock_generator = Mock() diff --git a/api/tests/unit_tests/controllers/web/test_forgot_password.py b/api/tests/unit_tests/controllers/web/test_forgot_password.py new file mode 100644 index 0000000000..d7c0d24f14 --- /dev/null +++ b/api/tests/unit_tests/controllers/web/test_forgot_password.py @@ -0,0 +1,195 @@ +"""Unit tests for controllers.web.forgot_password endpoints.""" + +from __future__ import annotations + +import base64 +import builtins +from types import SimpleNamespace +from unittest.mock import MagicMock, patch + +import pytest +from flask import Flask +from flask.views import MethodView + +# Ensure flask_restx.api finds MethodView during import. +if not hasattr(builtins, "MethodView"): + builtins.MethodView = MethodView # type: ignore[attr-defined] + + +def _load_controller_module(): + """Import controllers.web.forgot_password using a stub package.""" + + import importlib + import importlib.util + import sys + from types import ModuleType + + parent_module_name = "controllers.web" + module_name = f"{parent_module_name}.forgot_password" + + if parent_module_name not in sys.modules: + from flask_restx import Namespace + + stub = ModuleType(parent_module_name) + stub.__file__ = "controllers/web/__init__.py" + stub.__path__ = ["controllers/web"] + stub.__package__ = "controllers" + stub.__spec__ = importlib.util.spec_from_loader(parent_module_name, loader=None, is_package=True) + stub.web_ns = Namespace("web", description="Web API", path="/") + sys.modules[parent_module_name] = stub + + return importlib.import_module(module_name) + + +forgot_password_module = _load_controller_module() +ForgotPasswordCheckApi = forgot_password_module.ForgotPasswordCheckApi +ForgotPasswordResetApi = forgot_password_module.ForgotPasswordResetApi +ForgotPasswordSendEmailApi = forgot_password_module.ForgotPasswordSendEmailApi + + +@pytest.fixture +def app() -> Flask: + """Configure a minimal Flask app for request contexts.""" + + app = Flask(__name__) + app.config["TESTING"] = True + return app + + +@pytest.fixture(autouse=True) +def _enable_web_endpoint_guards(): + """Stub enterprise and feature toggles used by route decorators.""" + + features = SimpleNamespace(enable_email_password_login=True) + with ( + patch("controllers.console.wraps.dify_config.ENTERPRISE_ENABLED", True), + patch("controllers.console.wraps.dify_config.EDITION", "CLOUD"), + patch("controllers.console.wraps.FeatureService.get_system_features", return_value=features), + ): + yield + + +@pytest.fixture(autouse=True) +def _mock_controller_db(): + """Replace controller-level db reference with a simple stub.""" + + fake_db = SimpleNamespace(engine=MagicMock(name="engine")) + fake_wraps_db = SimpleNamespace( + session=MagicMock(query=MagicMock(return_value=MagicMock(first=MagicMock(return_value=True)))) + ) + with ( + patch("controllers.web.forgot_password.db", fake_db), + patch("controllers.console.wraps.db", fake_wraps_db), + ): + yield fake_db + + +@patch("controllers.web.forgot_password.AccountService.send_reset_password_email", return_value="reset-token") +@patch("controllers.web.forgot_password.Session") +@patch("controllers.web.forgot_password.AccountService.is_email_send_ip_limit", return_value=False) +@patch("controllers.web.forgot_password.extract_remote_ip", return_value="203.0.113.10") +def test_send_reset_email_success( + mock_extract_ip: MagicMock, + mock_is_ip_limit: MagicMock, + mock_session: MagicMock, + mock_send_email: MagicMock, + app: Flask, +): + """POST /forgot-password returns token when email exists and limits allow.""" + + mock_account = MagicMock() + session_ctx = MagicMock() + mock_session.return_value.__enter__.return_value = session_ctx + session_ctx.execute.return_value.scalar_one_or_none.return_value = mock_account + + with app.test_request_context( + "/forgot-password", + method="POST", + json={"email": "user@example.com"}, + ): + response = ForgotPasswordSendEmailApi().post() + + assert response == {"result": "success", "data": "reset-token"} + mock_extract_ip.assert_called_once() + mock_is_ip_limit.assert_called_once_with("203.0.113.10") + mock_send_email.assert_called_once_with(account=mock_account, email="user@example.com", language="en-US") + + +@patch("controllers.web.forgot_password.AccountService.reset_forgot_password_error_rate_limit") +@patch("controllers.web.forgot_password.AccountService.generate_reset_password_token", return_value=({}, "new-token")) +@patch("controllers.web.forgot_password.AccountService.revoke_reset_password_token") +@patch("controllers.web.forgot_password.AccountService.get_reset_password_data") +@patch("controllers.web.forgot_password.AccountService.is_forgot_password_error_rate_limit", return_value=False) +def test_check_token_success( + mock_is_rate_limited: MagicMock, + mock_get_data: MagicMock, + mock_revoke: MagicMock, + mock_generate: MagicMock, + mock_reset_limit: MagicMock, + app: Flask, +): + """POST /forgot-password/validity validates the code and refreshes token.""" + + mock_get_data.return_value = {"email": "user@example.com", "code": "123456"} + + with app.test_request_context( + "/forgot-password/validity", + method="POST", + json={"email": "user@example.com", "code": "123456", "token": "old-token"}, + ): + response = ForgotPasswordCheckApi().post() + + assert response == {"is_valid": True, "email": "user@example.com", "token": "new-token"} + mock_is_rate_limited.assert_called_once_with("user@example.com") + mock_get_data.assert_called_once_with("old-token") + mock_revoke.assert_called_once_with("old-token") + mock_generate.assert_called_once_with( + "user@example.com", + code="123456", + additional_data={"phase": "reset"}, + ) + mock_reset_limit.assert_called_once_with("user@example.com") + + +@patch("controllers.web.forgot_password.hash_password", return_value=b"hashed-value") +@patch("controllers.web.forgot_password.secrets.token_bytes", return_value=b"0123456789abcdef") +@patch("controllers.web.forgot_password.Session") +@patch("controllers.web.forgot_password.AccountService.revoke_reset_password_token") +@patch("controllers.web.forgot_password.AccountService.get_reset_password_data") +def test_reset_password_success( + mock_get_data: MagicMock, + mock_revoke_token: MagicMock, + mock_session: MagicMock, + mock_token_bytes: MagicMock, + mock_hash_password: MagicMock, + app: Flask, +): + """POST /forgot-password/resets updates the stored password when token is valid.""" + + mock_get_data.return_value = {"email": "user@example.com", "phase": "reset"} + account = MagicMock() + session_ctx = MagicMock() + mock_session.return_value.__enter__.return_value = session_ctx + session_ctx.execute.return_value.scalar_one_or_none.return_value = account + + with app.test_request_context( + "/forgot-password/resets", + method="POST", + json={ + "token": "reset-token", + "new_password": "StrongPass123!", + "password_confirm": "StrongPass123!", + }, + ): + response = ForgotPasswordResetApi().post() + + assert response == {"result": "success"} + mock_get_data.assert_called_once_with("reset-token") + mock_revoke_token.assert_called_once_with("reset-token") + mock_token_bytes.assert_called_once_with(16) + mock_hash_password.assert_called_once_with("StrongPass123!", b"0123456789abcdef") + expected_password = base64.b64encode(b"hashed-value").decode() + assert account.password == expected_password + expected_salt = base64.b64encode(b"0123456789abcdef").decode() + assert account.password_salt == expected_salt + session_ctx.commit.assert_called_once() diff --git a/api/tests/unit_tests/controllers/web/test_message_list.py b/api/tests/unit_tests/controllers/web/test_message_list.py new file mode 100644 index 0000000000..2835f7ffbf --- /dev/null +++ b/api/tests/unit_tests/controllers/web/test_message_list.py @@ -0,0 +1,174 @@ +"""Unit tests for controllers.web.message message list mapping.""" + +from __future__ import annotations + +import builtins +from datetime import datetime +from types import ModuleType, SimpleNamespace +from unittest.mock import patch +from uuid import uuid4 + +import pytest +from flask import Flask +from flask.views import MethodView + +# Ensure flask_restx.api finds MethodView during import. +if not hasattr(builtins, "MethodView"): + builtins.MethodView = MethodView # type: ignore[attr-defined] + + +def _load_controller_module(): + """Import controllers.web.message using a stub package.""" + + import importlib + import importlib.util + import sys + + parent_module_name = "controllers.web" + module_name = f"{parent_module_name}.message" + + if parent_module_name not in sys.modules: + from flask_restx import Namespace + + stub = ModuleType(parent_module_name) + stub.__file__ = "controllers/web/__init__.py" + stub.__path__ = ["controllers/web"] + stub.__package__ = "controllers" + stub.__spec__ = importlib.util.spec_from_loader(parent_module_name, loader=None, is_package=True) + stub.web_ns = Namespace("web", description="Web API", path="/") + sys.modules[parent_module_name] = stub + + wraps_module_name = f"{parent_module_name}.wraps" + if wraps_module_name not in sys.modules: + wraps_stub = ModuleType(wraps_module_name) + + class WebApiResource: + pass + + wraps_stub.WebApiResource = WebApiResource + sys.modules[wraps_module_name] = wraps_stub + + return importlib.import_module(module_name) + + +message_module = _load_controller_module() +MessageListApi = message_module.MessageListApi + + +@pytest.fixture +def app() -> Flask: + app = Flask(__name__) + app.config["TESTING"] = True + return app + + +def test_message_list_mapping(app: Flask) -> None: + conversation_id = str(uuid4()) + message_id = str(uuid4()) + + created_at = datetime(2024, 1, 1, 12, 0, 0) + resource_created_at = datetime(2024, 1, 1, 13, 0, 0) + thought_created_at = datetime(2024, 1, 1, 14, 0, 0) + + retriever_resource_obj = SimpleNamespace( + id="res-obj", + message_id=message_id, + position=2, + dataset_id="ds-1", + dataset_name="dataset", + document_id="doc-1", + document_name="document", + data_source_type="file", + segment_id="seg-1", + score=0.9, + hit_count=1, + word_count=10, + segment_position=0, + index_node_hash="hash", + content="content", + created_at=resource_created_at, + ) + + agent_thought = SimpleNamespace( + id="thought-1", + chain_id=None, + message_chain_id="chain-1", + message_id=message_id, + position=1, + thought="thinking", + tool="tool", + tool_labels={"label": "value"}, + tool_input="{}", + created_at=thought_created_at, + observation="observed", + files=["file-a"], + ) + + message_file_obj = SimpleNamespace( + id="file-obj", + filename="b.txt", + type="file", + url=None, + mime_type=None, + size=None, + transfer_method="local", + belongs_to=None, + upload_file_id=None, + ) + + message = SimpleNamespace( + id=message_id, + conversation_id=conversation_id, + parent_message_id=None, + inputs={"foo": "bar"}, + query="hello", + re_sign_file_url_answer="answer", + user_feedback=SimpleNamespace(rating="like"), + retriever_resources=[ + {"id": "res-dict", "message_id": message_id, "position": 1}, + retriever_resource_obj, + ], + created_at=created_at, + agent_thoughts=[agent_thought], + message_files=[ + {"id": "file-dict", "filename": "a.txt", "type": "file", "transfer_method": "local"}, + message_file_obj, + ], + status="success", + error=None, + message_metadata_dict={"meta": "value"}, + ) + + pagination = SimpleNamespace(limit=20, has_more=False, data=[message]) + app_model = SimpleNamespace(mode="chat") + end_user = SimpleNamespace() + + with ( + patch.object(message_module.MessageService, "pagination_by_first_id", return_value=pagination) as mock_page, + app.test_request_context(f"/messages?conversation_id={conversation_id}&limit=20"), + ): + response = MessageListApi().get(app_model, end_user) + + mock_page.assert_called_once_with(app_model, end_user, conversation_id, None, 20) + assert response["limit"] == 20 + assert response["has_more"] is False + assert len(response["data"]) == 1 + + item = response["data"][0] + assert item["id"] == message_id + assert item["conversation_id"] == conversation_id + assert item["inputs"] == {"foo": "bar"} + assert item["answer"] == "answer" + assert item["feedback"]["rating"] == "like" + assert item["metadata"] == {"meta": "value"} + assert item["created_at"] == int(created_at.timestamp()) + + assert item["retriever_resources"][0]["id"] == "res-dict" + assert item["retriever_resources"][1]["id"] == "res-obj" + assert item["retriever_resources"][1]["created_at"] == int(resource_created_at.timestamp()) + + assert item["agent_thoughts"][0]["chain_id"] == "chain-1" + assert item["agent_thoughts"][0]["created_at"] == int(thought_created_at.timestamp()) + + assert item["message_files"][0]["id"] == "file-dict" + assert item["message_files"][1]["id"] == "file-obj" diff --git a/api/tests/unit_tests/core/agent/__init__.py b/api/tests/unit_tests/core/agent/__init__.py new file mode 100644 index 0000000000..a9ccd45f4b --- /dev/null +++ b/api/tests/unit_tests/core/agent/__init__.py @@ -0,0 +1,3 @@ +""" +Mark agent test modules as a package to avoid import name collisions. +""" diff --git a/api/tests/unit_tests/core/agent/patterns/__init__.py b/api/tests/unit_tests/core/agent/patterns/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/api/tests/unit_tests/core/agent/patterns/test_base.py b/api/tests/unit_tests/core/agent/patterns/test_base.py new file mode 100644 index 0000000000..b0e0d44940 --- /dev/null +++ b/api/tests/unit_tests/core/agent/patterns/test_base.py @@ -0,0 +1,324 @@ +"""Tests for AgentPattern base class.""" + +from decimal import Decimal +from unittest.mock import MagicMock + +import pytest + +from core.agent.entities import AgentLog, ExecutionContext +from core.agent.patterns.base import AgentPattern +from core.model_runtime.entities.llm_entities import LLMUsage + + +class ConcreteAgentPattern(AgentPattern): + """Concrete implementation of AgentPattern for testing.""" + + def run(self, prompt_messages, model_parameters, stop=[], stream=True): + """Minimal implementation for testing.""" + yield from [] + + +@pytest.fixture +def mock_model_instance(): + """Create a mock model instance.""" + model_instance = MagicMock() + model_instance.model = "test-model" + model_instance.provider = "test-provider" + return model_instance + + +@pytest.fixture +def mock_context(): + """Create a mock execution context.""" + return ExecutionContext( + user_id="test-user", + app_id="test-app", + conversation_id="test-conversation", + message_id="test-message", + tenant_id="test-tenant", + ) + + +@pytest.fixture +def agent_pattern(mock_model_instance, mock_context): + """Create a concrete agent pattern for testing.""" + return ConcreteAgentPattern( + model_instance=mock_model_instance, + tools=[], + context=mock_context, + max_iterations=10, + ) + + +class TestAccumulateUsage: + """Tests for _accumulate_usage method.""" + + def test_accumulate_usage_to_empty_dict(self, agent_pattern): + """Test accumulating usage to an empty dict creates a copy.""" + total_usage: dict = {"usage": None} + delta_usage = LLMUsage( + prompt_tokens=100, + prompt_unit_price=Decimal("0.001"), + prompt_price_unit=Decimal("0.001"), + prompt_price=Decimal("0.1"), + completion_tokens=50, + completion_unit_price=Decimal("0.002"), + completion_price_unit=Decimal("0.001"), + completion_price=Decimal("0.1"), + total_tokens=150, + total_price=Decimal("0.2"), + currency="USD", + latency=0.5, + ) + + agent_pattern._accumulate_usage(total_usage, delta_usage) + + assert total_usage["usage"] is not None + assert total_usage["usage"].total_tokens == 150 + assert total_usage["usage"].prompt_tokens == 100 + assert total_usage["usage"].completion_tokens == 50 + # Verify it's a copy, not a reference + assert total_usage["usage"] is not delta_usage + + def test_accumulate_usage_adds_to_existing(self, agent_pattern): + """Test accumulating usage adds to existing values.""" + initial_usage = LLMUsage( + prompt_tokens=100, + prompt_unit_price=Decimal("0.001"), + prompt_price_unit=Decimal("0.001"), + prompt_price=Decimal("0.1"), + completion_tokens=50, + completion_unit_price=Decimal("0.002"), + completion_price_unit=Decimal("0.001"), + completion_price=Decimal("0.1"), + total_tokens=150, + total_price=Decimal("0.2"), + currency="USD", + latency=0.5, + ) + total_usage: dict = {"usage": initial_usage} + + delta_usage = LLMUsage( + prompt_tokens=200, + prompt_unit_price=Decimal("0.001"), + prompt_price_unit=Decimal("0.001"), + prompt_price=Decimal("0.2"), + completion_tokens=100, + completion_unit_price=Decimal("0.002"), + completion_price_unit=Decimal("0.001"), + completion_price=Decimal("0.2"), + total_tokens=300, + total_price=Decimal("0.4"), + currency="USD", + latency=0.5, + ) + + agent_pattern._accumulate_usage(total_usage, delta_usage) + + assert total_usage["usage"].total_tokens == 450 # 150 + 300 + assert total_usage["usage"].prompt_tokens == 300 # 100 + 200 + assert total_usage["usage"].completion_tokens == 150 # 50 + 100 + + def test_accumulate_usage_multiple_rounds(self, agent_pattern): + """Test accumulating usage across multiple rounds.""" + total_usage: dict = {"usage": None} + + # Round 1: 100 tokens + round1_usage = LLMUsage( + prompt_tokens=70, + prompt_unit_price=Decimal("0.001"), + prompt_price_unit=Decimal("0.001"), + prompt_price=Decimal("0.07"), + completion_tokens=30, + completion_unit_price=Decimal("0.002"), + completion_price_unit=Decimal("0.001"), + completion_price=Decimal("0.06"), + total_tokens=100, + total_price=Decimal("0.13"), + currency="USD", + latency=0.3, + ) + agent_pattern._accumulate_usage(total_usage, round1_usage) + assert total_usage["usage"].total_tokens == 100 + + # Round 2: 150 tokens + round2_usage = LLMUsage( + prompt_tokens=100, + prompt_unit_price=Decimal("0.001"), + prompt_price_unit=Decimal("0.001"), + prompt_price=Decimal("0.1"), + completion_tokens=50, + completion_unit_price=Decimal("0.002"), + completion_price_unit=Decimal("0.001"), + completion_price=Decimal("0.1"), + total_tokens=150, + total_price=Decimal("0.2"), + currency="USD", + latency=0.4, + ) + agent_pattern._accumulate_usage(total_usage, round2_usage) + assert total_usage["usage"].total_tokens == 250 # 100 + 150 + + # Round 3: 200 tokens + round3_usage = LLMUsage( + prompt_tokens=130, + prompt_unit_price=Decimal("0.001"), + prompt_price_unit=Decimal("0.001"), + prompt_price=Decimal("0.13"), + completion_tokens=70, + completion_unit_price=Decimal("0.002"), + completion_price_unit=Decimal("0.001"), + completion_price=Decimal("0.14"), + total_tokens=200, + total_price=Decimal("0.27"), + currency="USD", + latency=0.5, + ) + agent_pattern._accumulate_usage(total_usage, round3_usage) + assert total_usage["usage"].total_tokens == 450 # 100 + 150 + 200 + + +class TestCreateLog: + """Tests for _create_log method.""" + + def test_create_log_with_label_and_status(self, agent_pattern): + """Test creating a log with label and status.""" + log = agent_pattern._create_log( + label="ROUND 1", + log_type=AgentLog.LogType.ROUND, + status=AgentLog.LogStatus.START, + data={"key": "value"}, + ) + + assert log.label == "ROUND 1" + assert log.log_type == AgentLog.LogType.ROUND + assert log.status == AgentLog.LogStatus.START + assert log.data == {"key": "value"} + assert log.parent_id is None + + def test_create_log_with_parent_id(self, agent_pattern): + """Test creating a log with parent_id.""" + parent_log = agent_pattern._create_log( + label="ROUND 1", + log_type=AgentLog.LogType.ROUND, + status=AgentLog.LogStatus.START, + data={}, + ) + + child_log = agent_pattern._create_log( + label="CALL tool", + log_type=AgentLog.LogType.TOOL_CALL, + status=AgentLog.LogStatus.START, + data={}, + parent_id=parent_log.id, + ) + + assert child_log.parent_id == parent_log.id + assert child_log.log_type == AgentLog.LogType.TOOL_CALL + + +class TestFinishLog: + """Tests for _finish_log method.""" + + def test_finish_log_updates_status(self, agent_pattern): + """Test that finish_log updates status to SUCCESS.""" + log = agent_pattern._create_log( + label="ROUND 1", + log_type=AgentLog.LogType.ROUND, + status=AgentLog.LogStatus.START, + data={}, + ) + + finished_log = agent_pattern._finish_log(log, data={"result": "done"}) + + assert finished_log.status == AgentLog.LogStatus.SUCCESS + assert finished_log.data == {"result": "done"} + + def test_finish_log_adds_usage_metadata(self, agent_pattern): + """Test that finish_log adds usage to metadata.""" + log = agent_pattern._create_log( + label="ROUND 1", + log_type=AgentLog.LogType.ROUND, + status=AgentLog.LogStatus.START, + data={}, + ) + + usage = LLMUsage( + prompt_tokens=100, + prompt_unit_price=Decimal("0.001"), + prompt_price_unit=Decimal("0.001"), + prompt_price=Decimal("0.1"), + completion_tokens=50, + completion_unit_price=Decimal("0.002"), + completion_price_unit=Decimal("0.001"), + completion_price=Decimal("0.1"), + total_tokens=150, + total_price=Decimal("0.2"), + currency="USD", + latency=0.5, + ) + + finished_log = agent_pattern._finish_log(log, usage=usage) + + assert finished_log.metadata[AgentLog.LogMetadata.TOTAL_TOKENS] == 150 + assert finished_log.metadata[AgentLog.LogMetadata.TOTAL_PRICE] == Decimal("0.2") + assert finished_log.metadata[AgentLog.LogMetadata.CURRENCY] == "USD" + assert finished_log.metadata[AgentLog.LogMetadata.LLM_USAGE] == usage + + +class TestFindToolByName: + """Tests for _find_tool_by_name method.""" + + def test_find_existing_tool(self, mock_model_instance, mock_context): + """Test finding an existing tool by name.""" + mock_tool = MagicMock() + mock_tool.entity.identity.name = "test_tool" + + pattern = ConcreteAgentPattern( + model_instance=mock_model_instance, + tools=[mock_tool], + context=mock_context, + ) + + found_tool = pattern._find_tool_by_name("test_tool") + assert found_tool == mock_tool + + def test_find_nonexistent_tool_returns_none(self, mock_model_instance, mock_context): + """Test that finding a nonexistent tool returns None.""" + mock_tool = MagicMock() + mock_tool.entity.identity.name = "test_tool" + + pattern = ConcreteAgentPattern( + model_instance=mock_model_instance, + tools=[mock_tool], + context=mock_context, + ) + + found_tool = pattern._find_tool_by_name("nonexistent_tool") + assert found_tool is None + + +class TestMaxIterationsCapping: + """Tests for max_iterations capping.""" + + def test_max_iterations_capped_at_99(self, mock_model_instance, mock_context): + """Test that max_iterations is capped at 99.""" + pattern = ConcreteAgentPattern( + model_instance=mock_model_instance, + tools=[], + context=mock_context, + max_iterations=150, + ) + + assert pattern.max_iterations == 99 + + def test_max_iterations_not_capped_when_under_99(self, mock_model_instance, mock_context): + """Test that max_iterations is not capped when under 99.""" + pattern = ConcreteAgentPattern( + model_instance=mock_model_instance, + tools=[], + context=mock_context, + max_iterations=50, + ) + + assert pattern.max_iterations == 50 diff --git a/api/tests/unit_tests/core/agent/patterns/test_function_call.py b/api/tests/unit_tests/core/agent/patterns/test_function_call.py new file mode 100644 index 0000000000..6b3600dbbf --- /dev/null +++ b/api/tests/unit_tests/core/agent/patterns/test_function_call.py @@ -0,0 +1,332 @@ +"""Tests for FunctionCallStrategy.""" + +from decimal import Decimal +from unittest.mock import MagicMock + +import pytest + +from core.agent.entities import AgentLog, ExecutionContext +from core.model_runtime.entities.llm_entities import LLMUsage +from core.model_runtime.entities.message_entities import ( + PromptMessageTool, + SystemPromptMessage, + UserPromptMessage, +) + + +@pytest.fixture +def mock_model_instance(): + """Create a mock model instance.""" + model_instance = MagicMock() + model_instance.model = "test-model" + model_instance.provider = "test-provider" + return model_instance + + +@pytest.fixture +def mock_context(): + """Create a mock execution context.""" + return ExecutionContext( + user_id="test-user", + app_id="test-app", + conversation_id="test-conversation", + message_id="test-message", + tenant_id="test-tenant", + ) + + +@pytest.fixture +def mock_tool(): + """Create a mock tool.""" + tool = MagicMock() + tool.entity.identity.name = "test_tool" + tool.to_prompt_message_tool.return_value = PromptMessageTool( + name="test_tool", + description="A test tool", + parameters={ + "type": "object", + "properties": {"param1": {"type": "string", "description": "A parameter"}}, + "required": ["param1"], + }, + ) + return tool + + +class TestFunctionCallStrategyInit: + """Tests for FunctionCallStrategy initialization.""" + + def test_initialization(self, mock_model_instance, mock_context, mock_tool): + """Test basic initialization.""" + from core.agent.patterns.function_call import FunctionCallStrategy + + strategy = FunctionCallStrategy( + model_instance=mock_model_instance, + tools=[mock_tool], + context=mock_context, + max_iterations=10, + ) + + assert strategy.model_instance == mock_model_instance + assert strategy.context == mock_context + assert strategy.max_iterations == 10 + assert len(strategy.tools) == 1 + + def test_initialization_with_tool_invoke_hook(self, mock_model_instance, mock_context, mock_tool): + """Test initialization with tool_invoke_hook.""" + from core.agent.patterns.function_call import FunctionCallStrategy + + mock_hook = MagicMock() + + strategy = FunctionCallStrategy( + model_instance=mock_model_instance, + tools=[mock_tool], + context=mock_context, + tool_invoke_hook=mock_hook, + ) + + assert strategy.tool_invoke_hook == mock_hook + + +class TestConvertToolsToPromptFormat: + """Tests for _convert_tools_to_prompt_format method.""" + + def test_convert_tools_returns_prompt_message_tools(self, mock_model_instance, mock_context, mock_tool): + """Test that _convert_tools_to_prompt_format returns PromptMessageTool list.""" + from core.agent.patterns.function_call import FunctionCallStrategy + + strategy = FunctionCallStrategy( + model_instance=mock_model_instance, + tools=[mock_tool], + context=mock_context, + ) + + tools = strategy._convert_tools_to_prompt_format() + + assert len(tools) == 1 + assert isinstance(tools[0], PromptMessageTool) + assert tools[0].name == "test_tool" + + def test_convert_tools_empty_when_no_tools(self, mock_model_instance, mock_context): + """Test that _convert_tools_to_prompt_format returns empty list when no tools.""" + from core.agent.patterns.function_call import FunctionCallStrategy + + strategy = FunctionCallStrategy( + model_instance=mock_model_instance, + tools=[], + context=mock_context, + ) + + tools = strategy._convert_tools_to_prompt_format() + + assert tools == [] + + +class TestAgentLogGeneration: + """Tests for AgentLog generation during run.""" + + def test_round_log_structure(self, mock_model_instance, mock_context, mock_tool): + """Test that round logs have correct structure.""" + from core.agent.patterns.function_call import FunctionCallStrategy + + strategy = FunctionCallStrategy( + model_instance=mock_model_instance, + tools=[mock_tool], + context=mock_context, + max_iterations=1, + ) + + # Create a round log + round_log = strategy._create_log( + label="ROUND 1", + log_type=AgentLog.LogType.ROUND, + status=AgentLog.LogStatus.START, + data={"inputs": {"query": "test"}}, + ) + + assert round_log.label == "ROUND 1" + assert round_log.log_type == AgentLog.LogType.ROUND + assert round_log.status == AgentLog.LogStatus.START + assert "inputs" in round_log.data + + def test_tool_call_log_structure(self, mock_model_instance, mock_context, mock_tool): + """Test that tool call logs have correct structure.""" + from core.agent.patterns.function_call import FunctionCallStrategy + + strategy = FunctionCallStrategy( + model_instance=mock_model_instance, + tools=[mock_tool], + context=mock_context, + ) + + # Create a parent round log + round_log = strategy._create_log( + label="ROUND 1", + log_type=AgentLog.LogType.ROUND, + status=AgentLog.LogStatus.START, + data={}, + ) + + # Create a tool call log + tool_log = strategy._create_log( + label="CALL test_tool", + log_type=AgentLog.LogType.TOOL_CALL, + status=AgentLog.LogStatus.START, + data={"tool_name": "test_tool", "tool_args": {"param1": "value1"}}, + parent_id=round_log.id, + ) + + assert tool_log.label == "CALL test_tool" + assert tool_log.log_type == AgentLog.LogType.TOOL_CALL + assert tool_log.parent_id == round_log.id + assert tool_log.data["tool_name"] == "test_tool" + + +class TestToolInvocation: + """Tests for tool invocation.""" + + def test_invoke_tool_with_hook(self, mock_model_instance, mock_context, mock_tool): + """Test that tool invocation uses hook when provided.""" + from core.agent.patterns.function_call import FunctionCallStrategy + from core.tools.entities.tool_entities import ToolInvokeMeta + + mock_hook = MagicMock() + mock_meta = ToolInvokeMeta( + time_cost=0.5, + error=None, + tool_config={"tool_provider_type": "test", "tool_provider": "test_id"}, + ) + mock_hook.return_value = ("Tool result", ["file-1"], mock_meta) + + strategy = FunctionCallStrategy( + model_instance=mock_model_instance, + tools=[mock_tool], + context=mock_context, + tool_invoke_hook=mock_hook, + ) + + result, files, meta = strategy._invoke_tool(mock_tool, {"param1": "value"}, "test_tool") + + mock_hook.assert_called_once() + assert result == "Tool result" + assert files == [] # Hook returns file IDs, but _invoke_tool returns empty File list + assert meta == mock_meta + + def test_invoke_tool_without_hook_attribute_set(self, mock_model_instance, mock_context, mock_tool): + """Test that tool_invoke_hook is None when not provided.""" + from core.agent.patterns.function_call import FunctionCallStrategy + + strategy = FunctionCallStrategy( + model_instance=mock_model_instance, + tools=[mock_tool], + context=mock_context, + tool_invoke_hook=None, + ) + + # Verify that tool_invoke_hook is None + assert strategy.tool_invoke_hook is None + + +class TestUsageTracking: + """Tests for usage tracking across rounds.""" + + def test_round_usage_is_separate_from_total(self, mock_model_instance, mock_context): + """Test that round usage is tracked separately from total.""" + from core.agent.patterns.function_call import FunctionCallStrategy + + strategy = FunctionCallStrategy( + model_instance=mock_model_instance, + tools=[], + context=mock_context, + ) + + # Simulate two rounds of usage + total_usage: dict = {"usage": None} + round1_usage: dict = {"usage": None} + round2_usage: dict = {"usage": None} + + # Round 1 + usage1 = LLMUsage( + prompt_tokens=100, + prompt_unit_price=Decimal("0.001"), + prompt_price_unit=Decimal("0.001"), + prompt_price=Decimal("0.1"), + completion_tokens=50, + completion_unit_price=Decimal("0.002"), + completion_price_unit=Decimal("0.001"), + completion_price=Decimal("0.1"), + total_tokens=150, + total_price=Decimal("0.2"), + currency="USD", + latency=0.5, + ) + strategy._accumulate_usage(round1_usage, usage1) + strategy._accumulate_usage(total_usage, usage1) + + # Round 2 + usage2 = LLMUsage( + prompt_tokens=200, + prompt_unit_price=Decimal("0.001"), + prompt_price_unit=Decimal("0.001"), + prompt_price=Decimal("0.2"), + completion_tokens=100, + completion_unit_price=Decimal("0.002"), + completion_price_unit=Decimal("0.001"), + completion_price=Decimal("0.2"), + total_tokens=300, + total_price=Decimal("0.4"), + currency="USD", + latency=0.5, + ) + strategy._accumulate_usage(round2_usage, usage2) + strategy._accumulate_usage(total_usage, usage2) + + # Verify round usage is separate + assert round1_usage["usage"].total_tokens == 150 + assert round2_usage["usage"].total_tokens == 300 + # Verify total is accumulated + assert total_usage["usage"].total_tokens == 450 + + +class TestPromptMessageHandling: + """Tests for prompt message handling.""" + + def test_messages_include_system_and_user(self, mock_model_instance, mock_context, mock_tool): + """Test that messages include system and user prompts.""" + from core.agent.patterns.function_call import FunctionCallStrategy + + strategy = FunctionCallStrategy( + model_instance=mock_model_instance, + tools=[mock_tool], + context=mock_context, + ) + + messages = [ + SystemPromptMessage(content="You are a helpful assistant."), + UserPromptMessage(content="Hello"), + ] + + # Just verify the messages can be processed + assert len(messages) == 2 + assert isinstance(messages[0], SystemPromptMessage) + assert isinstance(messages[1], UserPromptMessage) + + def test_assistant_message_with_tool_calls(self, mock_model_instance, mock_context, mock_tool): + """Test that assistant messages can contain tool calls.""" + from core.model_runtime.entities.message_entities import AssistantPromptMessage + + tool_call = AssistantPromptMessage.ToolCall( + id="call_123", + type="function", + function=AssistantPromptMessage.ToolCall.ToolCallFunction( + name="test_tool", + arguments='{"param1": "value1"}', + ), + ) + + assistant_message = AssistantPromptMessage( + content="I'll help you with that.", + tool_calls=[tool_call], + ) + + assert len(assistant_message.tool_calls) == 1 + assert assistant_message.tool_calls[0].function.name == "test_tool" diff --git a/api/tests/unit_tests/core/agent/patterns/test_react.py b/api/tests/unit_tests/core/agent/patterns/test_react.py new file mode 100644 index 0000000000..a942ba6100 --- /dev/null +++ b/api/tests/unit_tests/core/agent/patterns/test_react.py @@ -0,0 +1,224 @@ +"""Tests for ReActStrategy.""" + +from unittest.mock import MagicMock + +import pytest + +from core.agent.entities import ExecutionContext +from core.agent.patterns.react import ReActStrategy +from core.model_runtime.entities import SystemPromptMessage, UserPromptMessage + + +@pytest.fixture +def mock_model_instance(): + """Create a mock model instance.""" + model_instance = MagicMock() + model_instance.model = "test-model" + model_instance.provider = "test-provider" + return model_instance + + +@pytest.fixture +def mock_context(): + """Create a mock execution context.""" + return ExecutionContext( + user_id="test-user", + app_id="test-app", + conversation_id="test-conversation", + message_id="test-message", + tenant_id="test-tenant", + ) + + +@pytest.fixture +def mock_tool(): + """Create a mock tool.""" + from core.model_runtime.entities.message_entities import PromptMessageTool + + tool = MagicMock() + tool.entity.identity.name = "test_tool" + tool.entity.identity.provider = "test_provider" + + # Use real PromptMessageTool for proper serialization + prompt_tool = PromptMessageTool( + name="test_tool", + description="A test tool", + parameters={"type": "object", "properties": {}}, + ) + tool.to_prompt_message_tool.return_value = prompt_tool + + return tool + + +class TestReActStrategyInit: + """Tests for ReActStrategy initialization.""" + + def test_init_with_instruction(self, mock_model_instance, mock_context): + """Test that instruction is stored correctly.""" + instruction = "You are a helpful assistant." + + strategy = ReActStrategy( + model_instance=mock_model_instance, + tools=[], + context=mock_context, + instruction=instruction, + ) + + assert strategy.instruction == instruction + + def test_init_with_empty_instruction(self, mock_model_instance, mock_context): + """Test that empty instruction is handled correctly.""" + strategy = ReActStrategy( + model_instance=mock_model_instance, + tools=[], + context=mock_context, + ) + + assert strategy.instruction == "" + + +class TestBuildPromptWithReactFormat: + """Tests for _build_prompt_with_react_format method.""" + + def test_replace_tools_placeholder(self, mock_model_instance, mock_context, mock_tool): + """Test that {{tools}} placeholder is replaced.""" + strategy = ReActStrategy( + model_instance=mock_model_instance, + tools=[mock_tool], + context=mock_context, + ) + + system_content = "You have access to: {{tools}}" + messages = [ + SystemPromptMessage(content=system_content), + UserPromptMessage(content="Hello"), + ] + + result = strategy._build_prompt_with_react_format(messages, [], True) + + # The tools placeholder should be replaced with JSON + assert "{{tools}}" not in result[0].content + assert "test_tool" in result[0].content + + def test_replace_tool_names_placeholder(self, mock_model_instance, mock_context, mock_tool): + """Test that {{tool_names}} placeholder is replaced.""" + strategy = ReActStrategy( + model_instance=mock_model_instance, + tools=[mock_tool], + context=mock_context, + ) + + system_content = "Valid actions: {{tool_names}}" + messages = [ + SystemPromptMessage(content=system_content), + ] + + result = strategy._build_prompt_with_react_format(messages, [], True) + + assert "{{tool_names}}" not in result[0].content + assert '"test_tool"' in result[0].content + + def test_replace_instruction_placeholder(self, mock_model_instance, mock_context): + """Test that {{instruction}} placeholder is replaced.""" + instruction = "You are a helpful coding assistant." + strategy = ReActStrategy( + model_instance=mock_model_instance, + tools=[], + context=mock_context, + instruction=instruction, + ) + + system_content = "{{instruction}}\n\nYou have access to: {{tools}}" + messages = [ + SystemPromptMessage(content=system_content), + ] + + result = strategy._build_prompt_with_react_format(messages, [], True, instruction) + + assert "{{instruction}}" not in result[0].content + assert instruction in result[0].content + + def test_no_tools_available_message(self, mock_model_instance, mock_context): + """Test that 'No tools available' is shown when include_tools is False.""" + strategy = ReActStrategy( + model_instance=mock_model_instance, + tools=[], + context=mock_context, + ) + + system_content = "You have access to: {{tools}}" + messages = [ + SystemPromptMessage(content=system_content), + ] + + result = strategy._build_prompt_with_react_format(messages, [], False) + + assert "No tools available" in result[0].content + + def test_scratchpad_appended_as_assistant_message(self, mock_model_instance, mock_context): + """Test that agent scratchpad is appended as AssistantPromptMessage.""" + from core.agent.entities import AgentScratchpadUnit + from core.model_runtime.entities import AssistantPromptMessage + + strategy = ReActStrategy( + model_instance=mock_model_instance, + tools=[], + context=mock_context, + ) + + messages = [ + SystemPromptMessage(content="System prompt"), + UserPromptMessage(content="User query"), + ] + + scratchpad = [ + AgentScratchpadUnit( + thought="I need to search for information", + action_str='{"action": "search", "action_input": "query"}', + observation="Search results here", + ) + ] + + result = strategy._build_prompt_with_react_format(messages, scratchpad, True) + + # The last message should be an AssistantPromptMessage with scratchpad content + assert len(result) == 3 + assert isinstance(result[-1], AssistantPromptMessage) + assert "I need to search for information" in result[-1].content + assert "Search results here" in result[-1].content + + def test_empty_scratchpad_no_extra_message(self, mock_model_instance, mock_context): + """Test that empty scratchpad doesn't add extra message.""" + strategy = ReActStrategy( + model_instance=mock_model_instance, + tools=[], + context=mock_context, + ) + + messages = [ + SystemPromptMessage(content="System prompt"), + UserPromptMessage(content="User query"), + ] + + result = strategy._build_prompt_with_react_format(messages, [], True) + + # Should only have the original 2 messages + assert len(result) == 2 + + def test_original_messages_not_modified(self, mock_model_instance, mock_context): + """Test that original messages list is not modified.""" + strategy = ReActStrategy( + model_instance=mock_model_instance, + tools=[], + context=mock_context, + ) + + original_content = "Original system prompt {{tools}}" + messages = [ + SystemPromptMessage(content=original_content), + ] + + strategy._build_prompt_with_react_format(messages, [], True) + + # Original message should not be modified + assert messages[0].content == original_content diff --git a/api/tests/unit_tests/core/agent/patterns/test_strategy_factory.py b/api/tests/unit_tests/core/agent/patterns/test_strategy_factory.py new file mode 100644 index 0000000000..07b9df2acf --- /dev/null +++ b/api/tests/unit_tests/core/agent/patterns/test_strategy_factory.py @@ -0,0 +1,203 @@ +"""Tests for StrategyFactory.""" + +from unittest.mock import MagicMock + +import pytest + +from core.agent.entities import AgentEntity, ExecutionContext +from core.agent.patterns.function_call import FunctionCallStrategy +from core.agent.patterns.react import ReActStrategy +from core.agent.patterns.strategy_factory import StrategyFactory +from core.model_runtime.entities.model_entities import ModelFeature + + +@pytest.fixture +def mock_model_instance(): + """Create a mock model instance.""" + model_instance = MagicMock() + model_instance.model = "test-model" + model_instance.provider = "test-provider" + return model_instance + + +@pytest.fixture +def mock_context(): + """Create a mock execution context.""" + return ExecutionContext( + user_id="test-user", + app_id="test-app", + conversation_id="test-conversation", + message_id="test-message", + tenant_id="test-tenant", + ) + + +class TestStrategyFactory: + """Tests for StrategyFactory.create_strategy method.""" + + def test_create_function_call_strategy_with_tool_call_feature(self, mock_model_instance, mock_context): + """Test that FunctionCallStrategy is created when model supports TOOL_CALL.""" + model_features = [ModelFeature.TOOL_CALL] + + strategy = StrategyFactory.create_strategy( + model_features=model_features, + model_instance=mock_model_instance, + context=mock_context, + tools=[], + files=[], + ) + + assert isinstance(strategy, FunctionCallStrategy) + + def test_create_function_call_strategy_with_multi_tool_call_feature(self, mock_model_instance, mock_context): + """Test that FunctionCallStrategy is created when model supports MULTI_TOOL_CALL.""" + model_features = [ModelFeature.MULTI_TOOL_CALL] + + strategy = StrategyFactory.create_strategy( + model_features=model_features, + model_instance=mock_model_instance, + context=mock_context, + tools=[], + files=[], + ) + + assert isinstance(strategy, FunctionCallStrategy) + + def test_create_function_call_strategy_with_stream_tool_call_feature(self, mock_model_instance, mock_context): + """Test that FunctionCallStrategy is created when model supports STREAM_TOOL_CALL.""" + model_features = [ModelFeature.STREAM_TOOL_CALL] + + strategy = StrategyFactory.create_strategy( + model_features=model_features, + model_instance=mock_model_instance, + context=mock_context, + tools=[], + files=[], + ) + + assert isinstance(strategy, FunctionCallStrategy) + + def test_create_react_strategy_without_tool_call_features(self, mock_model_instance, mock_context): + """Test that ReActStrategy is created when model doesn't support tool calling.""" + model_features = [ModelFeature.VISION] # Only vision, no tool calling + + strategy = StrategyFactory.create_strategy( + model_features=model_features, + model_instance=mock_model_instance, + context=mock_context, + tools=[], + files=[], + ) + + assert isinstance(strategy, ReActStrategy) + + def test_create_react_strategy_with_empty_features(self, mock_model_instance, mock_context): + """Test that ReActStrategy is created when model has no features.""" + model_features: list[ModelFeature] = [] + + strategy = StrategyFactory.create_strategy( + model_features=model_features, + model_instance=mock_model_instance, + context=mock_context, + tools=[], + files=[], + ) + + assert isinstance(strategy, ReActStrategy) + + def test_explicit_function_calling_strategy_with_support(self, mock_model_instance, mock_context): + """Test explicit FUNCTION_CALLING strategy selection with model support.""" + model_features = [ModelFeature.TOOL_CALL] + + strategy = StrategyFactory.create_strategy( + model_features=model_features, + model_instance=mock_model_instance, + context=mock_context, + tools=[], + files=[], + agent_strategy=AgentEntity.Strategy.FUNCTION_CALLING, + ) + + assert isinstance(strategy, FunctionCallStrategy) + + def test_explicit_function_calling_strategy_without_support_falls_back_to_react( + self, mock_model_instance, mock_context + ): + """Test that explicit FUNCTION_CALLING falls back to ReAct when not supported.""" + model_features: list[ModelFeature] = [] # No tool calling support + + strategy = StrategyFactory.create_strategy( + model_features=model_features, + model_instance=mock_model_instance, + context=mock_context, + tools=[], + files=[], + agent_strategy=AgentEntity.Strategy.FUNCTION_CALLING, + ) + + # Should fall back to ReAct since FC is not supported + assert isinstance(strategy, ReActStrategy) + + def test_explicit_chain_of_thought_strategy(self, mock_model_instance, mock_context): + """Test explicit CHAIN_OF_THOUGHT strategy selection.""" + model_features = [ModelFeature.TOOL_CALL] # Even with tool call support + + strategy = StrategyFactory.create_strategy( + model_features=model_features, + model_instance=mock_model_instance, + context=mock_context, + tools=[], + files=[], + agent_strategy=AgentEntity.Strategy.CHAIN_OF_THOUGHT, + ) + + assert isinstance(strategy, ReActStrategy) + + def test_react_strategy_with_instruction(self, mock_model_instance, mock_context): + """Test that ReActStrategy receives instruction parameter.""" + model_features: list[ModelFeature] = [] + instruction = "You are a helpful assistant." + + strategy = StrategyFactory.create_strategy( + model_features=model_features, + model_instance=mock_model_instance, + context=mock_context, + tools=[], + files=[], + instruction=instruction, + ) + + assert isinstance(strategy, ReActStrategy) + assert strategy.instruction == instruction + + def test_max_iterations_passed_to_strategy(self, mock_model_instance, mock_context): + """Test that max_iterations is passed to the strategy.""" + model_features = [ModelFeature.TOOL_CALL] + max_iterations = 5 + + strategy = StrategyFactory.create_strategy( + model_features=model_features, + model_instance=mock_model_instance, + context=mock_context, + tools=[], + files=[], + max_iterations=max_iterations, + ) + + assert strategy.max_iterations == max_iterations + + def test_tool_invoke_hook_passed_to_strategy(self, mock_model_instance, mock_context): + """Test that tool_invoke_hook is passed to the strategy.""" + model_features = [ModelFeature.TOOL_CALL] + mock_hook = MagicMock() + + strategy = StrategyFactory.create_strategy( + model_features=model_features, + model_instance=mock_model_instance, + context=mock_context, + tools=[], + files=[], + tool_invoke_hook=mock_hook, + ) + + assert strategy.tool_invoke_hook == mock_hook diff --git a/api/tests/unit_tests/core/agent/test_agent_app_runner.py b/api/tests/unit_tests/core/agent/test_agent_app_runner.py new file mode 100644 index 0000000000..d9301ccfe0 --- /dev/null +++ b/api/tests/unit_tests/core/agent/test_agent_app_runner.py @@ -0,0 +1,388 @@ +"""Tests for AgentAppRunner.""" + +from decimal import Decimal +from unittest.mock import MagicMock, patch + +import pytest + +from core.agent.entities import AgentEntity, AgentLog, AgentPromptEntity, AgentResult +from core.model_runtime.entities import SystemPromptMessage, UserPromptMessage +from core.model_runtime.entities.llm_entities import LLMUsage + + +class TestOrganizePromptMessages: + """Tests for _organize_prompt_messages method.""" + + @pytest.fixture + def mock_runner(self): + """Create a mock AgentAppRunner for testing.""" + # We'll patch the class to avoid complex initialization + with patch("core.agent.agent_app_runner.BaseAgentRunner.__init__", return_value=None): + from core.agent.agent_app_runner import AgentAppRunner + + runner = AgentAppRunner.__new__(AgentAppRunner) + + # Set up required attributes + runner.config = MagicMock(spec=AgentEntity) + runner.config.strategy = AgentEntity.Strategy.FUNCTION_CALLING + runner.config.prompt = None + + runner.app_config = MagicMock() + runner.app_config.prompt_template = MagicMock() + runner.app_config.prompt_template.simple_prompt_template = "You are a helpful assistant." + + runner.history_prompt_messages = [] + runner.query = "Hello" + runner._current_thoughts = [] + runner.files = [] + runner.model_config = MagicMock() + runner.memory = None + runner.application_generate_entity = MagicMock() + runner.application_generate_entity.file_upload_config = None + + return runner + + def test_function_calling_uses_simple_prompt(self, mock_runner): + """Test that function calling strategy uses simple_prompt_template.""" + mock_runner.config.strategy = AgentEntity.Strategy.FUNCTION_CALLING + + with patch.object(mock_runner, "_init_system_message") as mock_init: + mock_init.return_value = [SystemPromptMessage(content="You are a helpful assistant.")] + with patch.object(mock_runner, "_organize_user_query") as mock_query: + mock_query.return_value = [UserPromptMessage(content="Hello")] + with patch("core.agent.agent_app_runner.AgentHistoryPromptTransform") as mock_transform: + mock_transform.return_value.get_prompt.return_value = [ + SystemPromptMessage(content="You are a helpful assistant.") + ] + + result = mock_runner._organize_prompt_messages() + + # Verify _init_system_message was called with simple_prompt_template + mock_init.assert_called_once() + call_args = mock_init.call_args[0] + assert call_args[0] == "You are a helpful assistant." + + def test_chain_of_thought_uses_agent_prompt(self, mock_runner): + """Test that chain of thought strategy uses agent prompt template.""" + mock_runner.config.strategy = AgentEntity.Strategy.CHAIN_OF_THOUGHT + mock_runner.config.prompt = AgentPromptEntity( + first_prompt="ReAct prompt template with {{tools}}", + next_iteration="Continue...", + ) + + with patch.object(mock_runner, "_init_system_message") as mock_init: + mock_init.return_value = [SystemPromptMessage(content="ReAct prompt template with {{tools}}")] + with patch.object(mock_runner, "_organize_user_query") as mock_query: + mock_query.return_value = [UserPromptMessage(content="Hello")] + with patch("core.agent.agent_app_runner.AgentHistoryPromptTransform") as mock_transform: + mock_transform.return_value.get_prompt.return_value = [ + SystemPromptMessage(content="ReAct prompt template with {{tools}}") + ] + + result = mock_runner._organize_prompt_messages() + + # Verify _init_system_message was called with agent prompt + mock_init.assert_called_once() + call_args = mock_init.call_args[0] + assert call_args[0] == "ReAct prompt template with {{tools}}" + + def test_chain_of_thought_without_prompt_falls_back(self, mock_runner): + """Test that chain of thought without prompt falls back to simple_prompt_template.""" + mock_runner.config.strategy = AgentEntity.Strategy.CHAIN_OF_THOUGHT + mock_runner.config.prompt = None + + with patch.object(mock_runner, "_init_system_message") as mock_init: + mock_init.return_value = [SystemPromptMessage(content="You are a helpful assistant.")] + with patch.object(mock_runner, "_organize_user_query") as mock_query: + mock_query.return_value = [UserPromptMessage(content="Hello")] + with patch("core.agent.agent_app_runner.AgentHistoryPromptTransform") as mock_transform: + mock_transform.return_value.get_prompt.return_value = [ + SystemPromptMessage(content="You are a helpful assistant.") + ] + + result = mock_runner._organize_prompt_messages() + + # Verify _init_system_message was called with simple_prompt_template + mock_init.assert_called_once() + call_args = mock_init.call_args[0] + assert call_args[0] == "You are a helpful assistant." + + +class TestInitSystemMessage: + """Tests for _init_system_message method.""" + + @pytest.fixture + def mock_runner(self): + """Create a mock AgentAppRunner for testing.""" + with patch("core.agent.agent_app_runner.BaseAgentRunner.__init__", return_value=None): + from core.agent.agent_app_runner import AgentAppRunner + + runner = AgentAppRunner.__new__(AgentAppRunner) + return runner + + def test_empty_messages_with_template(self, mock_runner): + """Test that system message is created when messages are empty.""" + result = mock_runner._init_system_message("System template", []) + + assert len(result) == 1 + assert isinstance(result[0], SystemPromptMessage) + assert result[0].content == "System template" + + def test_empty_messages_without_template(self, mock_runner): + """Test that empty list is returned when no template and no messages.""" + result = mock_runner._init_system_message("", []) + + assert result == [] + + def test_existing_system_message_not_duplicated(self, mock_runner): + """Test that system message is not duplicated if already present.""" + existing_messages = [ + SystemPromptMessage(content="Existing system"), + UserPromptMessage(content="User message"), + ] + + result = mock_runner._init_system_message("New template", existing_messages) + + # Should not insert new system message + assert len(result) == 2 + assert result[0].content == "Existing system" + + def test_system_message_inserted_when_missing(self, mock_runner): + """Test that system message is inserted when first message is not system.""" + existing_messages = [ + UserPromptMessage(content="User message"), + ] + + result = mock_runner._init_system_message("System template", existing_messages) + + assert len(result) == 2 + assert isinstance(result[0], SystemPromptMessage) + assert result[0].content == "System template" + + +class TestClearUserPromptImageMessages: + """Tests for _clear_user_prompt_image_messages method.""" + + @pytest.fixture + def mock_runner(self): + """Create a mock AgentAppRunner for testing.""" + with patch("core.agent.agent_app_runner.BaseAgentRunner.__init__", return_value=None): + from core.agent.agent_app_runner import AgentAppRunner + + runner = AgentAppRunner.__new__(AgentAppRunner) + return runner + + def test_text_content_unchanged(self, mock_runner): + """Test that text content is unchanged.""" + messages = [ + UserPromptMessage(content="Plain text message"), + ] + + result = mock_runner._clear_user_prompt_image_messages(messages) + + assert len(result) == 1 + assert result[0].content == "Plain text message" + + def test_original_messages_not_modified(self, mock_runner): + """Test that original messages are not modified (deep copy).""" + from core.model_runtime.entities.message_entities import ( + ImagePromptMessageContent, + TextPromptMessageContent, + ) + + messages = [ + UserPromptMessage( + content=[ + TextPromptMessageContent(data="Text part"), + ImagePromptMessageContent( + data="http://example.com/image.jpg", + format="url", + mime_type="image/jpeg", + ), + ] + ), + ] + + result = mock_runner._clear_user_prompt_image_messages(messages) + + # Original should still have list content + assert isinstance(messages[0].content, list) + # Result should have string content + assert isinstance(result[0].content, str) + + +class TestToolInvokeHook: + """Tests for _create_tool_invoke_hook method.""" + + @pytest.fixture + def mock_runner(self): + """Create a mock AgentAppRunner for testing.""" + with patch("core.agent.agent_app_runner.BaseAgentRunner.__init__", return_value=None): + from core.agent.agent_app_runner import AgentAppRunner + + runner = AgentAppRunner.__new__(AgentAppRunner) + + runner.user_id = "test-user" + runner.tenant_id = "test-tenant" + runner.application_generate_entity = MagicMock() + runner.application_generate_entity.trace_manager = None + runner.application_generate_entity.invoke_from = "api" + runner.application_generate_entity.app_config = MagicMock() + runner.application_generate_entity.app_config.app_id = "test-app" + runner.agent_callback = MagicMock() + runner.conversation = MagicMock() + runner.conversation.id = "test-conversation" + runner.queue_manager = MagicMock() + runner._current_message_file_ids = [] + + return runner + + def test_hook_calls_agent_invoke(self, mock_runner): + """Test that the hook calls ToolEngine.agent_invoke.""" + from core.tools.entities.tool_entities import ToolInvokeMeta + + mock_message = MagicMock() + mock_message.id = "test-message" + + mock_tool = MagicMock() + mock_tool_meta = ToolInvokeMeta( + time_cost=0.5, + error=None, + tool_config={ + "tool_provider_type": "test_provider", + "tool_provider": "test_id", + }, + ) + + with patch("core.agent.agent_app_runner.ToolEngine") as mock_engine: + mock_engine.agent_invoke.return_value = ("Tool result", ["file-1", "file-2"], mock_tool_meta) + + hook = mock_runner._create_tool_invoke_hook(mock_message) + result_content, result_files, result_meta = hook(mock_tool, {"arg": "value"}, "test_tool") + + # Verify ToolEngine.agent_invoke was called + mock_engine.agent_invoke.assert_called_once() + + # Verify return values + assert result_content == "Tool result" + assert result_files == ["file-1", "file-2"] + assert result_meta == mock_tool_meta + + def test_hook_publishes_file_events(self, mock_runner): + """Test that the hook publishes QueueMessageFileEvent for files.""" + from core.tools.entities.tool_entities import ToolInvokeMeta + + mock_message = MagicMock() + mock_message.id = "test-message" + + mock_tool = MagicMock() + mock_tool_meta = ToolInvokeMeta( + time_cost=0.5, + error=None, + tool_config={}, + ) + + with patch("core.agent.agent_app_runner.ToolEngine") as mock_engine: + mock_engine.agent_invoke.return_value = ("Tool result", ["file-1", "file-2"], mock_tool_meta) + + hook = mock_runner._create_tool_invoke_hook(mock_message) + hook(mock_tool, {}, "test_tool") + + # Verify file events were published + assert mock_runner.queue_manager.publish.call_count == 2 + assert mock_runner._current_message_file_ids == ["file-1", "file-2"] + + +class TestAgentLogProcessing: + """Tests for AgentLog processing in run method.""" + + def test_agent_log_status_enum(self): + """Test AgentLog status enum values.""" + assert AgentLog.LogStatus.START == "start" + assert AgentLog.LogStatus.SUCCESS == "success" + assert AgentLog.LogStatus.ERROR == "error" + + def test_agent_log_metadata_enum(self): + """Test AgentLog metadata enum values.""" + assert AgentLog.LogMetadata.STARTED_AT == "started_at" + assert AgentLog.LogMetadata.FINISHED_AT == "finished_at" + assert AgentLog.LogMetadata.ELAPSED_TIME == "elapsed_time" + assert AgentLog.LogMetadata.TOTAL_PRICE == "total_price" + assert AgentLog.LogMetadata.TOTAL_TOKENS == "total_tokens" + assert AgentLog.LogMetadata.LLM_USAGE == "llm_usage" + + def test_agent_result_structure(self): + """Test AgentResult structure.""" + usage = LLMUsage( + prompt_tokens=100, + prompt_unit_price=Decimal("0.001"), + prompt_price_unit=Decimal("0.001"), + prompt_price=Decimal("0.1"), + completion_tokens=50, + completion_unit_price=Decimal("0.002"), + completion_price_unit=Decimal("0.001"), + completion_price=Decimal("0.1"), + total_tokens=150, + total_price=Decimal("0.2"), + currency="USD", + latency=0.5, + ) + + result = AgentResult( + text="Final answer", + files=[], + usage=usage, + finish_reason="stop", + ) + + assert result.text == "Final answer" + assert result.files == [] + assert result.usage == usage + assert result.finish_reason == "stop" + + +class TestOrganizeUserQuery: + """Tests for _organize_user_query method.""" + + @pytest.fixture + def mock_runner(self): + """Create a mock AgentAppRunner for testing.""" + with patch("core.agent.agent_app_runner.BaseAgentRunner.__init__", return_value=None): + from core.agent.agent_app_runner import AgentAppRunner + + runner = AgentAppRunner.__new__(AgentAppRunner) + runner.files = [] + runner.application_generate_entity = MagicMock() + runner.application_generate_entity.file_upload_config = None + return runner + + def test_simple_query_without_files(self, mock_runner): + """Test organizing a simple query without files.""" + result = mock_runner._organize_user_query("Hello world", []) + + assert len(result) == 1 + assert isinstance(result[0], UserPromptMessage) + assert result[0].content == "Hello world" + + def test_query_with_files(self, mock_runner): + """Test organizing a query with files.""" + from core.file.models import File + + mock_file = MagicMock(spec=File) + mock_runner.files = [mock_file] + + with patch("core.agent.agent_app_runner.file_manager") as mock_fm: + from core.model_runtime.entities.message_entities import ImagePromptMessageContent + + mock_fm.to_prompt_message_content.return_value = ImagePromptMessageContent( + data="http://example.com/image.jpg", + format="url", + mime_type="image/jpeg", + ) + + result = mock_runner._organize_user_query("Describe this image", []) + + assert len(result) == 1 + assert isinstance(result[0], UserPromptMessage) + assert isinstance(result[0].content, list) + assert len(result[0].content) == 2 # Image + Text diff --git a/api/tests/unit_tests/core/agent/test_entities.py b/api/tests/unit_tests/core/agent/test_entities.py new file mode 100644 index 0000000000..5136f48aab --- /dev/null +++ b/api/tests/unit_tests/core/agent/test_entities.py @@ -0,0 +1,191 @@ +"""Tests for agent entities.""" + +from core.agent.entities import AgentEntity, AgentLog, AgentPromptEntity, AgentScratchpadUnit, ExecutionContext + + +class TestExecutionContext: + """Tests for ExecutionContext entity.""" + + def test_create_with_all_fields(self): + """Test creating ExecutionContext with all fields.""" + context = ExecutionContext( + user_id="user-123", + app_id="app-456", + conversation_id="conv-789", + message_id="msg-012", + tenant_id="tenant-345", + ) + + assert context.user_id == "user-123" + assert context.app_id == "app-456" + assert context.conversation_id == "conv-789" + assert context.message_id == "msg-012" + assert context.tenant_id == "tenant-345" + + def test_create_minimal(self): + """Test creating minimal ExecutionContext.""" + context = ExecutionContext.create_minimal(user_id="user-123") + + assert context.user_id == "user-123" + assert context.app_id is None + assert context.conversation_id is None + assert context.message_id is None + assert context.tenant_id is None + + def test_to_dict(self): + """Test converting ExecutionContext to dictionary.""" + context = ExecutionContext( + user_id="user-123", + app_id="app-456", + conversation_id="conv-789", + message_id="msg-012", + tenant_id="tenant-345", + ) + + result = context.to_dict() + + assert result == { + "user_id": "user-123", + "app_id": "app-456", + "conversation_id": "conv-789", + "message_id": "msg-012", + "tenant_id": "tenant-345", + } + + def test_with_updates(self): + """Test creating new context with updates.""" + original = ExecutionContext( + user_id="user-123", + app_id="app-456", + ) + + updated = original.with_updates(message_id="msg-789") + + # Original should be unchanged + assert original.message_id is None + # Updated should have new value + assert updated.message_id == "msg-789" + assert updated.user_id == "user-123" + assert updated.app_id == "app-456" + + +class TestAgentLog: + """Tests for AgentLog entity.""" + + def test_create_log_with_required_fields(self): + """Test creating AgentLog with required fields.""" + log = AgentLog( + label="ROUND 1", + log_type=AgentLog.LogType.ROUND, + status=AgentLog.LogStatus.START, + data={"key": "value"}, + ) + + assert log.label == "ROUND 1" + assert log.log_type == AgentLog.LogType.ROUND + assert log.status == AgentLog.LogStatus.START + assert log.data == {"key": "value"} + assert log.id is not None # Auto-generated + assert log.parent_id is None + assert log.error is None + + def test_log_type_enum(self): + """Test LogType enum values.""" + assert AgentLog.LogType.ROUND == "round" + assert AgentLog.LogType.THOUGHT == "thought" + assert AgentLog.LogType.TOOL_CALL == "tool_call" + + def test_log_status_enum(self): + """Test LogStatus enum values.""" + assert AgentLog.LogStatus.START == "start" + assert AgentLog.LogStatus.SUCCESS == "success" + assert AgentLog.LogStatus.ERROR == "error" + + def test_log_metadata_enum(self): + """Test LogMetadata enum values.""" + assert AgentLog.LogMetadata.STARTED_AT == "started_at" + assert AgentLog.LogMetadata.FINISHED_AT == "finished_at" + assert AgentLog.LogMetadata.ELAPSED_TIME == "elapsed_time" + assert AgentLog.LogMetadata.TOTAL_PRICE == "total_price" + assert AgentLog.LogMetadata.TOTAL_TOKENS == "total_tokens" + assert AgentLog.LogMetadata.LLM_USAGE == "llm_usage" + + +class TestAgentScratchpadUnit: + """Tests for AgentScratchpadUnit entity.""" + + def test_is_final_with_final_answer_action(self): + """Test is_final returns True for Final Answer action.""" + unit = AgentScratchpadUnit( + thought="I know the answer", + action=AgentScratchpadUnit.Action( + action_name="Final Answer", + action_input="The answer is 42", + ), + ) + + assert unit.is_final() is True + + def test_is_final_with_tool_action(self): + """Test is_final returns False for tool action.""" + unit = AgentScratchpadUnit( + thought="I need to search", + action=AgentScratchpadUnit.Action( + action_name="search", + action_input={"query": "test"}, + ), + ) + + assert unit.is_final() is False + + def test_is_final_with_no_action(self): + """Test is_final returns True when no action.""" + unit = AgentScratchpadUnit( + thought="Just thinking", + ) + + assert unit.is_final() is True + + def test_action_to_dict(self): + """Test Action.to_dict method.""" + action = AgentScratchpadUnit.Action( + action_name="search", + action_input={"query": "test"}, + ) + + result = action.to_dict() + + assert result == { + "action": "search", + "action_input": {"query": "test"}, + } + + +class TestAgentEntity: + """Tests for AgentEntity.""" + + def test_strategy_enum(self): + """Test Strategy enum values.""" + assert AgentEntity.Strategy.CHAIN_OF_THOUGHT == "chain-of-thought" + assert AgentEntity.Strategy.FUNCTION_CALLING == "function-calling" + + def test_create_with_prompt(self): + """Test creating AgentEntity with prompt.""" + prompt = AgentPromptEntity( + first_prompt="You are a helpful assistant.", + next_iteration="Continue thinking...", + ) + + entity = AgentEntity( + provider="openai", + model="gpt-4", + strategy=AgentEntity.Strategy.CHAIN_OF_THOUGHT, + prompt=prompt, + max_iteration=5, + ) + + assert entity.provider == "openai" + assert entity.model == "gpt-4" + assert entity.strategy == AgentEntity.Strategy.CHAIN_OF_THOUGHT + assert entity.prompt == prompt + assert entity.max_iteration == 5 diff --git a/api/tests/unit_tests/core/app/apps/common/test_workflow_response_converter_truncation.py b/api/tests/unit_tests/core/app/apps/common/test_workflow_response_converter_truncation.py index 1c9f577a50..6b40bf462b 100644 --- a/api/tests/unit_tests/core/app/apps/common/test_workflow_response_converter_truncation.py +++ b/api/tests/unit_tests/core/app/apps/common/test_workflow_response_converter_truncation.py @@ -431,10 +431,10 @@ class TestWorkflowResponseConverterServiceApiTruncation: description="Explore calls should have truncation enabled", ), TestCase( - name="published_truncation_enabled", - invoke_from=InvokeFrom.PUBLISHED, + name="published_pipeline_truncation_enabled", + invoke_from=InvokeFrom.PUBLISHED_PIPELINE, expected_truncation_enabled=True, - description="Published app calls should have truncation enabled", + description="Published pipeline calls should have truncation enabled", ), ], ids=lambda x: x.name, diff --git a/api/tests/unit_tests/core/app/apps/test_base_app_generator.py b/api/tests/unit_tests/core/app/apps/test_base_app_generator.py index d622c3a555..1000d71399 100644 --- a/api/tests/unit_tests/core/app/apps/test_base_app_generator.py +++ b/api/tests/unit_tests/core/app/apps/test_base_app_generator.py @@ -287,7 +287,7 @@ def test_validate_inputs_optional_file_with_empty_string(): def test_validate_inputs_optional_file_list_with_empty_list(): - """Test that optional FILE_LIST variable with empty list returns None""" + """Test that optional FILE_LIST variable with empty list returns empty list (not None)""" base_app_generator = BaseAppGenerator() var_file_list = VariableEntity( @@ -302,6 +302,28 @@ def test_validate_inputs_optional_file_list_with_empty_list(): value=[], ) + # Empty list should be preserved, not converted to None + # This allows downstream components like document_extractor to handle empty lists properly + assert result == [] + + +def test_validate_inputs_optional_file_list_with_empty_string(): + """Test that optional FILE_LIST variable with empty string returns None""" + base_app_generator = BaseAppGenerator() + + var_file_list = VariableEntity( + variable="test_file_list", + label="test_file_list", + type=VariableEntityType.FILE_LIST, + required=False, + ) + + result = base_app_generator._validate_inputs( + variable_entity=var_file_list, + value="", + ) + + # Empty string should be treated as unset assert result is None diff --git a/api/tests/unit_tests/core/app/apps/test_workflow_app_runner_stream_chunk.py b/api/tests/unit_tests/core/app/apps/test_workflow_app_runner_stream_chunk.py new file mode 100644 index 0000000000..8779e8c586 --- /dev/null +++ b/api/tests/unit_tests/core/app/apps/test_workflow_app_runner_stream_chunk.py @@ -0,0 +1,47 @@ +from unittest.mock import MagicMock + +from core.app.apps.base_app_queue_manager import PublishFrom +from core.app.apps.workflow_app_runner import WorkflowBasedAppRunner +from core.workflow.graph_events import NodeRunStreamChunkEvent +from core.workflow.nodes import NodeType + + +class DummyQueueManager: + def __init__(self) -> None: + self.published = [] + + def publish(self, event, publish_from: PublishFrom) -> None: + self.published.append((event, publish_from)) + + +def test_skip_empty_final_chunk() -> None: + queue_manager = DummyQueueManager() + runner = WorkflowBasedAppRunner(queue_manager=queue_manager, app_id="app") + + empty_final_event = NodeRunStreamChunkEvent( + id="exec", + node_id="node", + node_type=NodeType.LLM, + selector=["node", "text"], + chunk="", + is_final=True, + ) + + runner._handle_event(workflow_entry=MagicMock(), event=empty_final_event) + assert queue_manager.published == [] + + normal_event = NodeRunStreamChunkEvent( + id="exec", + node_id="node", + node_type=NodeType.LLM, + selector=["node", "text"], + chunk="hi", + is_final=False, + ) + + runner._handle_event(workflow_entry=MagicMock(), event=normal_event) + + assert len(queue_manager.published) == 1 + published_event, publish_from = queue_manager.published[0] + assert publish_from == PublishFrom.APPLICATION_MANAGER + assert published_event.text == "hi" diff --git a/api/tests/unit_tests/core/app/layers/test_conversation_variable_persist_layer.py b/api/tests/unit_tests/core/app/layers/test_conversation_variable_persist_layer.py new file mode 100644 index 0000000000..b6e8cc9c8e --- /dev/null +++ b/api/tests/unit_tests/core/app/layers/test_conversation_variable_persist_layer.py @@ -0,0 +1,144 @@ +from collections.abc import Sequence +from datetime import datetime +from unittest.mock import Mock + +from core.app.layers.conversation_variable_persist_layer import ConversationVariablePersistenceLayer +from core.variables import StringVariable +from core.variables.segments import Segment +from core.workflow.constants import CONVERSATION_VARIABLE_NODE_ID +from core.workflow.enums import NodeType, WorkflowNodeExecutionStatus +from core.workflow.graph_engine.protocols.command_channel import CommandChannel +from core.workflow.graph_events.node import NodeRunSucceededEvent +from core.workflow.node_events import NodeRunResult +from core.workflow.nodes.variable_assigner.common import helpers as common_helpers +from core.workflow.runtime.graph_runtime_state_protocol import ReadOnlyGraphRuntimeState +from core.workflow.system_variable import SystemVariable + + +class MockReadOnlyVariablePool: + def __init__(self, variables: dict[tuple[str, str], Segment] | None = None) -> None: + self._variables = variables or {} + + def get(self, selector: Sequence[str]) -> Segment | None: + if len(selector) < 2: + return None + return self._variables.get((selector[0], selector[1])) + + def get_all_by_node(self, node_id: str) -> dict[str, object]: + return {key: value for (nid, key), value in self._variables.items() if nid == node_id} + + def get_by_prefix(self, prefix: str) -> dict[str, object]: + return {key: value for (nid, key), value in self._variables.items() if nid == prefix} + + +def _build_graph_runtime_state( + variable_pool: MockReadOnlyVariablePool, + conversation_id: str | None = None, +) -> ReadOnlyGraphRuntimeState: + graph_runtime_state = Mock(spec=ReadOnlyGraphRuntimeState) + graph_runtime_state.variable_pool = variable_pool + graph_runtime_state.system_variable = SystemVariable(conversation_id=conversation_id).as_view() + return graph_runtime_state + + +def _build_node_run_succeeded_event( + *, + node_type: NodeType, + outputs: dict[str, object] | None = None, + process_data: dict[str, object] | None = None, +) -> NodeRunSucceededEvent: + return NodeRunSucceededEvent( + id="node-exec-id", + node_id="assigner", + node_type=node_type, + start_at=datetime.utcnow(), + node_run_result=NodeRunResult( + status=WorkflowNodeExecutionStatus.SUCCEEDED, + outputs=outputs or {}, + process_data=process_data or {}, + ), + ) + + +def test_persists_conversation_variables_from_assigner_output(): + conversation_id = "conv-123" + variable = StringVariable( + id="var-1", + name="name", + value="updated", + selector=[CONVERSATION_VARIABLE_NODE_ID, "name"], + ) + process_data = common_helpers.set_updated_variables( + {}, [common_helpers.variable_to_processed_data(variable.selector, variable)] + ) + + variable_pool = MockReadOnlyVariablePool({(CONVERSATION_VARIABLE_NODE_ID, "name"): variable}) + + updater = Mock() + layer = ConversationVariablePersistenceLayer(updater) + layer.initialize(_build_graph_runtime_state(variable_pool, conversation_id), Mock(spec=CommandChannel)) + + event = _build_node_run_succeeded_event(node_type=NodeType.VARIABLE_ASSIGNER, process_data=process_data) + layer.on_event(event) + + updater.update.assert_called_once_with(conversation_id=conversation_id, variable=variable) + updater.flush.assert_called_once() + + +def test_skips_when_outputs_missing(): + conversation_id = "conv-456" + variable = StringVariable( + id="var-2", + name="name", + value="updated", + selector=[CONVERSATION_VARIABLE_NODE_ID, "name"], + ) + + variable_pool = MockReadOnlyVariablePool({(CONVERSATION_VARIABLE_NODE_ID, "name"): variable}) + + updater = Mock() + layer = ConversationVariablePersistenceLayer(updater) + layer.initialize(_build_graph_runtime_state(variable_pool, conversation_id), Mock(spec=CommandChannel)) + + event = _build_node_run_succeeded_event(node_type=NodeType.VARIABLE_ASSIGNER) + layer.on_event(event) + + updater.update.assert_not_called() + updater.flush.assert_not_called() + + +def test_skips_non_assigner_nodes(): + updater = Mock() + layer = ConversationVariablePersistenceLayer(updater) + layer.initialize(_build_graph_runtime_state(MockReadOnlyVariablePool()), Mock(spec=CommandChannel)) + + event = _build_node_run_succeeded_event(node_type=NodeType.LLM) + layer.on_event(event) + + updater.update.assert_not_called() + updater.flush.assert_not_called() + + +def test_skips_non_conversation_variables(): + conversation_id = "conv-789" + non_conversation_variable = StringVariable( + id="var-3", + name="name", + value="updated", + selector=["environment", "name"], + ) + process_data = common_helpers.set_updated_variables( + {}, [common_helpers.variable_to_processed_data(non_conversation_variable.selector, non_conversation_variable)] + ) + + variable_pool = MockReadOnlyVariablePool() + + updater = Mock() + layer = ConversationVariablePersistenceLayer(updater) + layer.initialize(_build_graph_runtime_state(variable_pool, conversation_id), Mock(spec=CommandChannel)) + + event = _build_node_run_succeeded_event(node_type=NodeType.VARIABLE_ASSIGNER, process_data=process_data) + layer.on_event(event) + + updater.update.assert_not_called() + updater.flush.assert_not_called() diff --git a/api/tests/unit_tests/core/app/layers/test_pause_state_persist_layer.py b/api/tests/unit_tests/core/app/layers/test_pause_state_persist_layer.py index 534420f21e..1d885f6b2e 100644 --- a/api/tests/unit_tests/core/app/layers/test_pause_state_persist_layer.py +++ b/api/tests/unit_tests/core/app/layers/test_pause_state_persist_layer.py @@ -1,4 +1,5 @@ import json +from collections.abc import Sequence from time import time from unittest.mock import Mock @@ -15,6 +16,7 @@ from core.app.layers.pause_state_persist_layer import ( from core.variables.segments import Segment from core.workflow.entities.pause_reason import SchedulingPause from core.workflow.graph_engine.entities.commands import GraphEngineCommand +from core.workflow.graph_engine.layers.base import GraphEngineLayerNotInitializedError from core.workflow.graph_events.graph import ( GraphRunFailedEvent, GraphRunPausedEvent, @@ -66,8 +68,10 @@ class MockReadOnlyVariablePool: def __init__(self, variables: dict[tuple[str, str], object] | None = None): self._variables = variables or {} - def get(self, node_id: str, variable_key: str) -> Segment | None: - value = self._variables.get((node_id, variable_key)) + def get(self, selector: Sequence[str]) -> Segment | None: + if len(selector) < 2: + return None + value = self._variables.get((selector[0], selector[1])) if value is None: return None mock_segment = Mock(spec=Segment) @@ -209,8 +213,9 @@ class TestPauseStatePersistenceLayer: assert layer._session_maker is session_factory assert layer._state_owner_user_id == state_owner_user_id - assert not hasattr(layer, "graph_runtime_state") - assert not hasattr(layer, "command_channel") + with pytest.raises(GraphEngineLayerNotInitializedError): + _ = layer.graph_runtime_state + assert layer.command_channel is None def test_initialize_sets_dependencies(self): session_factory = Mock(name="session_factory") @@ -295,7 +300,7 @@ class TestPauseStatePersistenceLayer: mock_factory.assert_not_called() mock_repo.create_workflow_pause.assert_not_called() - def test_on_event_raises_attribute_error_when_graph_runtime_state_is_none(self): + def test_on_event_raises_when_graph_runtime_state_is_uninitialized(self): session_factory = Mock(name="session_factory") layer = PauseStatePersistenceLayer( session_factory=session_factory, @@ -305,7 +310,7 @@ class TestPauseStatePersistenceLayer: event = TestDataFactory.create_graph_run_paused_event() - with pytest.raises(AttributeError): + with pytest.raises(GraphEngineLayerNotInitializedError): layer.on_event(event) def test_on_event_asserts_when_workflow_execution_id_missing(self, monkeypatch: pytest.MonkeyPatch): diff --git a/api/tests/unit_tests/core/app/task_pipeline/test_easy_ui_based_generate_task_pipeline.py b/api/tests/unit_tests/core/app/task_pipeline/test_easy_ui_based_generate_task_pipeline.py new file mode 100644 index 0000000000..40f58c9ddf --- /dev/null +++ b/api/tests/unit_tests/core/app/task_pipeline/test_easy_ui_based_generate_task_pipeline.py @@ -0,0 +1,420 @@ +from types import SimpleNamespace +from unittest.mock import ANY, Mock, patch + +import pytest + +from core.app.apps.base_app_queue_manager import AppQueueManager +from core.app.entities.app_invoke_entities import ChatAppGenerateEntity +from core.app.entities.queue_entities import ( + QueueAgentMessageEvent, + QueueErrorEvent, + QueueLLMChunkEvent, + QueueMessageEndEvent, + QueueMessageFileEvent, + QueuePingEvent, +) +from core.app.entities.task_entities import ( + EasyUITaskState, + ErrorStreamResponse, + MessageEndStreamResponse, + MessageFileStreamResponse, + MessageReplaceStreamResponse, + MessageStreamResponse, + PingStreamResponse, + StreamEvent, +) +from core.app.task_pipeline.easy_ui_based_generate_task_pipeline import EasyUIBasedGenerateTaskPipeline +from core.base.tts import AppGeneratorTTSPublisher +from core.model_runtime.entities.llm_entities import LLMResult as RuntimeLLMResult +from core.model_runtime.entities.message_entities import TextPromptMessageContent +from core.ops.ops_trace_manager import TraceQueueManager +from models.model import AppMode + + +class TestEasyUIBasedGenerateTaskPipelineProcessStreamResponse: + """Test cases for EasyUIBasedGenerateTaskPipeline._process_stream_response method.""" + + @pytest.fixture + def mock_application_generate_entity(self): + """Create a mock application generate entity.""" + entity = Mock(spec=ChatAppGenerateEntity) + entity.task_id = "test-task-id" + entity.app_id = "test-app-id" + # minimal app_config used by pipeline internals + entity.app_config = SimpleNamespace( + tenant_id="test-tenant-id", + app_id="test-app-id", + app_mode=AppMode.CHAT, + app_model_config_dict={}, + additional_features=None, + sensitive_word_avoidance=None, + ) + # minimal model_conf for LLMResult init + entity.model_conf = SimpleNamespace( + model="test-model", + provider_model_bundle=SimpleNamespace(model_type_instance=Mock()), + credentials={}, + ) + return entity + + @pytest.fixture + def mock_queue_manager(self): + """Create a mock queue manager.""" + manager = Mock(spec=AppQueueManager) + return manager + + @pytest.fixture + def mock_message_cycle_manager(self): + """Create a mock message cycle manager.""" + manager = Mock() + manager.get_message_event_type.return_value = StreamEvent.MESSAGE + manager.message_to_stream_response.return_value = Mock(spec=MessageStreamResponse) + manager.message_file_to_stream_response.return_value = Mock(spec=MessageFileStreamResponse) + manager.message_replace_to_stream_response.return_value = Mock(spec=MessageReplaceStreamResponse) + manager.handle_retriever_resources = Mock() + manager.handle_annotation_reply.return_value = None + return manager + + @pytest.fixture + def mock_conversation(self): + """Create a mock conversation.""" + conversation = Mock() + conversation.id = "test-conversation-id" + conversation.mode = "chat" + return conversation + + @pytest.fixture + def mock_message(self): + """Create a mock message.""" + message = Mock() + message.id = "test-message-id" + message.created_at = Mock() + message.created_at.timestamp.return_value = 1234567890 + return message + + @pytest.fixture + def mock_task_state(self): + """Create a mock task state.""" + task_state = Mock(spec=EasyUITaskState) + + # Create LLM result mock + llm_result = Mock(spec=RuntimeLLMResult) + llm_result.prompt_messages = [] + llm_result.message = Mock() + llm_result.message.content = "" + + task_state.llm_result = llm_result + task_state.answer = "" + + return task_state + + @pytest.fixture + def pipeline( + self, + mock_application_generate_entity, + mock_queue_manager, + mock_conversation, + mock_message, + mock_message_cycle_manager, + mock_task_state, + ): + """Create an EasyUIBasedGenerateTaskPipeline instance with mocked dependencies.""" + with patch( + "core.app.task_pipeline.easy_ui_based_generate_task_pipeline.EasyUITaskState", return_value=mock_task_state + ): + pipeline = EasyUIBasedGenerateTaskPipeline( + application_generate_entity=mock_application_generate_entity, + queue_manager=mock_queue_manager, + conversation=mock_conversation, + message=mock_message, + stream=True, + ) + pipeline._message_cycle_manager = mock_message_cycle_manager + pipeline._task_state = mock_task_state + return pipeline + + def test_get_message_event_type_called_once_when_first_llm_chunk_arrives( + self, pipeline, mock_message_cycle_manager + ): + """Expect get_message_event_type to be called when processing the first LLM chunk event.""" + # Setup a minimal LLM chunk event + chunk = Mock() + chunk.delta.message.content = "hi" + chunk.prompt_messages = [] + llm_chunk_event = Mock(spec=QueueLLMChunkEvent) + llm_chunk_event.chunk = chunk + mock_queue_message = Mock() + mock_queue_message.event = llm_chunk_event + pipeline.queue_manager.listen.return_value = [mock_queue_message] + + # Execute + list(pipeline._process_stream_response(publisher=None, trace_manager=None)) + + # Assert + mock_message_cycle_manager.get_message_event_type.assert_called_once_with(message_id="test-message-id") + + def test_llm_chunk_event_with_text_content(self, pipeline, mock_message_cycle_manager, mock_task_state): + """Test handling of LLM chunk events with text content.""" + # Setup + chunk = Mock() + chunk.delta.message.content = "Hello, world!" + chunk.prompt_messages = [] + + llm_chunk_event = Mock(spec=QueueLLMChunkEvent) + llm_chunk_event.chunk = chunk + + mock_queue_message = Mock() + mock_queue_message.event = llm_chunk_event + pipeline.queue_manager.listen.return_value = [mock_queue_message] + + mock_message_cycle_manager.get_message_event_type.return_value = StreamEvent.MESSAGE + + # Execute + responses = list(pipeline._process_stream_response(publisher=None, trace_manager=None)) + + # Assert + assert len(responses) == 1 + mock_message_cycle_manager.message_to_stream_response.assert_called_once_with( + answer="Hello, world!", message_id="test-message-id", event_type=StreamEvent.MESSAGE + ) + assert mock_task_state.llm_result.message.content == "Hello, world!" + + def test_llm_chunk_event_with_list_content(self, pipeline, mock_message_cycle_manager, mock_task_state): + """Test handling of LLM chunk events with list content.""" + # Setup + text_content = Mock(spec=TextPromptMessageContent) + text_content.data = "Hello" + + chunk = Mock() + chunk.delta.message.content = [text_content, " world!"] + chunk.prompt_messages = [] + + llm_chunk_event = Mock(spec=QueueLLMChunkEvent) + llm_chunk_event.chunk = chunk + + mock_queue_message = Mock() + mock_queue_message.event = llm_chunk_event + pipeline.queue_manager.listen.return_value = [mock_queue_message] + + mock_message_cycle_manager.get_message_event_type.return_value = StreamEvent.MESSAGE + + # Execute + responses = list(pipeline._process_stream_response(publisher=None, trace_manager=None)) + + # Assert + assert len(responses) == 1 + mock_message_cycle_manager.message_to_stream_response.assert_called_once_with( + answer="Hello world!", message_id="test-message-id", event_type=StreamEvent.MESSAGE + ) + assert mock_task_state.llm_result.message.content == "Hello world!" + + def test_agent_message_event(self, pipeline, mock_message_cycle_manager, mock_task_state): + """Test handling of agent message events.""" + # Setup + chunk = Mock() + chunk.delta.message.content = "Agent response" + + agent_message_event = Mock(spec=QueueAgentMessageEvent) + agent_message_event.chunk = chunk + + mock_queue_message = Mock() + mock_queue_message.event = agent_message_event + pipeline.queue_manager.listen.return_value = [mock_queue_message] + + # Ensure method under assertion is a mock to track calls + pipeline._agent_message_to_stream_response = Mock(return_value=Mock()) + + # Execute + responses = list(pipeline._process_stream_response(publisher=None, trace_manager=None)) + + # Assert + assert len(responses) == 1 + # Agent messages should use _agent_message_to_stream_response + pipeline._agent_message_to_stream_response.assert_called_once_with( + answer="Agent response", message_id="test-message-id" + ) + + def test_message_end_event(self, pipeline, mock_message_cycle_manager, mock_task_state): + """Test handling of message end events.""" + # Setup + llm_result = Mock(spec=RuntimeLLMResult) + llm_result.message = Mock() + llm_result.message.content = "Final response" + + message_end_event = Mock(spec=QueueMessageEndEvent) + message_end_event.llm_result = llm_result + + mock_queue_message = Mock() + mock_queue_message.event = message_end_event + pipeline.queue_manager.listen.return_value = [mock_queue_message] + + pipeline._save_message = Mock() + pipeline._message_end_to_stream_response = Mock(return_value=Mock(spec=MessageEndStreamResponse)) + + # Patch db.engine used inside pipeline for session creation + with patch( + "core.app.task_pipeline.easy_ui_based_generate_task_pipeline.db", new=SimpleNamespace(engine=Mock()) + ): + # Execute + responses = list(pipeline._process_stream_response(publisher=None, trace_manager=None)) + + # Assert + assert len(responses) == 1 + assert mock_task_state.llm_result == llm_result + pipeline._save_message.assert_called_once() + pipeline._message_end_to_stream_response.assert_called_once() + + def test_error_event(self, pipeline): + """Test handling of error events.""" + # Setup + error_event = Mock(spec=QueueErrorEvent) + error_event.error = Exception("Test error") + + mock_queue_message = Mock() + mock_queue_message.event = error_event + pipeline.queue_manager.listen.return_value = [mock_queue_message] + + pipeline.handle_error = Mock(return_value=Exception("Test error")) + pipeline.error_to_stream_response = Mock(return_value=Mock(spec=ErrorStreamResponse)) + + # Patch db.engine used inside pipeline for session creation + with patch( + "core.app.task_pipeline.easy_ui_based_generate_task_pipeline.db", new=SimpleNamespace(engine=Mock()) + ): + # Execute + responses = list(pipeline._process_stream_response(publisher=None, trace_manager=None)) + + # Assert + assert len(responses) == 1 + pipeline.handle_error.assert_called_once() + pipeline.error_to_stream_response.assert_called_once() + + def test_ping_event(self, pipeline): + """Test handling of ping events.""" + # Setup + ping_event = Mock(spec=QueuePingEvent) + + mock_queue_message = Mock() + mock_queue_message.event = ping_event + pipeline.queue_manager.listen.return_value = [mock_queue_message] + + pipeline.ping_stream_response = Mock(return_value=Mock(spec=PingStreamResponse)) + + # Execute + responses = list(pipeline._process_stream_response(publisher=None, trace_manager=None)) + + # Assert + assert len(responses) == 1 + pipeline.ping_stream_response.assert_called_once() + + def test_file_event(self, pipeline, mock_message_cycle_manager): + """Test handling of file events.""" + # Setup + file_event = Mock(spec=QueueMessageFileEvent) + file_event.message_file_id = "file-id" + + mock_queue_message = Mock() + mock_queue_message.event = file_event + pipeline.queue_manager.listen.return_value = [mock_queue_message] + + file_response = Mock(spec=MessageFileStreamResponse) + mock_message_cycle_manager.message_file_to_stream_response.return_value = file_response + + # Execute + responses = list(pipeline._process_stream_response(publisher=None, trace_manager=None)) + + # Assert + assert len(responses) == 1 + assert responses[0] == file_response + mock_message_cycle_manager.message_file_to_stream_response.assert_called_once_with(file_event) + + def test_publisher_is_called_with_messages(self, pipeline): + """Test that publisher publishes messages when provided.""" + # Setup + publisher = Mock(spec=AppGeneratorTTSPublisher) + + ping_event = Mock(spec=QueuePingEvent) + mock_queue_message = Mock() + mock_queue_message.event = ping_event + pipeline.queue_manager.listen.return_value = [mock_queue_message] + + pipeline.ping_stream_response = Mock(return_value=Mock(spec=PingStreamResponse)) + + # Execute + list(pipeline._process_stream_response(publisher=publisher, trace_manager=None)) + + # Assert + # Called once with message and once with None at the end + assert publisher.publish.call_count == 2 + publisher.publish.assert_any_call(mock_queue_message) + publisher.publish.assert_any_call(None) + + def test_trace_manager_passed_to_save_message(self, pipeline): + """Test that trace manager is passed to _save_message.""" + # Setup + trace_manager = Mock(spec=TraceQueueManager) + + message_end_event = Mock(spec=QueueMessageEndEvent) + message_end_event.llm_result = None + + mock_queue_message = Mock() + mock_queue_message.event = message_end_event + pipeline.queue_manager.listen.return_value = [mock_queue_message] + + pipeline._save_message = Mock() + pipeline._message_end_to_stream_response = Mock(return_value=Mock(spec=MessageEndStreamResponse)) + + # Patch db.engine used inside pipeline for session creation + with patch( + "core.app.task_pipeline.easy_ui_based_generate_task_pipeline.db", new=SimpleNamespace(engine=Mock()) + ): + # Execute + list(pipeline._process_stream_response(publisher=None, trace_manager=trace_manager)) + + # Assert + pipeline._save_message.assert_called_once_with(session=ANY, trace_manager=trace_manager) + + def test_multiple_events_sequence(self, pipeline, mock_message_cycle_manager, mock_task_state): + """Test handling multiple events in sequence.""" + # Setup + chunk1 = Mock() + chunk1.delta.message.content = "Hello" + chunk1.prompt_messages = [] + + chunk2 = Mock() + chunk2.delta.message.content = " world!" + chunk2.prompt_messages = [] + + llm_chunk_event1 = Mock(spec=QueueLLMChunkEvent) + llm_chunk_event1.chunk = chunk1 + + ping_event = Mock(spec=QueuePingEvent) + + llm_chunk_event2 = Mock(spec=QueueLLMChunkEvent) + llm_chunk_event2.chunk = chunk2 + + mock_queue_messages = [ + Mock(event=llm_chunk_event1), + Mock(event=ping_event), + Mock(event=llm_chunk_event2), + ] + pipeline.queue_manager.listen.return_value = mock_queue_messages + + mock_message_cycle_manager.get_message_event_type.return_value = StreamEvent.MESSAGE + pipeline.ping_stream_response = Mock(return_value=Mock(spec=PingStreamResponse)) + + # Execute + responses = list(pipeline._process_stream_response(publisher=None, trace_manager=None)) + + # Assert + assert len(responses) == 3 + assert mock_task_state.llm_result.message.content == "Hello world!" + + # Verify calls to message_to_stream_response + assert mock_message_cycle_manager.message_to_stream_response.call_count == 2 + mock_message_cycle_manager.message_to_stream_response.assert_any_call( + answer="Hello", message_id="test-message-id", event_type=StreamEvent.MESSAGE + ) + mock_message_cycle_manager.message_to_stream_response.assert_any_call( + answer=" world!", message_id="test-message-id", event_type=StreamEvent.MESSAGE + ) diff --git a/api/tests/unit_tests/core/app/task_pipeline/test_message_cycle_manager_optimization.py b/api/tests/unit_tests/core/app/task_pipeline/test_message_cycle_manager_optimization.py new file mode 100644 index 0000000000..5ef7f0d7f4 --- /dev/null +++ b/api/tests/unit_tests/core/app/task_pipeline/test_message_cycle_manager_optimization.py @@ -0,0 +1,166 @@ +"""Unit tests for the message cycle manager optimization.""" + +from types import SimpleNamespace +from unittest.mock import ANY, Mock, patch + +import pytest +from flask import current_app + +from core.app.entities.task_entities import MessageStreamResponse, StreamEvent +from core.app.task_pipeline.message_cycle_manager import MessageCycleManager + + +class TestMessageCycleManagerOptimization: + """Test cases for the message cycle manager optimization that prevents N+1 queries.""" + + @pytest.fixture + def mock_application_generate_entity(self): + """Create a mock application generate entity.""" + entity = Mock() + entity.task_id = "test-task-id" + return entity + + @pytest.fixture + def message_cycle_manager(self, mock_application_generate_entity): + """Create a message cycle manager instance.""" + task_state = Mock() + return MessageCycleManager(application_generate_entity=mock_application_generate_entity, task_state=task_state) + + def test_get_message_event_type_with_message_file(self, message_cycle_manager): + """Test get_message_event_type returns MESSAGE_FILE when message has files.""" + with ( + patch("core.app.task_pipeline.message_cycle_manager.Session") as mock_session_class, + patch("core.app.task_pipeline.message_cycle_manager.db", new=SimpleNamespace(engine=Mock())), + ): + # Setup mock session and message file + mock_session = Mock() + mock_session_class.return_value.__enter__.return_value = mock_session + + mock_message_file = Mock() + # Current implementation uses session.query(...).scalar() + mock_session.query.return_value.scalar.return_value = mock_message_file + + # Execute + with current_app.app_context(): + result = message_cycle_manager.get_message_event_type("test-message-id") + + # Assert + assert result == StreamEvent.MESSAGE_FILE + mock_session.query.return_value.scalar.assert_called_once() + + def test_get_message_event_type_without_message_file(self, message_cycle_manager): + """Test get_message_event_type returns MESSAGE when message has no files.""" + with ( + patch("core.app.task_pipeline.message_cycle_manager.Session") as mock_session_class, + patch("core.app.task_pipeline.message_cycle_manager.db", new=SimpleNamespace(engine=Mock())), + ): + # Setup mock session and no message file + mock_session = Mock() + mock_session_class.return_value.__enter__.return_value = mock_session + # Current implementation uses session.query(...).scalar() + mock_session.query.return_value.scalar.return_value = None + + # Execute + with current_app.app_context(): + result = message_cycle_manager.get_message_event_type("test-message-id") + + # Assert + assert result == StreamEvent.MESSAGE + mock_session.query.return_value.scalar.assert_called_once() + + def test_message_to_stream_response_with_precomputed_event_type(self, message_cycle_manager): + """MessageCycleManager.message_to_stream_response expects a valid event_type; callers should precompute it.""" + with ( + patch("core.app.task_pipeline.message_cycle_manager.Session") as mock_session_class, + patch("core.app.task_pipeline.message_cycle_manager.db", new=SimpleNamespace(engine=Mock())), + ): + # Setup mock session and message file + mock_session = Mock() + mock_session_class.return_value.__enter__.return_value = mock_session + + mock_message_file = Mock() + # Current implementation uses session.query(...).scalar() + mock_session.query.return_value.scalar.return_value = mock_message_file + + # Execute: compute event type once, then pass to message_to_stream_response + with current_app.app_context(): + event_type = message_cycle_manager.get_message_event_type("test-message-id") + result = message_cycle_manager.message_to_stream_response( + answer="Hello world", message_id="test-message-id", event_type=event_type + ) + + # Assert + assert isinstance(result, MessageStreamResponse) + assert result.answer == "Hello world" + assert result.id == "test-message-id" + assert result.event == StreamEvent.MESSAGE_FILE + mock_session.query.return_value.scalar.assert_called_once() + + def test_message_to_stream_response_with_event_type_skips_query(self, message_cycle_manager): + """Test that message_to_stream_response skips database query when event_type is provided.""" + with patch("core.app.task_pipeline.message_cycle_manager.Session") as mock_session_class: + # Execute with event_type provided + result = message_cycle_manager.message_to_stream_response( + answer="Hello world", message_id="test-message-id", event_type=StreamEvent.MESSAGE + ) + + # Assert + assert isinstance(result, MessageStreamResponse) + assert result.answer == "Hello world" + assert result.id == "test-message-id" + assert result.event == StreamEvent.MESSAGE + # Should not query database when event_type is provided + mock_session_class.assert_not_called() + + def test_message_to_stream_response_with_from_variable_selector(self, message_cycle_manager): + """Test message_to_stream_response with from_variable_selector parameter.""" + result = message_cycle_manager.message_to_stream_response( + answer="Hello world", + message_id="test-message-id", + from_variable_selector=["var1", "var2"], + event_type=StreamEvent.MESSAGE, + ) + + assert isinstance(result, MessageStreamResponse) + assert result.answer == "Hello world" + assert result.id == "test-message-id" + assert result.from_variable_selector == ["var1", "var2"] + assert result.event == StreamEvent.MESSAGE + + def test_optimization_usage_example(self, message_cycle_manager): + """Test the optimization pattern that should be used by callers.""" + # Step 1: Get event type once (this queries database) + with ( + patch("core.app.task_pipeline.message_cycle_manager.Session") as mock_session_class, + patch("core.app.task_pipeline.message_cycle_manager.db", new=SimpleNamespace(engine=Mock())), + ): + mock_session = Mock() + mock_session_class.return_value.__enter__.return_value = mock_session + # Current implementation uses session.query(...).scalar() + mock_session.query.return_value.scalar.return_value = None # No files + with current_app.app_context(): + event_type = message_cycle_manager.get_message_event_type("test-message-id") + + # Should query database once + mock_session_class.assert_called_once_with(ANY, expire_on_commit=False) + assert event_type == StreamEvent.MESSAGE + + # Step 2: Use event_type for multiple calls (no additional queries) + with patch("core.app.task_pipeline.message_cycle_manager.Session") as mock_session_class: + mock_session_class.return_value.__enter__.return_value = Mock() + + chunk1_response = message_cycle_manager.message_to_stream_response( + answer="Chunk 1", message_id="test-message-id", event_type=event_type + ) + + chunk2_response = message_cycle_manager.message_to_stream_response( + answer="Chunk 2", message_id="test-message-id", event_type=event_type + ) + + # Should not query database again + mock_session_class.assert_not_called() + + assert chunk1_response.event == StreamEvent.MESSAGE + assert chunk2_response.event == StreamEvent.MESSAGE + assert chunk1_response.answer == "Chunk 1" + assert chunk2_response.answer == "Chunk 2" diff --git a/api/tests/unit_tests/core/datasource/test_notion_provider.py b/api/tests/unit_tests/core/datasource/test_notion_provider.py index 9e7255bc3f..e4bd7d3bdf 100644 --- a/api/tests/unit_tests/core/datasource/test_notion_provider.py +++ b/api/tests/unit_tests/core/datasource/test_notion_provider.py @@ -96,7 +96,7 @@ class TestNotionExtractorAuthentication: def test_init_with_integration_token_fallback(self, mock_get_token, mock_config, mock_document_model): """Test NotionExtractor falls back to integration token when credential not found.""" # Arrange - mock_get_token.return_value = None + mock_get_token.side_effect = Exception("No credential id found") mock_config.NOTION_INTEGRATION_TOKEN = "integration-token-fallback" # Act @@ -105,7 +105,7 @@ class TestNotionExtractorAuthentication: notion_obj_id="page-456", notion_page_type="page", tenant_id="tenant-789", - credential_id="cred-123", + credential_id=None, document_model=mock_document_model, ) @@ -117,7 +117,7 @@ class TestNotionExtractorAuthentication: def test_init_missing_credentials_raises_error(self, mock_get_token, mock_config, mock_document_model): """Test NotionExtractor raises error when no credentials available.""" # Arrange - mock_get_token.return_value = None + mock_get_token.side_effect = Exception("No credential id found") mock_config.NOTION_INTEGRATION_TOKEN = None # Act & Assert @@ -127,7 +127,7 @@ class TestNotionExtractorAuthentication: notion_obj_id="page-456", notion_page_type="page", tenant_id="tenant-789", - credential_id="cred-123", + credential_id=None, document_model=mock_document_model, ) assert "Must specify `integration_token`" in str(exc_info.value) diff --git a/api/tests/unit_tests/core/helper/test_ssrf_proxy.py b/api/tests/unit_tests/core/helper/test_ssrf_proxy.py index 37749f0c66..d6d75fb72f 100644 --- a/api/tests/unit_tests/core/helper/test_ssrf_proxy.py +++ b/api/tests/unit_tests/core/helper/test_ssrf_proxy.py @@ -1,52 +1,176 @@ -import secrets from unittest.mock import MagicMock, patch import pytest -from core.helper.ssrf_proxy import SSRF_DEFAULT_MAX_RETRIES, STATUS_FORCELIST, make_request +from core.helper.ssrf_proxy import ( + SSRF_DEFAULT_MAX_RETRIES, + _get_user_provided_host_header, + make_request, +) -@patch("httpx.Client.request") -def test_successful_request(mock_request): +@patch("core.helper.ssrf_proxy._get_ssrf_client") +def test_successful_request(mock_get_client): + mock_client = MagicMock() mock_response = MagicMock() mock_response.status_code = 200 - mock_request.return_value = mock_response + mock_client.request.return_value = mock_response + mock_get_client.return_value = mock_client response = make_request("GET", "http://example.com") assert response.status_code == 200 + mock_client.request.assert_called_once() -@patch("httpx.Client.request") -def test_retry_exceed_max_retries(mock_request): +@patch("core.helper.ssrf_proxy._get_ssrf_client") +def test_retry_exceed_max_retries(mock_get_client): + mock_client = MagicMock() mock_response = MagicMock() mock_response.status_code = 500 - - side_effects = [mock_response] * SSRF_DEFAULT_MAX_RETRIES - mock_request.side_effect = side_effects + mock_client.request.return_value = mock_response + mock_get_client.return_value = mock_client with pytest.raises(Exception) as e: make_request("GET", "http://example.com", max_retries=SSRF_DEFAULT_MAX_RETRIES - 1) assert str(e.value) == f"Reached maximum retries ({SSRF_DEFAULT_MAX_RETRIES - 1}) for URL http://example.com" -@patch("httpx.Client.request") -def test_retry_logic_success(mock_request): - side_effects = [] +class TestGetUserProvidedHostHeader: + """Tests for _get_user_provided_host_header function.""" - for _ in range(SSRF_DEFAULT_MAX_RETRIES): - status_code = secrets.choice(STATUS_FORCELIST) - mock_response = MagicMock() - mock_response.status_code = status_code - side_effects.append(mock_response) + def test_returns_none_when_headers_is_none(self): + assert _get_user_provided_host_header(None) is None - mock_response_200 = MagicMock() - mock_response_200.status_code = 200 - side_effects.append(mock_response_200) + def test_returns_none_when_headers_is_empty(self): + assert _get_user_provided_host_header({}) is None - mock_request.side_effect = side_effects + def test_returns_none_when_host_header_not_present(self): + headers = {"Content-Type": "application/json", "Authorization": "Bearer token"} + assert _get_user_provided_host_header(headers) is None - response = make_request("GET", "http://example.com", max_retries=SSRF_DEFAULT_MAX_RETRIES) + def test_returns_host_header_lowercase(self): + headers = {"host": "example.com"} + assert _get_user_provided_host_header(headers) == "example.com" + + def test_returns_host_header_uppercase(self): + headers = {"HOST": "example.com"} + assert _get_user_provided_host_header(headers) == "example.com" + + def test_returns_host_header_mixed_case(self): + headers = {"HoSt": "example.com"} + assert _get_user_provided_host_header(headers) == "example.com" + + def test_returns_host_header_from_multiple_headers(self): + headers = {"Content-Type": "application/json", "Host": "api.example.com", "Authorization": "Bearer token"} + assert _get_user_provided_host_header(headers) == "api.example.com" + + def test_returns_first_host_header_when_duplicates(self): + headers = {"host": "first.com", "Host": "second.com"} + # Should return the first one encountered (iteration order is preserved in dict) + result = _get_user_provided_host_header(headers) + assert result in ("first.com", "second.com") + + +@patch("core.helper.ssrf_proxy._get_ssrf_client") +def test_host_header_preservation_with_user_header(mock_get_client): + """Test that user-provided Host header is preserved in the request.""" + mock_client = MagicMock() + mock_response = MagicMock() + mock_response.status_code = 200 + mock_client.request.return_value = mock_response + mock_get_client.return_value = mock_client + + custom_host = "custom.example.com:8080" + response = make_request("GET", "http://example.com", headers={"Host": custom_host}) assert response.status_code == 200 - assert mock_request.call_count == SSRF_DEFAULT_MAX_RETRIES + 1 - assert mock_request.call_args_list[0][1].get("method") == "GET" + # Verify client.request was called with the host header preserved (lowercase) + call_kwargs = mock_client.request.call_args.kwargs + assert call_kwargs["headers"]["host"] == custom_host + + +@patch("core.helper.ssrf_proxy._get_ssrf_client") +@pytest.mark.parametrize("host_key", ["host", "HOST", "Host"]) +def test_host_header_preservation_case_insensitive(mock_get_client, host_key): + """Test that Host header is preserved regardless of case.""" + mock_client = MagicMock() + mock_response = MagicMock() + mock_response.status_code = 200 + mock_client.request.return_value = mock_response + mock_get_client.return_value = mock_client + + response = make_request("GET", "http://example.com", headers={host_key: "api.example.com"}) + + assert response.status_code == 200 + # Host header should be normalized to lowercase "host" + call_kwargs = mock_client.request.call_args.kwargs + assert call_kwargs["headers"]["host"] == "api.example.com" + + +class TestFollowRedirectsParameter: + """Tests for follow_redirects parameter handling. + + These tests verify that follow_redirects is correctly passed to client.request(). + """ + + @patch("core.helper.ssrf_proxy._get_ssrf_client") + def test_follow_redirects_passed_to_request(self, mock_get_client): + """Verify follow_redirects IS passed to client.request().""" + mock_client = MagicMock() + mock_response = MagicMock() + mock_response.status_code = 200 + mock_client.request.return_value = mock_response + mock_get_client.return_value = mock_client + + make_request("GET", "http://example.com", follow_redirects=True) + + # Verify follow_redirects was passed to request + call_kwargs = mock_client.request.call_args.kwargs + assert call_kwargs.get("follow_redirects") is True + + @patch("core.helper.ssrf_proxy._get_ssrf_client") + def test_allow_redirects_converted_to_follow_redirects(self, mock_get_client): + """Verify allow_redirects (requests-style) is converted to follow_redirects (httpx-style).""" + mock_client = MagicMock() + mock_response = MagicMock() + mock_response.status_code = 200 + mock_client.request.return_value = mock_response + mock_get_client.return_value = mock_client + + # Use allow_redirects (requests-style parameter) + make_request("GET", "http://example.com", allow_redirects=True) + + # Verify it was converted to follow_redirects + call_kwargs = mock_client.request.call_args.kwargs + assert call_kwargs.get("follow_redirects") is True + assert "allow_redirects" not in call_kwargs + + @patch("core.helper.ssrf_proxy._get_ssrf_client") + def test_follow_redirects_not_set_when_not_specified(self, mock_get_client): + """Verify follow_redirects is not in kwargs when not specified (httpx default behavior).""" + mock_client = MagicMock() + mock_response = MagicMock() + mock_response.status_code = 200 + mock_client.request.return_value = mock_response + mock_get_client.return_value = mock_client + + make_request("GET", "http://example.com") + + # follow_redirects should not be in kwargs, letting httpx use its default + call_kwargs = mock_client.request.call_args.kwargs + assert "follow_redirects" not in call_kwargs + + @patch("core.helper.ssrf_proxy._get_ssrf_client") + def test_follow_redirects_takes_precedence_over_allow_redirects(self, mock_get_client): + """Verify follow_redirects takes precedence when both are specified.""" + mock_client = MagicMock() + mock_response = MagicMock() + mock_response.status_code = 200 + mock_client.request.return_value = mock_response + mock_get_client.return_value = mock_client + + # Both specified - follow_redirects should take precedence + make_request("GET", "http://example.com", allow_redirects=False, follow_redirects=True) + + call_kwargs = mock_client.request.call_args.kwargs + assert call_kwargs.get("follow_redirects") is True diff --git a/api/tests/unit_tests/core/helper/test_tool_provider_cache.py b/api/tests/unit_tests/core/helper/test_tool_provider_cache.py deleted file mode 100644 index 00f7c9d7e9..0000000000 --- a/api/tests/unit_tests/core/helper/test_tool_provider_cache.py +++ /dev/null @@ -1,129 +0,0 @@ -import json -from unittest.mock import patch - -import pytest -from redis.exceptions import RedisError - -from core.helper.tool_provider_cache import ToolProviderListCache -from core.tools.entities.api_entities import ToolProviderTypeApiLiteral - - -@pytest.fixture -def mock_redis_client(): - """Fixture: Mock Redis client""" - with patch("core.helper.tool_provider_cache.redis_client") as mock: - yield mock - - -class TestToolProviderListCache: - """Test class for ToolProviderListCache""" - - def test_generate_cache_key(self): - """Test cache key generation logic""" - # Scenario 1: Specify typ (valid literal value) - tenant_id = "tenant_123" - typ: ToolProviderTypeApiLiteral = "builtin" - expected_key = f"tool_providers:tenant_id:{tenant_id}:type:{typ}" - assert ToolProviderListCache._generate_cache_key(tenant_id, typ) == expected_key - - # Scenario 2: typ is None (defaults to "all") - expected_key_all = f"tool_providers:tenant_id:{tenant_id}:type:all" - assert ToolProviderListCache._generate_cache_key(tenant_id) == expected_key_all - - def test_get_cached_providers_hit(self, mock_redis_client): - """Test get cached providers - cache hit and successful decoding""" - tenant_id = "tenant_123" - typ: ToolProviderTypeApiLiteral = "api" - mock_providers = [{"id": "tool", "name": "test_provider"}] - mock_redis_client.get.return_value = json.dumps(mock_providers).encode("utf-8") - - result = ToolProviderListCache.get_cached_providers(tenant_id, typ) - - mock_redis_client.get.assert_called_once_with(ToolProviderListCache._generate_cache_key(tenant_id, typ)) - assert result == mock_providers - - def test_get_cached_providers_decode_error(self, mock_redis_client): - """Test get cached providers - cache hit but decoding failed""" - tenant_id = "tenant_123" - mock_redis_client.get.return_value = b"invalid_json_data" - - result = ToolProviderListCache.get_cached_providers(tenant_id) - - assert result is None - mock_redis_client.get.assert_called_once() - - def test_get_cached_providers_miss(self, mock_redis_client): - """Test get cached providers - cache miss""" - tenant_id = "tenant_123" - mock_redis_client.get.return_value = None - - result = ToolProviderListCache.get_cached_providers(tenant_id) - - assert result is None - mock_redis_client.get.assert_called_once() - - def test_set_cached_providers(self, mock_redis_client): - """Test set cached providers""" - tenant_id = "tenant_123" - typ: ToolProviderTypeApiLiteral = "builtin" - mock_providers = [{"id": "tool", "name": "test_provider"}] - cache_key = ToolProviderListCache._generate_cache_key(tenant_id, typ) - - ToolProviderListCache.set_cached_providers(tenant_id, typ, mock_providers) - - mock_redis_client.setex.assert_called_once_with( - cache_key, ToolProviderListCache.CACHE_TTL, json.dumps(mock_providers) - ) - - def test_invalidate_cache_specific_type(self, mock_redis_client): - """Test invalidate cache - specific type""" - tenant_id = "tenant_123" - typ: ToolProviderTypeApiLiteral = "workflow" - cache_key = ToolProviderListCache._generate_cache_key(tenant_id, typ) - - ToolProviderListCache.invalidate_cache(tenant_id, typ) - - mock_redis_client.delete.assert_called_once_with(cache_key) - - def test_invalidate_cache_all_types(self, mock_redis_client): - """Test invalidate cache - clear all tenant cache""" - tenant_id = "tenant_123" - mock_keys = [ - b"tool_providers:tenant_id:tenant_123:type:all", - b"tool_providers:tenant_id:tenant_123:type:builtin", - ] - mock_redis_client.scan_iter.return_value = mock_keys - - ToolProviderListCache.invalidate_cache(tenant_id) - - mock_redis_client.scan_iter.assert_called_once_with(f"tool_providers:tenant_id:{tenant_id}:*") - mock_redis_client.delete.assert_called_once_with(*mock_keys) - - def test_invalidate_cache_no_keys(self, mock_redis_client): - """Test invalidate cache - no cache keys for tenant""" - tenant_id = "tenant_123" - mock_redis_client.scan_iter.return_value = [] - - ToolProviderListCache.invalidate_cache(tenant_id) - - mock_redis_client.delete.assert_not_called() - - def test_redis_fallback_default_return(self, mock_redis_client): - """Test redis_fallback decorator - default return value (Redis error)""" - mock_redis_client.get.side_effect = RedisError("Redis connection error") - - result = ToolProviderListCache.get_cached_providers("tenant_123") - - assert result is None - mock_redis_client.get.assert_called_once() - - def test_redis_fallback_no_default(self, mock_redis_client): - """Test redis_fallback decorator - no default return value (Redis error)""" - mock_redis_client.setex.side_effect = RedisError("Redis connection error") - - try: - ToolProviderListCache.set_cached_providers("tenant_123", "mcp", []) - except RedisError: - pytest.fail("set_cached_providers should not raise RedisError (handled by fallback)") - - mock_redis_client.setex.assert_called_once() diff --git a/api/tests/unit_tests/core/logging/__init__.py b/api/tests/unit_tests/core/logging/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/api/tests/unit_tests/core/logging/test_context.py b/api/tests/unit_tests/core/logging/test_context.py new file mode 100644 index 0000000000..f388a3a0b9 --- /dev/null +++ b/api/tests/unit_tests/core/logging/test_context.py @@ -0,0 +1,79 @@ +"""Tests for logging context module.""" + +import uuid + +from core.logging.context import ( + clear_request_context, + get_request_id, + get_trace_id, + init_request_context, +) + + +class TestLoggingContext: + """Tests for the logging context functions.""" + + def test_init_creates_request_id(self): + """init_request_context should create a 10-char request ID.""" + init_request_context() + request_id = get_request_id() + assert len(request_id) == 10 + assert all(c in "0123456789abcdef" for c in request_id) + + def test_init_creates_trace_id(self): + """init_request_context should create a 32-char trace ID.""" + init_request_context() + trace_id = get_trace_id() + assert len(trace_id) == 32 + assert all(c in "0123456789abcdef" for c in trace_id) + + def test_trace_id_derived_from_request_id(self): + """trace_id should be deterministically derived from request_id.""" + init_request_context() + request_id = get_request_id() + trace_id = get_trace_id() + + # Verify trace_id is derived using uuid5 + expected_trace = uuid.uuid5(uuid.NAMESPACE_DNS, request_id).hex + assert trace_id == expected_trace + + def test_clear_resets_context(self): + """clear_request_context should reset both IDs to empty strings.""" + init_request_context() + assert get_request_id() != "" + assert get_trace_id() != "" + + clear_request_context() + assert get_request_id() == "" + assert get_trace_id() == "" + + def test_default_values_are_empty(self): + """Default values should be empty strings before init.""" + clear_request_context() + assert get_request_id() == "" + assert get_trace_id() == "" + + def test_multiple_inits_create_different_ids(self): + """Each init should create new unique IDs.""" + init_request_context() + first_request_id = get_request_id() + first_trace_id = get_trace_id() + + init_request_context() + second_request_id = get_request_id() + second_trace_id = get_trace_id() + + assert first_request_id != second_request_id + assert first_trace_id != second_trace_id + + def test_context_isolation(self): + """Context should be isolated per-call (no thread leakage in same thread).""" + init_request_context() + id1 = get_request_id() + + # Simulate another request + init_request_context() + id2 = get_request_id() + + # IDs should be different + assert id1 != id2 diff --git a/api/tests/unit_tests/core/logging/test_filters.py b/api/tests/unit_tests/core/logging/test_filters.py new file mode 100644 index 0000000000..b66ad111d5 --- /dev/null +++ b/api/tests/unit_tests/core/logging/test_filters.py @@ -0,0 +1,114 @@ +"""Tests for logging filters.""" + +import logging +from unittest import mock + +import pytest + + +@pytest.fixture +def log_record(): + return logging.LogRecord( + name="test", + level=logging.INFO, + pathname="", + lineno=0, + msg="test", + args=(), + exc_info=None, + ) + + +class TestTraceContextFilter: + def test_sets_empty_trace_id_without_context(self, log_record): + from core.logging.context import clear_request_context + from core.logging.filters import TraceContextFilter + + # Ensure no context is set + clear_request_context() + + filter = TraceContextFilter() + result = filter.filter(log_record) + + assert result is True + assert hasattr(log_record, "trace_id") + assert hasattr(log_record, "span_id") + assert hasattr(log_record, "req_id") + # Without context, IDs should be empty + assert log_record.trace_id == "" + assert log_record.req_id == "" + + def test_sets_trace_id_from_context(self, log_record): + """Test that trace_id and req_id are set from ContextVar when initialized.""" + from core.logging.context import init_request_context + from core.logging.filters import TraceContextFilter + + # Initialize context (no Flask needed!) + init_request_context() + + filter = TraceContextFilter() + filter.filter(log_record) + + # With context initialized, IDs should be set + assert log_record.trace_id != "" + assert len(log_record.trace_id) == 32 + assert log_record.req_id != "" + assert len(log_record.req_id) == 10 + + def test_filter_always_returns_true(self, log_record): + from core.logging.filters import TraceContextFilter + + filter = TraceContextFilter() + result = filter.filter(log_record) + assert result is True + + def test_sets_trace_id_from_otel_when_available(self, log_record): + from core.logging.filters import TraceContextFilter + + mock_span = mock.MagicMock() + mock_context = mock.MagicMock() + mock_context.trace_id = 0x5B8AA5A2D2C872E8321CF37308D69DF2 + mock_context.span_id = 0x051581BF3BB55C45 + mock_span.get_span_context.return_value = mock_context + + with ( + mock.patch("opentelemetry.trace.get_current_span", return_value=mock_span), + mock.patch("opentelemetry.trace.span.INVALID_TRACE_ID", 0), + mock.patch("opentelemetry.trace.span.INVALID_SPAN_ID", 0), + ): + filter = TraceContextFilter() + filter.filter(log_record) + + assert log_record.trace_id == "5b8aa5a2d2c872e8321cf37308d69df2" + assert log_record.span_id == "051581bf3bb55c45" + + +class TestIdentityContextFilter: + def test_sets_empty_identity_without_request_context(self, log_record): + from core.logging.filters import IdentityContextFilter + + filter = IdentityContextFilter() + result = filter.filter(log_record) + + assert result is True + assert log_record.tenant_id == "" + assert log_record.user_id == "" + assert log_record.user_type == "" + + def test_filter_always_returns_true(self, log_record): + from core.logging.filters import IdentityContextFilter + + filter = IdentityContextFilter() + result = filter.filter(log_record) + assert result is True + + def test_handles_exception_gracefully(self, log_record): + from core.logging.filters import IdentityContextFilter + + filter = IdentityContextFilter() + + # Should not raise even if something goes wrong + with mock.patch("core.logging.filters.flask.has_request_context", side_effect=Exception("Test error")): + result = filter.filter(log_record) + assert result is True + assert log_record.tenant_id == "" diff --git a/api/tests/unit_tests/core/logging/test_structured_formatter.py b/api/tests/unit_tests/core/logging/test_structured_formatter.py new file mode 100644 index 0000000000..94b91d205e --- /dev/null +++ b/api/tests/unit_tests/core/logging/test_structured_formatter.py @@ -0,0 +1,267 @@ +"""Tests for structured JSON formatter.""" + +import logging +import sys + +import orjson + + +class TestStructuredJSONFormatter: + def test_basic_log_format(self): + from core.logging.structured_formatter import StructuredJSONFormatter + + formatter = StructuredJSONFormatter(service_name="test-service") + record = logging.LogRecord( + name="test", + level=logging.INFO, + pathname="test.py", + lineno=42, + msg="Test message", + args=(), + exc_info=None, + ) + + output = formatter.format(record) + log_dict = orjson.loads(output) + + assert log_dict["severity"] == "INFO" + assert log_dict["service"] == "test-service" + assert log_dict["caller"] == "test.py:42" + assert log_dict["message"] == "Test message" + assert "ts" in log_dict + assert log_dict["ts"].endswith("Z") + + def test_severity_mapping(self): + from core.logging.structured_formatter import StructuredJSONFormatter + + formatter = StructuredJSONFormatter() + + test_cases = [ + (logging.DEBUG, "DEBUG"), + (logging.INFO, "INFO"), + (logging.WARNING, "WARN"), + (logging.ERROR, "ERROR"), + (logging.CRITICAL, "ERROR"), + ] + + for level, expected_severity in test_cases: + record = logging.LogRecord( + name="test", + level=level, + pathname="test.py", + lineno=1, + msg="Test", + args=(), + exc_info=None, + ) + output = formatter.format(record) + log_dict = orjson.loads(output) + assert log_dict["severity"] == expected_severity, f"Level {level} should map to {expected_severity}" + + def test_error_with_stack_trace(self): + from core.logging.structured_formatter import StructuredJSONFormatter + + formatter = StructuredJSONFormatter() + + try: + raise ValueError("Test error") + except ValueError: + exc_info = sys.exc_info() + + record = logging.LogRecord( + name="test", + level=logging.ERROR, + pathname="test.py", + lineno=10, + msg="Error occurred", + args=(), + exc_info=exc_info, + ) + + output = formatter.format(record) + log_dict = orjson.loads(output) + + assert log_dict["severity"] == "ERROR" + assert "stack_trace" in log_dict + assert "ValueError: Test error" in log_dict["stack_trace"] + + def test_no_stack_trace_for_info(self): + from core.logging.structured_formatter import StructuredJSONFormatter + + formatter = StructuredJSONFormatter() + + try: + raise ValueError("Test error") + except ValueError: + exc_info = sys.exc_info() + + record = logging.LogRecord( + name="test", + level=logging.INFO, + pathname="test.py", + lineno=10, + msg="Info message", + args=(), + exc_info=exc_info, + ) + + output = formatter.format(record) + log_dict = orjson.loads(output) + + assert "stack_trace" not in log_dict + + def test_trace_context_included(self): + from core.logging.structured_formatter import StructuredJSONFormatter + + formatter = StructuredJSONFormatter() + record = logging.LogRecord( + name="test", + level=logging.INFO, + pathname="test.py", + lineno=1, + msg="Test", + args=(), + exc_info=None, + ) + record.trace_id = "5b8aa5a2d2c872e8321cf37308d69df2" + record.span_id = "051581bf3bb55c45" + + output = formatter.format(record) + log_dict = orjson.loads(output) + + assert log_dict["trace_id"] == "5b8aa5a2d2c872e8321cf37308d69df2" + assert log_dict["span_id"] == "051581bf3bb55c45" + + def test_identity_context_included(self): + from core.logging.structured_formatter import StructuredJSONFormatter + + formatter = StructuredJSONFormatter() + record = logging.LogRecord( + name="test", + level=logging.INFO, + pathname="test.py", + lineno=1, + msg="Test", + args=(), + exc_info=None, + ) + record.tenant_id = "t-global-corp" + record.user_id = "u-admin-007" + record.user_type = "admin" + + output = formatter.format(record) + log_dict = orjson.loads(output) + + assert "identity" in log_dict + assert log_dict["identity"]["tenant_id"] == "t-global-corp" + assert log_dict["identity"]["user_id"] == "u-admin-007" + assert log_dict["identity"]["user_type"] == "admin" + + def test_no_identity_when_empty(self): + from core.logging.structured_formatter import StructuredJSONFormatter + + formatter = StructuredJSONFormatter() + record = logging.LogRecord( + name="test", + level=logging.INFO, + pathname="test.py", + lineno=1, + msg="Test", + args=(), + exc_info=None, + ) + + output = formatter.format(record) + log_dict = orjson.loads(output) + + assert "identity" not in log_dict + + def test_attributes_included(self): + from core.logging.structured_formatter import StructuredJSONFormatter + + formatter = StructuredJSONFormatter() + record = logging.LogRecord( + name="test", + level=logging.INFO, + pathname="test.py", + lineno=1, + msg="Test", + args=(), + exc_info=None, + ) + record.attributes = {"order_id": "ord-123", "amount": 99.99} + + output = formatter.format(record) + log_dict = orjson.loads(output) + + assert log_dict["attributes"]["order_id"] == "ord-123" + assert log_dict["attributes"]["amount"] == 99.99 + + def test_message_with_args(self): + from core.logging.structured_formatter import StructuredJSONFormatter + + formatter = StructuredJSONFormatter() + record = logging.LogRecord( + name="test", + level=logging.INFO, + pathname="test.py", + lineno=1, + msg="User %s logged in from %s", + args=("john", "192.168.1.1"), + exc_info=None, + ) + + output = formatter.format(record) + log_dict = orjson.loads(output) + + assert log_dict["message"] == "User john logged in from 192.168.1.1" + + def test_timestamp_format(self): + from core.logging.structured_formatter import StructuredJSONFormatter + + formatter = StructuredJSONFormatter() + record = logging.LogRecord( + name="test", + level=logging.INFO, + pathname="test.py", + lineno=1, + msg="Test", + args=(), + exc_info=None, + ) + + output = formatter.format(record) + log_dict = orjson.loads(output) + + # Verify ISO 8601 format with Z suffix + ts = log_dict["ts"] + assert ts.endswith("Z") + assert "T" in ts + # Should have milliseconds + assert "." in ts + + def test_fallback_for_non_serializable_attributes(self): + from core.logging.structured_formatter import StructuredJSONFormatter + + formatter = StructuredJSONFormatter() + record = logging.LogRecord( + name="test", + level=logging.INFO, + pathname="test.py", + lineno=1, + msg="Test with non-serializable", + args=(), + exc_info=None, + ) + # Set is not serializable by orjson + record.attributes = {"items": {1, 2, 3}, "custom": object()} + + # Should not raise, fallback to json.dumps with default=str + output = formatter.format(record) + + # Verify it's valid JSON (parsed by stdlib json since orjson may fail) + import json + + log_dict = json.loads(output) + assert log_dict["message"] == "Test with non-serializable" + assert "attributes" in log_dict diff --git a/api/tests/unit_tests/core/logging/test_trace_helpers.py b/api/tests/unit_tests/core/logging/test_trace_helpers.py new file mode 100644 index 0000000000..aab1753b9b --- /dev/null +++ b/api/tests/unit_tests/core/logging/test_trace_helpers.py @@ -0,0 +1,102 @@ +"""Tests for trace helper functions.""" + +import re +from unittest import mock + + +class TestGetSpanIdFromOtelContext: + def test_returns_none_without_span(self): + from core.helper.trace_id_helper import get_span_id_from_otel_context + + with mock.patch("opentelemetry.trace.get_current_span", return_value=None): + result = get_span_id_from_otel_context() + assert result is None + + def test_returns_span_id_when_available(self): + from core.helper.trace_id_helper import get_span_id_from_otel_context + + mock_span = mock.MagicMock() + mock_context = mock.MagicMock() + mock_context.span_id = 0x051581BF3BB55C45 + mock_span.get_span_context.return_value = mock_context + + with mock.patch("opentelemetry.trace.get_current_span", return_value=mock_span): + with mock.patch("opentelemetry.trace.span.INVALID_SPAN_ID", 0): + result = get_span_id_from_otel_context() + assert result == "051581bf3bb55c45" + + def test_returns_none_on_exception(self): + from core.helper.trace_id_helper import get_span_id_from_otel_context + + with mock.patch("opentelemetry.trace.get_current_span", side_effect=Exception("Test error")): + result = get_span_id_from_otel_context() + assert result is None + + +class TestGenerateTraceparentHeader: + def test_generates_valid_format(self): + from core.helper.trace_id_helper import generate_traceparent_header + + with mock.patch("opentelemetry.trace.get_current_span", return_value=None): + result = generate_traceparent_header() + + assert result is not None + # Format: 00-{trace_id}-{span_id}-01 + parts = result.split("-") + assert len(parts) == 4 + assert parts[0] == "00" # version + assert len(parts[1]) == 32 # trace_id (32 hex chars) + assert len(parts[2]) == 16 # span_id (16 hex chars) + assert parts[3] == "01" # flags + + def test_uses_otel_context_when_available(self): + from core.helper.trace_id_helper import generate_traceparent_header + + mock_span = mock.MagicMock() + mock_context = mock.MagicMock() + mock_context.trace_id = 0x5B8AA5A2D2C872E8321CF37308D69DF2 + mock_context.span_id = 0x051581BF3BB55C45 + mock_span.get_span_context.return_value = mock_context + + with mock.patch("opentelemetry.trace.get_current_span", return_value=mock_span): + with ( + mock.patch("opentelemetry.trace.span.INVALID_TRACE_ID", 0), + mock.patch("opentelemetry.trace.span.INVALID_SPAN_ID", 0), + ): + result = generate_traceparent_header() + + assert result == "00-5b8aa5a2d2c872e8321cf37308d69df2-051581bf3bb55c45-01" + + def test_generates_hex_only_values(self): + from core.helper.trace_id_helper import generate_traceparent_header + + with mock.patch("opentelemetry.trace.get_current_span", return_value=None): + result = generate_traceparent_header() + + parts = result.split("-") + # All parts should be valid hex + assert re.match(r"^[0-9a-f]+$", parts[1]) + assert re.match(r"^[0-9a-f]+$", parts[2]) + + +class TestParseTraceparentHeader: + def test_parses_valid_traceparent(self): + from core.helper.trace_id_helper import parse_traceparent_header + + traceparent = "00-5b8aa5a2d2c872e8321cf37308d69df2-051581bf3bb55c45-01" + result = parse_traceparent_header(traceparent) + + assert result == "5b8aa5a2d2c872e8321cf37308d69df2" + + def test_returns_none_for_invalid_format(self): + from core.helper.trace_id_helper import parse_traceparent_header + + # Wrong number of parts + assert parse_traceparent_header("00-abc-def") is None + # Wrong trace_id length + assert parse_traceparent_header("00-abc-def-01") is None + + def test_returns_none_for_empty_string(self): + from core.helper.trace_id_helper import parse_traceparent_header + + assert parse_traceparent_header("") is None diff --git a/api/tests/unit_tests/core/ops/test_utils.py b/api/tests/unit_tests/core/ops/test_utils.py index 7cc2772acf..e1084001b7 100644 --- a/api/tests/unit_tests/core/ops/test_utils.py +++ b/api/tests/unit_tests/core/ops/test_utils.py @@ -1,6 +1,9 @@ +import re +from datetime import datetime + import pytest -from core.ops.utils import validate_project_name, validate_url, validate_url_with_path +from core.ops.utils import generate_dotted_order, validate_project_name, validate_url, validate_url_with_path class TestValidateUrl: @@ -136,3 +139,51 @@ class TestValidateProjectName: """Test custom default name""" result = validate_project_name("", "Custom Default") assert result == "Custom Default" + + +class TestGenerateDottedOrder: + """Test cases for generate_dotted_order function""" + + def test_dotted_order_has_6_digit_microseconds(self): + """Test that timestamp includes full 6-digit microseconds for LangSmith API compatibility. + + LangSmith API expects timestamps in format: YYYYMMDDTHHMMSSffffffZ (6-digit microseconds). + Previously, the code truncated to 3 digits which caused API errors: + 'cannot parse .111 as .000000' + """ + start_time = datetime(2025, 12, 23, 4, 19, 55, 111000) + run_id = "test-run-id" + result = generate_dotted_order(run_id, start_time) + + # Extract timestamp portion (before the run_id) + timestamp_match = re.match(r"^(\d{8}T\d{6})(\d+)Z", result) + assert timestamp_match is not None, "Timestamp format should match YYYYMMDDTHHMMSSffffffZ" + + microseconds = timestamp_match.group(2) + assert len(microseconds) == 6, f"Microseconds should be 6 digits, got {len(microseconds)}: {microseconds}" + + def test_dotted_order_format_matches_langsmith_expected(self): + """Test that dotted_order format matches LangSmith API expected format.""" + start_time = datetime(2025, 1, 15, 10, 30, 45, 123456) + run_id = "abc123" + result = generate_dotted_order(run_id, start_time) + + # LangSmith expects: YYYYMMDDTHHMMSSffffffZ followed by run_id + assert result == "20250115T103045123456Zabc123" + + def test_dotted_order_with_parent(self): + """Test dotted_order generation with parent order uses dot separator.""" + start_time = datetime(2025, 12, 23, 4, 19, 55, 111000) + run_id = "child-run-id" + parent_order = "20251223T041955000000Zparent-run-id" + result = generate_dotted_order(run_id, start_time, parent_order) + + assert result == "20251223T041955000000Zparent-run-id.20251223T041955111000Zchild-run-id" + + def test_dotted_order_without_parent_has_no_dot(self): + """Test dotted_order generation without parent has no dot separator.""" + start_time = datetime(2025, 12, 23, 4, 19, 55, 111000) + run_id = "test-run-id" + result = generate_dotted_order(run_id, start_time, None) + + assert "." not in result diff --git a/api/tests/unit_tests/core/rag/cleaner/__init__.py b/api/tests/unit_tests/core/rag/cleaner/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/api/tests/unit_tests/core/rag/cleaner/test_clean_processor.py b/api/tests/unit_tests/core/rag/cleaner/test_clean_processor.py new file mode 100644 index 0000000000..65ee62b8dd --- /dev/null +++ b/api/tests/unit_tests/core/rag/cleaner/test_clean_processor.py @@ -0,0 +1,213 @@ +from core.rag.cleaner.clean_processor import CleanProcessor + + +class TestCleanProcessor: + """Test cases for CleanProcessor.clean method.""" + + def test_clean_default_removal_of_invalid_symbols(self): + """Test default cleaning removes invalid symbols.""" + # Test <| replacement + assert CleanProcessor.clean("text<|with<|invalid", None) == "text replacement + assert CleanProcessor.clean("text|>with|>invalid", None) == "text>with>invalid" + + # Test removal of control characters + text_with_control = "normal\x00text\x1fwith\x07control\x7fchars" + expected = "normaltextwithcontrolchars" + assert CleanProcessor.clean(text_with_control, None) == expected + + # Test U+FFFE removal + text_with_ufffe = "normal\ufffepadding" + expected = "normalpadding" + assert CleanProcessor.clean(text_with_ufffe, None) == expected + + def test_clean_with_none_process_rule(self): + """Test cleaning with None process_rule - only default cleaning applied.""" + text = "Hello<|World\x00" + expected = "Hello becomes >, control chars and U+FFFE are removed + assert CleanProcessor.clean(text, None) == "<<>>" + + def test_clean_multiple_markdown_links_preserved(self): + """Test multiple markdown links are all preserved.""" + process_rule = {"rules": {"pre_processing_rules": [{"id": "remove_urls_emails", "enabled": True}]}} + + text = "[One](https://one.com) [Two](http://two.org) [Three](https://three.net)" + expected = "[One](https://one.com) [Two](http://two.org) [Three](https://three.net)" + assert CleanProcessor.clean(text, process_rule) == expected + + def test_clean_markdown_link_text_as_url(self): + """Test markdown link where the link text itself is a URL.""" + process_rule = {"rules": {"pre_processing_rules": [{"id": "remove_urls_emails", "enabled": True}]}} + + # Link text that looks like URL should be preserved + text = "[https://text-url.com](https://actual-url.com)" + expected = "[https://text-url.com](https://actual-url.com)" + assert CleanProcessor.clean(text, process_rule) == expected + + # Text URL without markdown should be removed + text = "https://text-url.com https://actual-url.com" + expected = " " + assert CleanProcessor.clean(text, process_rule) == expected + + def test_clean_complex_markdown_link_content(self): + """Test markdown links with complex content - known limitation with brackets in link text.""" + process_rule = {"rules": {"pre_processing_rules": [{"id": "remove_urls_emails", "enabled": True}]}} + + # Note: The regex pattern [^\]]* cannot handle ] within link text + # This is a known limitation - the pattern stops at the first ] + text = "[Text with [brackets] and (parens)](https://example.com)" + # Actual behavior: only matches up to first ], URL gets removed + expected = "[Text with [brackets] and (parens)](" + assert CleanProcessor.clean(text, process_rule) == expected + + # Test that properly formatted markdown links work + text = "[Text with (parens) and symbols](https://example.com)" + expected = "[Text with (parens) and symbols](https://example.com)" + assert CleanProcessor.clean(text, process_rule) == expected diff --git a/api/tests/unit_tests/core/rag/datasource/vdb/pgvector/__init__.py b/api/tests/unit_tests/core/rag/datasource/vdb/pgvector/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/api/tests/unit_tests/core/rag/datasource/vdb/pgvector/test_pgvector.py b/api/tests/unit_tests/core/rag/datasource/vdb/pgvector/test_pgvector.py new file mode 100644 index 0000000000..4998a9858f --- /dev/null +++ b/api/tests/unit_tests/core/rag/datasource/vdb/pgvector/test_pgvector.py @@ -0,0 +1,327 @@ +import unittest +from unittest.mock import MagicMock, patch + +import pytest + +from core.rag.datasource.vdb.pgvector.pgvector import ( + PGVector, + PGVectorConfig, +) + + +class TestPGVector(unittest.TestCase): + def setUp(self): + self.config = PGVectorConfig( + host="localhost", + port=5432, + user="test_user", + password="test_password", + database="test_db", + min_connection=1, + max_connection=5, + pg_bigm=False, + ) + self.collection_name = "test_collection" + + @patch("core.rag.datasource.vdb.pgvector.pgvector.psycopg2.pool.SimpleConnectionPool") + def test_init(self, mock_pool_class): + """Test PGVector initialization.""" + mock_pool = MagicMock() + mock_pool_class.return_value = mock_pool + + pgvector = PGVector(self.collection_name, self.config) + + assert pgvector._collection_name == self.collection_name + assert pgvector.table_name == f"embedding_{self.collection_name}" + assert pgvector.get_type() == "pgvector" + assert pgvector.pool is not None + assert pgvector.pg_bigm is False + assert pgvector.index_hash is not None + + @patch("core.rag.datasource.vdb.pgvector.pgvector.psycopg2.pool.SimpleConnectionPool") + def test_init_with_pg_bigm(self, mock_pool_class): + """Test PGVector initialization with pg_bigm enabled.""" + config = PGVectorConfig( + host="localhost", + port=5432, + user="test_user", + password="test_password", + database="test_db", + min_connection=1, + max_connection=5, + pg_bigm=True, + ) + mock_pool = MagicMock() + mock_pool_class.return_value = mock_pool + + pgvector = PGVector(self.collection_name, config) + + assert pgvector.pg_bigm is True + + @patch("core.rag.datasource.vdb.pgvector.pgvector.psycopg2.pool.SimpleConnectionPool") + @patch("core.rag.datasource.vdb.pgvector.pgvector.redis_client") + def test_create_collection_basic(self, mock_redis, mock_pool_class): + """Test basic collection creation.""" + # Mock Redis operations + mock_lock = MagicMock() + mock_lock.__enter__ = MagicMock() + mock_lock.__exit__ = MagicMock() + mock_redis.lock.return_value = mock_lock + mock_redis.get.return_value = None + mock_redis.set.return_value = None + + # Mock the connection pool + mock_pool = MagicMock() + mock_pool_class.return_value = mock_pool + + # Mock connection and cursor + mock_conn = MagicMock() + mock_cursor = MagicMock() + mock_pool.getconn.return_value = mock_conn + mock_conn.cursor.return_value = mock_cursor + mock_cursor.fetchone.return_value = [1] # vector extension exists + + pgvector = PGVector(self.collection_name, self.config) + pgvector._create_collection(1536) + + # Verify SQL execution calls + assert mock_cursor.execute.called + + # Check that CREATE TABLE was called with correct dimension + create_table_calls = [call for call in mock_cursor.execute.call_args_list if "CREATE TABLE" in str(call)] + assert len(create_table_calls) == 1 + assert "vector(1536)" in create_table_calls[0][0][0] + + # Check that CREATE INDEX was called (dimension <= 2000) + create_index_calls = [ + call for call in mock_cursor.execute.call_args_list if "CREATE INDEX" in str(call) and "hnsw" in str(call) + ] + assert len(create_index_calls) == 1 + + # Verify Redis cache was set + mock_redis.set.assert_called_once() + + @patch("core.rag.datasource.vdb.pgvector.pgvector.psycopg2.pool.SimpleConnectionPool") + @patch("core.rag.datasource.vdb.pgvector.pgvector.redis_client") + def test_create_collection_with_large_dimension(self, mock_redis, mock_pool_class): + """Test collection creation with dimension > 2000 (no HNSW index).""" + # Mock Redis operations + mock_lock = MagicMock() + mock_lock.__enter__ = MagicMock() + mock_lock.__exit__ = MagicMock() + mock_redis.lock.return_value = mock_lock + mock_redis.get.return_value = None + mock_redis.set.return_value = None + + # Mock the connection pool + mock_pool = MagicMock() + mock_pool_class.return_value = mock_pool + + # Mock connection and cursor + mock_conn = MagicMock() + mock_cursor = MagicMock() + mock_pool.getconn.return_value = mock_conn + mock_conn.cursor.return_value = mock_cursor + mock_cursor.fetchone.return_value = [1] # vector extension exists + + pgvector = PGVector(self.collection_name, self.config) + pgvector._create_collection(3072) # Dimension > 2000 + + # Check that CREATE TABLE was called + create_table_calls = [call for call in mock_cursor.execute.call_args_list if "CREATE TABLE" in str(call)] + assert len(create_table_calls) == 1 + assert "vector(3072)" in create_table_calls[0][0][0] + + # Check that HNSW index was NOT created (dimension > 2000) + hnsw_index_calls = [call for call in mock_cursor.execute.call_args_list if "hnsw" in str(call)] + assert len(hnsw_index_calls) == 0 + + @patch("core.rag.datasource.vdb.pgvector.pgvector.psycopg2.pool.SimpleConnectionPool") + @patch("core.rag.datasource.vdb.pgvector.pgvector.redis_client") + def test_create_collection_with_pg_bigm(self, mock_redis, mock_pool_class): + """Test collection creation with pg_bigm enabled.""" + config = PGVectorConfig( + host="localhost", + port=5432, + user="test_user", + password="test_password", + database="test_db", + min_connection=1, + max_connection=5, + pg_bigm=True, + ) + + # Mock Redis operations + mock_lock = MagicMock() + mock_lock.__enter__ = MagicMock() + mock_lock.__exit__ = MagicMock() + mock_redis.lock.return_value = mock_lock + mock_redis.get.return_value = None + mock_redis.set.return_value = None + + # Mock the connection pool + mock_pool = MagicMock() + mock_pool_class.return_value = mock_pool + + # Mock connection and cursor + mock_conn = MagicMock() + mock_cursor = MagicMock() + mock_pool.getconn.return_value = mock_conn + mock_conn.cursor.return_value = mock_cursor + mock_cursor.fetchone.return_value = [1] # vector extension exists + + pgvector = PGVector(self.collection_name, config) + pgvector._create_collection(1536) + + # Check that pg_bigm index was created + bigm_index_calls = [call for call in mock_cursor.execute.call_args_list if "gin_bigm_ops" in str(call)] + assert len(bigm_index_calls) == 1 + + @patch("core.rag.datasource.vdb.pgvector.pgvector.psycopg2.pool.SimpleConnectionPool") + @patch("core.rag.datasource.vdb.pgvector.pgvector.redis_client") + def test_create_collection_creates_vector_extension(self, mock_redis, mock_pool_class): + """Test that vector extension is created if it doesn't exist.""" + # Mock Redis operations + mock_lock = MagicMock() + mock_lock.__enter__ = MagicMock() + mock_lock.__exit__ = MagicMock() + mock_redis.lock.return_value = mock_lock + mock_redis.get.return_value = None + mock_redis.set.return_value = None + + # Mock the connection pool + mock_pool = MagicMock() + mock_pool_class.return_value = mock_pool + + # Mock connection and cursor + mock_conn = MagicMock() + mock_cursor = MagicMock() + mock_pool.getconn.return_value = mock_conn + mock_conn.cursor.return_value = mock_cursor + # First call: vector extension doesn't exist + mock_cursor.fetchone.return_value = None + + pgvector = PGVector(self.collection_name, self.config) + pgvector._create_collection(1536) + + # Check that CREATE EXTENSION was called + create_extension_calls = [ + call for call in mock_cursor.execute.call_args_list if "CREATE EXTENSION vector" in str(call) + ] + assert len(create_extension_calls) == 1 + + @patch("core.rag.datasource.vdb.pgvector.pgvector.psycopg2.pool.SimpleConnectionPool") + @patch("core.rag.datasource.vdb.pgvector.pgvector.redis_client") + def test_create_collection_with_cache_hit(self, mock_redis, mock_pool_class): + """Test that collection creation is skipped when cache exists.""" + # Mock Redis operations - cache exists + mock_lock = MagicMock() + mock_lock.__enter__ = MagicMock() + mock_lock.__exit__ = MagicMock() + mock_redis.lock.return_value = mock_lock + mock_redis.get.return_value = 1 # Cache exists + + # Mock the connection pool + mock_pool = MagicMock() + mock_pool_class.return_value = mock_pool + + # Mock connection and cursor + mock_conn = MagicMock() + mock_cursor = MagicMock() + mock_pool.getconn.return_value = mock_conn + mock_conn.cursor.return_value = mock_cursor + + pgvector = PGVector(self.collection_name, self.config) + pgvector._create_collection(1536) + + # Check that no SQL was executed (early return due to cache) + assert mock_cursor.execute.call_count == 0 + + @patch("core.rag.datasource.vdb.pgvector.pgvector.psycopg2.pool.SimpleConnectionPool") + @patch("core.rag.datasource.vdb.pgvector.pgvector.redis_client") + def test_create_collection_with_redis_lock(self, mock_redis, mock_pool_class): + """Test that Redis lock is used during collection creation.""" + # Mock Redis operations + mock_lock = MagicMock() + mock_lock.__enter__ = MagicMock() + mock_lock.__exit__ = MagicMock() + mock_redis.lock.return_value = mock_lock + mock_redis.get.return_value = None + mock_redis.set.return_value = None + + # Mock the connection pool + mock_pool = MagicMock() + mock_pool_class.return_value = mock_pool + + # Mock connection and cursor + mock_conn = MagicMock() + mock_cursor = MagicMock() + mock_pool.getconn.return_value = mock_conn + mock_conn.cursor.return_value = mock_cursor + mock_cursor.fetchone.return_value = [1] # vector extension exists + + pgvector = PGVector(self.collection_name, self.config) + pgvector._create_collection(1536) + + # Verify Redis lock was acquired with correct lock name + mock_redis.lock.assert_called_once_with("vector_indexing_test_collection_lock", timeout=20) + + # Verify lock context manager was entered and exited + mock_lock.__enter__.assert_called_once() + mock_lock.__exit__.assert_called_once() + + @patch("core.rag.datasource.vdb.pgvector.pgvector.psycopg2.pool.SimpleConnectionPool") + def test_get_cursor_context_manager(self, mock_pool_class): + """Test that _get_cursor properly manages connection lifecycle.""" + mock_pool = MagicMock() + mock_pool_class.return_value = mock_pool + + mock_conn = MagicMock() + mock_cursor = MagicMock() + mock_pool.getconn.return_value = mock_conn + mock_conn.cursor.return_value = mock_cursor + + pgvector = PGVector(self.collection_name, self.config) + + with pgvector._get_cursor() as cur: + assert cur == mock_cursor + + # Verify connection lifecycle methods were called + mock_pool.getconn.assert_called_once() + mock_cursor.close.assert_called_once() + mock_conn.commit.assert_called_once() + mock_pool.putconn.assert_called_once_with(mock_conn) + + +@pytest.mark.parametrize( + "invalid_config_override", + [ + {"host": ""}, # Test empty host + {"port": 0}, # Test invalid port + {"user": ""}, # Test empty user + {"password": ""}, # Test empty password + {"database": ""}, # Test empty database + {"min_connection": 0}, # Test invalid min_connection + {"max_connection": 0}, # Test invalid max_connection + {"min_connection": 10, "max_connection": 5}, # Test min > max + ], +) +def test_config_validation_parametrized(invalid_config_override): + """Test configuration validation for various invalid inputs using parametrize.""" + config = { + "host": "localhost", + "port": 5432, + "user": "test_user", + "password": "test_password", + "database": "test_db", + "min_connection": 1, + "max_connection": 5, + } + config.update(invalid_config_override) + + with pytest.raises(ValueError): + PGVectorConfig(**config) + + +if __name__ == "__main__": + unittest.main() diff --git a/api/tests/unit_tests/core/rag/extractor/firecrawl/test_firecrawl.py b/api/tests/unit_tests/core/rag/extractor/firecrawl/test_firecrawl.py index b4ee1b91b4..4ee04ddebc 100644 --- a/api/tests/unit_tests/core/rag/extractor/firecrawl/test_firecrawl.py +++ b/api/tests/unit_tests/core/rag/extractor/firecrawl/test_firecrawl.py @@ -1,5 +1,7 @@ import os +from unittest.mock import MagicMock +import pytest from pytest_mock import MockerFixture from core.rag.extractor.firecrawl.firecrawl_app import FirecrawlApp @@ -25,3 +27,35 @@ def test_firecrawl_web_extractor_crawl_mode(mocker: MockerFixture): assert job_id is not None assert isinstance(job_id, str) + + +def test_build_url_normalizes_slashes_for_crawl(mocker: MockerFixture): + api_key = "fc-" + base_urls = ["https://custom.firecrawl.dev", "https://custom.firecrawl.dev/"] + for base in base_urls: + app = FirecrawlApp(api_key=api_key, base_url=base) + mock_post = mocker.patch("httpx.post") + mock_resp = MagicMock() + mock_resp.status_code = 200 + mock_resp.json.return_value = {"id": "job123"} + mock_post.return_value = mock_resp + app.crawl_url("https://example.com", params=None) + called_url = mock_post.call_args[0][0] + assert called_url == "https://custom.firecrawl.dev/v2/crawl" + + +def test_error_handler_handles_non_json_error_bodies(mocker: MockerFixture): + api_key = "fc-" + app = FirecrawlApp(api_key=api_key, base_url="https://custom.firecrawl.dev/") + mock_post = mocker.patch("httpx.post") + mock_resp = MagicMock() + mock_resp.status_code = 404 + mock_resp.text = "Not Found" + mock_resp.json.side_effect = Exception("Not JSON") + mock_post.return_value = mock_resp + + with pytest.raises(Exception) as excinfo: + app.scrape_url("https://example.com") + + # Should not raise a JSONDecodeError; current behavior reports status code only + assert str(excinfo.value) == "Failed to scrape URL. Status code: 404" diff --git a/api/tests/unit_tests/core/rag/extractor/test_helpers.py b/api/tests/unit_tests/core/rag/extractor/test_helpers.py new file mode 100644 index 0000000000..edf8735e57 --- /dev/null +++ b/api/tests/unit_tests/core/rag/extractor/test_helpers.py @@ -0,0 +1,10 @@ +import tempfile + +from core.rag.extractor.helpers import FileEncoding, detect_file_encodings + + +def test_detect_file_encodings() -> None: + with tempfile.NamedTemporaryFile(mode="w+t", suffix=".txt") as temp: + temp.write("Shared data") + temp_path = temp.name + assert detect_file_encodings(temp_path) == [FileEncoding(encoding="utf_8", confidence=0.0, language="Unknown")] diff --git a/api/tests/unit_tests/core/rag/extractor/test_pdf_extractor.py b/api/tests/unit_tests/core/rag/extractor/test_pdf_extractor.py new file mode 100644 index 0000000000..3167a9a301 --- /dev/null +++ b/api/tests/unit_tests/core/rag/extractor/test_pdf_extractor.py @@ -0,0 +1,186 @@ +from types import SimpleNamespace +from unittest.mock import MagicMock, patch + +import pytest + +import core.rag.extractor.pdf_extractor as pe + + +@pytest.fixture +def mock_dependencies(monkeypatch): + # Mock storage + saves = [] + + def save(key, data): + saves.append((key, data)) + + monkeypatch.setattr(pe, "storage", SimpleNamespace(save=save)) + + # Mock db + class DummySession: + def __init__(self): + self.added = [] + self.committed = False + + def add(self, obj): + self.added.append(obj) + + def add_all(self, objs): + self.added.extend(objs) + + def commit(self): + self.committed = True + + db_stub = SimpleNamespace(session=DummySession()) + monkeypatch.setattr(pe, "db", db_stub) + + # Mock UploadFile + class FakeUploadFile: + DEFAULT_ID = "test_file_id" + + def __init__(self, **kwargs): + # Assign id from DEFAULT_ID, allow override via kwargs if needed + self.id = self.DEFAULT_ID + for k, v in kwargs.items(): + setattr(self, k, v) + + monkeypatch.setattr(pe, "UploadFile", FakeUploadFile) + + # Mock config + monkeypatch.setattr(pe.dify_config, "FILES_URL", "http://files.local") + monkeypatch.setattr(pe.dify_config, "INTERNAL_FILES_URL", None) + monkeypatch.setattr(pe.dify_config, "STORAGE_TYPE", "local") + + return SimpleNamespace(saves=saves, db=db_stub, UploadFile=FakeUploadFile) + + +@pytest.mark.parametrize( + ("image_bytes", "expected_mime", "expected_ext", "file_id"), + [ + (b"\xff\xd8\xff some jpeg", "image/jpeg", "jpg", "test_file_id_jpeg"), + (b"\x89PNG\r\n\x1a\n some png", "image/png", "png", "test_file_id_png"), + ], +) +def test_extract_images_formats(mock_dependencies, monkeypatch, image_bytes, expected_mime, expected_ext, file_id): + saves = mock_dependencies.saves + db_stub = mock_dependencies.db + + # Customize FakeUploadFile id for this test case. + # Using monkeypatch ensures the class attribute is reset between parameter sets. + monkeypatch.setattr(mock_dependencies.UploadFile, "DEFAULT_ID", file_id) + + # Mock page and image objects + mock_page = MagicMock() + mock_image_obj = MagicMock() + + def mock_extract(buf, fb_format=None): + buf.write(image_bytes) + + mock_image_obj.extract.side_effect = mock_extract + + mock_page.get_objects.return_value = [mock_image_obj] + + extractor = pe.PdfExtractor(file_path="test.pdf", tenant_id="t1", user_id="u1") + + # We need to handle the import inside _extract_images + with patch("pypdfium2.raw") as mock_raw: + mock_raw.FPDF_PAGEOBJ_IMAGE = 1 + result = extractor._extract_images(mock_page) + + assert f"![image](http://files.local/files/{file_id}/file-preview)" in result + assert len(saves) == 1 + assert saves[0][1] == image_bytes + assert len(db_stub.session.added) == 1 + assert db_stub.session.added[0].tenant_id == "t1" + assert db_stub.session.added[0].size == len(image_bytes) + assert db_stub.session.added[0].mime_type == expected_mime + assert db_stub.session.added[0].extension == expected_ext + assert db_stub.session.committed is True + + +@pytest.mark.parametrize( + ("get_objects_side_effect", "get_objects_return_value"), + [ + (None, []), # Empty list + (None, None), # None returned + (Exception("Failed to get objects"), None), # Exception raised + ], +) +def test_extract_images_get_objects_scenarios(mock_dependencies, get_objects_side_effect, get_objects_return_value): + mock_page = MagicMock() + if get_objects_side_effect: + mock_page.get_objects.side_effect = get_objects_side_effect + else: + mock_page.get_objects.return_value = get_objects_return_value + + extractor = pe.PdfExtractor(file_path="test.pdf", tenant_id="t1", user_id="u1") + + with patch("pypdfium2.raw") as mock_raw: + mock_raw.FPDF_PAGEOBJ_IMAGE = 1 + result = extractor._extract_images(mock_page) + + assert result == "" + + +def test_extract_calls_extract_images(mock_dependencies, monkeypatch): + # Mock pypdfium2 + mock_pdf_doc = MagicMock() + mock_page = MagicMock() + mock_pdf_doc.__iter__.return_value = [mock_page] + + # Mock text extraction + mock_text_page = MagicMock() + mock_text_page.get_text_range.return_value = "Page text content" + mock_page.get_textpage.return_value = mock_text_page + + with patch("pypdfium2.PdfDocument", return_value=mock_pdf_doc): + # Mock Blob + mock_blob = MagicMock() + mock_blob.source = "test.pdf" + with patch("core.rag.extractor.pdf_extractor.Blob.from_path", return_value=mock_blob): + extractor = pe.PdfExtractor(file_path="test.pdf", tenant_id="t1", user_id="u1") + + # Mock _extract_images to return a known string + monkeypatch.setattr(extractor, "_extract_images", lambda p: "![image](img_url)") + + documents = list(extractor.extract()) + + assert len(documents) == 1 + assert "Page text content" in documents[0].page_content + assert "![image](img_url)" in documents[0].page_content + assert documents[0].metadata["page"] == 0 + + +def test_extract_images_failures(mock_dependencies): + saves = mock_dependencies.saves + db_stub = mock_dependencies.db + + # Mock page and image objects + mock_page = MagicMock() + mock_image_obj_fail = MagicMock() + mock_image_obj_ok = MagicMock() + + # First image raises exception + mock_image_obj_fail.extract.side_effect = Exception("Extraction failure") + + # Second image is OK (JPEG) + jpeg_bytes = b"\xff\xd8\xff some image data" + + def mock_extract(buf, fb_format=None): + buf.write(jpeg_bytes) + + mock_image_obj_ok.extract.side_effect = mock_extract + + mock_page.get_objects.return_value = [mock_image_obj_fail, mock_image_obj_ok] + + extractor = pe.PdfExtractor(file_path="test.pdf", tenant_id="t1", user_id="u1") + + with patch("pypdfium2.raw") as mock_raw: + mock_raw.FPDF_PAGEOBJ_IMAGE = 1 + result = extractor._extract_images(mock_page) + + # Should have one success + assert "![image](http://files.local/files/test_file_id/file-preview)" in result + assert len(saves) == 1 + assert saves[0][1] == jpeg_bytes + assert db_stub.session.committed is True diff --git a/api/tests/unit_tests/core/rag/extractor/test_word_extractor.py b/api/tests/unit_tests/core/rag/extractor/test_word_extractor.py index fd0b0e2e44..f9e59a5f05 100644 --- a/api/tests/unit_tests/core/rag/extractor/test_word_extractor.py +++ b/api/tests/unit_tests/core/rag/extractor/test_word_extractor.py @@ -1,8 +1,12 @@ """Primarily used for testing merged cell scenarios""" +import os +import tempfile from types import SimpleNamespace from docx import Document +from docx.oxml import OxmlElement +from docx.oxml.ns import qn import core.rag.extractor.word_extractor as we from core.rag.extractor.word_extractor import WordExtractor @@ -132,3 +136,143 @@ def test_extract_images_from_docx(monkeypatch): # DB interactions should be recorded assert len(db_stub.session.added) == 2 assert db_stub.session.committed is True + + +def test_extract_images_from_docx_uses_internal_files_url(): + """Test that INTERNAL_FILES_URL takes precedence over FILES_URL for plugin access.""" + # Test the URL generation logic directly + from configs import dify_config + + # Mock the configuration values + original_files_url = getattr(dify_config, "FILES_URL", None) + original_internal_files_url = getattr(dify_config, "INTERNAL_FILES_URL", None) + + try: + # Set both URLs - INTERNAL should take precedence + dify_config.FILES_URL = "http://external.example.com" + dify_config.INTERNAL_FILES_URL = "http://internal.docker:5001" + + # Test the URL generation logic (same as in word_extractor.py) + upload_file_id = "test_file_id" + + # This is the pattern we fixed in the word extractor + base_url = dify_config.INTERNAL_FILES_URL or dify_config.FILES_URL + generated_url = f"{base_url}/files/{upload_file_id}/file-preview" + + # Verify that INTERNAL_FILES_URL is used instead of FILES_URL + assert "http://internal.docker:5001" in generated_url, f"Expected internal URL, got: {generated_url}" + assert "http://external.example.com" not in generated_url, f"Should not use external URL, got: {generated_url}" + + finally: + # Restore original values + if original_files_url is not None: + dify_config.FILES_URL = original_files_url + if original_internal_files_url is not None: + dify_config.INTERNAL_FILES_URL = original_internal_files_url + + +def test_extract_hyperlinks(monkeypatch): + # Mock db and storage to avoid issues during image extraction (even if no images are present) + monkeypatch.setattr(we, "storage", SimpleNamespace(save=lambda k, d: None)) + db_stub = SimpleNamespace(session=SimpleNamespace(add=lambda o: None, commit=lambda: None)) + monkeypatch.setattr(we, "db", db_stub) + monkeypatch.setattr(we.dify_config, "FILES_URL", "http://files.local", raising=False) + monkeypatch.setattr(we.dify_config, "STORAGE_TYPE", "local", raising=False) + + doc = Document() + p = doc.add_paragraph("Visit ") + + # Adding a hyperlink manually + r_id = "rId99" + hyperlink = OxmlElement("w:hyperlink") + hyperlink.set(qn("r:id"), r_id) + + new_run = OxmlElement("w:r") + t = OxmlElement("w:t") + t.text = "Dify" + new_run.append(t) + hyperlink.append(new_run) + p._p.append(hyperlink) + + # Add relationship to the part + doc.part.rels.add_relationship( + "http://schemas.openxmlformats.org/officeDocument/2006/relationships/hyperlink", + "https://dify.ai", + r_id, + is_external=True, + ) + + with tempfile.NamedTemporaryFile(suffix=".docx", delete=False) as tmp: + doc.save(tmp.name) + tmp_path = tmp.name + + try: + extractor = WordExtractor(tmp_path, "tenant_id", "user_id") + docs = extractor.extract() + # Verify modern hyperlink extraction + assert "Visit[Dify](https://dify.ai)" in docs[0].page_content + finally: + if os.path.exists(tmp_path): + os.remove(tmp_path) + + +def test_extract_legacy_hyperlinks(monkeypatch): + # Mock db and storage + monkeypatch.setattr(we, "storage", SimpleNamespace(save=lambda k, d: None)) + db_stub = SimpleNamespace(session=SimpleNamespace(add=lambda o: None, commit=lambda: None)) + monkeypatch.setattr(we, "db", db_stub) + monkeypatch.setattr(we.dify_config, "FILES_URL", "http://files.local", raising=False) + monkeypatch.setattr(we.dify_config, "STORAGE_TYPE", "local", raising=False) + + doc = Document() + p = doc.add_paragraph() + + # Construct a legacy HYPERLINK field: + # 1. w:fldChar (begin) + # 2. w:instrText (HYPERLINK "http://example.com") + # 3. w:fldChar (separate) + # 4. w:r (visible text "Example") + # 5. w:fldChar (end) + + run1 = OxmlElement("w:r") + fldCharBegin = OxmlElement("w:fldChar") + fldCharBegin.set(qn("w:fldCharType"), "begin") + run1.append(fldCharBegin) + p._p.append(run1) + + run2 = OxmlElement("w:r") + instrText = OxmlElement("w:instrText") + instrText.text = ' HYPERLINK "http://example.com" ' + run2.append(instrText) + p._p.append(run2) + + run3 = OxmlElement("w:r") + fldCharSep = OxmlElement("w:fldChar") + fldCharSep.set(qn("w:fldCharType"), "separate") + run3.append(fldCharSep) + p._p.append(run3) + + run4 = OxmlElement("w:r") + t4 = OxmlElement("w:t") + t4.text = "Example" + run4.append(t4) + p._p.append(run4) + + run5 = OxmlElement("w:r") + fldCharEnd = OxmlElement("w:fldChar") + fldCharEnd.set(qn("w:fldCharType"), "end") + run5.append(fldCharEnd) + p._p.append(run5) + + with tempfile.NamedTemporaryFile(suffix=".docx", delete=False) as tmp: + doc.save(tmp.name) + tmp_path = tmp.name + + try: + extractor = WordExtractor(tmp_path, "tenant_id", "user_id") + docs = extractor.extract() + # Verify legacy hyperlink extraction + assert "[Example](http://example.com)" in docs[0].page_content + finally: + if os.path.exists(tmp_path): + os.remove(tmp_path) diff --git a/api/tests/unit_tests/core/rag/retrieval/test_dataset_retrieval.py b/api/tests/unit_tests/core/rag/retrieval/test_dataset_retrieval.py index affd6c648f..ca08cb0591 100644 --- a/api/tests/unit_tests/core/rag/retrieval/test_dataset_retrieval.py +++ b/api/tests/unit_tests/core/rag/retrieval/test_dataset_retrieval.py @@ -73,6 +73,7 @@ import pytest from core.rag.datasource.retrieval_service import RetrievalService from core.rag.models.document import Document +from core.rag.retrieval.dataset_retrieval import DatasetRetrieval from core.rag.retrieval.retrieval_methods import RetrievalMethod from models.dataset import Dataset @@ -421,7 +422,18 @@ class TestRetrievalService: # In real code, this waits for all futures to complete # In tests, futures complete immediately, so wait is a no-op with patch("core.rag.datasource.retrieval_service.concurrent.futures.wait"): - yield mock_executor + # Mock concurrent.futures.as_completed for early error propagation + # In real code, this yields futures as they complete + # In tests, we yield all futures immediately since they're already done + def mock_as_completed(futures_list, timeout=None): + """Mock as_completed that yields futures immediately.""" + yield from futures_list + + with patch( + "core.rag.datasource.retrieval_service.concurrent.futures.as_completed", + side_effect=mock_as_completed, + ): + yield mock_executor # ==================== Vector Search Tests ==================== @@ -1507,6 +1519,282 @@ class TestRetrievalService: call_kwargs = mock_retrieve.call_args.kwargs assert call_kwargs["reranking_model"] == reranking_model + # ==================== Multiple Retrieve Thread Tests ==================== + + @patch("core.rag.retrieval.dataset_retrieval.DataPostProcessor") + @patch("core.rag.retrieval.dataset_retrieval.DatasetRetrieval._retriever") + def test_multiple_retrieve_thread_skips_second_reranking_with_single_dataset( + self, mock_retriever, mock_data_processor_class, mock_flask_app, mock_dataset + ): + """ + Test that _multiple_retrieve_thread skips second reranking when dataset_count is 1. + + When there is only one dataset, the second reranking is unnecessary + because the documents are already ranked from the first retrieval. + This optimization avoids the overhead of reranking when it won't + provide any benefit. + + Verifies: + - DataPostProcessor is NOT called when dataset_count == 1 + - Documents are still added to all_documents + - Standard scoring logic is applied instead + """ + # Arrange + dataset_retrieval = DatasetRetrieval() + tenant_id = str(uuid4()) + + # Create test documents + doc1 = Document( + page_content="Test content 1", + metadata={"doc_id": "doc1", "score": 0.9, "document_id": str(uuid4()), "dataset_id": mock_dataset.id}, + provider="dify", + ) + doc2 = Document( + page_content="Test content 2", + metadata={"doc_id": "doc2", "score": 0.8, "document_id": str(uuid4()), "dataset_id": mock_dataset.id}, + provider="dify", + ) + + # Mock _retriever to return documents + def side_effect_retriever( + flask_app, dataset_id, query, top_k, all_documents, document_ids_filter, metadata_condition, attachment_ids + ): + all_documents.extend([doc1, doc2]) + + mock_retriever.side_effect = side_effect_retriever + + # Set up dataset with high_quality indexing + mock_dataset.indexing_technique = "high_quality" + + all_documents = [] + + # Act - Call with dataset_count = 1 + dataset_retrieval._multiple_retrieve_thread( + flask_app=mock_flask_app, + available_datasets=[mock_dataset], + metadata_condition=None, + metadata_filter_document_ids=None, + all_documents=all_documents, + tenant_id=tenant_id, + reranking_enable=True, + reranking_mode="reranking_model", + reranking_model={"reranking_provider_name": "cohere", "reranking_model_name": "rerank-v2"}, + weights=None, + top_k=5, + score_threshold=0.5, + query="test query", + attachment_id=None, + dataset_count=1, # Single dataset - should skip second reranking + ) + + # Assert + # DataPostProcessor should NOT be called (second reranking skipped) + mock_data_processor_class.assert_not_called() + + # Documents should still be added to all_documents + assert len(all_documents) == 2 + assert all_documents[0].page_content == "Test content 1" + assert all_documents[1].page_content == "Test content 2" + + @patch("core.rag.retrieval.dataset_retrieval.DataPostProcessor") + @patch("core.rag.retrieval.dataset_retrieval.DatasetRetrieval._retriever") + @patch("core.rag.retrieval.dataset_retrieval.DatasetRetrieval.calculate_vector_score") + def test_multiple_retrieve_thread_performs_second_reranking_with_multiple_datasets( + self, mock_calculate_vector_score, mock_retriever, mock_data_processor_class, mock_flask_app, mock_dataset + ): + """ + Test that _multiple_retrieve_thread performs second reranking when dataset_count > 1. + + When there are multiple datasets, the second reranking is necessary + to merge and re-rank results from different datasets. This ensures + the most relevant documents across all datasets are returned. + + Verifies: + - DataPostProcessor IS called when dataset_count > 1 + - Reranking is applied with correct parameters + - Documents are processed correctly + """ + # Arrange + dataset_retrieval = DatasetRetrieval() + tenant_id = str(uuid4()) + + # Create test documents + doc1 = Document( + page_content="Test content 1", + metadata={"doc_id": "doc1", "score": 0.7, "document_id": str(uuid4()), "dataset_id": mock_dataset.id}, + provider="dify", + ) + doc2 = Document( + page_content="Test content 2", + metadata={"doc_id": "doc2", "score": 0.6, "document_id": str(uuid4()), "dataset_id": mock_dataset.id}, + provider="dify", + ) + + # Mock _retriever to return documents + def side_effect_retriever( + flask_app, dataset_id, query, top_k, all_documents, document_ids_filter, metadata_condition, attachment_ids + ): + all_documents.extend([doc1, doc2]) + + mock_retriever.side_effect = side_effect_retriever + + # Set up dataset with high_quality indexing + mock_dataset.indexing_technique = "high_quality" + + # Mock DataPostProcessor instance and its invoke method + mock_processor_instance = Mock() + # Simulate reranking - return documents in reversed order with updated scores + reranked_docs = [ + Document( + page_content="Test content 2", + metadata={"doc_id": "doc2", "score": 0.95, "document_id": str(uuid4()), "dataset_id": mock_dataset.id}, + provider="dify", + ), + Document( + page_content="Test content 1", + metadata={"doc_id": "doc1", "score": 0.85, "document_id": str(uuid4()), "dataset_id": mock_dataset.id}, + provider="dify", + ), + ] + mock_processor_instance.invoke.return_value = reranked_docs + mock_data_processor_class.return_value = mock_processor_instance + + all_documents = [] + + # Create second dataset + mock_dataset2 = Mock(spec=Dataset) + mock_dataset2.id = str(uuid4()) + mock_dataset2.indexing_technique = "high_quality" + mock_dataset2.provider = "dify" + + # Act - Call with dataset_count = 2 + dataset_retrieval._multiple_retrieve_thread( + flask_app=mock_flask_app, + available_datasets=[mock_dataset, mock_dataset2], + metadata_condition=None, + metadata_filter_document_ids=None, + all_documents=all_documents, + tenant_id=tenant_id, + reranking_enable=True, + reranking_mode="reranking_model", + reranking_model={"reranking_provider_name": "cohere", "reranking_model_name": "rerank-v2"}, + weights=None, + top_k=5, + score_threshold=0.5, + query="test query", + attachment_id=None, + dataset_count=2, # Multiple datasets - should perform second reranking + ) + + # Assert + # DataPostProcessor SHOULD be called (second reranking performed) + mock_data_processor_class.assert_called_once_with( + tenant_id, + "reranking_model", + {"reranking_provider_name": "cohere", "reranking_model_name": "rerank-v2"}, + None, + False, + ) + + # Verify invoke was called with correct parameters + mock_processor_instance.invoke.assert_called_once() + + # Documents should be added to all_documents after reranking + assert len(all_documents) == 2 + # The reranked order should be reflected + assert all_documents[0].page_content == "Test content 2" + assert all_documents[1].page_content == "Test content 1" + + @patch("core.rag.retrieval.dataset_retrieval.DataPostProcessor") + @patch("core.rag.retrieval.dataset_retrieval.DatasetRetrieval._retriever") + @patch("core.rag.retrieval.dataset_retrieval.DatasetRetrieval.calculate_vector_score") + def test_multiple_retrieve_thread_single_dataset_uses_standard_scoring( + self, mock_calculate_vector_score, mock_retriever, mock_data_processor_class, mock_flask_app, mock_dataset + ): + """ + Test that _multiple_retrieve_thread uses standard scoring when dataset_count is 1 + and reranking is enabled. + + When there's only one dataset, instead of using DataPostProcessor, + the method should fall through to the standard scoring logic + (calculate_vector_score for high_quality datasets). + + Verifies: + - DataPostProcessor is NOT called + - calculate_vector_score IS called for high_quality indexing + - Documents are scored correctly + """ + # Arrange + dataset_retrieval = DatasetRetrieval() + tenant_id = str(uuid4()) + + # Create test documents + doc1 = Document( + page_content="Test content 1", + metadata={"doc_id": "doc1", "score": 0.9, "document_id": str(uuid4()), "dataset_id": mock_dataset.id}, + provider="dify", + ) + doc2 = Document( + page_content="Test content 2", + metadata={"doc_id": "doc2", "score": 0.8, "document_id": str(uuid4()), "dataset_id": mock_dataset.id}, + provider="dify", + ) + + # Mock _retriever to return documents + def side_effect_retriever( + flask_app, dataset_id, query, top_k, all_documents, document_ids_filter, metadata_condition, attachment_ids + ): + all_documents.extend([doc1, doc2]) + + mock_retriever.side_effect = side_effect_retriever + + # Set up dataset with high_quality indexing + mock_dataset.indexing_technique = "high_quality" + + # Mock calculate_vector_score to return scored documents + scored_docs = [ + Document( + page_content="Test content 1", + metadata={"doc_id": "doc1", "score": 0.95, "document_id": str(uuid4()), "dataset_id": mock_dataset.id}, + provider="dify", + ), + ] + mock_calculate_vector_score.return_value = scored_docs + + all_documents = [] + + # Act - Call with dataset_count = 1 + dataset_retrieval._multiple_retrieve_thread( + flask_app=mock_flask_app, + available_datasets=[mock_dataset], + metadata_condition=None, + metadata_filter_document_ids=None, + all_documents=all_documents, + tenant_id=tenant_id, + reranking_enable=True, # Reranking enabled but should be skipped for single dataset + reranking_mode="reranking_model", + reranking_model={"reranking_provider_name": "cohere", "reranking_model_name": "rerank-v2"}, + weights=None, + top_k=5, + score_threshold=0.5, + query="test query", + attachment_id=None, + dataset_count=1, + ) + + # Assert + # DataPostProcessor should NOT be called + mock_data_processor_class.assert_not_called() + + # calculate_vector_score SHOULD be called for high_quality datasets + mock_calculate_vector_score.assert_called_once() + call_args = mock_calculate_vector_score.call_args + assert call_args[0][1] == 5 # top_k + + # Documents should be added after standard scoring + assert len(all_documents) == 1 + assert all_documents[0].page_content == "Test content 1" + class TestRetrievalMethods: """ diff --git a/api/tests/unit_tests/core/rag/retrieval/test_dataset_retrieval_metadata_filter.py b/api/tests/unit_tests/core/rag/retrieval/test_dataset_retrieval_metadata_filter.py new file mode 100644 index 0000000000..07d6e51e4b --- /dev/null +++ b/api/tests/unit_tests/core/rag/retrieval/test_dataset_retrieval_metadata_filter.py @@ -0,0 +1,873 @@ +""" +Unit tests for DatasetRetrieval.process_metadata_filter_func. + +This module provides comprehensive test coverage for the process_metadata_filter_func +method in the DatasetRetrieval class, which is responsible for building SQLAlchemy +filter expressions based on metadata filtering conditions. + +Conditions Tested: +================== +1. **String Conditions**: contains, not contains, start with, end with +2. **Equality Conditions**: is / =, is not / ≠ +3. **Null Conditions**: empty, not empty +4. **Numeric Comparisons**: before / <, after / >, ≤ / <=, ≥ / >= +5. **List Conditions**: in +6. **Edge Cases**: None values, different data types (str, int, float) + +Test Architecture: +================== +- Direct instantiation of DatasetRetrieval +- Mocking of DatasetDocument model attributes +- Verification of SQLAlchemy filter expressions +- Follows Arrange-Act-Assert (AAA) pattern + +Running Tests: +============== + # Run all tests in this module + uv run --project api pytest \ + api/tests/unit_tests/core/rag/retrieval/test_dataset_retrieval_metadata_filter.py -v + + # Run a specific test + uv run --project api pytest \ + api/tests/unit_tests/core/rag/retrieval/test_dataset_retrieval_metadata_filter.py::\ +TestProcessMetadataFilterFunc::test_contains_condition -v +""" + +from unittest.mock import MagicMock + +import pytest + +from core.rag.retrieval.dataset_retrieval import DatasetRetrieval + + +class TestProcessMetadataFilterFunc: + """ + Comprehensive test suite for process_metadata_filter_func method. + + This test class validates all metadata filtering conditions supported by + the DatasetRetrieval class, including string operations, numeric comparisons, + null checks, and list operations. + + Method Signature: + ================== + def process_metadata_filter_func( + self, sequence: int, condition: str, metadata_name: str, value: Any | None, filters: list + ) -> list: + + The method builds SQLAlchemy filter expressions by: + 1. Validating value is not None (except for empty/not empty conditions) + 2. Using DatasetDocument.doc_metadata JSON field operations + 3. Adding appropriate SQLAlchemy expressions to the filters list + 4. Returning the updated filters list + + Mocking Strategy: + ================== + - Mock DatasetDocument.doc_metadata to avoid database dependencies + - Verify filter expressions are created correctly + - Test with various data types (str, int, float, list) + """ + + @pytest.fixture + def retrieval(self): + """ + Create a DatasetRetrieval instance for testing. + + Returns: + DatasetRetrieval: Instance to test process_metadata_filter_func + """ + return DatasetRetrieval() + + @pytest.fixture + def mock_doc_metadata(self): + """ + Mock the DatasetDocument.doc_metadata JSON field. + + The method uses DatasetDocument.doc_metadata[metadata_name] to access + JSON fields. We mock this to avoid database dependencies. + + Returns: + Mock: Mocked doc_metadata attribute + """ + mock_metadata_field = MagicMock() + + # Create mock for string access + mock_string_access = MagicMock() + mock_string_access.like = MagicMock() + mock_string_access.notlike = MagicMock() + mock_string_access.__eq__ = MagicMock(return_value=MagicMock()) + mock_string_access.__ne__ = MagicMock(return_value=MagicMock()) + mock_string_access.in_ = MagicMock(return_value=MagicMock()) + + # Create mock for float access (for numeric comparisons) + mock_float_access = MagicMock() + mock_float_access.__eq__ = MagicMock(return_value=MagicMock()) + mock_float_access.__ne__ = MagicMock(return_value=MagicMock()) + mock_float_access.__lt__ = MagicMock(return_value=MagicMock()) + mock_float_access.__gt__ = MagicMock(return_value=MagicMock()) + mock_float_access.__le__ = MagicMock(return_value=MagicMock()) + mock_float_access.__ge__ = MagicMock(return_value=MagicMock()) + + # Create mock for null checks + mock_null_access = MagicMock() + mock_null_access.is_ = MagicMock(return_value=MagicMock()) + mock_null_access.isnot = MagicMock(return_value=MagicMock()) + + # Setup __getitem__ to return appropriate mock based on usage + def getitem_side_effect(name): + if name in ["author", "title", "category"]: + return mock_string_access + elif name in ["year", "price", "rating"]: + return mock_float_access + else: + return mock_string_access + + mock_metadata_field.__getitem__ = MagicMock(side_effect=getitem_side_effect) + mock_metadata_field.as_string.return_value = mock_string_access + mock_metadata_field.as_float.return_value = mock_float_access + mock_metadata_field[metadata_name:str].is_ = mock_null_access.is_ + mock_metadata_field[metadata_name:str].isnot = mock_null_access.isnot + + return mock_metadata_field + + # ==================== String Condition Tests ==================== + + def test_contains_condition_string_value(self, retrieval): + """ + Test 'contains' condition with string value. + + Verifies: + - Filters list is populated with LIKE expression + - Pattern matching uses %value% syntax + """ + filters = [] + sequence = 0 + condition = "contains" + metadata_name = "author" + value = "John" + + result = retrieval.process_metadata_filter_func(sequence, condition, metadata_name, value, filters) + + assert result == filters + assert len(filters) == 1 + + def test_not_contains_condition(self, retrieval): + """ + Test 'not contains' condition. + + Verifies: + - Filters list is populated with NOT LIKE expression + - Pattern matching uses %value% syntax with negation + """ + filters = [] + sequence = 0 + condition = "not contains" + metadata_name = "title" + value = "banned" + + result = retrieval.process_metadata_filter_func(sequence, condition, metadata_name, value, filters) + + assert result == filters + assert len(filters) == 1 + + def test_start_with_condition(self, retrieval): + """ + Test 'start with' condition. + + Verifies: + - Filters list is populated with LIKE expression + - Pattern matching uses value% syntax + """ + filters = [] + sequence = 0 + condition = "start with" + metadata_name = "category" + value = "tech" + + result = retrieval.process_metadata_filter_func(sequence, condition, metadata_name, value, filters) + + assert result == filters + assert len(filters) == 1 + + def test_end_with_condition(self, retrieval): + """ + Test 'end with' condition. + + Verifies: + - Filters list is populated with LIKE expression + - Pattern matching uses %value syntax + """ + filters = [] + sequence = 0 + condition = "end with" + metadata_name = "filename" + value = ".pdf" + + result = retrieval.process_metadata_filter_func(sequence, condition, metadata_name, value, filters) + + assert result == filters + assert len(filters) == 1 + + # ==================== Equality Condition Tests ==================== + + def test_is_condition_with_string_value(self, retrieval): + """ + Test 'is' (=) condition with string value. + + Verifies: + - Filters list is populated with equality expression + - String comparison is used + """ + filters = [] + sequence = 0 + condition = "is" + metadata_name = "author" + value = "Jane Doe" + + result = retrieval.process_metadata_filter_func(sequence, condition, metadata_name, value, filters) + + assert result == filters + assert len(filters) == 1 + + def test_equals_condition_with_string_value(self, retrieval): + """ + Test '=' condition with string value. + + Verifies: + - Same behavior as 'is' condition + - String comparison is used + """ + filters = [] + sequence = 0 + condition = "=" + metadata_name = "category" + value = "technology" + + result = retrieval.process_metadata_filter_func(sequence, condition, metadata_name, value, filters) + + assert result == filters + assert len(filters) == 1 + + def test_is_condition_with_int_value(self, retrieval): + """ + Test 'is' condition with integer value. + + Verifies: + - Numeric comparison is used + - as_float() is called on the metadata field + """ + filters = [] + sequence = 0 + condition = "is" + metadata_name = "year" + value = 2023 + + result = retrieval.process_metadata_filter_func(sequence, condition, metadata_name, value, filters) + + assert result == filters + assert len(filters) == 1 + + def test_is_condition_with_float_value(self, retrieval): + """ + Test 'is' condition with float value. + + Verifies: + - Numeric comparison is used + - as_float() is called on the metadata field + """ + filters = [] + sequence = 0 + condition = "is" + metadata_name = "price" + value = 19.99 + + result = retrieval.process_metadata_filter_func(sequence, condition, metadata_name, value, filters) + + assert result == filters + assert len(filters) == 1 + + def test_is_not_condition_with_string_value(self, retrieval): + """ + Test 'is not' (≠) condition with string value. + + Verifies: + - Filters list is populated with inequality expression + - String comparison is used + """ + filters = [] + sequence = 0 + condition = "is not" + metadata_name = "author" + value = "Unknown" + + result = retrieval.process_metadata_filter_func(sequence, condition, metadata_name, value, filters) + + assert result == filters + assert len(filters) == 1 + + def test_not_equals_condition(self, retrieval): + """ + Test '≠' condition with string value. + + Verifies: + - Same behavior as 'is not' condition + - Inequality expression is used + """ + filters = [] + sequence = 0 + condition = "≠" + metadata_name = "category" + value = "archived" + + result = retrieval.process_metadata_filter_func(sequence, condition, metadata_name, value, filters) + + assert result == filters + assert len(filters) == 1 + + def test_is_not_condition_with_numeric_value(self, retrieval): + """ + Test 'is not' condition with numeric value. + + Verifies: + - Numeric inequality comparison is used + - as_float() is called on the metadata field + """ + filters = [] + sequence = 0 + condition = "is not" + metadata_name = "year" + value = 2000 + + result = retrieval.process_metadata_filter_func(sequence, condition, metadata_name, value, filters) + + assert result == filters + assert len(filters) == 1 + + # ==================== Null Condition Tests ==================== + + def test_empty_condition(self, retrieval): + """ + Test 'empty' condition (null check). + + Verifies: + - Filters list is populated with IS NULL expression + - Value can be None for this condition + """ + filters = [] + sequence = 0 + condition = "empty" + metadata_name = "author" + value = None + + result = retrieval.process_metadata_filter_func(sequence, condition, metadata_name, value, filters) + + assert result == filters + assert len(filters) == 1 + + def test_not_empty_condition(self, retrieval): + """ + Test 'not empty' condition (not null check). + + Verifies: + - Filters list is populated with IS NOT NULL expression + - Value can be None for this condition + """ + filters = [] + sequence = 0 + condition = "not empty" + metadata_name = "description" + value = None + + result = retrieval.process_metadata_filter_func(sequence, condition, metadata_name, value, filters) + + assert result == filters + assert len(filters) == 1 + + # ==================== Numeric Comparison Tests ==================== + + def test_before_condition(self, retrieval): + """ + Test 'before' (<) condition. + + Verifies: + - Filters list is populated with less than expression + - Numeric comparison is used + """ + filters = [] + sequence = 0 + condition = "before" + metadata_name = "year" + value = 2020 + + result = retrieval.process_metadata_filter_func(sequence, condition, metadata_name, value, filters) + + assert result == filters + assert len(filters) == 1 + + def test_less_than_condition(self, retrieval): + """ + Test '<' condition. + + Verifies: + - Same behavior as 'before' condition + - Less than expression is used + """ + filters = [] + sequence = 0 + condition = "<" + metadata_name = "price" + value = 100.0 + + result = retrieval.process_metadata_filter_func(sequence, condition, metadata_name, value, filters) + + assert result == filters + assert len(filters) == 1 + + def test_after_condition(self, retrieval): + """ + Test 'after' (>) condition. + + Verifies: + - Filters list is populated with greater than expression + - Numeric comparison is used + """ + filters = [] + sequence = 0 + condition = "after" + metadata_name = "year" + value = 2020 + + result = retrieval.process_metadata_filter_func(sequence, condition, metadata_name, value, filters) + + assert result == filters + assert len(filters) == 1 + + def test_greater_than_condition(self, retrieval): + """ + Test '>' condition. + + Verifies: + - Same behavior as 'after' condition + - Greater than expression is used + """ + filters = [] + sequence = 0 + condition = ">" + metadata_name = "rating" + value = 4.5 + + result = retrieval.process_metadata_filter_func(sequence, condition, metadata_name, value, filters) + + assert result == filters + assert len(filters) == 1 + + def test_less_than_or_equal_condition_unicode(self, retrieval): + """ + Test '≤' condition. + + Verifies: + - Filters list is populated with less than or equal expression + - Numeric comparison is used + """ + filters = [] + sequence = 0 + condition = "≤" + metadata_name = "price" + value = 50.0 + + result = retrieval.process_metadata_filter_func(sequence, condition, metadata_name, value, filters) + + assert result == filters + assert len(filters) == 1 + + def test_less_than_or_equal_condition_ascii(self, retrieval): + """ + Test '<=' condition. + + Verifies: + - Same behavior as '≤' condition + - Less than or equal expression is used + """ + filters = [] + sequence = 0 + condition = "<=" + metadata_name = "year" + value = 2023 + + result = retrieval.process_metadata_filter_func(sequence, condition, metadata_name, value, filters) + + assert result == filters + assert len(filters) == 1 + + def test_greater_than_or_equal_condition_unicode(self, retrieval): + """ + Test '≥' condition. + + Verifies: + - Filters list is populated with greater than or equal expression + - Numeric comparison is used + """ + filters = [] + sequence = 0 + condition = "≥" + metadata_name = "rating" + value = 3.5 + + result = retrieval.process_metadata_filter_func(sequence, condition, metadata_name, value, filters) + + assert result == filters + assert len(filters) == 1 + + def test_greater_than_or_equal_condition_ascii(self, retrieval): + """ + Test '>=' condition. + + Verifies: + - Same behavior as '≥' condition + - Greater than or equal expression is used + """ + filters = [] + sequence = 0 + condition = ">=" + metadata_name = "year" + value = 2000 + + result = retrieval.process_metadata_filter_func(sequence, condition, metadata_name, value, filters) + + assert result == filters + assert len(filters) == 1 + + # ==================== List/In Condition Tests ==================== + + def test_in_condition_with_comma_separated_string(self, retrieval): + """ + Test 'in' condition with comma-separated string value. + + Verifies: + - String is split into list + - Whitespace is trimmed from each value + - IN expression is created + """ + filters = [] + sequence = 0 + condition = "in" + metadata_name = "category" + value = "tech, science, AI " + + result = retrieval.process_metadata_filter_func(sequence, condition, metadata_name, value, filters) + + assert result == filters + assert len(filters) == 1 + + def test_in_condition_with_list_value(self, retrieval): + """ + Test 'in' condition with list value. + + Verifies: + - List is processed correctly + - None values are filtered out + - IN expression is created with valid values + """ + filters = [] + sequence = 0 + condition = "in" + metadata_name = "tags" + value = ["python", "javascript", None, "golang"] + + result = retrieval.process_metadata_filter_func(sequence, condition, metadata_name, value, filters) + + assert result == filters + assert len(filters) == 1 + + def test_in_condition_with_tuple_value(self, retrieval): + """ + Test 'in' condition with tuple value. + + Verifies: + - Tuple is processed like a list + - IN expression is created + """ + filters = [] + sequence = 0 + condition = "in" + metadata_name = "category" + value = ("tech", "science", "ai") + + result = retrieval.process_metadata_filter_func(sequence, condition, metadata_name, value, filters) + + assert result == filters + assert len(filters) == 1 + + def test_in_condition_with_empty_string(self, retrieval): + """ + Test 'in' condition with empty string value. + + Verifies: + - Empty string results in literal(False) filter + - No valid values to match + """ + filters = [] + sequence = 0 + condition = "in" + metadata_name = "category" + value = "" + + result = retrieval.process_metadata_filter_func(sequence, condition, metadata_name, value, filters) + + assert result == filters + assert len(filters) == 1 + # Verify it's a literal(False) expression + # This is a bit tricky to test without access to the actual expression + + def test_in_condition_with_only_whitespace(self, retrieval): + """ + Test 'in' condition with whitespace-only string value. + + Verifies: + - Whitespace-only string results in literal(False) filter + - All values are stripped and filtered out + """ + filters = [] + sequence = 0 + condition = "in" + metadata_name = "category" + value = " , , " + + result = retrieval.process_metadata_filter_func(sequence, condition, metadata_name, value, filters) + + assert result == filters + assert len(filters) == 1 + + def test_in_condition_with_single_string(self, retrieval): + """ + Test 'in' condition with single non-comma string. + + Verifies: + - Single string is treated as single-item list + - IN expression is created with one value + """ + filters = [] + sequence = 0 + condition = "in" + metadata_name = "category" + value = "technology" + + result = retrieval.process_metadata_filter_func(sequence, condition, metadata_name, value, filters) + + assert result == filters + assert len(filters) == 1 + + # ==================== Edge Case Tests ==================== + + def test_none_value_with_non_empty_condition(self, retrieval): + """ + Test None value with conditions that require value. + + Verifies: + - Original filters list is returned unchanged + - No filter is added for None values (except empty/not empty) + """ + filters = [] + sequence = 0 + condition = "contains" + metadata_name = "author" + value = None + + result = retrieval.process_metadata_filter_func(sequence, condition, metadata_name, value, filters) + + assert result == filters + assert len(filters) == 0 # No filter added + + def test_none_value_with_equals_condition(self, retrieval): + """ + Test None value with 'is' (=) condition. + + Verifies: + - Original filters list is returned unchanged + - No filter is added for None values + """ + filters = [] + sequence = 0 + condition = "is" + metadata_name = "author" + value = None + + result = retrieval.process_metadata_filter_func(sequence, condition, metadata_name, value, filters) + + assert result == filters + assert len(filters) == 0 + + def test_none_value_with_numeric_condition(self, retrieval): + """ + Test None value with numeric comparison condition. + + Verifies: + - Original filters list is returned unchanged + - No filter is added for None values + """ + filters = [] + sequence = 0 + condition = ">" + metadata_name = "year" + value = None + + result = retrieval.process_metadata_filter_func(sequence, condition, metadata_name, value, filters) + + assert result == filters + assert len(filters) == 0 + + def test_existing_filters_preserved(self, retrieval): + """ + Test that existing filters are preserved. + + Verifies: + - Existing filters in the list are not removed + - New filters are appended to the list + """ + existing_filter = MagicMock() + filters = [existing_filter] + sequence = 0 + condition = "contains" + metadata_name = "author" + value = "test" + + result = retrieval.process_metadata_filter_func(sequence, condition, metadata_name, value, filters) + + assert result == filters + assert len(filters) == 2 + assert filters[0] == existing_filter + + def test_multiple_filters_accumulated(self, retrieval): + """ + Test multiple calls to accumulate filters. + + Verifies: + - Each call adds a new filter to the list + - All filters are preserved across calls + """ + filters = [] + + # First filter + retrieval.process_metadata_filter_func(0, "contains", "author", "John", filters) + assert len(filters) == 1 + + # Second filter + retrieval.process_metadata_filter_func(1, ">", "year", 2020, filters) + assert len(filters) == 2 + + # Third filter + retrieval.process_metadata_filter_func(2, "is", "category", "tech", filters) + assert len(filters) == 3 + + def test_unknown_condition(self, retrieval): + """ + Test unknown/unsupported condition. + + Verifies: + - Original filters list is returned unchanged + - No filter is added for unknown conditions + """ + filters = [] + sequence = 0 + condition = "unknown_condition" + metadata_name = "author" + value = "test" + + result = retrieval.process_metadata_filter_func(sequence, condition, metadata_name, value, filters) + + assert result == filters + assert len(filters) == 0 + + def test_empty_string_value_with_contains(self, retrieval): + """ + Test empty string value with 'contains' condition. + + Verifies: + - Filter is added even with empty string + - LIKE expression is created + """ + filters = [] + sequence = 0 + condition = "contains" + metadata_name = "author" + value = "" + + result = retrieval.process_metadata_filter_func(sequence, condition, metadata_name, value, filters) + + assert result == filters + assert len(filters) == 1 + + def test_special_characters_in_value(self, retrieval): + """ + Test special characters in value string. + + Verifies: + - Special characters are handled in value + - LIKE expression is created correctly + """ + filters = [] + sequence = 0 + condition = "contains" + metadata_name = "title" + value = "C++ & Python's features" + + result = retrieval.process_metadata_filter_func(sequence, condition, metadata_name, value, filters) + + assert result == filters + assert len(filters) == 1 + + def test_zero_value_with_numeric_condition(self, retrieval): + """ + Test zero value with numeric comparison condition. + + Verifies: + - Zero is treated as valid value + - Numeric comparison is performed + """ + filters = [] + sequence = 0 + condition = ">" + metadata_name = "price" + value = 0 + + result = retrieval.process_metadata_filter_func(sequence, condition, metadata_name, value, filters) + + assert result == filters + assert len(filters) == 1 + + def test_negative_value_with_numeric_condition(self, retrieval): + """ + Test negative value with numeric comparison condition. + + Verifies: + - Negative numbers are handled correctly + - Numeric comparison is performed + """ + filters = [] + sequence = 0 + condition = "<" + metadata_name = "temperature" + value = -10.5 + + result = retrieval.process_metadata_filter_func(sequence, condition, metadata_name, value, filters) + + assert result == filters + assert len(filters) == 1 + + def test_float_value_with_integer_comparison(self, retrieval): + """ + Test float value with numeric comparison condition. + + Verifies: + - Float values work correctly + - Numeric comparison is performed + """ + filters = [] + sequence = 0 + condition = ">=" + metadata_name = "rating" + value = 4.5 + + result = retrieval.process_metadata_filter_func(sequence, condition, metadata_name, value, filters) + + assert result == filters + assert len(filters) == 1 diff --git a/api/tests/unit_tests/core/rag/retrieval/test_knowledge_retrieval.py b/api/tests/unit_tests/core/rag/retrieval/test_knowledge_retrieval.py new file mode 100644 index 0000000000..5f461d53ae --- /dev/null +++ b/api/tests/unit_tests/core/rag/retrieval/test_knowledge_retrieval.py @@ -0,0 +1,113 @@ +import threading +from unittest.mock import Mock, patch +from uuid import uuid4 + +import pytest +from flask import Flask, current_app + +from core.rag.models.document import Document +from core.rag.retrieval.dataset_retrieval import DatasetRetrieval +from models.dataset import Dataset + + +class TestRetrievalService: + @pytest.fixture + def mock_dataset(self) -> Dataset: + dataset = Mock(spec=Dataset) + dataset.id = str(uuid4()) + dataset.tenant_id = str(uuid4()) + dataset.name = "test_dataset" + dataset.indexing_technique = "high_quality" + dataset.provider = "dify" + return dataset + + def test_multiple_retrieve_reranking_with_app_context(self, mock_dataset): + """ + Repro test for current bug: + reranking runs after `with flask_app.app_context():` exits. + `_multiple_retrieve_thread` catches exceptions and stores them into `thread_exceptions`, + so we must assert from that list (not from an outer try/except). + """ + dataset_retrieval = DatasetRetrieval() + flask_app = Flask(__name__) + tenant_id = str(uuid4()) + + # second dataset to ensure dataset_count > 1 reranking branch + secondary_dataset = Mock(spec=Dataset) + secondary_dataset.id = str(uuid4()) + secondary_dataset.provider = "dify" + secondary_dataset.indexing_technique = "high_quality" + + # retriever returns 1 doc into internal list (all_documents_item) + document = Document( + page_content="Context aware doc", + metadata={ + "doc_id": "doc1", + "score": 0.95, + "document_id": str(uuid4()), + "dataset_id": mock_dataset.id, + }, + provider="dify", + ) + + def fake_retriever( + flask_app, dataset_id, query, top_k, all_documents, document_ids_filter, metadata_condition, attachment_ids + ): + all_documents.append(document) + + called = {"init": 0, "invoke": 0} + + class ContextRequiredPostProcessor: + def __init__(self, *args, **kwargs): + called["init"] += 1 + # will raise RuntimeError if no Flask app context exists + _ = current_app.name + + def invoke(self, *args, **kwargs): + called["invoke"] += 1 + _ = current_app.name + return kwargs.get("documents") or args[1] + + # output list from _multiple_retrieve_thread + all_documents: list[Document] = [] + + # IMPORTANT: _multiple_retrieve_thread swallows exceptions and appends them here + thread_exceptions: list[Exception] = [] + + def target(): + with patch.object(dataset_retrieval, "_retriever", side_effect=fake_retriever): + with patch( + "core.rag.retrieval.dataset_retrieval.DataPostProcessor", + ContextRequiredPostProcessor, + ): + dataset_retrieval._multiple_retrieve_thread( + flask_app=flask_app, + available_datasets=[mock_dataset, secondary_dataset], + metadata_condition=None, + metadata_filter_document_ids=None, + all_documents=all_documents, + tenant_id=tenant_id, + reranking_enable=True, + reranking_mode="reranking_model", + reranking_model={ + "reranking_provider_name": "cohere", + "reranking_model_name": "rerank-v2", + }, + weights=None, + top_k=3, + score_threshold=0.0, + query="test query", + attachment_id=None, + dataset_count=2, # force reranking branch + thread_exceptions=thread_exceptions, # ✅ key + ) + + t = threading.Thread(target=target) + t.start() + t.join() + + # Ensure reranking branch was actually executed + assert called["init"] >= 1, "DataPostProcessor was never constructed; reranking branch may not have run." + + # Current buggy code should record an exception (not raise it) + assert not thread_exceptions, thread_exceptions diff --git a/api/tests/unit_tests/core/rag/splitter/test_text_splitter.py b/api/tests/unit_tests/core/rag/splitter/test_text_splitter.py index 7d246ac3cc..943a9e5712 100644 --- a/api/tests/unit_tests/core/rag/splitter/test_text_splitter.py +++ b/api/tests/unit_tests/core/rag/splitter/test_text_splitter.py @@ -901,6 +901,13 @@ class TestFixedRecursiveCharacterTextSplitter: # Verify no empty chunks assert all(len(chunk) > 0 for chunk in result) + def test_double_slash_n(self): + data = "chunk 1\n\nsubchunk 1.\nsubchunk 2.\n\n---\n\nchunk 2\n\nsubchunk 1\nsubchunk 2." + separator = "\\n\\n---\\n\\n" + splitter = FixedRecursiveCharacterTextSplitter(fixed_separator=separator) + chunks = splitter.split_text(data) + assert chunks == ["chunk 1\n\nsubchunk 1.\nsubchunk 2.", "chunk 2\n\nsubchunk 1\nsubchunk 2."] + # ============================================================================ # Test Metadata Preservation diff --git a/api/tests/unit_tests/core/test_provider_configuration.py b/api/tests/unit_tests/core/test_provider_configuration.py index 9060cf7b6c..636fac7a40 100644 --- a/api/tests/unit_tests/core/test_provider_configuration.py +++ b/api/tests/unit_tests/core/test_provider_configuration.py @@ -32,7 +32,6 @@ def mock_provider_entity(): label=I18nObject(en_US="OpenAI", zh_Hans="OpenAI"), description=I18nObject(en_US="OpenAI provider", zh_Hans="OpenAI 提供商"), icon_small=I18nObject(en_US="icon.png", zh_Hans="icon.png"), - icon_large=I18nObject(en_US="icon.png", zh_Hans="icon.png"), background="background.png", help=None, supported_model_types=[ModelType.LLM], diff --git a/api/tests/unit_tests/core/workflow/graph_engine/command_channels/test_redis_channel.py b/api/tests/unit_tests/core/workflow/graph_engine/command_channels/test_redis_channel.py index 8677325d4e..f33fd0deeb 100644 --- a/api/tests/unit_tests/core/workflow/graph_engine/command_channels/test_redis_channel.py +++ b/api/tests/unit_tests/core/workflow/graph_engine/command_channels/test_redis_channel.py @@ -3,8 +3,15 @@ import json from unittest.mock import MagicMock +from core.variables import IntegerVariable, StringVariable from core.workflow.graph_engine.command_channels.redis_channel import RedisChannel -from core.workflow.graph_engine.entities.commands import AbortCommand, CommandType, GraphEngineCommand +from core.workflow.graph_engine.entities.commands import ( + AbortCommand, + CommandType, + GraphEngineCommand, + UpdateVariablesCommand, + VariableUpdate, +) class TestRedisChannel: @@ -148,6 +155,43 @@ class TestRedisChannel: assert commands[0].command_type == CommandType.ABORT assert isinstance(commands[1], AbortCommand) + def test_fetch_commands_with_update_variables_command(self): + """Test fetching update variables command from Redis.""" + mock_redis = MagicMock() + pending_pipe = MagicMock() + fetch_pipe = MagicMock() + pending_context = MagicMock() + fetch_context = MagicMock() + pending_context.__enter__.return_value = pending_pipe + pending_context.__exit__.return_value = None + fetch_context.__enter__.return_value = fetch_pipe + fetch_context.__exit__.return_value = None + mock_redis.pipeline.side_effect = [pending_context, fetch_context] + + update_command = UpdateVariablesCommand( + updates=[ + VariableUpdate( + value=StringVariable(name="foo", value="bar", selector=["node1", "foo"]), + ), + VariableUpdate( + value=IntegerVariable(name="baz", value=123, selector=["node2", "baz"]), + ), + ] + ) + command_json = json.dumps(update_command.model_dump()) + + pending_pipe.execute.return_value = [b"1", 1] + fetch_pipe.execute.return_value = [[command_json.encode()], 1] + + channel = RedisChannel(mock_redis, "test:key") + commands = channel.fetch_commands() + + assert len(commands) == 1 + assert isinstance(commands[0], UpdateVariablesCommand) + assert isinstance(commands[0].updates[0].value, StringVariable) + assert list(commands[0].updates[0].value.selector) == ["node1", "foo"] + assert commands[0].updates[0].value.value == "bar" + def test_fetch_commands_skips_invalid_json(self): """Test that invalid JSON commands are skipped.""" mock_redis = MagicMock() diff --git a/api/tests/unit_tests/core/workflow/graph_engine/graph_traversal/__init__.py b/api/tests/unit_tests/core/workflow/graph_engine/graph_traversal/__init__.py new file mode 100644 index 0000000000..cf8811dc2b --- /dev/null +++ b/api/tests/unit_tests/core/workflow/graph_engine/graph_traversal/__init__.py @@ -0,0 +1 @@ +"""Tests for graph traversal components.""" diff --git a/api/tests/unit_tests/core/workflow/graph_engine/graph_traversal/test_skip_propagator.py b/api/tests/unit_tests/core/workflow/graph_engine/graph_traversal/test_skip_propagator.py new file mode 100644 index 0000000000..0019020ede --- /dev/null +++ b/api/tests/unit_tests/core/workflow/graph_engine/graph_traversal/test_skip_propagator.py @@ -0,0 +1,307 @@ +"""Unit tests for skip propagator.""" + +from unittest.mock import MagicMock, create_autospec + +from core.workflow.graph import Edge, Graph +from core.workflow.graph_engine.graph_state_manager import GraphStateManager +from core.workflow.graph_engine.graph_traversal.skip_propagator import SkipPropagator + + +class TestSkipPropagator: + """Test suite for SkipPropagator.""" + + def test_propagate_skip_from_edge_with_unknown_edges_stops_processing(self) -> None: + """When there are unknown incoming edges, propagation should stop.""" + # Arrange + mock_graph = create_autospec(Graph) + mock_state_manager = create_autospec(GraphStateManager) + + # Create a mock edge + mock_edge = MagicMock(spec=Edge) + mock_edge.id = "edge_1" + mock_edge.head = "node_2" + + # Setup graph edges dict + mock_graph.edges = {"edge_1": mock_edge} + + # Setup incoming edges + incoming_edges = [MagicMock(spec=Edge), MagicMock(spec=Edge)] + mock_graph.get_incoming_edges.return_value = incoming_edges + + # Setup state manager to return has_unknown=True + mock_state_manager.analyze_edge_states.return_value = { + "has_unknown": True, + "has_taken": False, + "all_skipped": False, + } + + propagator = SkipPropagator(mock_graph, mock_state_manager) + + # Act + propagator.propagate_skip_from_edge("edge_1") + + # Assert + mock_graph.get_incoming_edges.assert_called_once_with("node_2") + mock_state_manager.analyze_edge_states.assert_called_once_with(incoming_edges) + # Should not call any other state manager methods + mock_state_manager.enqueue_node.assert_not_called() + mock_state_manager.start_execution.assert_not_called() + mock_state_manager.mark_node_skipped.assert_not_called() + + def test_propagate_skip_from_edge_with_taken_edge_enqueues_node(self) -> None: + """When there is at least one taken edge, node should be enqueued.""" + # Arrange + mock_graph = create_autospec(Graph) + mock_state_manager = create_autospec(GraphStateManager) + + # Create a mock edge + mock_edge = MagicMock(spec=Edge) + mock_edge.id = "edge_1" + mock_edge.head = "node_2" + + mock_graph.edges = {"edge_1": mock_edge} + incoming_edges = [MagicMock(spec=Edge)] + mock_graph.get_incoming_edges.return_value = incoming_edges + + # Setup state manager to return has_taken=True + mock_state_manager.analyze_edge_states.return_value = { + "has_unknown": False, + "has_taken": True, + "all_skipped": False, + } + + propagator = SkipPropagator(mock_graph, mock_state_manager) + + # Act + propagator.propagate_skip_from_edge("edge_1") + + # Assert + mock_state_manager.enqueue_node.assert_called_once_with("node_2") + mock_state_manager.start_execution.assert_called_once_with("node_2") + mock_state_manager.mark_node_skipped.assert_not_called() + + def test_propagate_skip_from_edge_with_all_skipped_propagates_to_node(self) -> None: + """When all incoming edges are skipped, should propagate skip to node.""" + # Arrange + mock_graph = create_autospec(Graph) + mock_state_manager = create_autospec(GraphStateManager) + + # Create a mock edge + mock_edge = MagicMock(spec=Edge) + mock_edge.id = "edge_1" + mock_edge.head = "node_2" + + mock_graph.edges = {"edge_1": mock_edge} + incoming_edges = [MagicMock(spec=Edge)] + mock_graph.get_incoming_edges.return_value = incoming_edges + + # Setup state manager to return all_skipped=True + mock_state_manager.analyze_edge_states.return_value = { + "has_unknown": False, + "has_taken": False, + "all_skipped": True, + } + + propagator = SkipPropagator(mock_graph, mock_state_manager) + + # Act + propagator.propagate_skip_from_edge("edge_1") + + # Assert + mock_state_manager.mark_node_skipped.assert_called_once_with("node_2") + mock_state_manager.enqueue_node.assert_not_called() + mock_state_manager.start_execution.assert_not_called() + + def test_propagate_skip_to_node_marks_node_and_outgoing_edges_skipped(self) -> None: + """_propagate_skip_to_node should mark node and all outgoing edges as skipped.""" + # Arrange + mock_graph = create_autospec(Graph) + mock_state_manager = create_autospec(GraphStateManager) + + # Create outgoing edges + edge1 = MagicMock(spec=Edge) + edge1.id = "edge_2" + edge1.head = "node_downstream_1" # Set head for propagate_skip_from_edge + + edge2 = MagicMock(spec=Edge) + edge2.id = "edge_3" + edge2.head = "node_downstream_2" + + # Setup graph edges dict for propagate_skip_from_edge + mock_graph.edges = {"edge_2": edge1, "edge_3": edge2} + mock_graph.get_outgoing_edges.return_value = [edge1, edge2] + + # Setup get_incoming_edges to return empty list to stop recursion + mock_graph.get_incoming_edges.return_value = [] + + propagator = SkipPropagator(mock_graph, mock_state_manager) + + # Use mock to call private method + # Act + propagator._propagate_skip_to_node("node_1") + + # Assert + mock_state_manager.mark_node_skipped.assert_called_once_with("node_1") + mock_state_manager.mark_edge_skipped.assert_any_call("edge_2") + mock_state_manager.mark_edge_skipped.assert_any_call("edge_3") + assert mock_state_manager.mark_edge_skipped.call_count == 2 + # Should recursively propagate from each edge + # Since propagate_skip_from_edge is called, we need to verify it was called + # But we can't directly verify due to recursion. We'll trust the logic. + + def test_skip_branch_paths_marks_unselected_edges_and_propagates(self) -> None: + """skip_branch_paths should mark all unselected edges as skipped and propagate.""" + # Arrange + mock_graph = create_autospec(Graph) + mock_state_manager = create_autospec(GraphStateManager) + + # Create unselected edges + edge1 = MagicMock(spec=Edge) + edge1.id = "edge_1" + edge1.head = "node_downstream_1" + + edge2 = MagicMock(spec=Edge) + edge2.id = "edge_2" + edge2.head = "node_downstream_2" + + unselected_edges = [edge1, edge2] + + # Setup graph edges dict + mock_graph.edges = {"edge_1": edge1, "edge_2": edge2} + # Setup get_incoming_edges to return empty list to stop recursion + mock_graph.get_incoming_edges.return_value = [] + + propagator = SkipPropagator(mock_graph, mock_state_manager) + + # Act + propagator.skip_branch_paths(unselected_edges) + + # Assert + mock_state_manager.mark_edge_skipped.assert_any_call("edge_1") + mock_state_manager.mark_edge_skipped.assert_any_call("edge_2") + assert mock_state_manager.mark_edge_skipped.call_count == 2 + # propagate_skip_from_edge should be called for each edge + # We can't directly verify due to the mock, but the logic is covered + + def test_propagate_skip_from_edge_recursively_propagates_through_graph(self) -> None: + """Skip propagation should recursively propagate through the graph.""" + # Arrange + mock_graph = create_autospec(Graph) + mock_state_manager = create_autospec(GraphStateManager) + + # Create edge chain: edge_1 -> node_2 -> edge_3 -> node_4 + edge1 = MagicMock(spec=Edge) + edge1.id = "edge_1" + edge1.head = "node_2" + + edge3 = MagicMock(spec=Edge) + edge3.id = "edge_3" + edge3.head = "node_4" + + mock_graph.edges = {"edge_1": edge1, "edge_3": edge3} + + # Setup get_incoming_edges to return different values based on node + def get_incoming_edges_side_effect(node_id): + if node_id == "node_2": + return [edge1] + elif node_id == "node_4": + return [edge3] + return [] + + mock_graph.get_incoming_edges.side_effect = get_incoming_edges_side_effect + + # Setup get_outgoing_edges to return different values based on node + def get_outgoing_edges_side_effect(node_id): + if node_id == "node_2": + return [edge3] + elif node_id == "node_4": + return [] # No outgoing edges, stops recursion + return [] + + mock_graph.get_outgoing_edges.side_effect = get_outgoing_edges_side_effect + + # Setup state manager to return all_skipped for both nodes + mock_state_manager.analyze_edge_states.return_value = { + "has_unknown": False, + "has_taken": False, + "all_skipped": True, + } + + propagator = SkipPropagator(mock_graph, mock_state_manager) + + # Act + propagator.propagate_skip_from_edge("edge_1") + + # Assert + # Should mark node_2 as skipped + mock_state_manager.mark_node_skipped.assert_any_call("node_2") + # Should mark edge_3 as skipped + mock_state_manager.mark_edge_skipped.assert_any_call("edge_3") + # Should propagate to node_4 + mock_state_manager.mark_node_skipped.assert_any_call("node_4") + assert mock_state_manager.mark_node_skipped.call_count == 2 + + def test_propagate_skip_from_edge_with_mixed_edge_states_handles_correctly(self) -> None: + """Test with mixed edge states (some unknown, some taken, some skipped).""" + # Arrange + mock_graph = create_autospec(Graph) + mock_state_manager = create_autospec(GraphStateManager) + + mock_edge = MagicMock(spec=Edge) + mock_edge.id = "edge_1" + mock_edge.head = "node_2" + + mock_graph.edges = {"edge_1": mock_edge} + incoming_edges = [MagicMock(spec=Edge), MagicMock(spec=Edge), MagicMock(spec=Edge)] + mock_graph.get_incoming_edges.return_value = incoming_edges + + # Test 1: has_unknown=True, has_taken=False, all_skipped=False + mock_state_manager.analyze_edge_states.return_value = { + "has_unknown": True, + "has_taken": False, + "all_skipped": False, + } + + propagator = SkipPropagator(mock_graph, mock_state_manager) + + # Act + propagator.propagate_skip_from_edge("edge_1") + + # Assert - should stop processing + mock_state_manager.enqueue_node.assert_not_called() + mock_state_manager.mark_node_skipped.assert_not_called() + + # Reset mocks for next test + mock_state_manager.reset_mock() + mock_graph.reset_mock() + + # Test 2: has_unknown=False, has_taken=True, all_skipped=False + mock_state_manager.analyze_edge_states.return_value = { + "has_unknown": False, + "has_taken": True, + "all_skipped": False, + } + + # Act + propagator.propagate_skip_from_edge("edge_1") + + # Assert - should enqueue node + mock_state_manager.enqueue_node.assert_called_once_with("node_2") + mock_state_manager.start_execution.assert_called_once_with("node_2") + + # Reset mocks for next test + mock_state_manager.reset_mock() + mock_graph.reset_mock() + + # Test 3: has_unknown=False, has_taken=False, all_skipped=True + mock_state_manager.analyze_edge_states.return_value = { + "has_unknown": False, + "has_taken": False, + "all_skipped": True, + } + + # Act + propagator.propagate_skip_from_edge("edge_1") + + # Assert - should propagate skip + mock_state_manager.mark_node_skipped.assert_called_once_with("node_2") diff --git a/api/tests/unit_tests/core/workflow/graph_engine/layers/__init__.py b/api/tests/unit_tests/core/workflow/graph_engine/layers/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/api/tests/unit_tests/core/workflow/graph_engine/layers/conftest.py b/api/tests/unit_tests/core/workflow/graph_engine/layers/conftest.py new file mode 100644 index 0000000000..b18a3369e9 --- /dev/null +++ b/api/tests/unit_tests/core/workflow/graph_engine/layers/conftest.py @@ -0,0 +1,101 @@ +""" +Shared fixtures for ObservabilityLayer tests. +""" + +from unittest.mock import MagicMock, patch + +import pytest +from opentelemetry.sdk.trace import TracerProvider +from opentelemetry.sdk.trace.export import SimpleSpanProcessor +from opentelemetry.sdk.trace.export.in_memory_span_exporter import InMemorySpanExporter +from opentelemetry.trace import set_tracer_provider + +from core.workflow.enums import NodeType + + +@pytest.fixture +def memory_span_exporter(): + """Provide an in-memory span exporter for testing.""" + return InMemorySpanExporter() + + +@pytest.fixture +def tracer_provider_with_memory_exporter(memory_span_exporter): + """Provide a TracerProvider configured with memory exporter.""" + import opentelemetry.trace as trace_api + + trace_api._TRACER_PROVIDER = None + trace_api._TRACER_PROVIDER_SET_ONCE._done = False + + provider = TracerProvider() + processor = SimpleSpanProcessor(memory_span_exporter) + provider.add_span_processor(processor) + set_tracer_provider(provider) + + yield provider + + provider.force_flush() + + +@pytest.fixture +def mock_start_node(): + """Create a mock Start Node.""" + node = MagicMock() + node.id = "test-start-node-id" + node.title = "Start Node" + node.execution_id = "test-start-execution-id" + node.node_type = NodeType.START + return node + + +@pytest.fixture +def mock_llm_node(): + """Create a mock LLM Node.""" + node = MagicMock() + node.id = "test-llm-node-id" + node.title = "LLM Node" + node.execution_id = "test-llm-execution-id" + node.node_type = NodeType.LLM + return node + + +@pytest.fixture +def mock_tool_node(): + """Create a mock Tool Node with tool-specific attributes.""" + from core.tools.entities.tool_entities import ToolProviderType + from core.workflow.nodes.tool.entities import ToolNodeData + + node = MagicMock() + node.id = "test-tool-node-id" + node.title = "Test Tool Node" + node.execution_id = "test-tool-execution-id" + node.node_type = NodeType.TOOL + + tool_data = ToolNodeData( + title="Test Tool Node", + desc=None, + provider_id="test-provider-id", + provider_type=ToolProviderType.BUILT_IN, + provider_name="test-provider", + tool_name="test-tool", + tool_label="Test Tool", + tool_configurations={}, + tool_parameters={}, + ) + node._node_data = tool_data + + return node + + +@pytest.fixture +def mock_is_instrument_flag_enabled_false(): + """Mock is_instrument_flag_enabled to return False.""" + with patch("core.workflow.graph_engine.layers.observability.is_instrument_flag_enabled", return_value=False): + yield + + +@pytest.fixture +def mock_is_instrument_flag_enabled_true(): + """Mock is_instrument_flag_enabled to return True.""" + with patch("core.workflow.graph_engine.layers.observability.is_instrument_flag_enabled", return_value=True): + yield diff --git a/api/tests/unit_tests/core/workflow/graph_engine/layers/test_layer_initialization.py b/api/tests/unit_tests/core/workflow/graph_engine/layers/test_layer_initialization.py new file mode 100644 index 0000000000..d6ba61c50c --- /dev/null +++ b/api/tests/unit_tests/core/workflow/graph_engine/layers/test_layer_initialization.py @@ -0,0 +1,56 @@ +from __future__ import annotations + +import pytest + +from core.workflow.graph_engine import GraphEngine +from core.workflow.graph_engine.command_channels import InMemoryChannel +from core.workflow.graph_engine.layers.base import ( + GraphEngineLayer, + GraphEngineLayerNotInitializedError, +) +from core.workflow.graph_events import GraphEngineEvent + +from ..test_table_runner import WorkflowRunner + + +class LayerForTest(GraphEngineLayer): + def on_graph_start(self) -> None: + pass + + def on_event(self, event: GraphEngineEvent) -> None: + pass + + def on_graph_end(self, error: Exception | None) -> None: + pass + + +def test_layer_runtime_state_raises_when_uninitialized() -> None: + layer = LayerForTest() + + with pytest.raises(GraphEngineLayerNotInitializedError): + _ = layer.graph_runtime_state + + +def test_layer_runtime_state_available_after_engine_layer() -> None: + runner = WorkflowRunner() + fixture_data = runner.load_fixture("simple_passthrough_workflow") + graph, graph_runtime_state = runner.create_graph_from_fixture( + fixture_data, + inputs={"query": "test layer state"}, + ) + engine = GraphEngine( + workflow_id="test_workflow", + graph=graph, + graph_runtime_state=graph_runtime_state, + command_channel=InMemoryChannel(), + ) + + layer = LayerForTest() + engine.layer(layer) + + outputs = layer.graph_runtime_state.outputs + ready_queue_size = layer.graph_runtime_state.ready_queue_size + + assert outputs == {} + assert isinstance(ready_queue_size, int) + assert ready_queue_size >= 0 diff --git a/api/tests/unit_tests/core/workflow/graph_engine/layers/test_observability.py b/api/tests/unit_tests/core/workflow/graph_engine/layers/test_observability.py new file mode 100644 index 0000000000..458cf2cc67 --- /dev/null +++ b/api/tests/unit_tests/core/workflow/graph_engine/layers/test_observability.py @@ -0,0 +1,219 @@ +""" +Tests for ObservabilityLayer. + +Test coverage: +- Initialization and enable/disable logic +- Node span lifecycle (start, end, error handling) +- Parser integration (default and tool-specific) +- Graph lifecycle management +- Disabled mode behavior +""" + +from unittest.mock import patch + +import pytest +from opentelemetry.trace import StatusCode + +from core.workflow.enums import NodeType +from core.workflow.graph_engine.layers.observability import ObservabilityLayer + + +class TestObservabilityLayerInitialization: + """Test ObservabilityLayer initialization logic.""" + + @patch("core.workflow.graph_engine.layers.observability.dify_config.ENABLE_OTEL", True) + @pytest.mark.usefixtures("mock_is_instrument_flag_enabled_false") + def test_initialization_when_otel_enabled(self, tracer_provider_with_memory_exporter): + """Test that layer initializes correctly when OTel is enabled.""" + layer = ObservabilityLayer() + assert not layer._is_disabled + assert layer._tracer is not None + assert NodeType.TOOL in layer._parsers + assert layer._default_parser is not None + + @patch("core.workflow.graph_engine.layers.observability.dify_config.ENABLE_OTEL", False) + @pytest.mark.usefixtures("mock_is_instrument_flag_enabled_true") + def test_initialization_when_instrument_flag_enabled(self, tracer_provider_with_memory_exporter): + """Test that layer enables when instrument flag is enabled.""" + layer = ObservabilityLayer() + assert not layer._is_disabled + assert layer._tracer is not None + assert NodeType.TOOL in layer._parsers + assert layer._default_parser is not None + + +class TestObservabilityLayerNodeSpanLifecycle: + """Test node span creation and lifecycle management.""" + + @patch("core.workflow.graph_engine.layers.observability.dify_config.ENABLE_OTEL", True) + @pytest.mark.usefixtures("mock_is_instrument_flag_enabled_false") + def test_node_span_created_and_ended( + self, tracer_provider_with_memory_exporter, memory_span_exporter, mock_llm_node + ): + """Test that span is created on node start and ended on node end.""" + layer = ObservabilityLayer() + layer.on_graph_start() + + layer.on_node_run_start(mock_llm_node) + layer.on_node_run_end(mock_llm_node, None) + + spans = memory_span_exporter.get_finished_spans() + assert len(spans) == 1 + assert spans[0].name == mock_llm_node.title + assert spans[0].status.status_code == StatusCode.OK + + @patch("core.workflow.graph_engine.layers.observability.dify_config.ENABLE_OTEL", True) + @pytest.mark.usefixtures("mock_is_instrument_flag_enabled_false") + def test_node_error_recorded_in_span( + self, tracer_provider_with_memory_exporter, memory_span_exporter, mock_llm_node + ): + """Test that node execution errors are recorded in span.""" + layer = ObservabilityLayer() + layer.on_graph_start() + + error = ValueError("Test error") + layer.on_node_run_start(mock_llm_node) + layer.on_node_run_end(mock_llm_node, error) + + spans = memory_span_exporter.get_finished_spans() + assert len(spans) == 1 + assert spans[0].status.status_code == StatusCode.ERROR + assert len(spans[0].events) > 0 + assert any("exception" in event.name.lower() for event in spans[0].events) + + @patch("core.workflow.graph_engine.layers.observability.dify_config.ENABLE_OTEL", True) + @pytest.mark.usefixtures("mock_is_instrument_flag_enabled_false") + def test_node_end_without_start_handled_gracefully( + self, tracer_provider_with_memory_exporter, memory_span_exporter, mock_llm_node + ): + """Test that ending a node without start doesn't crash.""" + layer = ObservabilityLayer() + layer.on_graph_start() + + layer.on_node_run_end(mock_llm_node, None) + + spans = memory_span_exporter.get_finished_spans() + assert len(spans) == 0 + + +class TestObservabilityLayerParserIntegration: + """Test parser integration for different node types.""" + + @patch("core.workflow.graph_engine.layers.observability.dify_config.ENABLE_OTEL", True) + @pytest.mark.usefixtures("mock_is_instrument_flag_enabled_false") + def test_default_parser_used_for_regular_node( + self, tracer_provider_with_memory_exporter, memory_span_exporter, mock_start_node + ): + """Test that default parser is used for non-tool nodes.""" + layer = ObservabilityLayer() + layer.on_graph_start() + + layer.on_node_run_start(mock_start_node) + layer.on_node_run_end(mock_start_node, None) + + spans = memory_span_exporter.get_finished_spans() + assert len(spans) == 1 + attrs = spans[0].attributes + assert attrs["node.id"] == mock_start_node.id + assert attrs["node.execution_id"] == mock_start_node.execution_id + assert attrs["node.type"] == mock_start_node.node_type.value + + @patch("core.workflow.graph_engine.layers.observability.dify_config.ENABLE_OTEL", True) + @pytest.mark.usefixtures("mock_is_instrument_flag_enabled_false") + def test_tool_parser_used_for_tool_node( + self, tracer_provider_with_memory_exporter, memory_span_exporter, mock_tool_node + ): + """Test that tool parser is used for tool nodes.""" + layer = ObservabilityLayer() + layer.on_graph_start() + + layer.on_node_run_start(mock_tool_node) + layer.on_node_run_end(mock_tool_node, None) + + spans = memory_span_exporter.get_finished_spans() + assert len(spans) == 1 + attrs = spans[0].attributes + assert attrs["node.id"] == mock_tool_node.id + assert attrs["tool.provider.id"] == mock_tool_node._node_data.provider_id + assert attrs["tool.provider.type"] == mock_tool_node._node_data.provider_type.value + assert attrs["tool.name"] == mock_tool_node._node_data.tool_name + + +class TestObservabilityLayerGraphLifecycle: + """Test graph lifecycle management.""" + + @patch("core.workflow.graph_engine.layers.observability.dify_config.ENABLE_OTEL", True) + @pytest.mark.usefixtures("mock_is_instrument_flag_enabled_false") + def test_on_graph_start_clears_contexts(self, tracer_provider_with_memory_exporter, mock_llm_node): + """Test that on_graph_start clears node contexts.""" + layer = ObservabilityLayer() + layer.on_graph_start() + + layer.on_node_run_start(mock_llm_node) + assert len(layer._node_contexts) == 1 + + layer.on_graph_start() + assert len(layer._node_contexts) == 0 + + @patch("core.workflow.graph_engine.layers.observability.dify_config.ENABLE_OTEL", True) + @pytest.mark.usefixtures("mock_is_instrument_flag_enabled_false") + def test_on_graph_end_with_no_unfinished_spans( + self, tracer_provider_with_memory_exporter, memory_span_exporter, mock_llm_node + ): + """Test that on_graph_end handles normal completion.""" + layer = ObservabilityLayer() + layer.on_graph_start() + + layer.on_node_run_start(mock_llm_node) + layer.on_node_run_end(mock_llm_node, None) + layer.on_graph_end(None) + + spans = memory_span_exporter.get_finished_spans() + assert len(spans) == 1 + + @patch("core.workflow.graph_engine.layers.observability.dify_config.ENABLE_OTEL", True) + @pytest.mark.usefixtures("mock_is_instrument_flag_enabled_false") + def test_on_graph_end_with_unfinished_spans_logs_warning( + self, tracer_provider_with_memory_exporter, mock_llm_node, caplog + ): + """Test that on_graph_end logs warning for unfinished spans.""" + layer = ObservabilityLayer() + layer.on_graph_start() + + layer.on_node_run_start(mock_llm_node) + assert len(layer._node_contexts) == 1 + + layer.on_graph_end(None) + + assert len(layer._node_contexts) == 0 + assert "node spans were not properly ended" in caplog.text + + +class TestObservabilityLayerDisabledMode: + """Test behavior when layer is disabled.""" + + @patch("core.workflow.graph_engine.layers.observability.dify_config.ENABLE_OTEL", False) + @pytest.mark.usefixtures("mock_is_instrument_flag_enabled_false") + def test_disabled_mode_skips_node_start(self, memory_span_exporter, mock_start_node): + """Test that disabled layer doesn't create spans on node start.""" + layer = ObservabilityLayer() + assert layer._is_disabled + + layer.on_graph_start() + layer.on_node_run_start(mock_start_node) + layer.on_node_run_end(mock_start_node, None) + + spans = memory_span_exporter.get_finished_spans() + assert len(spans) == 0 + + @patch("core.workflow.graph_engine.layers.observability.dify_config.ENABLE_OTEL", False) + @pytest.mark.usefixtures("mock_is_instrument_flag_enabled_false") + def test_disabled_mode_skips_node_end(self, memory_span_exporter, mock_llm_node): + """Test that disabled layer doesn't process node end.""" + layer = ObservabilityLayer() + assert layer._is_disabled + + layer.on_node_run_end(mock_llm_node, None) + + spans = memory_span_exporter.get_finished_spans() + assert len(spans) == 0 diff --git a/api/tests/unit_tests/core/workflow/graph_engine/orchestration/test_dispatcher.py b/api/tests/unit_tests/core/workflow/graph_engine/orchestration/test_dispatcher.py index c1fc4acd73..fe3ea576c1 100644 --- a/api/tests/unit_tests/core/workflow/graph_engine/orchestration/test_dispatcher.py +++ b/api/tests/unit_tests/core/workflow/graph_engine/orchestration/test_dispatcher.py @@ -3,6 +3,7 @@ from __future__ import annotations import queue +import threading from unittest import mock from core.workflow.entities.pause_reason import SchedulingPause @@ -36,6 +37,7 @@ def test_dispatcher_should_consume_remains_events_after_pause(): event_queue=event_queue, event_handler=event_handler, execution_coordinator=execution_coordinator, + stop_event=threading.Event(), ) dispatcher._dispatcher_loop() assert event_queue.empty() @@ -96,6 +98,7 @@ def _run_dispatcher_for_event(event) -> int: event_queue=event_queue, event_handler=event_handler, execution_coordinator=coordinator, + stop_event=threading.Event(), ) dispatcher._dispatcher_loop() @@ -181,6 +184,7 @@ def test_dispatcher_drain_event_queue(): event_queue=event_queue, event_handler=event_handler, execution_coordinator=coordinator, + stop_event=threading.Event(), ) dispatcher._dispatcher_loop() diff --git a/api/tests/unit_tests/core/workflow/graph_engine/test_command_system.py b/api/tests/unit_tests/core/workflow/graph_engine/test_command_system.py index b074a11be9..d826f7a900 100644 --- a/api/tests/unit_tests/core/workflow/graph_engine/test_command_system.py +++ b/api/tests/unit_tests/core/workflow/graph_engine/test_command_system.py @@ -4,12 +4,19 @@ import time from unittest.mock import MagicMock from core.app.entities.app_invoke_entities import InvokeFrom +from core.variables import IntegerVariable, StringVariable from core.workflow.entities.graph_init_params import GraphInitParams from core.workflow.entities.pause_reason import SchedulingPause from core.workflow.graph import Graph from core.workflow.graph_engine import GraphEngine from core.workflow.graph_engine.command_channels import InMemoryChannel -from core.workflow.graph_engine.entities.commands import AbortCommand, CommandType, PauseCommand +from core.workflow.graph_engine.entities.commands import ( + AbortCommand, + CommandType, + PauseCommand, + UpdateVariablesCommand, + VariableUpdate, +) from core.workflow.graph_events import GraphRunAbortedEvent, GraphRunPausedEvent, GraphRunStartedEvent from core.workflow.nodes.start.start_node import StartNode from core.workflow.runtime import GraphRuntimeState, VariablePool @@ -180,3 +187,67 @@ def test_pause_command(): graph_execution = engine.graph_runtime_state.graph_execution assert graph_execution.pause_reasons == [SchedulingPause(message="User requested pause")] + + +def test_update_variables_command_updates_pool(): + """Test that GraphEngine updates variable pool via update variables command.""" + + shared_runtime_state = GraphRuntimeState(variable_pool=VariablePool(), start_at=time.perf_counter()) + shared_runtime_state.variable_pool.add(("node1", "foo"), "old value") + + mock_graph = MagicMock(spec=Graph) + mock_graph.nodes = {} + mock_graph.edges = {} + mock_graph.root_node = MagicMock() + mock_graph.root_node.id = "start" + + start_node = StartNode( + id="start", + config={"id": "start", "data": {"title": "start", "variables": []}}, + graph_init_params=GraphInitParams( + tenant_id="test_tenant", + app_id="test_app", + workflow_id="test_workflow", + graph_config={}, + user_id="test_user", + user_from=UserFrom.ACCOUNT, + invoke_from=InvokeFrom.DEBUGGER, + call_depth=0, + ), + graph_runtime_state=shared_runtime_state, + ) + mock_graph.nodes["start"] = start_node + + mock_graph.get_outgoing_edges = MagicMock(return_value=[]) + mock_graph.get_incoming_edges = MagicMock(return_value=[]) + + command_channel = InMemoryChannel() + + engine = GraphEngine( + workflow_id="test_workflow", + graph=mock_graph, + graph_runtime_state=shared_runtime_state, + command_channel=command_channel, + ) + + update_command = UpdateVariablesCommand( + updates=[ + VariableUpdate( + value=StringVariable(name="foo", value="new value", selector=["node1", "foo"]), + ), + VariableUpdate( + value=IntegerVariable(name="bar", value=123, selector=["node2", "bar"]), + ), + ] + ) + command_channel.send_command(update_command) + + list(engine.run()) + + updated_existing = shared_runtime_state.variable_pool.get(["node1", "foo"]) + added_new = shared_runtime_state.variable_pool.get(["node2", "bar"]) + + assert updated_existing is not None + assert updated_existing.value == "new value" + assert added_new is not None + assert added_new.value == 123 diff --git a/api/tests/unit_tests/core/workflow/graph_engine/test_mock_config.py b/api/tests/unit_tests/core/workflow/graph_engine/test_mock_config.py index b02f90588b..5ceb8dd7f7 100644 --- a/api/tests/unit_tests/core/workflow/graph_engine/test_mock_config.py +++ b/api/tests/unit_tests/core/workflow/graph_engine/test_mock_config.py @@ -5,6 +5,8 @@ This module provides a flexible configuration system for customizing the behavior of mock nodes during testing. """ +from __future__ import annotations + from collections.abc import Callable from dataclasses import dataclass, field from typing import Any @@ -95,67 +97,67 @@ class MockConfigBuilder: def __init__(self) -> None: self._config = MockConfig() - def with_auto_mock(self, enabled: bool = True) -> "MockConfigBuilder": + def with_auto_mock(self, enabled: bool = True) -> MockConfigBuilder: """Enable or disable auto-mocking.""" self._config.enable_auto_mock = enabled return self - def with_delays(self, enabled: bool = True) -> "MockConfigBuilder": + def with_delays(self, enabled: bool = True) -> MockConfigBuilder: """Enable or disable simulated execution delays.""" self._config.simulate_delays = enabled return self - def with_llm_response(self, response: str) -> "MockConfigBuilder": + def with_llm_response(self, response: str) -> MockConfigBuilder: """Set default LLM response.""" self._config.default_llm_response = response return self - def with_agent_response(self, response: str) -> "MockConfigBuilder": + def with_agent_response(self, response: str) -> MockConfigBuilder: """Set default agent response.""" self._config.default_agent_response = response return self - def with_tool_response(self, response: dict[str, Any]) -> "MockConfigBuilder": + def with_tool_response(self, response: dict[str, Any]) -> MockConfigBuilder: """Set default tool response.""" self._config.default_tool_response = response return self - def with_retrieval_response(self, response: str) -> "MockConfigBuilder": + def with_retrieval_response(self, response: str) -> MockConfigBuilder: """Set default retrieval response.""" self._config.default_retrieval_response = response return self - def with_http_response(self, response: dict[str, Any]) -> "MockConfigBuilder": + def with_http_response(self, response: dict[str, Any]) -> MockConfigBuilder: """Set default HTTP response.""" self._config.default_http_response = response return self - def with_template_transform_response(self, response: str) -> "MockConfigBuilder": + def with_template_transform_response(self, response: str) -> MockConfigBuilder: """Set default template transform response.""" self._config.default_template_transform_response = response return self - def with_code_response(self, response: dict[str, Any]) -> "MockConfigBuilder": + def with_code_response(self, response: dict[str, Any]) -> MockConfigBuilder: """Set default code execution response.""" self._config.default_code_response = response return self - def with_node_output(self, node_id: str, outputs: dict[str, Any]) -> "MockConfigBuilder": + def with_node_output(self, node_id: str, outputs: dict[str, Any]) -> MockConfigBuilder: """Set outputs for a specific node.""" self._config.set_node_outputs(node_id, outputs) return self - def with_node_error(self, node_id: str, error: str) -> "MockConfigBuilder": + def with_node_error(self, node_id: str, error: str) -> MockConfigBuilder: """Set error for a specific node.""" self._config.set_node_error(node_id, error) return self - def with_node_config(self, config: NodeMockConfig) -> "MockConfigBuilder": + def with_node_config(self, config: NodeMockConfig) -> MockConfigBuilder: """Add a node-specific configuration.""" self._config.set_node_config(config.node_id, config) return self - def with_default_config(self, node_type: NodeType, config: dict[str, Any]) -> "MockConfigBuilder": + def with_default_config(self, node_type: NodeType, config: dict[str, Any]) -> MockConfigBuilder: """Set default configuration for a node type.""" self._config.set_default_config(node_type, config) return self diff --git a/api/tests/unit_tests/core/workflow/graph_engine/test_mock_factory.py b/api/tests/unit_tests/core/workflow/graph_engine/test_mock_factory.py index eeffdd27fe..6e9a432745 100644 --- a/api/tests/unit_tests/core/workflow/graph_engine/test_mock_factory.py +++ b/api/tests/unit_tests/core/workflow/graph_engine/test_mock_factory.py @@ -103,13 +103,25 @@ class MockNodeFactory(DifyNodeFactory): # Create mock node instance mock_class = self._mock_node_types[node_type] - mock_instance = mock_class( - id=node_id, - config=node_config, - graph_init_params=self.graph_init_params, - graph_runtime_state=self.graph_runtime_state, - mock_config=self.mock_config, - ) + if node_type == NodeType.CODE: + mock_instance = mock_class( + id=node_id, + config=node_config, + graph_init_params=self.graph_init_params, + graph_runtime_state=self.graph_runtime_state, + mock_config=self.mock_config, + code_executor=self._code_executor, + code_providers=self._code_providers, + code_limits=self._code_limits, + ) + else: + mock_instance = mock_class( + id=node_id, + config=node_config, + graph_init_params=self.graph_init_params, + graph_runtime_state=self.graph_runtime_state, + mock_config=self.mock_config, + ) return mock_instance diff --git a/api/tests/unit_tests/core/workflow/graph_engine/test_mock_nodes.py b/api/tests/unit_tests/core/workflow/graph_engine/test_mock_nodes.py index fd94a5e833..5937bbfb39 100644 --- a/api/tests/unit_tests/core/workflow/graph_engine/test_mock_nodes.py +++ b/api/tests/unit_tests/core/workflow/graph_engine/test_mock_nodes.py @@ -40,12 +40,14 @@ class MockNodeMixin: graph_init_params: "GraphInitParams", graph_runtime_state: "GraphRuntimeState", mock_config: Optional["MockConfig"] = None, + **kwargs: Any, ): super().__init__( id=id, config=config, graph_init_params=graph_init_params, graph_runtime_state=graph_runtime_state, + **kwargs, ) self.mock_config = mock_config diff --git a/api/tests/unit_tests/core/workflow/graph_engine/test_mock_nodes_template_code.py b/api/tests/unit_tests/core/workflow/graph_engine/test_mock_nodes_template_code.py index 4fb693a5c2..de08cc3497 100644 --- a/api/tests/unit_tests/core/workflow/graph_engine/test_mock_nodes_template_code.py +++ b/api/tests/unit_tests/core/workflow/graph_engine/test_mock_nodes_template_code.py @@ -5,11 +5,24 @@ This module tests the functionality of MockTemplateTransformNode and MockCodeNod to ensure they work correctly with the TableTestRunner. """ +from configs import dify_config from core.workflow.enums import NodeType, WorkflowNodeExecutionStatus +from core.workflow.nodes.code.limits import CodeNodeLimits from tests.unit_tests.core.workflow.graph_engine.test_mock_config import MockConfig, MockConfigBuilder, NodeMockConfig from tests.unit_tests.core.workflow.graph_engine.test_mock_factory import MockNodeFactory from tests.unit_tests.core.workflow.graph_engine.test_mock_nodes import MockCodeNode, MockTemplateTransformNode +DEFAULT_CODE_LIMITS = CodeNodeLimits( + max_string_length=dify_config.CODE_MAX_STRING_LENGTH, + max_number=dify_config.CODE_MAX_NUMBER, + min_number=dify_config.CODE_MIN_NUMBER, + max_precision=dify_config.CODE_MAX_PRECISION, + max_depth=dify_config.CODE_MAX_DEPTH, + max_number_array_length=dify_config.CODE_MAX_NUMBER_ARRAY_LENGTH, + max_string_array_length=dify_config.CODE_MAX_STRING_ARRAY_LENGTH, + max_object_array_length=dify_config.CODE_MAX_OBJECT_ARRAY_LENGTH, +) + class TestMockTemplateTransformNode: """Test cases for MockTemplateTransformNode.""" @@ -306,6 +319,7 @@ class TestMockCodeNode: graph_init_params=graph_init_params, graph_runtime_state=graph_runtime_state, mock_config=mock_config, + code_limits=DEFAULT_CODE_LIMITS, ) # Run the node @@ -370,6 +384,7 @@ class TestMockCodeNode: graph_init_params=graph_init_params, graph_runtime_state=graph_runtime_state, mock_config=mock_config, + code_limits=DEFAULT_CODE_LIMITS, ) # Run the node @@ -438,6 +453,7 @@ class TestMockCodeNode: graph_init_params=graph_init_params, graph_runtime_state=graph_runtime_state, mock_config=mock_config, + code_limits=DEFAULT_CODE_LIMITS, ) # Run the node diff --git a/api/tests/unit_tests/core/workflow/graph_engine/test_response_coordinator.py b/api/tests/unit_tests/core/workflow/graph_engine/test_response_coordinator.py new file mode 100644 index 0000000000..822b6a808f --- /dev/null +++ b/api/tests/unit_tests/core/workflow/graph_engine/test_response_coordinator.py @@ -0,0 +1,231 @@ +"""Tests for ResponseStreamCoordinator object field streaming.""" + +from unittest.mock import MagicMock + +from core.workflow.entities.tool_entities import ToolResultStatus +from core.workflow.enums import NodeType +from core.workflow.graph.graph import Graph +from core.workflow.graph_engine.response_coordinator.coordinator import ResponseStreamCoordinator +from core.workflow.graph_engine.response_coordinator.session import ResponseSession +from core.workflow.graph_events import ( + ChunkType, + NodeRunStreamChunkEvent, + ToolCall, + ToolResult, +) +from core.workflow.nodes.base.entities import BaseNodeData +from core.workflow.nodes.base.template import Template, VariableSegment +from core.workflow.runtime import VariablePool + + +class TestResponseCoordinatorObjectStreaming: + """Test streaming of object-type variables with child fields.""" + + def test_object_field_streaming(self): + """Test that when selecting an object variable, all child field streams are forwarded.""" + # Create mock graph and variable pool + graph = MagicMock(spec=Graph) + variable_pool = MagicMock(spec=VariablePool) + + # Mock nodes + llm_node = MagicMock() + llm_node.id = "llm_node" + llm_node.node_type = NodeType.LLM + llm_node.execution_type = MagicMock() + llm_node.blocks_variable_output = MagicMock(return_value=False) + + response_node = MagicMock() + response_node.id = "response_node" + response_node.node_type = NodeType.ANSWER + response_node.execution_type = MagicMock() + response_node.blocks_variable_output = MagicMock(return_value=False) + + # Mock template for response node + response_node.node_data = MagicMock(spec=BaseNodeData) + response_node.node_data.answer = "{{#llm_node.generation#}}" + + graph.nodes = { + "llm_node": llm_node, + "response_node": response_node, + } + graph.root_node = llm_node + graph.get_outgoing_edges = MagicMock(return_value=[]) + + # Create coordinator + coordinator = ResponseStreamCoordinator(variable_pool, graph) + + # Track execution + coordinator.track_node_execution("llm_node", "exec_123") + coordinator.track_node_execution("response_node", "exec_456") + + # Simulate streaming events for child fields of generation object + # 1. Content stream + content_event_1 = NodeRunStreamChunkEvent( + id="exec_123", + node_id="llm_node", + node_type=NodeType.LLM, + selector=["llm_node", "generation", "content"], + chunk="Hello", + is_final=False, + chunk_type=ChunkType.TEXT, + ) + content_event_2 = NodeRunStreamChunkEvent( + id="exec_123", + node_id="llm_node", + node_type=NodeType.LLM, + selector=["llm_node", "generation", "content"], + chunk=" world", + is_final=True, + chunk_type=ChunkType.TEXT, + ) + + # 2. Tool call stream + tool_call_event = NodeRunStreamChunkEvent( + id="exec_123", + node_id="llm_node", + node_type=NodeType.LLM, + selector=["llm_node", "generation", "tool_calls"], + chunk='{"query": "test"}', + is_final=True, + chunk_type=ChunkType.TOOL_CALL, + tool_call=ToolCall( + id="call_123", + name="search", + arguments='{"query": "test"}', + ), + ) + + # 3. Tool result stream + tool_result_event = NodeRunStreamChunkEvent( + id="exec_123", + node_id="llm_node", + node_type=NodeType.LLM, + selector=["llm_node", "generation", "tool_results"], + chunk="Found 10 results", + is_final=True, + chunk_type=ChunkType.TOOL_RESULT, + tool_result=ToolResult( + id="call_123", + name="search", + output="Found 10 results", + files=[], + status=ToolResultStatus.SUCCESS, + ), + ) + + # Intercept these events + coordinator.intercept_event(content_event_1) + coordinator.intercept_event(tool_call_event) + coordinator.intercept_event(tool_result_event) + coordinator.intercept_event(content_event_2) + + # Verify that all child streams are buffered + assert ("llm_node", "generation", "content") in coordinator._stream_buffers + assert ("llm_node", "generation", "tool_calls") in coordinator._stream_buffers + assert ("llm_node", "generation", "tool_results") in coordinator._stream_buffers + + # Verify payloads are preserved in buffered events + buffered_call = coordinator._stream_buffers[("llm_node", "generation", "tool_calls")][0] + assert buffered_call.tool_call is not None + assert buffered_call.tool_call.id == "call_123" + buffered_result = coordinator._stream_buffers[("llm_node", "generation", "tool_results")][0] + assert buffered_result.tool_result is not None + assert buffered_result.tool_result.status == "success" + + # Verify we can find child streams + child_streams = coordinator._find_child_streams(["llm_node", "generation"]) + assert len(child_streams) == 3 + assert ("llm_node", "generation", "content") in child_streams + assert ("llm_node", "generation", "tool_calls") in child_streams + assert ("llm_node", "generation", "tool_results") in child_streams + + def test_find_child_streams(self): + """Test the _find_child_streams method.""" + graph = MagicMock(spec=Graph) + variable_pool = MagicMock(spec=VariablePool) + + coordinator = ResponseStreamCoordinator(variable_pool, graph) + + # Add some mock streams + coordinator._stream_buffers = { + ("node1", "generation", "content"): [], + ("node1", "generation", "tool_calls"): [], + ("node1", "generation", "thought"): [], + ("node1", "text"): [], # Not a child of generation + ("node2", "generation", "content"): [], # Different node + } + + # Find children of node1.generation + children = coordinator._find_child_streams(["node1", "generation"]) + + assert len(children) == 3 + assert ("node1", "generation", "content") in children + assert ("node1", "generation", "tool_calls") in children + assert ("node1", "generation", "thought") in children + assert ("node1", "text") not in children + assert ("node2", "generation", "content") not in children + + def test_find_child_streams_with_closed_streams(self): + """Test that _find_child_streams also considers closed streams.""" + graph = MagicMock(spec=Graph) + variable_pool = MagicMock(spec=VariablePool) + + coordinator = ResponseStreamCoordinator(variable_pool, graph) + + # Add some streams - some buffered, some closed + coordinator._stream_buffers = { + ("node1", "generation", "content"): [], + } + coordinator._closed_streams = { + ("node1", "generation", "tool_calls"), + ("node1", "generation", "thought"), + } + + # Should find all children regardless of whether they're in buffers or closed + children = coordinator._find_child_streams(["node1", "generation"]) + + assert len(children) == 3 + assert ("node1", "generation", "content") in children + assert ("node1", "generation", "tool_calls") in children + assert ("node1", "generation", "thought") in children + + def test_special_selector_rewrites_to_active_response_node(self): + """Ensure special selectors attribute streams to the active response node.""" + graph = MagicMock(spec=Graph) + variable_pool = MagicMock(spec=VariablePool) + + response_node = MagicMock() + response_node.id = "response_node" + response_node.node_type = NodeType.ANSWER + graph.nodes = {"response_node": response_node} + graph.root_node = response_node + + coordinator = ResponseStreamCoordinator(variable_pool, graph) + coordinator.track_node_execution("response_node", "exec_resp") + + coordinator._active_session = ResponseSession( + node_id="response_node", + template=Template(segments=[VariableSegment(selector=["sys", "foo"])]), + ) + + event = NodeRunStreamChunkEvent( + id="stream_1", + node_id="llm_node", + node_type=NodeType.LLM, + selector=["sys", "foo"], + chunk="hi", + is_final=True, + chunk_type=ChunkType.TEXT, + ) + + coordinator._stream_buffers[("sys", "foo")] = [event] + coordinator._stream_positions[("sys", "foo")] = 0 + coordinator._closed_streams.add(("sys", "foo")) + + events, is_complete = coordinator._process_variable_segment(VariableSegment(selector=["sys", "foo"])) + + assert is_complete + assert len(events) == 1 + rewritten = events[0] + assert rewritten.node_id == "response_node" + assert rewritten.id == "exec_resp" diff --git a/api/tests/unit_tests/core/workflow/graph_engine/test_stop_event.py b/api/tests/unit_tests/core/workflow/graph_engine/test_stop_event.py new file mode 100644 index 0000000000..ea8d3a977f --- /dev/null +++ b/api/tests/unit_tests/core/workflow/graph_engine/test_stop_event.py @@ -0,0 +1,539 @@ +""" +Unit tests for stop_event functionality in GraphEngine. + +Tests the unified stop_event management by GraphEngine and its propagation +to WorkerPool, Worker, Dispatcher, and Nodes. +""" + +import threading +import time +from unittest.mock import MagicMock, Mock, patch + +from core.app.entities.app_invoke_entities import InvokeFrom +from core.workflow.entities.graph_init_params import GraphInitParams +from core.workflow.graph import Graph +from core.workflow.graph_engine import GraphEngine +from core.workflow.graph_engine.command_channels import InMemoryChannel +from core.workflow.graph_events import ( + GraphRunStartedEvent, + GraphRunSucceededEvent, + NodeRunStartedEvent, +) +from core.workflow.nodes.answer.answer_node import AnswerNode +from core.workflow.nodes.start.start_node import StartNode +from core.workflow.runtime import GraphRuntimeState, VariablePool +from models.enums import UserFrom + + +class TestStopEventPropagation: + """Test suite for stop_event propagation through GraphEngine components.""" + + def test_graph_engine_creates_stop_event(self): + """Test that GraphEngine creates a stop_event on initialization.""" + runtime_state = GraphRuntimeState(variable_pool=VariablePool(), start_at=time.perf_counter()) + mock_graph = MagicMock(spec=Graph) + mock_graph.nodes = {} + mock_graph.edges = {} + mock_graph.root_node = MagicMock() + + engine = GraphEngine( + workflow_id="test_workflow", + graph=mock_graph, + graph_runtime_state=runtime_state, + command_channel=InMemoryChannel(), + ) + + # Verify stop_event was created + assert engine._stop_event is not None + assert isinstance(engine._stop_event, threading.Event) + + # Verify it was set in graph_runtime_state + assert runtime_state.stop_event is not None + assert runtime_state.stop_event is engine._stop_event + + def test_stop_event_cleared_on_start(self): + """Test that stop_event is cleared when execution starts.""" + runtime_state = GraphRuntimeState(variable_pool=VariablePool(), start_at=time.perf_counter()) + mock_graph = MagicMock(spec=Graph) + mock_graph.nodes = {} + mock_graph.edges = {} + mock_graph.root_node = MagicMock() + mock_graph.root_node.id = "start" # Set proper id + + start_node = StartNode( + id="start", + config={"id": "start", "data": {"title": "start", "variables": []}}, + graph_init_params=GraphInitParams( + tenant_id="test_tenant", + app_id="test_app", + workflow_id="test_workflow", + graph_config={}, + user_id="test_user", + user_from=UserFrom.ACCOUNT, + invoke_from=InvokeFrom.DEBUGGER, + call_depth=0, + ), + graph_runtime_state=runtime_state, + ) + mock_graph.nodes["start"] = start_node + mock_graph.get_outgoing_edges = MagicMock(return_value=[]) + mock_graph.get_incoming_edges = MagicMock(return_value=[]) + + engine = GraphEngine( + workflow_id="test_workflow", + graph=mock_graph, + graph_runtime_state=runtime_state, + command_channel=InMemoryChannel(), + ) + + # Set the stop_event before running + engine._stop_event.set() + assert engine._stop_event.is_set() + + # Run the engine (should clear the stop_event) + events = list(engine.run()) + + # After running, stop_event should be set again (by _stop_execution) + # But during start it was cleared + assert any(isinstance(e, GraphRunStartedEvent) for e in events) + assert any(isinstance(e, GraphRunSucceededEvent) for e in events) + + def test_stop_event_set_on_stop(self): + """Test that stop_event is set when execution stops.""" + runtime_state = GraphRuntimeState(variable_pool=VariablePool(), start_at=time.perf_counter()) + mock_graph = MagicMock(spec=Graph) + mock_graph.nodes = {} + mock_graph.edges = {} + mock_graph.root_node = MagicMock() + mock_graph.root_node.id = "start" # Set proper id + + start_node = StartNode( + id="start", + config={"id": "start", "data": {"title": "start", "variables": []}}, + graph_init_params=GraphInitParams( + tenant_id="test_tenant", + app_id="test_app", + workflow_id="test_workflow", + graph_config={}, + user_id="test_user", + user_from=UserFrom.ACCOUNT, + invoke_from=InvokeFrom.DEBUGGER, + call_depth=0, + ), + graph_runtime_state=runtime_state, + ) + mock_graph.nodes["start"] = start_node + mock_graph.get_outgoing_edges = MagicMock(return_value=[]) + mock_graph.get_incoming_edges = MagicMock(return_value=[]) + + engine = GraphEngine( + workflow_id="test_workflow", + graph=mock_graph, + graph_runtime_state=runtime_state, + command_channel=InMemoryChannel(), + ) + + # Initially not set + assert not engine._stop_event.is_set() + + # Run the engine + list(engine.run()) + + # After execution completes, stop_event should be set + assert engine._stop_event.is_set() + + def test_stop_event_passed_to_worker_pool(self): + """Test that stop_event is passed to WorkerPool.""" + runtime_state = GraphRuntimeState(variable_pool=VariablePool(), start_at=time.perf_counter()) + mock_graph = MagicMock(spec=Graph) + mock_graph.nodes = {} + mock_graph.edges = {} + mock_graph.root_node = MagicMock() + + engine = GraphEngine( + workflow_id="test_workflow", + graph=mock_graph, + graph_runtime_state=runtime_state, + command_channel=InMemoryChannel(), + ) + + # Verify WorkerPool has the stop_event + assert engine._worker_pool._stop_event is not None + assert engine._worker_pool._stop_event is engine._stop_event + + def test_stop_event_passed_to_dispatcher(self): + """Test that stop_event is passed to Dispatcher.""" + runtime_state = GraphRuntimeState(variable_pool=VariablePool(), start_at=time.perf_counter()) + mock_graph = MagicMock(spec=Graph) + mock_graph.nodes = {} + mock_graph.edges = {} + mock_graph.root_node = MagicMock() + + engine = GraphEngine( + workflow_id="test_workflow", + graph=mock_graph, + graph_runtime_state=runtime_state, + command_channel=InMemoryChannel(), + ) + + # Verify Dispatcher has the stop_event + assert engine._dispatcher._stop_event is not None + assert engine._dispatcher._stop_event is engine._stop_event + + +class TestNodeStopCheck: + """Test suite for Node._should_stop() functionality.""" + + def test_node_should_stop_checks_runtime_state(self): + """Test that Node._should_stop() checks GraphRuntimeState.stop_event.""" + runtime_state = GraphRuntimeState(variable_pool=VariablePool(), start_at=time.perf_counter()) + + answer_node = AnswerNode( + id="answer", + config={"id": "answer", "data": {"title": "answer", "answer": "{{#start.result#}}"}}, + graph_init_params=GraphInitParams( + tenant_id="test_tenant", + app_id="test_app", + workflow_id="test_workflow", + graph_config={}, + user_id="test_user", + user_from=UserFrom.ACCOUNT, + invoke_from=InvokeFrom.DEBUGGER, + call_depth=0, + ), + graph_runtime_state=runtime_state, + ) + + # Initially stop_event is not set + assert not answer_node._should_stop() + + # Set the stop_event + runtime_state.stop_event.set() + + # Now _should_stop should return True + assert answer_node._should_stop() + + def test_node_run_checks_stop_event_between_yields(self): + """Test that Node.run() checks stop_event between yielding events.""" + runtime_state = GraphRuntimeState(variable_pool=VariablePool(), start_at=time.perf_counter()) + + # Create a simple node + answer_node = AnswerNode( + id="answer", + config={"id": "answer", "data": {"title": "answer", "answer": "hello"}}, + graph_init_params=GraphInitParams( + tenant_id="test_tenant", + app_id="test_app", + workflow_id="test_workflow", + graph_config={}, + user_id="test_user", + user_from=UserFrom.ACCOUNT, + invoke_from=InvokeFrom.DEBUGGER, + call_depth=0, + ), + graph_runtime_state=runtime_state, + ) + + # Set stop_event BEFORE running the node + runtime_state.stop_event.set() + + # Run the node - should yield start event then detect stop + # The node should check stop_event before processing + assert answer_node._should_stop(), "stop_event should be set" + + # Run and collect events + events = list(answer_node.run()) + + # Since stop_event is set at the start, we should get: + # 1. NodeRunStartedEvent (always yielded first) + # 2. Either NodeRunFailedEvent (if detected early) or NodeRunSucceededEvent (if too fast) + assert len(events) >= 2 + assert isinstance(events[0], NodeRunStartedEvent) + + # Note: AnswerNode is very simple and might complete before stop check + # The important thing is that _should_stop() returns True when stop_event is set + assert answer_node._should_stop() + + +class TestStopEventIntegration: + """Integration tests for stop_event in workflow execution.""" + + def test_simple_workflow_respects_stop_event(self): + """Test that a simple workflow respects stop_event.""" + runtime_state = GraphRuntimeState(variable_pool=VariablePool(), start_at=time.perf_counter()) + + mock_graph = MagicMock(spec=Graph) + mock_graph.nodes = {} + mock_graph.edges = {} + mock_graph.root_node = MagicMock() + mock_graph.root_node.id = "start" + + # Create start and answer nodes + start_node = StartNode( + id="start", + config={"id": "start", "data": {"title": "start", "variables": []}}, + graph_init_params=GraphInitParams( + tenant_id="test_tenant", + app_id="test_app", + workflow_id="test_workflow", + graph_config={}, + user_id="test_user", + user_from=UserFrom.ACCOUNT, + invoke_from=InvokeFrom.DEBUGGER, + call_depth=0, + ), + graph_runtime_state=runtime_state, + ) + + answer_node = AnswerNode( + id="answer", + config={"id": "answer", "data": {"title": "answer", "answer": "hello"}}, + graph_init_params=GraphInitParams( + tenant_id="test_tenant", + app_id="test_app", + workflow_id="test_workflow", + graph_config={}, + user_id="test_user", + user_from=UserFrom.ACCOUNT, + invoke_from=InvokeFrom.DEBUGGER, + call_depth=0, + ), + graph_runtime_state=runtime_state, + ) + + mock_graph.nodes["start"] = start_node + mock_graph.nodes["answer"] = answer_node + mock_graph.get_outgoing_edges = MagicMock(return_value=[]) + mock_graph.get_incoming_edges = MagicMock(return_value=[]) + + engine = GraphEngine( + workflow_id="test_workflow", + graph=mock_graph, + graph_runtime_state=runtime_state, + command_channel=InMemoryChannel(), + ) + + # Set stop_event before running + runtime_state.stop_event.set() + + # Run the engine + events = list(engine.run()) + + # Should get started event but not succeeded (due to stop) + assert any(isinstance(e, GraphRunStartedEvent) for e in events) + # The workflow should still complete (start node runs quickly) + # but answer node might be cancelled depending on timing + + def test_stop_event_with_concurrent_nodes(self): + """Test stop_event behavior with multiple concurrent nodes.""" + runtime_state = GraphRuntimeState(variable_pool=VariablePool(), start_at=time.perf_counter()) + + mock_graph = MagicMock(spec=Graph) + mock_graph.nodes = {} + mock_graph.edges = {} + mock_graph.root_node = MagicMock() + + # Create multiple nodes + for i in range(3): + answer_node = AnswerNode( + id=f"answer_{i}", + config={"id": f"answer_{i}", "data": {"title": f"answer_{i}", "answer": f"test{i}"}}, + graph_init_params=GraphInitParams( + tenant_id="test_tenant", + app_id="test_app", + workflow_id="test_workflow", + graph_config={}, + user_id="test_user", + user_from=UserFrom.ACCOUNT, + invoke_from=InvokeFrom.DEBUGGER, + call_depth=0, + ), + graph_runtime_state=runtime_state, + ) + mock_graph.nodes[f"answer_{i}"] = answer_node + + mock_graph.get_outgoing_edges = MagicMock(return_value=[]) + mock_graph.get_incoming_edges = MagicMock(return_value=[]) + + engine = GraphEngine( + workflow_id="test_workflow", + graph=mock_graph, + graph_runtime_state=runtime_state, + command_channel=InMemoryChannel(), + ) + + # All nodes should share the same stop_event + for node in mock_graph.nodes.values(): + assert node.graph_runtime_state.stop_event is runtime_state.stop_event + assert node.graph_runtime_state.stop_event is engine._stop_event + + +class TestStopEventTimeoutBehavior: + """Test stop_event behavior with join timeouts.""" + + @patch("core.workflow.graph_engine.orchestration.dispatcher.threading.Thread") + def test_dispatcher_uses_shorter_timeout(self, mock_thread_cls: MagicMock): + """Test that Dispatcher uses 2s timeout instead of 10s.""" + runtime_state = GraphRuntimeState(variable_pool=VariablePool(), start_at=time.perf_counter()) + mock_graph = MagicMock(spec=Graph) + mock_graph.nodes = {} + mock_graph.edges = {} + mock_graph.root_node = MagicMock() + + engine = GraphEngine( + workflow_id="test_workflow", + graph=mock_graph, + graph_runtime_state=runtime_state, + command_channel=InMemoryChannel(), + ) + + dispatcher = engine._dispatcher + dispatcher.start() # This will create and start the mocked thread + + mock_thread_instance = mock_thread_cls.return_value + mock_thread_instance.is_alive.return_value = True + + dispatcher.stop() + + mock_thread_instance.join.assert_called_once_with(timeout=2.0) + + @patch("core.workflow.graph_engine.worker_management.worker_pool.Worker") + def test_worker_pool_uses_shorter_timeout(self, mock_worker_cls: MagicMock): + """Test that WorkerPool uses 2s timeout instead of 10s.""" + runtime_state = GraphRuntimeState(variable_pool=VariablePool(), start_at=time.perf_counter()) + mock_graph = MagicMock(spec=Graph) + mock_graph.nodes = {} + mock_graph.edges = {} + mock_graph.root_node = MagicMock() + + engine = GraphEngine( + workflow_id="test_workflow", + graph=mock_graph, + graph_runtime_state=runtime_state, + command_channel=InMemoryChannel(), + ) + + worker_pool = engine._worker_pool + worker_pool.start(initial_count=1) # Start with one worker + + mock_worker_instance = mock_worker_cls.return_value + mock_worker_instance.is_alive.return_value = True + + worker_pool.stop() + + mock_worker_instance.join.assert_called_once_with(timeout=2.0) + + +class TestStopEventResumeBehavior: + """Test stop_event behavior during workflow resume.""" + + def test_stop_event_cleared_on_resume(self): + """Test that stop_event is cleared when resuming a paused workflow.""" + runtime_state = GraphRuntimeState(variable_pool=VariablePool(), start_at=time.perf_counter()) + mock_graph = MagicMock(spec=Graph) + mock_graph.nodes = {} + mock_graph.edges = {} + mock_graph.root_node = MagicMock() + mock_graph.root_node.id = "start" # Set proper id + + start_node = StartNode( + id="start", + config={"id": "start", "data": {"title": "start", "variables": []}}, + graph_init_params=GraphInitParams( + tenant_id="test_tenant", + app_id="test_app", + workflow_id="test_workflow", + graph_config={}, + user_id="test_user", + user_from=UserFrom.ACCOUNT, + invoke_from=InvokeFrom.DEBUGGER, + call_depth=0, + ), + graph_runtime_state=runtime_state, + ) + mock_graph.nodes["start"] = start_node + mock_graph.get_outgoing_edges = MagicMock(return_value=[]) + mock_graph.get_incoming_edges = MagicMock(return_value=[]) + + engine = GraphEngine( + workflow_id="test_workflow", + graph=mock_graph, + graph_runtime_state=runtime_state, + command_channel=InMemoryChannel(), + ) + + # Simulate a previous execution that set stop_event + engine._stop_event.set() + assert engine._stop_event.is_set() + + # Run the engine (should clear stop_event in _start_execution) + events = list(engine.run()) + + # Execution should complete successfully + assert any(isinstance(e, GraphRunStartedEvent) for e in events) + assert any(isinstance(e, GraphRunSucceededEvent) for e in events) + + +class TestWorkerStopBehavior: + """Test Worker behavior with shared stop_event.""" + + def test_worker_uses_shared_stop_event(self): + """Test that Worker uses shared stop_event from GraphEngine.""" + runtime_state = GraphRuntimeState(variable_pool=VariablePool(), start_at=time.perf_counter()) + mock_graph = MagicMock(spec=Graph) + mock_graph.nodes = {} + mock_graph.edges = {} + mock_graph.root_node = MagicMock() + + engine = GraphEngine( + workflow_id="test_workflow", + graph=mock_graph, + graph_runtime_state=runtime_state, + command_channel=InMemoryChannel(), + ) + + # Get the worker pool and check workers + worker_pool = engine._worker_pool + + # Start the worker pool to create workers + worker_pool.start() + + # Check that at least one worker was created + assert len(worker_pool._workers) > 0 + + # Verify workers use the shared stop_event + for worker in worker_pool._workers: + assert worker._stop_event is engine._stop_event + + # Clean up + worker_pool.stop() + + def test_worker_stop_is_noop(self): + """Test that Worker.stop() is now a no-op.""" + runtime_state = GraphRuntimeState(variable_pool=VariablePool(), start_at=time.perf_counter()) + + # Create a mock worker + from core.workflow.graph_engine.ready_queue import InMemoryReadyQueue + from core.workflow.graph_engine.worker import Worker + + ready_queue = InMemoryReadyQueue() + event_queue = MagicMock() + + # Create a proper mock graph with real dict + mock_graph = Mock(spec=Graph) + mock_graph.nodes = {} # Use real dict + + stop_event = threading.Event() + + worker = Worker( + ready_queue=ready_queue, + event_queue=event_queue, + graph=mock_graph, + layers=[], + stop_event=stop_event, + ) + + # Calling stop() should do nothing (no-op) + # and should NOT set the stop_event + worker.stop() + assert not stop_event.is_set() diff --git a/api/tests/unit_tests/core/workflow/node_events/test_stream_chunk_events.py b/api/tests/unit_tests/core/workflow/node_events/test_stream_chunk_events.py new file mode 100644 index 0000000000..951149e933 --- /dev/null +++ b/api/tests/unit_tests/core/workflow/node_events/test_stream_chunk_events.py @@ -0,0 +1,328 @@ +"""Tests for StreamChunkEvent and its subclasses.""" + +from core.workflow.entities import ToolCall, ToolResult, ToolResultStatus +from core.workflow.node_events import ( + ChunkType, + StreamChunkEvent, + ThoughtChunkEvent, + ToolCallChunkEvent, + ToolResultChunkEvent, +) + + +class TestChunkType: + """Tests for ChunkType enum.""" + + def test_chunk_type_values(self): + """Test that ChunkType has expected values.""" + assert ChunkType.TEXT == "text" + assert ChunkType.TOOL_CALL == "tool_call" + assert ChunkType.TOOL_RESULT == "tool_result" + assert ChunkType.THOUGHT == "thought" + + def test_chunk_type_is_str_enum(self): + """Test that ChunkType values are strings.""" + for chunk_type in ChunkType: + assert isinstance(chunk_type.value, str) + + +class TestStreamChunkEvent: + """Tests for base StreamChunkEvent.""" + + def test_create_with_required_fields(self): + """Test creating StreamChunkEvent with required fields.""" + event = StreamChunkEvent( + selector=["node1", "text"], + chunk="Hello", + ) + + assert event.selector == ["node1", "text"] + assert event.chunk == "Hello" + assert event.is_final is False + assert event.chunk_type == ChunkType.TEXT + + def test_create_with_all_fields(self): + """Test creating StreamChunkEvent with all fields.""" + event = StreamChunkEvent( + selector=["node1", "output"], + chunk="World", + is_final=True, + chunk_type=ChunkType.TEXT, + ) + + assert event.selector == ["node1", "output"] + assert event.chunk == "World" + assert event.is_final is True + assert event.chunk_type == ChunkType.TEXT + + def test_default_chunk_type_is_text(self): + """Test that default chunk_type is TEXT.""" + event = StreamChunkEvent( + selector=["node1", "text"], + chunk="test", + ) + + assert event.chunk_type == ChunkType.TEXT + + def test_serialization(self): + """Test that event can be serialized to dict.""" + event = StreamChunkEvent( + selector=["node1", "text"], + chunk="Hello", + is_final=True, + ) + + data = event.model_dump() + + assert data["selector"] == ["node1", "text"] + assert data["chunk"] == "Hello" + assert data["is_final"] is True + assert data["chunk_type"] == "text" + + +class TestToolCallChunkEvent: + """Tests for ToolCallChunkEvent.""" + + def test_create_with_required_fields(self): + """Test creating ToolCallChunkEvent with required fields.""" + event = ToolCallChunkEvent( + selector=["node1", "tool_calls"], + chunk='{"city": "Beijing"}', + tool_call=ToolCall(id="call_123", name="weather", arguments=None), + ) + + assert event.selector == ["node1", "tool_calls"] + assert event.chunk == '{"city": "Beijing"}' + assert event.tool_call.id == "call_123" + assert event.tool_call.name == "weather" + assert event.chunk_type == ChunkType.TOOL_CALL + + def test_chunk_type_is_tool_call(self): + """Test that chunk_type is always TOOL_CALL.""" + event = ToolCallChunkEvent( + selector=["node1", "tool_calls"], + chunk="", + tool_call=ToolCall(id="call_123", name="test_tool", arguments=None), + ) + + assert event.chunk_type == ChunkType.TOOL_CALL + + def test_tool_arguments_field(self): + """Test tool_arguments field.""" + event = ToolCallChunkEvent( + selector=["node1", "tool_calls"], + chunk='{"param": "value"}', + tool_call=ToolCall( + id="call_123", + name="test_tool", + arguments='{"param": "value"}', + ), + ) + + assert event.tool_call.arguments == '{"param": "value"}' + + def test_serialization(self): + """Test that event can be serialized to dict.""" + event = ToolCallChunkEvent( + selector=["node1", "tool_calls"], + chunk='{"city": "Beijing"}', + tool_call=ToolCall( + id="call_123", + name="weather", + arguments='{"city": "Beijing"}', + ), + is_final=True, + ) + + data = event.model_dump() + + assert data["chunk_type"] == "tool_call" + assert data["tool_call"]["id"] == "call_123" + assert data["tool_call"]["name"] == "weather" + assert data["tool_call"]["arguments"] == '{"city": "Beijing"}' + assert data["is_final"] is True + + +class TestToolResultChunkEvent: + """Tests for ToolResultChunkEvent.""" + + def test_create_with_required_fields(self): + """Test creating ToolResultChunkEvent with required fields.""" + event = ToolResultChunkEvent( + selector=["node1", "tool_results"], + chunk="Weather: Sunny, 25°C", + tool_result=ToolResult(id="call_123", name="weather", output="Weather: Sunny, 25°C"), + ) + + assert event.selector == ["node1", "tool_results"] + assert event.chunk == "Weather: Sunny, 25°C" + assert event.tool_result.id == "call_123" + assert event.tool_result.name == "weather" + assert event.chunk_type == ChunkType.TOOL_RESULT + + def test_chunk_type_is_tool_result(self): + """Test that chunk_type is always TOOL_RESULT.""" + event = ToolResultChunkEvent( + selector=["node1", "tool_results"], + chunk="result", + tool_result=ToolResult(id="call_123", name="test_tool"), + ) + + assert event.chunk_type == ChunkType.TOOL_RESULT + + def test_tool_files_default_empty(self): + """Test that tool_files defaults to empty list.""" + event = ToolResultChunkEvent( + selector=["node1", "tool_results"], + chunk="result", + tool_result=ToolResult(id="call_123", name="test_tool"), + ) + + assert event.tool_result.files == [] + + def test_tool_files_with_values(self): + """Test tool_files with file IDs.""" + event = ToolResultChunkEvent( + selector=["node1", "tool_results"], + chunk="result", + tool_result=ToolResult( + id="call_123", + name="test_tool", + files=["file_1", "file_2"], + ), + ) + + assert event.tool_result.files == ["file_1", "file_2"] + + def test_tool_error_output(self): + """Test error output captured in tool_result.""" + event = ToolResultChunkEvent( + selector=["node1", "tool_results"], + chunk="", + tool_result=ToolResult( + id="call_123", + name="test_tool", + output="Tool execution failed", + status=ToolResultStatus.ERROR, + ), + ) + + assert event.tool_result.output == "Tool execution failed" + assert event.tool_result.status == ToolResultStatus.ERROR + + def test_serialization(self): + """Test that event can be serialized to dict.""" + event = ToolResultChunkEvent( + selector=["node1", "tool_results"], + chunk="Weather: Sunny", + tool_result=ToolResult( + id="call_123", + name="weather", + output="Weather: Sunny", + files=["file_1"], + status=ToolResultStatus.SUCCESS, + ), + is_final=True, + ) + + data = event.model_dump() + + assert data["chunk_type"] == "tool_result" + assert data["tool_result"]["id"] == "call_123" + assert data["tool_result"]["name"] == "weather" + assert data["tool_result"]["files"] == ["file_1"] + assert data["is_final"] is True + + +class TestThoughtChunkEvent: + """Tests for ThoughtChunkEvent.""" + + def test_create_with_required_fields(self): + """Test creating ThoughtChunkEvent with required fields.""" + event = ThoughtChunkEvent( + selector=["node1", "thought"], + chunk="I need to query the weather...", + ) + + assert event.selector == ["node1", "thought"] + assert event.chunk == "I need to query the weather..." + assert event.chunk_type == ChunkType.THOUGHT + + def test_chunk_type_is_thought(self): + """Test that chunk_type is always THOUGHT.""" + event = ThoughtChunkEvent( + selector=["node1", "thought"], + chunk="thinking...", + ) + + assert event.chunk_type == ChunkType.THOUGHT + + def test_serialization(self): + """Test that event can be serialized to dict.""" + event = ThoughtChunkEvent( + selector=["node1", "thought"], + chunk="I need to analyze this...", + is_final=False, + ) + + data = event.model_dump() + + assert data["chunk_type"] == "thought" + assert data["chunk"] == "I need to analyze this..." + assert data["is_final"] is False + + +class TestEventInheritance: + """Tests for event inheritance relationships.""" + + def test_tool_call_is_stream_chunk(self): + """Test that ToolCallChunkEvent is a StreamChunkEvent.""" + event = ToolCallChunkEvent( + selector=["node1", "tool_calls"], + chunk="", + tool_call=ToolCall(id="call_123", name="test", arguments=None), + ) + + assert isinstance(event, StreamChunkEvent) + + def test_tool_result_is_stream_chunk(self): + """Test that ToolResultChunkEvent is a StreamChunkEvent.""" + event = ToolResultChunkEvent( + selector=["node1", "tool_results"], + chunk="result", + tool_result=ToolResult(id="call_123", name="test"), + ) + + assert isinstance(event, StreamChunkEvent) + + def test_thought_is_stream_chunk(self): + """Test that ThoughtChunkEvent is a StreamChunkEvent.""" + event = ThoughtChunkEvent( + selector=["node1", "thought"], + chunk="thinking...", + ) + + assert isinstance(event, StreamChunkEvent) + + def test_all_events_have_common_fields(self): + """Test that all events have common StreamChunkEvent fields.""" + events = [ + StreamChunkEvent(selector=["n", "t"], chunk="a"), + ToolCallChunkEvent( + selector=["n", "t"], + chunk="b", + tool_call=ToolCall(id="1", name="t", arguments=None), + ), + ToolResultChunkEvent( + selector=["n", "t"], + chunk="c", + tool_result=ToolResult(id="1", name="t"), + ), + ThoughtChunkEvent(selector=["n", "t"], chunk="d"), + ] + + for event in events: + assert hasattr(event, "selector") + assert hasattr(event, "chunk") + assert hasattr(event, "is_final") + assert hasattr(event, "chunk_type") diff --git a/api/tests/unit_tests/core/workflow/nodes/code/code_node_spec.py b/api/tests/unit_tests/core/workflow/nodes/code/code_node_spec.py index 596e72ddd0..2262d25a14 100644 --- a/api/tests/unit_tests/core/workflow/nodes/code/code_node_spec.py +++ b/api/tests/unit_tests/core/workflow/nodes/code/code_node_spec.py @@ -1,3 +1,4 @@ +from configs import dify_config from core.helper.code_executor.code_executor import CodeLanguage from core.variables.types import SegmentType from core.workflow.nodes.code.code_node import CodeNode @@ -7,6 +8,18 @@ from core.workflow.nodes.code.exc import ( DepthLimitError, OutputValidationError, ) +from core.workflow.nodes.code.limits import CodeNodeLimits + +CodeNode._limits = CodeNodeLimits( + max_string_length=dify_config.CODE_MAX_STRING_LENGTH, + max_number=dify_config.CODE_MAX_NUMBER, + min_number=dify_config.CODE_MIN_NUMBER, + max_precision=dify_config.CODE_MAX_PRECISION, + max_depth=dify_config.CODE_MAX_DEPTH, + max_number_array_length=dify_config.CODE_MAX_NUMBER_ARRAY_LENGTH, + max_string_array_length=dify_config.CODE_MAX_STRING_ARRAY_LENGTH, + max_object_array_length=dify_config.CODE_MAX_OBJECT_ARRAY_LENGTH, +) class TestCodeNodeExceptions: diff --git a/api/tests/unit_tests/core/workflow/nodes/http_request/test_http_request_executor.py b/api/tests/unit_tests/core/workflow/nodes/http_request/test_http_request_executor.py index f040a92b6f..27df938102 100644 --- a/api/tests/unit_tests/core/workflow/nodes/http_request/test_http_request_executor.py +++ b/api/tests/unit_tests/core/workflow/nodes/http_request/test_http_request_executor.py @@ -1,3 +1,5 @@ +import pytest + from core.workflow.nodes.http_request import ( BodyData, HttpRequestNodeAuthorization, @@ -5,6 +7,7 @@ from core.workflow.nodes.http_request import ( HttpRequestNodeData, ) from core.workflow.nodes.http_request.entities import HttpRequestNodeTimeout +from core.workflow.nodes.http_request.exc import AuthorizationConfigError from core.workflow.nodes.http_request.executor import Executor from core.workflow.runtime import VariablePool from core.workflow.system_variable import SystemVariable @@ -348,3 +351,127 @@ def test_init_params(): executor = create_executor("key1:value1\n\nkey2:value2\n\n") executor._init_params() assert executor.params == [("key1", "value1"), ("key2", "value2")] + + +def test_empty_api_key_raises_error_bearer(): + """Test that empty API key raises AuthorizationConfigError for bearer auth.""" + variable_pool = VariablePool(system_variables=SystemVariable.empty()) + node_data = HttpRequestNodeData( + title="test", + method="get", + url="http://example.com", + headers="", + params="", + authorization=HttpRequestNodeAuthorization( + type="api-key", + config={"type": "bearer", "api_key": ""}, + ), + ) + timeout = HttpRequestNodeTimeout(connect=10, read=30, write=30) + + with pytest.raises(AuthorizationConfigError, match="API key is required"): + Executor( + node_data=node_data, + timeout=timeout, + variable_pool=variable_pool, + ) + + +def test_empty_api_key_raises_error_basic(): + """Test that empty API key raises AuthorizationConfigError for basic auth.""" + variable_pool = VariablePool(system_variables=SystemVariable.empty()) + node_data = HttpRequestNodeData( + title="test", + method="get", + url="http://example.com", + headers="", + params="", + authorization=HttpRequestNodeAuthorization( + type="api-key", + config={"type": "basic", "api_key": ""}, + ), + ) + timeout = HttpRequestNodeTimeout(connect=10, read=30, write=30) + + with pytest.raises(AuthorizationConfigError, match="API key is required"): + Executor( + node_data=node_data, + timeout=timeout, + variable_pool=variable_pool, + ) + + +def test_empty_api_key_raises_error_custom(): + """Test that empty API key raises AuthorizationConfigError for custom auth.""" + variable_pool = VariablePool(system_variables=SystemVariable.empty()) + node_data = HttpRequestNodeData( + title="test", + method="get", + url="http://example.com", + headers="", + params="", + authorization=HttpRequestNodeAuthorization( + type="api-key", + config={"type": "custom", "api_key": "", "header": "X-Custom-Auth"}, + ), + ) + timeout = HttpRequestNodeTimeout(connect=10, read=30, write=30) + + with pytest.raises(AuthorizationConfigError, match="API key is required"): + Executor( + node_data=node_data, + timeout=timeout, + variable_pool=variable_pool, + ) + + +def test_whitespace_only_api_key_raises_error(): + """Test that whitespace-only API key raises AuthorizationConfigError.""" + variable_pool = VariablePool(system_variables=SystemVariable.empty()) + node_data = HttpRequestNodeData( + title="test", + method="get", + url="http://example.com", + headers="", + params="", + authorization=HttpRequestNodeAuthorization( + type="api-key", + config={"type": "bearer", "api_key": " "}, + ), + ) + timeout = HttpRequestNodeTimeout(connect=10, read=30, write=30) + + with pytest.raises(AuthorizationConfigError, match="API key is required"): + Executor( + node_data=node_data, + timeout=timeout, + variable_pool=variable_pool, + ) + + +def test_valid_api_key_works(): + """Test that valid API key works correctly for bearer auth.""" + variable_pool = VariablePool(system_variables=SystemVariable.empty()) + node_data = HttpRequestNodeData( + title="test", + method="get", + url="http://example.com", + headers="", + params="", + authorization=HttpRequestNodeAuthorization( + type="api-key", + config={"type": "bearer", "api_key": "valid-api-key-123"}, + ), + ) + timeout = HttpRequestNodeTimeout(connect=10, read=30, write=30) + + executor = Executor( + node_data=node_data, + timeout=timeout, + variable_pool=variable_pool, + ) + + # Should not raise an error + headers = executor._assembling_headers() + assert "Authorization" in headers + assert headers["Authorization"] == "Bearer valid-api-key-123" diff --git a/api/tests/unit_tests/core/workflow/nodes/llm/test_file_saver.py b/api/tests/unit_tests/core/workflow/nodes/llm/test_file_saver.py index e8f257bf2f..1e224d56a5 100644 --- a/api/tests/unit_tests/core/workflow/nodes/llm/test_file_saver.py +++ b/api/tests/unit_tests/core/workflow/nodes/llm/test_file_saver.py @@ -78,7 +78,7 @@ class TestFileSaverImpl: file_binary=_PNG_DATA, mimetype=mime_type, ) - mocked_sign_file.assert_called_once_with(mock_tool_file.id, ".png") + mocked_sign_file.assert_called_once_with(tool_file_id=mock_tool_file.id, extension=".png", for_external=True) def test_save_remote_url_request_failed(self, monkeypatch: pytest.MonkeyPatch): _TEST_URL = "https://example.com/image.png" diff --git a/api/tests/unit_tests/core/workflow/nodes/template_transform/template_transform_node_spec.py b/api/tests/unit_tests/core/workflow/nodes/template_transform/template_transform_node_spec.py index 1a67d5c3e3..66d6c3c56b 100644 --- a/api/tests/unit_tests/core/workflow/nodes/template_transform/template_transform_node_spec.py +++ b/api/tests/unit_tests/core/workflow/nodes/template_transform/template_transform_node_spec.py @@ -5,8 +5,8 @@ from core.workflow.graph_engine.entities.graph import Graph from core.workflow.graph_engine.entities.graph_init_params import GraphInitParams from core.workflow.graph_engine.entities.graph_runtime_state import GraphRuntimeState -from core.helper.code_executor.code_executor import CodeExecutionError from core.workflow.enums import ErrorStrategy, NodeType, WorkflowNodeExecutionStatus +from core.workflow.nodes.template_transform.template_renderer import TemplateRenderError from core.workflow.nodes.template_transform.template_transform_node import TemplateTransformNode from models.workflow import WorkflowType @@ -127,7 +127,9 @@ class TestTemplateTransformNode: """Test version class method.""" assert TemplateTransformNode.version() == "1" - @patch("core.workflow.nodes.template_transform.template_transform_node.CodeExecutor.execute_workflow_code_template") + @patch( + "core.workflow.nodes.template_transform.template_transform_node.CodeExecutorJinja2TemplateRenderer.render_template" + ) def test_run_simple_template( self, mock_execute, basic_node_data, mock_graph, mock_graph_runtime_state, graph_init_params ): @@ -145,7 +147,7 @@ class TestTemplateTransformNode: mock_graph_runtime_state.variable_pool.get.side_effect = lambda selector: variable_map.get(tuple(selector)) # Setup mock executor - mock_execute.return_value = {"result": "Hello Alice, you are 30 years old!"} + mock_execute.return_value = "Hello Alice, you are 30 years old!" node = TemplateTransformNode( id="test_node", @@ -162,7 +164,9 @@ class TestTemplateTransformNode: assert result.inputs["name"] == "Alice" assert result.inputs["age"] == 30 - @patch("core.workflow.nodes.template_transform.template_transform_node.CodeExecutor.execute_workflow_code_template") + @patch( + "core.workflow.nodes.template_transform.template_transform_node.CodeExecutorJinja2TemplateRenderer.render_template" + ) def test_run_with_none_values(self, mock_execute, mock_graph, mock_graph_runtime_state, graph_init_params): """Test _run with None variable values.""" node_data = { @@ -172,7 +176,7 @@ class TestTemplateTransformNode: } mock_graph_runtime_state.variable_pool.get.return_value = None - mock_execute.return_value = {"result": "Value: "} + mock_execute.return_value = "Value: " node = TemplateTransformNode( id="test_node", @@ -187,13 +191,15 @@ class TestTemplateTransformNode: assert result.status == WorkflowNodeExecutionStatus.SUCCEEDED assert result.inputs["value"] is None - @patch("core.workflow.nodes.template_transform.template_transform_node.CodeExecutor.execute_workflow_code_template") + @patch( + "core.workflow.nodes.template_transform.template_transform_node.CodeExecutorJinja2TemplateRenderer.render_template" + ) def test_run_with_code_execution_error( self, mock_execute, basic_node_data, mock_graph, mock_graph_runtime_state, graph_init_params ): """Test _run when code execution fails.""" mock_graph_runtime_state.variable_pool.get.return_value = MagicMock() - mock_execute.side_effect = CodeExecutionError("Template syntax error") + mock_execute.side_effect = TemplateRenderError("Template syntax error") node = TemplateTransformNode( id="test_node", @@ -208,14 +214,16 @@ class TestTemplateTransformNode: assert result.status == WorkflowNodeExecutionStatus.FAILED assert "Template syntax error" in result.error - @patch("core.workflow.nodes.template_transform.template_transform_node.CodeExecutor.execute_workflow_code_template") + @patch( + "core.workflow.nodes.template_transform.template_transform_node.CodeExecutorJinja2TemplateRenderer.render_template" + ) @patch("core.workflow.nodes.template_transform.template_transform_node.MAX_TEMPLATE_TRANSFORM_OUTPUT_LENGTH", 10) def test_run_output_length_exceeds_limit( self, mock_execute, basic_node_data, mock_graph, mock_graph_runtime_state, graph_init_params ): """Test _run when output exceeds maximum length.""" mock_graph_runtime_state.variable_pool.get.return_value = MagicMock() - mock_execute.return_value = {"result": "This is a very long output that exceeds the limit"} + mock_execute.return_value = "This is a very long output that exceeds the limit" node = TemplateTransformNode( id="test_node", @@ -230,7 +238,9 @@ class TestTemplateTransformNode: assert result.status == WorkflowNodeExecutionStatus.FAILED assert "Output length exceeds" in result.error - @patch("core.workflow.nodes.template_transform.template_transform_node.CodeExecutor.execute_workflow_code_template") + @patch( + "core.workflow.nodes.template_transform.template_transform_node.CodeExecutorJinja2TemplateRenderer.render_template" + ) def test_run_with_complex_jinja2_template( self, mock_execute, mock_graph, mock_graph_runtime_state, graph_init_params ): @@ -257,7 +267,7 @@ class TestTemplateTransformNode: ("sys", "show_total"): mock_show_total, } mock_graph_runtime_state.variable_pool.get.side_effect = lambda selector: variable_map.get(tuple(selector)) - mock_execute.return_value = {"result": "apple, banana, orange (Total: 3)"} + mock_execute.return_value = "apple, banana, orange (Total: 3)" node = TemplateTransformNode( id="test_node", @@ -292,7 +302,9 @@ class TestTemplateTransformNode: assert mapping["node_123.var1"] == ["sys", "input1"] assert mapping["node_123.var2"] == ["sys", "input2"] - @patch("core.workflow.nodes.template_transform.template_transform_node.CodeExecutor.execute_workflow_code_template") + @patch( + "core.workflow.nodes.template_transform.template_transform_node.CodeExecutorJinja2TemplateRenderer.render_template" + ) def test_run_with_empty_variables(self, mock_execute, mock_graph, mock_graph_runtime_state, graph_init_params): """Test _run with no variables (static template).""" node_data = { @@ -301,7 +313,7 @@ class TestTemplateTransformNode: "template": "This is a static message.", } - mock_execute.return_value = {"result": "This is a static message."} + mock_execute.return_value = "This is a static message." node = TemplateTransformNode( id="test_node", @@ -317,7 +329,9 @@ class TestTemplateTransformNode: assert result.outputs["output"] == "This is a static message." assert result.inputs == {} - @patch("core.workflow.nodes.template_transform.template_transform_node.CodeExecutor.execute_workflow_code_template") + @patch( + "core.workflow.nodes.template_transform.template_transform_node.CodeExecutorJinja2TemplateRenderer.render_template" + ) def test_run_with_numeric_values(self, mock_execute, mock_graph, mock_graph_runtime_state, graph_init_params): """Test _run with numeric variable values.""" node_data = { @@ -339,7 +353,7 @@ class TestTemplateTransformNode: ("sys", "quantity"): mock_quantity, } mock_graph_runtime_state.variable_pool.get.side_effect = lambda selector: variable_map.get(tuple(selector)) - mock_execute.return_value = {"result": "Total: $31.5"} + mock_execute.return_value = "Total: $31.5" node = TemplateTransformNode( id="test_node", @@ -354,7 +368,9 @@ class TestTemplateTransformNode: assert result.status == WorkflowNodeExecutionStatus.SUCCEEDED assert result.outputs["output"] == "Total: $31.5" - @patch("core.workflow.nodes.template_transform.template_transform_node.CodeExecutor.execute_workflow_code_template") + @patch( + "core.workflow.nodes.template_transform.template_transform_node.CodeExecutorJinja2TemplateRenderer.render_template" + ) def test_run_with_dict_values(self, mock_execute, mock_graph, mock_graph_runtime_state, graph_init_params): """Test _run with dictionary variable values.""" node_data = { @@ -367,7 +383,7 @@ class TestTemplateTransformNode: mock_user.to_object.return_value = {"name": "John Doe", "email": "john@example.com"} mock_graph_runtime_state.variable_pool.get.return_value = mock_user - mock_execute.return_value = {"result": "Name: John Doe, Email: john@example.com"} + mock_execute.return_value = "Name: John Doe, Email: john@example.com" node = TemplateTransformNode( id="test_node", @@ -383,7 +399,9 @@ class TestTemplateTransformNode: assert "John Doe" in result.outputs["output"] assert "john@example.com" in result.outputs["output"] - @patch("core.workflow.nodes.template_transform.template_transform_node.CodeExecutor.execute_workflow_code_template") + @patch( + "core.workflow.nodes.template_transform.template_transform_node.CodeExecutorJinja2TemplateRenderer.render_template" + ) def test_run_with_list_values(self, mock_execute, mock_graph, mock_graph_runtime_state, graph_init_params): """Test _run with list variable values.""" node_data = { @@ -396,7 +414,7 @@ class TestTemplateTransformNode: mock_tags.to_object.return_value = ["python", "ai", "workflow"] mock_graph_runtime_state.variable_pool.get.return_value = mock_tags - mock_execute.return_value = {"result": "Tags: #python #ai #workflow "} + mock_execute.return_value = "Tags: #python #ai #workflow " node = TemplateTransformNode( id="test_node", diff --git a/api/tests/unit_tests/core/workflow/nodes/test_llm_node_streaming.py b/api/tests/unit_tests/core/workflow/nodes/test_llm_node_streaming.py new file mode 100644 index 0000000000..9d793f804f --- /dev/null +++ b/api/tests/unit_tests/core/workflow/nodes/test_llm_node_streaming.py @@ -0,0 +1,148 @@ +import types +from collections.abc import Generator +from typing import Any + +import pytest + +from core.model_runtime.entities.llm_entities import LLMUsage +from core.workflow.entities import ToolCallResult +from core.workflow.entities.tool_entities import ToolResultStatus +from core.workflow.node_events import ModelInvokeCompletedEvent, NodeEventBase +from core.workflow.nodes.llm.node import LLMNode + + +class _StubModelInstance: + """Minimal stub to satisfy _stream_llm_events signature.""" + + provider_model_bundle = None + + +def _drain(generator: Generator[NodeEventBase, None, Any]): + events: list = [] + try: + while True: + events.append(next(generator)) + except StopIteration as exc: + return events, exc.value + + +@pytest.fixture(autouse=True) +def patch_deduct_llm_quota(monkeypatch): + # Avoid touching real quota logic during unit tests + monkeypatch.setattr("core.workflow.nodes.llm.node.llm_utils.deduct_llm_quota", lambda **_: None) + + +def _make_llm_node(reasoning_format: str) -> LLMNode: + node = LLMNode.__new__(LLMNode) + object.__setattr__(node, "_node_data", types.SimpleNamespace(reasoning_format=reasoning_format, tools=[])) + object.__setattr__(node, "tenant_id", "tenant") + return node + + +def test_stream_llm_events_extracts_reasoning_for_tagged(): + node = _make_llm_node(reasoning_format="tagged") + tagged_text = "ThoughtAnswer" + usage = LLMUsage.empty_usage() + + def generator(): + yield ModelInvokeCompletedEvent( + text=tagged_text, + usage=usage, + finish_reason="stop", + reasoning_content="", + structured_output=None, + ) + + events, returned = _drain( + node._stream_llm_events(generator(), model_instance=types.SimpleNamespace(provider_model_bundle=None)) + ) + + assert events == [] + clean_text, reasoning_content, gen_reasoning, gen_clean, ret_usage, finish_reason, structured, gen_data = returned + assert clean_text == tagged_text # original preserved for output + assert reasoning_content == "" # tagged mode keeps reasoning separate + assert gen_clean == "Answer" # stripped content for generation + assert gen_reasoning == "Thought" # reasoning extracted from tag + assert ret_usage == usage + assert finish_reason == "stop" + assert structured is None + assert gen_data is None + + # generation building should include reasoning and sequence + generation_content = gen_clean or clean_text + sequence = [ + {"type": "reasoning", "index": 0}, + {"type": "content", "start": 0, "end": len(generation_content)}, + ] + assert sequence == [ + {"type": "reasoning", "index": 0}, + {"type": "content", "start": 0, "end": len("Answer")}, + ] + + +def test_stream_llm_events_no_reasoning_results_in_empty_sequence(): + node = _make_llm_node(reasoning_format="tagged") + plain_text = "Hello world" + usage = LLMUsage.empty_usage() + + def generator(): + yield ModelInvokeCompletedEvent( + text=plain_text, + usage=usage, + finish_reason=None, + reasoning_content="", + structured_output=None, + ) + + events, returned = _drain( + node._stream_llm_events(generator(), model_instance=types.SimpleNamespace(provider_model_bundle=None)) + ) + + assert events == [] + _, _, gen_reasoning, gen_clean, *_ = returned + generation_content = gen_clean or plain_text + assert gen_reasoning == "" + assert generation_content == plain_text + # Empty reasoning should imply empty sequence in generation construction + sequence = [] + assert sequence == [] + + +def test_serialize_tool_call_strips_files_to_ids(): + file_cls = pytest.importorskip("core.file").File + file_type = pytest.importorskip("core.file.enums").FileType + transfer_method = pytest.importorskip("core.file.enums").FileTransferMethod + + file_with_id = file_cls( + id="f1", + tenant_id="t", + type=file_type.IMAGE, + transfer_method=transfer_method.REMOTE_URL, + remote_url="http://example.com/f1", + storage_key="k1", + ) + file_with_related = file_cls( + id=None, + tenant_id="t", + type=file_type.IMAGE, + transfer_method=transfer_method.REMOTE_URL, + related_id="rel2", + remote_url="http://example.com/f2", + storage_key="k2", + ) + tool_call = ToolCallResult( + id="tc", + name="do", + arguments='{"a":1}', + output="ok", + files=[file_with_id, file_with_related], + status=ToolResultStatus.SUCCESS, + ) + + serialized = LLMNode._serialize_tool_call(tool_call) + + assert serialized["files"] == ["f1", "rel2"] + assert serialized["id"] == "tc" + assert serialized["name"] == "do" + assert serialized["arguments"] == '{"a":1}' + assert serialized["output"] == "ok" diff --git a/api/tests/unit_tests/core/workflow/nodes/test_start_node_json_object.py b/api/tests/unit_tests/core/workflow/nodes/test_start_node_json_object.py index 83799c9508..16b432bae6 100644 --- a/api/tests/unit_tests/core/workflow/nodes/test_start_node_json_object.py +++ b/api/tests/unit_tests/core/workflow/nodes/test_start_node_json_object.py @@ -1,3 +1,4 @@ +import json import time import pytest @@ -46,14 +47,18 @@ def make_start_node(user_inputs, variables): def test_json_object_valid_schema(): - schema = { - "type": "object", - "properties": { - "age": {"type": "number"}, - "name": {"type": "string"}, - }, - "required": ["age"], - } + schema = json.dumps( + { + "type": "object", + "properties": { + "age": {"type": "number"}, + "name": {"type": "string"}, + }, + "required": ["age"], + } + ) + + schema = json.loads(schema) variables = [ VariableEntity( @@ -74,52 +79,50 @@ def test_json_object_valid_schema(): def test_json_object_invalid_json_string(): + schema = json.dumps( + { + "type": "object", + "properties": { + "age": {"type": "number"}, + "name": {"type": "string"}, + }, + "required": ["age", "name"], + } + ) + + schema = json.loads(schema) variables = [ VariableEntity( variable="profile", label="profile", type=VariableEntityType.JSON_OBJECT, required=True, + json_schema=schema, ) ] - # Missing closing brace makes this invalid JSON + # Providing a string instead of an object should raise a type error user_inputs = {"profile": '{"age": 20, "name": "Tom"'} node = make_start_node(user_inputs, variables) - with pytest.raises(ValueError, match="profile must be a JSON object"): - node._run() - - -@pytest.mark.parametrize("value", ["[1, 2, 3]", "123"]) -def test_json_object_valid_json_but_not_object(value): - variables = [ - VariableEntity( - variable="profile", - label="profile", - type=VariableEntityType.JSON_OBJECT, - required=True, - ) - ] - - user_inputs = {"profile": value} - - node = make_start_node(user_inputs, variables) - - with pytest.raises(ValueError, match="profile must be a JSON object"): + with pytest.raises(ValueError, match="JSON object for 'profile' must be an object"): node._run() def test_json_object_does_not_match_schema(): - schema = { - "type": "object", - "properties": { - "age": {"type": "number"}, - "name": {"type": "string"}, - }, - "required": ["age", "name"], - } + schema = json.dumps( + { + "type": "object", + "properties": { + "age": {"type": "number"}, + "name": {"type": "string"}, + }, + "required": ["age", "name"], + } + ) + + schema = json.loads(schema) variables = [ VariableEntity( @@ -141,14 +144,18 @@ def test_json_object_does_not_match_schema(): def test_json_object_missing_required_schema_field(): - schema = { - "type": "object", - "properties": { - "age": {"type": "number"}, - "name": {"type": "string"}, - }, - "required": ["age", "name"], - } + schema = json.dumps( + { + "type": "object", + "properties": { + "age": {"type": "number"}, + "name": {"type": "string"}, + }, + "required": ["age", "name"], + } + ) + + schema = json.loads(schema) variables = [ VariableEntity( @@ -214,7 +221,7 @@ def test_json_object_optional_variable_not_provided(): variable="profile", label="profile", type=VariableEntityType.JSON_OBJECT, - required=False, + required=True, ) ] @@ -223,5 +230,5 @@ def test_json_object_optional_variable_not_provided(): node = make_start_node(user_inputs, variables) # Current implementation raises a validation error even when the variable is optional - with pytest.raises(ValueError, match="profile must be a JSON object"): + with pytest.raises(ValueError, match="profile is required in input form"): node._run() diff --git a/api/tests/unit_tests/core/workflow/nodes/tool/test_tool_node.py b/api/tests/unit_tests/core/workflow/nodes/tool/test_tool_node.py index 09b8191870..06927cddcf 100644 --- a/api/tests/unit_tests/core/workflow/nodes/tool/test_tool_node.py +++ b/api/tests/unit_tests/core/workflow/nodes/tool/test_tool_node.py @@ -1,3 +1,5 @@ +from __future__ import annotations + import sys import types from collections.abc import Generator @@ -21,7 +23,7 @@ if TYPE_CHECKING: # pragma: no cover - imported for type checking only @pytest.fixture -def tool_node(monkeypatch) -> "ToolNode": +def tool_node(monkeypatch) -> ToolNode: module_name = "core.ops.ops_trace_manager" if module_name not in sys.modules: ops_stub = types.ModuleType(module_name) @@ -85,7 +87,7 @@ def _collect_events(generator: Generator) -> tuple[list[Any], LLMUsage]: return events, stop.value -def _run_transform(tool_node: "ToolNode", message: ToolInvokeMessage) -> tuple[list[Any], LLMUsage]: +def _run_transform(tool_node: ToolNode, message: ToolInvokeMessage) -> tuple[list[Any], LLMUsage]: def _identity_transform(messages, *_args, **_kwargs): return messages @@ -103,7 +105,7 @@ def _run_transform(tool_node: "ToolNode", message: ToolInvokeMessage) -> tuple[l return _collect_events(generator) -def test_link_messages_with_file_populate_files_output(tool_node: "ToolNode"): +def test_link_messages_with_file_populate_files_output(tool_node: ToolNode): file_obj = File( tenant_id="tenant-id", type=FileType.DOCUMENT, @@ -139,7 +141,7 @@ def test_link_messages_with_file_populate_files_output(tool_node: "ToolNode"): assert files_segment.value == [file_obj] -def test_plain_link_messages_remain_links(tool_node: "ToolNode"): +def test_plain_link_messages_remain_links(tool_node: ToolNode): message = ToolInvokeMessage( type=ToolInvokeMessage.MessageType.LINK, message=ToolInvokeMessage.TextMessage(text="https://dify.ai"), diff --git a/api/tests/unit_tests/core/workflow/nodes/variable_assigner/v1/test_variable_assigner_v1.py b/api/tests/unit_tests/core/workflow/nodes/variable_assigner/v1/test_variable_assigner_v1.py index c62fc4d8fe..1df75380af 100644 --- a/api/tests/unit_tests/core/workflow/nodes/variable_assigner/v1/test_variable_assigner_v1.py +++ b/api/tests/unit_tests/core/workflow/nodes/variable_assigner/v1/test_variable_assigner_v1.py @@ -1,14 +1,14 @@ import time import uuid -from unittest import mock from uuid import uuid4 from core.app.entities.app_invoke_entities import InvokeFrom from core.variables import ArrayStringVariable, StringVariable -from core.workflow.conversation_variable_updater import ConversationVariableUpdater from core.workflow.entities import GraphInitParams from core.workflow.graph import Graph +from core.workflow.graph_events.node import NodeRunSucceededEvent from core.workflow.nodes.node_factory import DifyNodeFactory +from core.workflow.nodes.variable_assigner.common import helpers as common_helpers from core.workflow.nodes.variable_assigner.v1 import VariableAssignerNode from core.workflow.nodes.variable_assigner.v1.node_data import WriteMode from core.workflow.runtime import GraphRuntimeState, VariablePool @@ -86,9 +86,6 @@ def test_overwrite_string_variable(): ) graph = Graph.init(graph_config=graph_config, node_factory=node_factory) - mock_conv_var_updater = mock.Mock(spec=ConversationVariableUpdater) - mock_conv_var_updater_factory = mock.Mock(return_value=mock_conv_var_updater) - node_config = { "id": "node_id", "data": { @@ -104,20 +101,14 @@ def test_overwrite_string_variable(): graph_init_params=init_params, graph_runtime_state=graph_runtime_state, config=node_config, - conv_var_updater_factory=mock_conv_var_updater_factory, ) - list(node.run()) - expected_var = StringVariable( - id=conversation_variable.id, - name=conversation_variable.name, - description=conversation_variable.description, - selector=conversation_variable.selector, - value_type=conversation_variable.value_type, - value=input_variable.value, - ) - mock_conv_var_updater.update.assert_called_once_with(conversation_id=conversation_id, variable=expected_var) - mock_conv_var_updater.flush.assert_called_once() + events = list(node.run()) + succeeded_event = next(event for event in events if isinstance(event, NodeRunSucceededEvent)) + updated_variables = common_helpers.get_updated_variables(succeeded_event.node_run_result.process_data) + assert updated_variables is not None + assert updated_variables[0].name == conversation_variable.name + assert updated_variables[0].new_value == input_variable.value got = variable_pool.get(["conversation", conversation_variable.name]) assert got is not None @@ -191,9 +182,6 @@ def test_append_variable_to_array(): ) graph = Graph.init(graph_config=graph_config, node_factory=node_factory) - mock_conv_var_updater = mock.Mock(spec=ConversationVariableUpdater) - mock_conv_var_updater_factory = mock.Mock(return_value=mock_conv_var_updater) - node_config = { "id": "node_id", "data": { @@ -209,22 +197,14 @@ def test_append_variable_to_array(): graph_init_params=init_params, graph_runtime_state=graph_runtime_state, config=node_config, - conv_var_updater_factory=mock_conv_var_updater_factory, ) - list(node.run()) - expected_value = list(conversation_variable.value) - expected_value.append(input_variable.value) - expected_var = ArrayStringVariable( - id=conversation_variable.id, - name=conversation_variable.name, - description=conversation_variable.description, - selector=conversation_variable.selector, - value_type=conversation_variable.value_type, - value=expected_value, - ) - mock_conv_var_updater.update.assert_called_once_with(conversation_id=conversation_id, variable=expected_var) - mock_conv_var_updater.flush.assert_called_once() + events = list(node.run()) + succeeded_event = next(event for event in events if isinstance(event, NodeRunSucceededEvent)) + updated_variables = common_helpers.get_updated_variables(succeeded_event.node_run_result.process_data) + assert updated_variables is not None + assert updated_variables[0].name == conversation_variable.name + assert updated_variables[0].new_value == ["the first value", "the second value"] got = variable_pool.get(["conversation", conversation_variable.name]) assert got is not None @@ -287,9 +267,6 @@ def test_clear_array(): ) graph = Graph.init(graph_config=graph_config, node_factory=node_factory) - mock_conv_var_updater = mock.Mock(spec=ConversationVariableUpdater) - mock_conv_var_updater_factory = mock.Mock(return_value=mock_conv_var_updater) - node_config = { "id": "node_id", "data": { @@ -305,20 +282,14 @@ def test_clear_array(): graph_init_params=init_params, graph_runtime_state=graph_runtime_state, config=node_config, - conv_var_updater_factory=mock_conv_var_updater_factory, ) - list(node.run()) - expected_var = ArrayStringVariable( - id=conversation_variable.id, - name=conversation_variable.name, - description=conversation_variable.description, - selector=conversation_variable.selector, - value_type=conversation_variable.value_type, - value=[], - ) - mock_conv_var_updater.update.assert_called_once_with(conversation_id=conversation_id, variable=expected_var) - mock_conv_var_updater.flush.assert_called_once() + events = list(node.run()) + succeeded_event = next(event for event in events if isinstance(event, NodeRunSucceededEvent)) + updated_variables = common_helpers.get_updated_variables(succeeded_event.node_run_result.process_data) + assert updated_variables is not None + assert updated_variables[0].name == conversation_variable.name + assert updated_variables[0].new_value == [] got = variable_pool.get(["conversation", conversation_variable.name]) assert got is not None diff --git a/api/tests/unit_tests/core/workflow/nodes/variable_assigner/v2/test_variable_assigner_v2.py b/api/tests/unit_tests/core/workflow/nodes/variable_assigner/v2/test_variable_assigner_v2.py index caa36734ad..353d56fe25 100644 --- a/api/tests/unit_tests/core/workflow/nodes/variable_assigner/v2/test_variable_assigner_v2.py +++ b/api/tests/unit_tests/core/workflow/nodes/variable_assigner/v2/test_variable_assigner_v2.py @@ -390,3 +390,42 @@ def test_remove_last_from_empty_array(): got = variable_pool.get(["conversation", conversation_variable.name]) assert got is not None assert got.to_object() == [] + + +def test_node_factory_creates_variable_assigner_node(): + graph_config = { + "edges": [], + "nodes": [ + { + "data": {"type": "assigner", "version": "2", "title": "Variable Assigner", "items": []}, + "id": "assigner", + }, + ], + } + + init_params = GraphInitParams( + tenant_id="1", + app_id="1", + workflow_id="1", + graph_config=graph_config, + user_id="1", + user_from=UserFrom.ACCOUNT, + invoke_from=InvokeFrom.DEBUGGER, + call_depth=0, + ) + variable_pool = VariablePool( + system_variables=SystemVariable(conversation_id="conversation_id"), + user_inputs={}, + environment_variables=[], + conversation_variables=[], + ) + graph_runtime_state = GraphRuntimeState(variable_pool=variable_pool, start_at=time.perf_counter()) + + node_factory = DifyNodeFactory( + graph_init_params=init_params, + graph_runtime_state=graph_runtime_state, + ) + + node = node_factory.create_node(graph_config["nodes"][0]) + + assert isinstance(node, VariableAssignerNode) diff --git a/api/tests/unit_tests/extensions/test_celery_ssl.py b/api/tests/unit_tests/extensions/test_celery_ssl.py index fc7a090ef9..d3a4d69f07 100644 --- a/api/tests/unit_tests/extensions/test_celery_ssl.py +++ b/api/tests/unit_tests/extensions/test_celery_ssl.py @@ -8,11 +8,12 @@ class TestCelerySSLConfiguration: """Test suite for Celery SSL configuration.""" def test_get_celery_ssl_options_when_ssl_disabled(self): - """Test SSL options when REDIS_USE_SSL is False.""" - mock_config = MagicMock() - mock_config.REDIS_USE_SSL = False + """Test SSL options when BROKER_USE_SSL is False.""" + from configs import DifyConfig - with patch("extensions.ext_celery.dify_config", mock_config): + dify_config = DifyConfig(CELERY_BROKER_URL="redis://localhost:6379/0") + + with patch("extensions.ext_celery.dify_config", dify_config): from extensions.ext_celery import _get_celery_ssl_options result = _get_celery_ssl_options() @@ -21,7 +22,6 @@ class TestCelerySSLConfiguration: def test_get_celery_ssl_options_when_broker_not_redis(self): """Test SSL options when broker is not Redis.""" mock_config = MagicMock() - mock_config.REDIS_USE_SSL = True mock_config.CELERY_BROKER_URL = "amqp://localhost:5672" with patch("extensions.ext_celery.dify_config", mock_config): @@ -33,7 +33,6 @@ class TestCelerySSLConfiguration: def test_get_celery_ssl_options_with_cert_none(self): """Test SSL options with CERT_NONE requirement.""" mock_config = MagicMock() - mock_config.REDIS_USE_SSL = True mock_config.CELERY_BROKER_URL = "redis://localhost:6379/0" mock_config.REDIS_SSL_CERT_REQS = "CERT_NONE" mock_config.REDIS_SSL_CA_CERTS = None @@ -53,7 +52,6 @@ class TestCelerySSLConfiguration: def test_get_celery_ssl_options_with_cert_required(self): """Test SSL options with CERT_REQUIRED and certificates.""" mock_config = MagicMock() - mock_config.REDIS_USE_SSL = True mock_config.CELERY_BROKER_URL = "rediss://localhost:6380/0" mock_config.REDIS_SSL_CERT_REQS = "CERT_REQUIRED" mock_config.REDIS_SSL_CA_CERTS = "/path/to/ca.crt" @@ -73,7 +71,6 @@ class TestCelerySSLConfiguration: def test_get_celery_ssl_options_with_cert_optional(self): """Test SSL options with CERT_OPTIONAL requirement.""" mock_config = MagicMock() - mock_config.REDIS_USE_SSL = True mock_config.CELERY_BROKER_URL = "redis://localhost:6379/0" mock_config.REDIS_SSL_CERT_REQS = "CERT_OPTIONAL" mock_config.REDIS_SSL_CA_CERTS = "/path/to/ca.crt" @@ -91,7 +88,6 @@ class TestCelerySSLConfiguration: def test_get_celery_ssl_options_with_invalid_cert_reqs(self): """Test SSL options with invalid cert requirement defaults to CERT_NONE.""" mock_config = MagicMock() - mock_config.REDIS_USE_SSL = True mock_config.CELERY_BROKER_URL = "redis://localhost:6379/0" mock_config.REDIS_SSL_CERT_REQS = "INVALID_VALUE" mock_config.REDIS_SSL_CA_CERTS = None @@ -108,7 +104,6 @@ class TestCelerySSLConfiguration: def test_celery_init_applies_ssl_to_broker_and_backend(self): """Test that SSL options are applied to both broker and backend when using Redis.""" mock_config = MagicMock() - mock_config.REDIS_USE_SSL = True mock_config.CELERY_BROKER_URL = "redis://localhost:6379/0" mock_config.CELERY_BACKEND = "redis" mock_config.CELERY_RESULT_BACKEND = "redis://localhost:6379/0" diff --git a/api/tests/unit_tests/fields/test_file_fields.py b/api/tests/unit_tests/fields/test_file_fields.py new file mode 100644 index 0000000000..8be8df16f4 --- /dev/null +++ b/api/tests/unit_tests/fields/test_file_fields.py @@ -0,0 +1,78 @@ +from __future__ import annotations + +from datetime import datetime +from types import SimpleNamespace + +from fields.file_fields import FileResponse, FileWithSignedUrl, RemoteFileInfo, UploadConfig + + +def test_file_response_serializes_datetime() -> None: + created_at = datetime(2024, 1, 1, 12, 0, 0) + file_obj = SimpleNamespace( + id="file-1", + name="example.txt", + size=1024, + extension="txt", + mime_type="text/plain", + created_by="user-1", + created_at=created_at, + preview_url="https://preview", + source_url="https://source", + original_url="https://origin", + user_id="user-1", + tenant_id="tenant-1", + conversation_id="conv-1", + file_key="key-1", + ) + + serialized = FileResponse.model_validate(file_obj, from_attributes=True).model_dump(mode="json") + + assert serialized["id"] == "file-1" + assert serialized["created_at"] == int(created_at.timestamp()) + assert serialized["preview_url"] == "https://preview" + assert serialized["source_url"] == "https://source" + assert serialized["original_url"] == "https://origin" + assert serialized["user_id"] == "user-1" + assert serialized["tenant_id"] == "tenant-1" + assert serialized["conversation_id"] == "conv-1" + assert serialized["file_key"] == "key-1" + + +def test_file_with_signed_url_builds_payload() -> None: + payload = FileWithSignedUrl( + id="file-2", + name="remote.pdf", + size=2048, + extension="pdf", + url="https://signed", + mime_type="application/pdf", + created_by="user-2", + created_at=datetime(2024, 1, 2, 0, 0, 0), + ) + + dumped = payload.model_dump(mode="json") + + assert dumped["url"] == "https://signed" + assert dumped["created_at"] == int(datetime(2024, 1, 2, 0, 0, 0).timestamp()) + + +def test_remote_file_info_and_upload_config() -> None: + info = RemoteFileInfo(file_type="text/plain", file_length=123) + assert info.model_dump(mode="json") == {"file_type": "text/plain", "file_length": 123} + + config = UploadConfig( + file_size_limit=1, + batch_count_limit=2, + file_upload_limit=3, + image_file_size_limit=4, + video_file_size_limit=5, + audio_file_size_limit=6, + workflow_file_upload_limit=7, + image_file_batch_limit=8, + single_chunk_attachment_limit=9, + attachment_image_file_size_limit=10, + ) + + dumped = config.model_dump(mode="json") + assert dumped["file_upload_limit"] == 3 + assert dumped["attachment_image_file_size_limit"] == 10 diff --git a/api/tests/unit_tests/libs/test_archive_storage.py b/api/tests/unit_tests/libs/test_archive_storage.py new file mode 100644 index 0000000000..697760e33a --- /dev/null +++ b/api/tests/unit_tests/libs/test_archive_storage.py @@ -0,0 +1,272 @@ +import base64 +import hashlib +from datetime import datetime +from unittest.mock import ANY, MagicMock + +import pytest +from botocore.exceptions import ClientError + +from libs import archive_storage as storage_module +from libs.archive_storage import ( + ArchiveStorage, + ArchiveStorageError, + ArchiveStorageNotConfiguredError, +) + +BUCKET_NAME = "archive-bucket" + + +def _configure_storage(monkeypatch, **overrides): + defaults = { + "ARCHIVE_STORAGE_ENABLED": True, + "ARCHIVE_STORAGE_ENDPOINT": "https://storage.example.com", + "ARCHIVE_STORAGE_ARCHIVE_BUCKET": BUCKET_NAME, + "ARCHIVE_STORAGE_ACCESS_KEY": "access", + "ARCHIVE_STORAGE_SECRET_KEY": "secret", + "ARCHIVE_STORAGE_REGION": "auto", + } + defaults.update(overrides) + for key, value in defaults.items(): + monkeypatch.setattr(storage_module.dify_config, key, value, raising=False) + + +def _client_error(code: str) -> ClientError: + return ClientError({"Error": {"Code": code}}, "Operation") + + +def _mock_client(monkeypatch): + client = MagicMock() + client.head_bucket.return_value = None + boto_client = MagicMock(return_value=client) + monkeypatch.setattr(storage_module.boto3, "client", boto_client) + return client, boto_client + + +def test_init_disabled(monkeypatch): + _configure_storage(monkeypatch, ARCHIVE_STORAGE_ENABLED=False) + with pytest.raises(ArchiveStorageNotConfiguredError, match="not enabled"): + ArchiveStorage(bucket=BUCKET_NAME) + + +def test_init_missing_config(monkeypatch): + _configure_storage(monkeypatch, ARCHIVE_STORAGE_ENDPOINT=None) + with pytest.raises(ArchiveStorageNotConfiguredError, match="incomplete"): + ArchiveStorage(bucket=BUCKET_NAME) + + +def test_init_bucket_not_found(monkeypatch): + _configure_storage(monkeypatch) + client, _ = _mock_client(monkeypatch) + client.head_bucket.side_effect = _client_error("404") + + with pytest.raises(ArchiveStorageNotConfiguredError, match="does not exist"): + ArchiveStorage(bucket=BUCKET_NAME) + + +def test_init_bucket_access_denied(monkeypatch): + _configure_storage(monkeypatch) + client, _ = _mock_client(monkeypatch) + client.head_bucket.side_effect = _client_error("403") + + with pytest.raises(ArchiveStorageNotConfiguredError, match="Access denied"): + ArchiveStorage(bucket=BUCKET_NAME) + + +def test_init_bucket_other_error(monkeypatch): + _configure_storage(monkeypatch) + client, _ = _mock_client(monkeypatch) + client.head_bucket.side_effect = _client_error("500") + + with pytest.raises(ArchiveStorageError, match="Failed to access archive bucket"): + ArchiveStorage(bucket=BUCKET_NAME) + + +def test_init_sets_client(monkeypatch): + _configure_storage(monkeypatch) + client, boto_client = _mock_client(monkeypatch) + + storage = ArchiveStorage(bucket=BUCKET_NAME) + + boto_client.assert_called_once_with( + "s3", + endpoint_url="https://storage.example.com", + aws_access_key_id="access", + aws_secret_access_key="secret", + region_name="auto", + config=ANY, + ) + assert storage.client is client + assert storage.bucket == BUCKET_NAME + + +def test_put_object_returns_checksum(monkeypatch): + _configure_storage(monkeypatch) + client, _ = _mock_client(monkeypatch) + storage = ArchiveStorage(bucket=BUCKET_NAME) + + data = b"hello" + checksum = storage.put_object("key", data) + + expected_md5 = hashlib.md5(data).hexdigest() + expected_content_md5 = base64.b64encode(hashlib.md5(data).digest()).decode() + client.put_object.assert_called_once_with( + Bucket="archive-bucket", + Key="key", + Body=data, + ContentMD5=expected_content_md5, + ) + assert checksum == expected_md5 + + +def test_put_object_raises_on_error(monkeypatch): + _configure_storage(monkeypatch) + client, _ = _mock_client(monkeypatch) + storage = ArchiveStorage(bucket=BUCKET_NAME) + client.put_object.side_effect = _client_error("500") + + with pytest.raises(ArchiveStorageError, match="Failed to upload object"): + storage.put_object("key", b"data") + + +def test_get_object_returns_bytes(monkeypatch): + _configure_storage(monkeypatch) + client, _ = _mock_client(monkeypatch) + body = MagicMock() + body.read.return_value = b"payload" + client.get_object.return_value = {"Body": body} + storage = ArchiveStorage(bucket=BUCKET_NAME) + + assert storage.get_object("key") == b"payload" + + +def test_get_object_missing(monkeypatch): + _configure_storage(monkeypatch) + client, _ = _mock_client(monkeypatch) + client.get_object.side_effect = _client_error("NoSuchKey") + storage = ArchiveStorage(bucket=BUCKET_NAME) + + with pytest.raises(FileNotFoundError, match="Archive object not found"): + storage.get_object("missing") + + +def test_get_object_stream(monkeypatch): + _configure_storage(monkeypatch) + client, _ = _mock_client(monkeypatch) + body = MagicMock() + body.iter_chunks.return_value = [b"a", b"b"] + client.get_object.return_value = {"Body": body} + storage = ArchiveStorage(bucket=BUCKET_NAME) + + assert list(storage.get_object_stream("key")) == [b"a", b"b"] + + +def test_get_object_stream_missing(monkeypatch): + _configure_storage(monkeypatch) + client, _ = _mock_client(monkeypatch) + client.get_object.side_effect = _client_error("NoSuchKey") + storage = ArchiveStorage(bucket=BUCKET_NAME) + + with pytest.raises(FileNotFoundError, match="Archive object not found"): + list(storage.get_object_stream("missing")) + + +def test_object_exists(monkeypatch): + _configure_storage(monkeypatch) + client, _ = _mock_client(monkeypatch) + storage = ArchiveStorage(bucket=BUCKET_NAME) + + assert storage.object_exists("key") is True + client.head_object.side_effect = _client_error("404") + assert storage.object_exists("missing") is False + + +def test_delete_object_error(monkeypatch): + _configure_storage(monkeypatch) + client, _ = _mock_client(monkeypatch) + client.delete_object.side_effect = _client_error("500") + storage = ArchiveStorage(bucket=BUCKET_NAME) + + with pytest.raises(ArchiveStorageError, match="Failed to delete object"): + storage.delete_object("key") + + +def test_list_objects(monkeypatch): + _configure_storage(monkeypatch) + client, _ = _mock_client(monkeypatch) + paginator = MagicMock() + paginator.paginate.return_value = [ + {"Contents": [{"Key": "a"}, {"Key": "b"}]}, + {"Contents": [{"Key": "c"}]}, + ] + client.get_paginator.return_value = paginator + storage = ArchiveStorage(bucket=BUCKET_NAME) + + assert storage.list_objects("prefix") == ["a", "b", "c"] + paginator.paginate.assert_called_once_with(Bucket="archive-bucket", Prefix="prefix") + + +def test_list_objects_error(monkeypatch): + _configure_storage(monkeypatch) + client, _ = _mock_client(monkeypatch) + paginator = MagicMock() + paginator.paginate.side_effect = _client_error("500") + client.get_paginator.return_value = paginator + storage = ArchiveStorage(bucket=BUCKET_NAME) + + with pytest.raises(ArchiveStorageError, match="Failed to list objects"): + storage.list_objects("prefix") + + +def test_generate_presigned_url(monkeypatch): + _configure_storage(monkeypatch) + client, _ = _mock_client(monkeypatch) + client.generate_presigned_url.return_value = "http://signed-url" + storage = ArchiveStorage(bucket=BUCKET_NAME) + + url = storage.generate_presigned_url("key", expires_in=123) + + client.generate_presigned_url.assert_called_once_with( + ClientMethod="get_object", + Params={"Bucket": "archive-bucket", "Key": "key"}, + ExpiresIn=123, + ) + assert url == "http://signed-url" + + +def test_generate_presigned_url_error(monkeypatch): + _configure_storage(monkeypatch) + client, _ = _mock_client(monkeypatch) + client.generate_presigned_url.side_effect = _client_error("500") + storage = ArchiveStorage(bucket=BUCKET_NAME) + + with pytest.raises(ArchiveStorageError, match="Failed to generate pre-signed URL"): + storage.generate_presigned_url("key") + + +def test_serialization_roundtrip(): + records = [ + { + "id": "1", + "created_at": datetime(2024, 1, 1, 12, 0, 0), + "payload": {"nested": "value"}, + "items": [{"name": "a"}], + }, + {"id": "2", "value": 123}, + ] + + data = ArchiveStorage.serialize_to_jsonl_gz(records) + decoded = ArchiveStorage.deserialize_from_jsonl_gz(data) + + assert decoded[0]["id"] == "1" + assert decoded[0]["payload"]["nested"] == "value" + assert decoded[0]["items"][0]["name"] == "a" + assert "2024-01-01T12:00:00" in decoded[0]["created_at"] + assert decoded[1]["value"] == 123 + + +def test_content_md5_matches_checksum(): + data = b"checksum" + expected = base64.b64encode(hashlib.md5(data).digest()).decode() + + assert ArchiveStorage._content_md5(data) == expected + assert ArchiveStorage.compute_checksum(data) == hashlib.md5(data).hexdigest() diff --git a/api/tests/unit_tests/libs/test_encryption.py b/api/tests/unit_tests/libs/test_encryption.py new file mode 100644 index 0000000000..bf013c4bae --- /dev/null +++ b/api/tests/unit_tests/libs/test_encryption.py @@ -0,0 +1,150 @@ +""" +Unit tests for field encoding/decoding utilities. + +These tests verify Base64 encoding/decoding functionality and +proper error handling and fallback behavior. +""" + +import base64 + +from libs.encryption import FieldEncryption + + +class TestDecodeField: + """Test cases for field decoding functionality.""" + + def test_decode_valid_base64(self): + """Test decoding a valid Base64 encoded string.""" + plaintext = "password123" + encoded = base64.b64encode(plaintext.encode("utf-8")).decode() + + result = FieldEncryption.decrypt_field(encoded) + assert result == plaintext + + def test_decode_non_base64_returns_none(self): + """Test that non-base64 input returns None.""" + non_base64 = "plain-password-!@#" + result = FieldEncryption.decrypt_field(non_base64) + # Should return None (decoding failed) + assert result is None + + def test_decode_unicode_text(self): + """Test decoding Base64 encoded Unicode text.""" + plaintext = "密码Test123" + encoded = base64.b64encode(plaintext.encode("utf-8")).decode() + + result = FieldEncryption.decrypt_field(encoded) + assert result == plaintext + + def test_decode_empty_string(self): + """Test decoding an empty string returns empty string.""" + result = FieldEncryption.decrypt_field("") + # Empty string base64 decodes to empty string + assert result == "" + + def test_decode_special_characters(self): + """Test decoding with special characters.""" + plaintext = "P@ssw0rd!#$%^&*()" + encoded = base64.b64encode(plaintext.encode("utf-8")).decode() + + result = FieldEncryption.decrypt_field(encoded) + assert result == plaintext + + +class TestDecodePassword: + """Test cases for password decoding.""" + + def test_decode_password_base64(self): + """Test decoding a Base64 encoded password.""" + password = "SecureP@ssw0rd!" + encoded = base64.b64encode(password.encode("utf-8")).decode() + + result = FieldEncryption.decrypt_password(encoded) + assert result == password + + def test_decode_password_invalid_returns_none(self): + """Test that invalid base64 passwords return None.""" + invalid = "PlainPassword!@#" + result = FieldEncryption.decrypt_password(invalid) + # Should return None (decoding failed) + assert result is None + + +class TestDecodeVerificationCode: + """Test cases for verification code decoding.""" + + def test_decode_code_base64(self): + """Test decoding a Base64 encoded verification code.""" + code = "789012" + encoded = base64.b64encode(code.encode("utf-8")).decode() + + result = FieldEncryption.decrypt_verification_code(encoded) + assert result == code + + def test_decode_code_invalid_returns_none(self): + """Test that invalid base64 codes return None.""" + invalid = "123456" # Plain 6-digit code, not base64 + result = FieldEncryption.decrypt_verification_code(invalid) + # Should return None (decoding failed) + assert result is None + + +class TestRoundTripEncodingDecoding: + """ + Integration tests for complete encoding-decoding cycle. + These tests simulate the full frontend-to-backend flow using Base64. + """ + + def test_roundtrip_password(self): + """Test encoding and decoding a password.""" + original_password = "SecureP@ssw0rd!" + + # Simulate frontend encoding (Base64) + encoded = base64.b64encode(original_password.encode("utf-8")).decode() + + # Backend decoding + decoded = FieldEncryption.decrypt_password(encoded) + + assert decoded == original_password + + def test_roundtrip_verification_code(self): + """Test encoding and decoding a verification code.""" + original_code = "123456" + + # Simulate frontend encoding + encoded = base64.b64encode(original_code.encode("utf-8")).decode() + + # Backend decoding + decoded = FieldEncryption.decrypt_verification_code(encoded) + + assert decoded == original_code + + def test_roundtrip_unicode_password(self): + """Test encoding and decoding password with Unicode characters.""" + original_password = "密码Test123!@#" + + # Frontend encoding + encoded = base64.b64encode(original_password.encode("utf-8")).decode() + + # Backend decoding + decoded = FieldEncryption.decrypt_password(encoded) + + assert decoded == original_password + + def test_roundtrip_long_password(self): + """Test encoding and decoding a long password.""" + original_password = "ThisIsAVeryLongPasswordWithLotsOfCharacters123!@#$%^&*()" + + encoded = base64.b64encode(original_password.encode("utf-8")).decode() + decoded = FieldEncryption.decrypt_password(encoded) + + assert decoded == original_password + + def test_roundtrip_with_whitespace(self): + """Test encoding and decoding with whitespace.""" + original_password = "pass word with spaces" + + encoded = base64.b64encode(original_password.encode("utf-8")).decode() + decoded = FieldEncryption.decrypt_field(encoded) + + assert decoded == original_password diff --git a/api/tests/unit_tests/libs/test_external_api.py b/api/tests/unit_tests/libs/test_external_api.py index 9aa157a651..5135970bcc 100644 --- a/api/tests/unit_tests/libs/test_external_api.py +++ b/api/tests/unit_tests/libs/test_external_api.py @@ -99,29 +99,20 @@ def test_external_api_json_message_and_bad_request_rewrite(): assert res.get_json()["message"] == "Invalid JSON payload received or JSON payload is empty." -def test_external_api_param_mapping_and_quota_and_exc_info_none(): - # Force exc_info() to return (None,None,None) only during request - import libs.external_api as ext +def test_external_api_param_mapping_and_quota(): + app = _create_api_app() + client = app.test_client() - orig_exc_info = ext.sys.exc_info - try: - ext.sys.exc_info = lambda: (None, None, None) + # Param errors mapping payload path + res = client.get("/api/param-errors") + assert res.status_code == 400 + data = res.get_json() + assert data["code"] == "invalid_param" + assert data["params"] == "field" - app = _create_api_app() - client = app.test_client() - - # Param errors mapping payload path - res = client.get("/api/param-errors") - assert res.status_code == 400 - data = res.get_json() - assert data["code"] == "invalid_param" - assert data["params"] == "field" - - # Quota path — depending on Flask-RESTX internals it may be handled - res = client.get("/api/quota") - assert res.status_code in (400, 429) - finally: - ext.sys.exc_info = orig_exc_info # type: ignore[assignment] + # Quota path — depending on Flask-RESTX internals it may be handled + res = client.get("/api/quota") + assert res.status_code in (400, 429) def test_unauthorized_and_force_logout_clears_cookies(): diff --git a/api/tests/unit_tests/libs/test_helper.py b/api/tests/unit_tests/libs/test_helper.py index 85789bfa7e..de74eff82f 100644 --- a/api/tests/unit_tests/libs/test_helper.py +++ b/api/tests/unit_tests/libs/test_helper.py @@ -1,6 +1,6 @@ import pytest -from libs.helper import extract_tenant_id +from libs.helper import escape_like_pattern, extract_tenant_id from models.account import Account from models.model import EndUser @@ -63,3 +63,51 @@ class TestExtractTenantId: with pytest.raises(ValueError, match="Invalid user type.*Expected Account or EndUser"): extract_tenant_id(dict_user) + + +class TestEscapeLikePattern: + """Test cases for the escape_like_pattern utility function.""" + + def test_escape_percent_character(self): + """Test escaping percent character.""" + result = escape_like_pattern("50% discount") + assert result == "50\\% discount" + + def test_escape_underscore_character(self): + """Test escaping underscore character.""" + result = escape_like_pattern("test_data") + assert result == "test\\_data" + + def test_escape_backslash_character(self): + """Test escaping backslash character.""" + result = escape_like_pattern("path\\to\\file") + assert result == "path\\\\to\\\\file" + + def test_escape_combined_special_characters(self): + """Test escaping multiple special characters together.""" + result = escape_like_pattern("file_50%\\path") + assert result == "file\\_50\\%\\\\path" + + def test_escape_empty_string(self): + """Test escaping empty string returns empty string.""" + result = escape_like_pattern("") + assert result == "" + + def test_escape_none_handling(self): + """Test escaping None returns None (falsy check handles it).""" + # The function checks `if not pattern`, so None is falsy and returns as-is + result = escape_like_pattern(None) + assert result is None + + def test_escape_normal_string_no_change(self): + """Test that normal strings without special characters are unchanged.""" + result = escape_like_pattern("normal text") + assert result == "normal text" + + def test_escape_order_matters(self): + """Test that backslash is escaped first to prevent double escaping.""" + # If we escape % first, then escape \, we might get wrong results + # This test ensures the order is correct: \ first, then % and _ + result = escape_like_pattern("test\\%_value") + # Should be: test\\\%\_value + assert result == "test\\\\\\%\\_value" diff --git a/api/tests/unit_tests/models/test_app_models.py b/api/tests/unit_tests/models/test_app_models.py index e35788660d..8be2eea121 100644 --- a/api/tests/unit_tests/models/test_app_models.py +++ b/api/tests/unit_tests/models/test_app_models.py @@ -114,7 +114,7 @@ class TestAppModelValidation: def test_icon_type_validation(self): """Test icon type enum values.""" # Assert - assert {t.value for t in IconType} == {"image", "emoji"} + assert {t.value for t in IconType} == {"image", "emoji", "link"} def test_app_desc_or_prompt_with_description(self): """Test desc_or_prompt property when description exists.""" diff --git a/api/tests/unit_tests/oss/tencent_cos/test_tencent_cos.py b/api/tests/unit_tests/oss/tencent_cos/test_tencent_cos.py index 303f0493bd..a0fed1aa14 100644 --- a/api/tests/unit_tests/oss/tencent_cos/test_tencent_cos.py +++ b/api/tests/unit_tests/oss/tencent_cos/test_tencent_cos.py @@ -1,4 +1,4 @@ -from unittest.mock import patch +from unittest.mock import MagicMock, patch import pytest from qcloud_cos import CosConfig @@ -18,3 +18,72 @@ class TestTencentCos(BaseStorageTest): with patch.object(CosConfig, "__init__", return_value=None): self.storage = TencentCosStorage() self.storage.bucket_name = get_example_bucket() + + +class TestTencentCosConfiguration: + """Tests for TencentCosStorage initialization with different configurations.""" + + def test_init_with_custom_domain(self): + """Test initialization with custom domain configured.""" + # Mock dify_config to return custom domain configuration + mock_dify_config = MagicMock() + mock_dify_config.TENCENT_COS_CUSTOM_DOMAIN = "cos.example.com" + mock_dify_config.TENCENT_COS_SECRET_ID = "test-secret-id" + mock_dify_config.TENCENT_COS_SECRET_KEY = "test-secret-key" + mock_dify_config.TENCENT_COS_SCHEME = "https" + + # Mock CosConfig and CosS3Client + mock_config_instance = MagicMock() + mock_client = MagicMock() + + with ( + patch("extensions.storage.tencent_cos_storage.dify_config", mock_dify_config), + patch( + "extensions.storage.tencent_cos_storage.CosConfig", return_value=mock_config_instance + ) as mock_cos_config, + patch("extensions.storage.tencent_cos_storage.CosS3Client", return_value=mock_client), + ): + TencentCosStorage() + + # Verify CosConfig was called with Domain parameter (not Region) + mock_cos_config.assert_called_once() + call_kwargs = mock_cos_config.call_args[1] + assert "Domain" in call_kwargs + assert call_kwargs["Domain"] == "cos.example.com" + assert "Region" not in call_kwargs + assert call_kwargs["SecretId"] == "test-secret-id" + assert call_kwargs["SecretKey"] == "test-secret-key" + assert call_kwargs["Scheme"] == "https" + + def test_init_with_region(self): + """Test initialization with region configured (no custom domain).""" + # Mock dify_config to return region configuration + mock_dify_config = MagicMock() + mock_dify_config.TENCENT_COS_CUSTOM_DOMAIN = None + mock_dify_config.TENCENT_COS_REGION = "ap-guangzhou" + mock_dify_config.TENCENT_COS_SECRET_ID = "test-secret-id" + mock_dify_config.TENCENT_COS_SECRET_KEY = "test-secret-key" + mock_dify_config.TENCENT_COS_SCHEME = "https" + + # Mock CosConfig and CosS3Client + mock_config_instance = MagicMock() + mock_client = MagicMock() + + with ( + patch("extensions.storage.tencent_cos_storage.dify_config", mock_dify_config), + patch( + "extensions.storage.tencent_cos_storage.CosConfig", return_value=mock_config_instance + ) as mock_cos_config, + patch("extensions.storage.tencent_cos_storage.CosS3Client", return_value=mock_client), + ): + TencentCosStorage() + + # Verify CosConfig was called with Region parameter (not Domain) + mock_cos_config.assert_called_once() + call_kwargs = mock_cos_config.call_args[1] + assert "Region" in call_kwargs + assert call_kwargs["Region"] == "ap-guangzhou" + assert "Domain" not in call_kwargs + assert call_kwargs["SecretId"] == "test-secret-id" + assert call_kwargs["SecretKey"] == "test-secret-key" + assert call_kwargs["Scheme"] == "https" diff --git a/api/tests/unit_tests/repositories/test_sqlalchemy_api_workflow_run_repository.py b/api/tests/unit_tests/repositories/test_sqlalchemy_api_workflow_run_repository.py index 0c34676252..ef3ac29519 100644 --- a/api/tests/unit_tests/repositories/test_sqlalchemy_api_workflow_run_repository.py +++ b/api/tests/unit_tests/repositories/test_sqlalchemy_api_workflow_run_repository.py @@ -4,6 +4,7 @@ from datetime import UTC, datetime from unittest.mock import Mock, patch import pytest +from sqlalchemy.dialects import postgresql from sqlalchemy.orm import Session, sessionmaker from core.workflow.enums import WorkflowExecutionStatus @@ -104,6 +105,42 @@ class TestDifyAPISQLAlchemyWorkflowRunRepository: return pause +class TestGetRunsBatchByTimeRange(TestDifyAPISQLAlchemyWorkflowRunRepository): + def test_get_runs_batch_by_time_range_filters_terminal_statuses( + self, repository: DifyAPISQLAlchemyWorkflowRunRepository, mock_session: Mock + ): + scalar_result = Mock() + scalar_result.all.return_value = [] + mock_session.scalars.return_value = scalar_result + + repository.get_runs_batch_by_time_range( + start_after=None, + end_before=datetime(2024, 1, 1), + last_seen=None, + batch_size=50, + ) + + stmt = mock_session.scalars.call_args[0][0] + compiled_sql = str( + stmt.compile( + dialect=postgresql.dialect(), + compile_kwargs={"literal_binds": True}, + ) + ) + + assert "workflow_runs.status" in compiled_sql + for status in ( + WorkflowExecutionStatus.SUCCEEDED, + WorkflowExecutionStatus.FAILED, + WorkflowExecutionStatus.STOPPED, + WorkflowExecutionStatus.PARTIAL_SUCCEEDED, + ): + assert f"'{status.value}'" in compiled_sql + + assert "'running'" not in compiled_sql + assert "'paused'" not in compiled_sql + + class TestCreateWorkflowPause(TestDifyAPISQLAlchemyWorkflowRunRepository): """Test create_workflow_pause method.""" @@ -181,6 +218,34 @@ class TestCreateWorkflowPause(TestDifyAPISQLAlchemyWorkflowRunRepository): ) +class TestDeleteRunsWithRelated(TestDifyAPISQLAlchemyWorkflowRunRepository): + def test_uses_trigger_log_repository(self, repository: DifyAPISQLAlchemyWorkflowRunRepository, mock_session: Mock): + node_ids_result = Mock() + node_ids_result.all.return_value = [] + pause_ids_result = Mock() + pause_ids_result.all.return_value = [] + mock_session.scalars.side_effect = [node_ids_result, pause_ids_result] + + # app_logs delete, runs delete + mock_session.execute.side_effect = [Mock(rowcount=0), Mock(rowcount=1)] + + fake_trigger_repo = Mock() + fake_trigger_repo.delete_by_run_ids.return_value = 3 + + run = Mock(id="run-1", tenant_id="t1", app_id="a1", workflow_id="w1", triggered_from="tf") + counts = repository.delete_runs_with_related( + [run], + delete_node_executions=lambda session, runs: (2, 1), + delete_trigger_logs=lambda session, run_ids: fake_trigger_repo.delete_by_run_ids(run_ids), + ) + + fake_trigger_repo.delete_by_run_ids.assert_called_once_with(["run-1"]) + assert counts["node_executions"] == 2 + assert counts["offloads"] == 1 + assert counts["trigger_logs"] == 3 + assert counts["runs"] == 1 + + class TestResumeWorkflowPause(TestDifyAPISQLAlchemyWorkflowRunRepository): """Test resume_workflow_pause method.""" diff --git a/api/tests/unit_tests/repositories/test_sqlalchemy_workflow_trigger_log_repository.py b/api/tests/unit_tests/repositories/test_sqlalchemy_workflow_trigger_log_repository.py new file mode 100644 index 0000000000..d409618211 --- /dev/null +++ b/api/tests/unit_tests/repositories/test_sqlalchemy_workflow_trigger_log_repository.py @@ -0,0 +1,31 @@ +from unittest.mock import Mock + +from sqlalchemy.dialects import postgresql +from sqlalchemy.orm import Session + +from repositories.sqlalchemy_workflow_trigger_log_repository import SQLAlchemyWorkflowTriggerLogRepository + + +def test_delete_by_run_ids_executes_delete(): + session = Mock(spec=Session) + session.execute.return_value = Mock(rowcount=2) + repo = SQLAlchemyWorkflowTriggerLogRepository(session) + + deleted = repo.delete_by_run_ids(["run-1", "run-2"]) + + stmt = session.execute.call_args[0][0] + compiled_sql = str(stmt.compile(dialect=postgresql.dialect(), compile_kwargs={"literal_binds": True})) + assert "workflow_trigger_logs" in compiled_sql + assert "'run-1'" in compiled_sql + assert "'run-2'" in compiled_sql + assert deleted == 2 + + +def test_delete_by_run_ids_empty_short_circuits(): + session = Mock(spec=Session) + repo = SQLAlchemyWorkflowTriggerLogRepository(session) + + deleted = repo.delete_by_run_ids([]) + + session.execute.assert_not_called() + assert deleted == 0 diff --git a/api/tests/unit_tests/services/auth/test_firecrawl_auth.py b/api/tests/unit_tests/services/auth/test_firecrawl_auth.py index b5ee55706d..ab50d6a92c 100644 --- a/api/tests/unit_tests/services/auth/test_firecrawl_auth.py +++ b/api/tests/unit_tests/services/auth/test_firecrawl_auth.py @@ -1,3 +1,4 @@ +import json from unittest.mock import MagicMock, patch import httpx @@ -110,9 +111,11 @@ class TestFirecrawlAuth: @pytest.mark.parametrize( ("status_code", "response_text", "has_json_error", "expected_error_contains"), [ - (403, '{"error": "Forbidden"}', True, "Failed to authorize. Status code: 403. Error: Forbidden"), - (404, "", True, "Unexpected error occurred while trying to authorize. Status code: 404"), - (401, "Not JSON", True, "Expecting value"), # JSON decode error + (403, '{"error": "Forbidden"}', False, "Failed to authorize. Status code: 403. Error: Forbidden"), + # empty body falls back to generic message + (404, "", True, "Failed to authorize. Status code: 404. Error: Unknown error occurred"), + # non-JSON body is surfaced directly + (401, "Not JSON", True, "Failed to authorize. Status code: 401. Error: Not JSON"), ], ) @patch("services.auth.firecrawl.firecrawl.httpx.post") @@ -124,12 +127,14 @@ class TestFirecrawlAuth: mock_response.status_code = status_code mock_response.text = response_text if has_json_error: - mock_response.json.side_effect = Exception("Not JSON") + mock_response.json.side_effect = json.JSONDecodeError("Not JSON", "", 0) + else: + mock_response.json.return_value = {"error": "Forbidden"} mock_post.return_value = mock_response with pytest.raises(Exception) as exc_info: auth_instance.validate_credentials() - assert expected_error_contains in str(exc_info.value) + assert str(exc_info.value) == expected_error_contains @pytest.mark.parametrize( ("exception_type", "exception_message"), @@ -164,20 +169,21 @@ class TestFirecrawlAuth: @patch("services.auth.firecrawl.firecrawl.httpx.post") def test_should_use_custom_base_url_in_validation(self, mock_post): - """Test that custom base URL is used in validation""" + """Test that custom base URL is used in validation and normalized""" mock_response = MagicMock() mock_response.status_code = 200 mock_post.return_value = mock_response - credentials = { - "auth_type": "bearer", - "config": {"api_key": "test_api_key_123", "base_url": "https://custom.firecrawl.dev"}, - } - auth = FirecrawlAuth(credentials) - result = auth.validate_credentials() + for base in ("https://custom.firecrawl.dev", "https://custom.firecrawl.dev/"): + credentials = { + "auth_type": "bearer", + "config": {"api_key": "test_api_key_123", "base_url": base}, + } + auth = FirecrawlAuth(credentials) + result = auth.validate_credentials() - assert result is True - assert mock_post.call_args[0][0] == "https://custom.firecrawl.dev/v1/crawl" + assert result is True + assert mock_post.call_args[0][0] == "https://custom.firecrawl.dev/v1/crawl" @patch("services.auth.firecrawl.firecrawl.httpx.post") def test_should_handle_timeout_with_retry_suggestion(self, mock_post, auth_instance): diff --git a/api/tests/unit_tests/services/test_account_service.py b/api/tests/unit_tests/services/test_account_service.py index 627a04bcd0..e35ba74c56 100644 --- a/api/tests/unit_tests/services/test_account_service.py +++ b/api/tests/unit_tests/services/test_account_service.py @@ -619,8 +619,13 @@ class TestTenantService: mock_tenant_instance.name = "Test User's Workspace" mock_tenant_class.return_value = mock_tenant_instance - # Execute test - TenantService.create_owner_tenant_if_not_exist(mock_account) + # Mock the db import in CreditPoolService to avoid database connection + with patch("services.credit_pool_service.db") as mock_credit_pool_db: + mock_credit_pool_db.session.add = MagicMock() + mock_credit_pool_db.session.commit = MagicMock() + + # Execute test + TenantService.create_owner_tenant_if_not_exist(mock_account) # Verify tenant was created with correct parameters mock_db_dependencies["db"].session.add.assert_called() diff --git a/api/tests/unit_tests/services/test_app_dsl_service_import_yaml_url.py b/api/tests/unit_tests/services/test_app_dsl_service_import_yaml_url.py new file mode 100644 index 0000000000..41c1d0ea2a --- /dev/null +++ b/api/tests/unit_tests/services/test_app_dsl_service_import_yaml_url.py @@ -0,0 +1,71 @@ +from unittest.mock import MagicMock + +import httpx + +from models import Account +from services import app_dsl_service +from services.app_dsl_service import AppDslService, ImportMode, ImportStatus + + +def _build_response(url: str, status_code: int, content: bytes = b"") -> httpx.Response: + request = httpx.Request("GET", url) + return httpx.Response(status_code=status_code, request=request, content=content) + + +def _pending_yaml_content(version: str = "99.0.0") -> bytes: + return (f'version: "{version}"\nkind: app\napp:\n name: Loop Test\n mode: workflow\n').encode() + + +def _account_mock() -> MagicMock: + account = MagicMock(spec=Account) + account.current_tenant_id = "tenant-1" + return account + + +def test_import_app_yaml_url_user_attachments_keeps_original_url(monkeypatch): + yaml_url = "https://github.com/user-attachments/files/24290802/loop-test.yml" + raw_url = "https://raw.githubusercontent.com/user-attachments/files/24290802/loop-test.yml" + yaml_bytes = _pending_yaml_content() + + def fake_get(url: str, **kwargs): + if url == raw_url: + return _build_response(url, status_code=404) + assert url == yaml_url + return _build_response(url, status_code=200, content=yaml_bytes) + + monkeypatch.setattr(app_dsl_service.ssrf_proxy, "get", fake_get) + + service = AppDslService(MagicMock()) + result = service.import_app( + account=_account_mock(), + import_mode=ImportMode.YAML_URL, + yaml_url=yaml_url, + ) + + assert result.status == ImportStatus.PENDING + assert result.imported_dsl_version == "99.0.0" + + +def test_import_app_yaml_url_github_blob_rewrites_to_raw(monkeypatch): + yaml_url = "https://github.com/acme/repo/blob/main/app.yml" + raw_url = "https://raw.githubusercontent.com/acme/repo/main/app.yml" + yaml_bytes = _pending_yaml_content() + + requested_urls: list[str] = [] + + def fake_get(url: str, **kwargs): + requested_urls.append(url) + assert url == raw_url + return _build_response(url, status_code=200, content=yaml_bytes) + + monkeypatch.setattr(app_dsl_service.ssrf_proxy, "get", fake_get) + + service = AppDslService(MagicMock()) + result = service.import_app( + account=_account_mock(), + import_mode=ImportMode.YAML_URL, + yaml_url=yaml_url, + ) + + assert result.status == ImportStatus.PENDING + assert requested_urls == [raw_url] diff --git a/api/tests/unit_tests/services/test_billing_service.py b/api/tests/unit_tests/services/test_billing_service.py index 915aee3fa7..d00743278e 100644 --- a/api/tests/unit_tests/services/test_billing_service.py +++ b/api/tests/unit_tests/services/test_billing_service.py @@ -1156,6 +1156,235 @@ class TestBillingServiceEdgeCases: assert "Only team owner or team admin can perform this action" in str(exc_info.value) +class TestBillingServiceSubscriptionOperations: + """Unit tests for subscription operations in BillingService. + + Tests cover: + - Bulk plan retrieval with chunking + - Expired subscription cleanup whitelist retrieval + """ + + @pytest.fixture + def mock_send_request(self): + """Mock _send_request method.""" + with patch.object(BillingService, "_send_request") as mock: + yield mock + + def test_get_plan_bulk_with_empty_list(self, mock_send_request): + """Test bulk plan retrieval with empty tenant list.""" + # Arrange + tenant_ids = [] + + # Act + result = BillingService.get_plan_bulk(tenant_ids) + + # Assert + assert result == {} + mock_send_request.assert_not_called() + + def test_get_plan_bulk_with_chunking(self, mock_send_request): + """Test bulk plan retrieval with more than 200 tenants (chunking logic).""" + # Arrange - 250 tenants to test chunking (chunk_size = 200) + tenant_ids = [f"tenant-{i}" for i in range(250)] + + # First chunk: tenants 0-199 + first_chunk_response = { + "data": {f"tenant-{i}": {"plan": "sandbox", "expiration_date": 1735689600} for i in range(200)} + } + + # Second chunk: tenants 200-249 + second_chunk_response = { + "data": {f"tenant-{i}": {"plan": "professional", "expiration_date": 1767225600} for i in range(200, 250)} + } + + mock_send_request.side_effect = [first_chunk_response, second_chunk_response] + + # Act + result = BillingService.get_plan_bulk(tenant_ids) + + # Assert + assert len(result) == 250 + assert result["tenant-0"]["plan"] == "sandbox" + assert result["tenant-199"]["plan"] == "sandbox" + assert result["tenant-200"]["plan"] == "professional" + assert result["tenant-249"]["plan"] == "professional" + assert mock_send_request.call_count == 2 + + # Verify first chunk call + first_call = mock_send_request.call_args_list[0] + assert first_call[0][0] == "POST" + assert first_call[0][1] == "/subscription/plan/batch" + assert len(first_call[1]["json"]["tenant_ids"]) == 200 + + # Verify second chunk call + second_call = mock_send_request.call_args_list[1] + assert len(second_call[1]["json"]["tenant_ids"]) == 50 + + def test_get_plan_bulk_with_partial_batch_failure(self, mock_send_request): + """Test bulk plan retrieval when one batch fails but others succeed.""" + # Arrange - 250 tenants, second batch will fail + tenant_ids = [f"tenant-{i}" for i in range(250)] + + # First chunk succeeds + first_chunk_response = { + "data": {f"tenant-{i}": {"plan": "sandbox", "expiration_date": 1735689600} for i in range(200)} + } + + # Second chunk fails - need to create a mock that raises when called + def side_effect_func(*args, **kwargs): + if mock_send_request.call_count == 1: + return first_chunk_response + else: + raise ValueError("API error") + + mock_send_request.side_effect = side_effect_func + + # Act + result = BillingService.get_plan_bulk(tenant_ids) + + # Assert - should only have data from first batch + assert len(result) == 200 + assert result["tenant-0"]["plan"] == "sandbox" + assert result["tenant-199"]["plan"] == "sandbox" + assert "tenant-200" not in result + assert mock_send_request.call_count == 2 + + def test_get_plan_bulk_with_all_batches_failing(self, mock_send_request): + """Test bulk plan retrieval when all batches fail.""" + # Arrange + tenant_ids = [f"tenant-{i}" for i in range(250)] + + # All chunks fail + def side_effect_func(*args, **kwargs): + raise ValueError("API error") + + mock_send_request.side_effect = side_effect_func + + # Act + result = BillingService.get_plan_bulk(tenant_ids) + + # Assert - should return empty dict + assert result == {} + assert mock_send_request.call_count == 2 + + def test_get_plan_bulk_with_exactly_200_tenants(self, mock_send_request): + """Test bulk plan retrieval with exactly 200 tenants (boundary condition).""" + # Arrange + tenant_ids = [f"tenant-{i}" for i in range(200)] + mock_send_request.return_value = { + "data": {f"tenant-{i}": {"plan": "sandbox", "expiration_date": 1735689600} for i in range(200)} + } + + # Act + result = BillingService.get_plan_bulk(tenant_ids) + + # Assert + assert len(result) == 200 + assert mock_send_request.call_count == 1 + + def test_get_plan_bulk_with_empty_data_response(self, mock_send_request): + """Test bulk plan retrieval with empty data in response.""" + # Arrange + tenant_ids = ["tenant-1", "tenant-2"] + mock_send_request.return_value = {"data": {}} + + # Act + result = BillingService.get_plan_bulk(tenant_ids) + + # Assert + assert result == {} + + def test_get_plan_bulk_with_invalid_tenant_plan_skipped(self, mock_send_request): + """Test bulk plan retrieval when one tenant has invalid plan data (should skip that tenant).""" + # Arrange + tenant_ids = ["tenant-valid-1", "tenant-invalid", "tenant-valid-2"] + + # Response with one invalid tenant plan (missing expiration_date) and two valid ones + mock_send_request.return_value = { + "data": { + "tenant-valid-1": {"plan": "sandbox", "expiration_date": 1735689600}, + "tenant-invalid": {"plan": "professional"}, # Missing expiration_date field + "tenant-valid-2": {"plan": "team", "expiration_date": 1767225600}, + } + } + + # Act + with patch("services.billing_service.logger") as mock_logger: + result = BillingService.get_plan_bulk(tenant_ids) + + # Assert - should only contain valid tenants + assert len(result) == 2 + assert "tenant-valid-1" in result + assert "tenant-valid-2" in result + assert "tenant-invalid" not in result + + # Verify valid tenants have correct data + assert result["tenant-valid-1"]["plan"] == "sandbox" + assert result["tenant-valid-1"]["expiration_date"] == 1735689600 + assert result["tenant-valid-2"]["plan"] == "team" + assert result["tenant-valid-2"]["expiration_date"] == 1767225600 + + # Verify exception was logged for the invalid tenant + mock_logger.exception.assert_called_once() + log_call_args = mock_logger.exception.call_args[0] + assert "get_plan_bulk: failed to validate subscription plan for tenant" in log_call_args[0] + assert "tenant-invalid" in log_call_args[1] + + def test_get_expired_subscription_cleanup_whitelist_success(self, mock_send_request): + """Test successful retrieval of expired subscription cleanup whitelist.""" + # Arrange + api_response = [ + { + "created_at": "2025-10-16T01:56:17", + "tenant_id": "36bd55ec-2ea9-4d75-a9ea-1f26aeb4ffe6", + "contact": "example@dify.ai", + "id": "36bd55ec-2ea9-4d75-a9ea-1f26aeb4ffe5", + "expired_at": "2026-01-01T01:56:17", + "updated_at": "2025-10-16T01:56:17", + }, + { + "created_at": "2025-10-16T02:00:00", + "tenant_id": "tenant-2", + "contact": "test@example.com", + "id": "whitelist-id-2", + "expired_at": "2026-02-01T00:00:00", + "updated_at": "2025-10-16T02:00:00", + }, + { + "created_at": "2025-10-16T03:00:00", + "tenant_id": "tenant-3", + "contact": "another@example.com", + "id": "whitelist-id-3", + "expired_at": "2026-03-01T00:00:00", + "updated_at": "2025-10-16T03:00:00", + }, + ] + mock_send_request.return_value = {"data": api_response} + + # Act + result = BillingService.get_expired_subscription_cleanup_whitelist() + + # Assert - should return only tenant_ids + assert result == ["36bd55ec-2ea9-4d75-a9ea-1f26aeb4ffe6", "tenant-2", "tenant-3"] + assert len(result) == 3 + assert result[0] == "36bd55ec-2ea9-4d75-a9ea-1f26aeb4ffe6" + assert result[1] == "tenant-2" + assert result[2] == "tenant-3" + mock_send_request.assert_called_once_with("GET", "/subscription/cleanup/whitelist") + + def test_get_expired_subscription_cleanup_whitelist_empty_list(self, mock_send_request): + """Test retrieval of empty cleanup whitelist.""" + # Arrange + mock_send_request.return_value = {"data": []} + + # Act + result = BillingService.get_expired_subscription_cleanup_whitelist() + + # Assert + assert result == [] + assert len(result) == 0 + + class TestBillingServiceIntegrationScenarios: """Integration-style tests simulating real-world usage scenarios. diff --git a/api/tests/unit_tests/services/test_clear_free_plan_expired_workflow_run_logs.py b/api/tests/unit_tests/services/test_clear_free_plan_expired_workflow_run_logs.py new file mode 100644 index 0000000000..a1685fcfb0 --- /dev/null +++ b/api/tests/unit_tests/services/test_clear_free_plan_expired_workflow_run_logs.py @@ -0,0 +1,194 @@ +import datetime +from typing import Any + +import pytest + +from services import clear_free_plan_expired_workflow_run_logs as cleanup_module +from services.clear_free_plan_expired_workflow_run_logs import WorkflowRunCleanup + + +class FakeRun: + def __init__( + self, + run_id: str, + tenant_id: str, + created_at: datetime.datetime, + app_id: str = "app-1", + workflow_id: str = "wf-1", + triggered_from: str = "workflow-run", + ) -> None: + self.id = run_id + self.tenant_id = tenant_id + self.app_id = app_id + self.workflow_id = workflow_id + self.triggered_from = triggered_from + self.created_at = created_at + + +class FakeRepo: + def __init__(self, batches: list[list[FakeRun]], delete_result: dict[str, int] | None = None) -> None: + self.batches = batches + self.call_idx = 0 + self.deleted: list[list[str]] = [] + self.delete_result = delete_result or { + "runs": 0, + "node_executions": 0, + "offloads": 0, + "app_logs": 0, + "trigger_logs": 0, + "pauses": 0, + "pause_reasons": 0, + } + + def get_runs_batch_by_time_range( + self, + start_after: datetime.datetime | None, + end_before: datetime.datetime, + last_seen: tuple[datetime.datetime, str] | None, + batch_size: int, + ) -> list[FakeRun]: + if self.call_idx >= len(self.batches): + return [] + batch = self.batches[self.call_idx] + self.call_idx += 1 + return batch + + def delete_runs_with_related( + self, runs: list[FakeRun], delete_node_executions=None, delete_trigger_logs=None + ) -> dict[str, int]: + self.deleted.append([run.id for run in runs]) + result = self.delete_result.copy() + result["runs"] = len(runs) + return result + + +def create_cleanup(monkeypatch: pytest.MonkeyPatch, repo: FakeRepo, **kwargs: Any) -> WorkflowRunCleanup: + return WorkflowRunCleanup(workflow_run_repo=repo, **kwargs) + + +def test_filter_free_tenants_billing_disabled(monkeypatch: pytest.MonkeyPatch) -> None: + cleanup = create_cleanup(monkeypatch, repo=FakeRepo([]), days=30, batch_size=10) + + monkeypatch.setattr(cleanup_module.dify_config, "BILLING_ENABLED", False) + + def fail_bulk(_: list[str]) -> dict[str, dict[str, Any]]: + raise RuntimeError("should not call") + + monkeypatch.setattr(cleanup_module.BillingService, "get_info_bulk", staticmethod(fail_bulk)) + + tenants = {"t1", "t2"} + free = cleanup._filter_free_tenants(tenants) + + assert free == tenants + + +def test_filter_free_tenants_bulk_mixed(monkeypatch: pytest.MonkeyPatch) -> None: + cleanup = create_cleanup(monkeypatch, repo=FakeRepo([]), days=30, batch_size=10) + + monkeypatch.setattr(cleanup_module.dify_config, "BILLING_ENABLED", True) + cleanup.billing_cache["t_free"] = cleanup_module.CloudPlan.SANDBOX + cleanup.billing_cache["t_paid"] = cleanup_module.CloudPlan.TEAM + monkeypatch.setattr( + cleanup_module.BillingService, + "get_info_bulk", + staticmethod(lambda tenant_ids: dict.fromkeys(tenant_ids, "sandbox")), + ) + + free = cleanup._filter_free_tenants({"t_free", "t_paid", "t_missing"}) + + assert free == {"t_free", "t_missing"} + + +def test_filter_free_tenants_bulk_failure(monkeypatch: pytest.MonkeyPatch) -> None: + cleanup = create_cleanup(monkeypatch, repo=FakeRepo([]), days=30, batch_size=10) + + monkeypatch.setattr(cleanup_module.dify_config, "BILLING_ENABLED", True) + monkeypatch.setattr( + cleanup_module.BillingService, + "get_info_bulk", + staticmethod(lambda tenant_ids: (_ for _ in ()).throw(RuntimeError("boom"))), + ) + + free = cleanup._filter_free_tenants({"t1", "t2"}) + + assert free == set() + + +def test_run_deletes_only_free_tenants(monkeypatch: pytest.MonkeyPatch) -> None: + cutoff = datetime.datetime.now() + repo = FakeRepo( + batches=[ + [ + FakeRun("run-free", "t_free", cutoff), + FakeRun("run-paid", "t_paid", cutoff), + ] + ] + ) + cleanup = create_cleanup(monkeypatch, repo=repo, days=30, batch_size=10) + + monkeypatch.setattr(cleanup_module.dify_config, "BILLING_ENABLED", True) + cleanup.billing_cache["t_free"] = cleanup_module.CloudPlan.SANDBOX + cleanup.billing_cache["t_paid"] = cleanup_module.CloudPlan.TEAM + monkeypatch.setattr( + cleanup_module.BillingService, + "get_info_bulk", + staticmethod(lambda tenant_ids: dict.fromkeys(tenant_ids, "sandbox")), + ) + + cleanup.run() + + assert repo.deleted == [["run-free"]] + + +def test_run_skips_when_no_free_tenants(monkeypatch: pytest.MonkeyPatch) -> None: + cutoff = datetime.datetime.now() + repo = FakeRepo(batches=[[FakeRun("run-paid", "t_paid", cutoff)]]) + cleanup = create_cleanup(monkeypatch, repo=repo, days=30, batch_size=10) + + monkeypatch.setattr(cleanup_module.dify_config, "BILLING_ENABLED", True) + monkeypatch.setattr( + cleanup_module.BillingService, + "get_info_bulk", + staticmethod(lambda tenant_ids: dict.fromkeys(tenant_ids, "team")), + ) + + cleanup.run() + + assert repo.deleted == [] + + +def test_run_exits_on_empty_batch(monkeypatch: pytest.MonkeyPatch) -> None: + cleanup = create_cleanup(monkeypatch, repo=FakeRepo([]), days=30, batch_size=10) + + cleanup.run() + + +def test_between_sets_window_bounds(monkeypatch: pytest.MonkeyPatch) -> None: + start_after = datetime.datetime(2024, 5, 1, 0, 0, 0) + end_before = datetime.datetime(2024, 6, 1, 0, 0, 0) + cleanup = create_cleanup( + monkeypatch, repo=FakeRepo([]), days=30, batch_size=10, start_after=start_after, end_before=end_before + ) + + assert cleanup.window_start == start_after + assert cleanup.window_end == end_before + + +def test_between_requires_both_boundaries(monkeypatch: pytest.MonkeyPatch) -> None: + with pytest.raises(ValueError): + create_cleanup( + monkeypatch, repo=FakeRepo([]), days=30, batch_size=10, start_after=datetime.datetime.now(), end_before=None + ) + with pytest.raises(ValueError): + create_cleanup( + monkeypatch, repo=FakeRepo([]), days=30, batch_size=10, start_after=None, end_before=datetime.datetime.now() + ) + + +def test_between_requires_end_after_start(monkeypatch: pytest.MonkeyPatch) -> None: + start_after = datetime.datetime(2024, 6, 1, 0, 0, 0) + end_before = datetime.datetime(2024, 5, 1, 0, 0, 0) + with pytest.raises(ValueError): + create_cleanup( + monkeypatch, repo=FakeRepo([]), days=30, batch_size=10, start_after=start_after, end_before=end_before + ) diff --git a/api/tests/unit_tests/services/test_dataset_service_get_segments.py b/api/tests/unit_tests/services/test_dataset_service_get_segments.py new file mode 100644 index 0000000000..360c8a3c7d --- /dev/null +++ b/api/tests/unit_tests/services/test_dataset_service_get_segments.py @@ -0,0 +1,472 @@ +""" +Unit tests for SegmentService.get_segments method. + +Tests the retrieval of document segments with pagination and filtering: +- Basic pagination (page, limit) +- Status filtering +- Keyword search +- Ordering by position and id (to avoid duplicate data) +""" + +from unittest.mock import Mock, create_autospec, patch + +import pytest + +from models.dataset import DocumentSegment + + +class SegmentServiceTestDataFactory: + """ + Factory class for creating test data and mock objects for segment tests. + """ + + @staticmethod + def create_segment_mock( + segment_id: str = "segment-123", + document_id: str = "doc-123", + tenant_id: str = "tenant-123", + dataset_id: str = "dataset-123", + position: int = 1, + content: str = "Test content", + status: str = "completed", + **kwargs, + ) -> Mock: + """ + Create a mock document segment. + + Args: + segment_id: Unique identifier for the segment + document_id: Parent document ID + tenant_id: Tenant ID the segment belongs to + dataset_id: Parent dataset ID + position: Position within the document + content: Segment text content + status: Indexing status + **kwargs: Additional attributes + + Returns: + Mock: DocumentSegment mock object + """ + segment = create_autospec(DocumentSegment, instance=True) + segment.id = segment_id + segment.document_id = document_id + segment.tenant_id = tenant_id + segment.dataset_id = dataset_id + segment.position = position + segment.content = content + segment.status = status + for key, value in kwargs.items(): + setattr(segment, key, value) + return segment + + +class TestSegmentServiceGetSegments: + """ + Comprehensive unit tests for SegmentService.get_segments method. + + Tests cover: + - Basic pagination functionality + - Status list filtering + - Keyword search filtering + - Ordering (position + id for uniqueness) + - Empty results + - Combined filters + """ + + @pytest.fixture + def mock_segment_service_dependencies(self): + """ + Common mock setup for segment service dependencies. + + Patches: + - db: Database operations and pagination + - select: SQLAlchemy query builder + """ + with ( + patch("services.dataset_service.db") as mock_db, + patch("services.dataset_service.select") as mock_select, + ): + yield { + "db": mock_db, + "select": mock_select, + } + + def test_get_segments_basic_pagination(self, mock_segment_service_dependencies): + """ + Test basic pagination functionality. + + Verifies: + - Query is built with document_id and tenant_id filters + - Pagination uses correct page and limit parameters + - Returns segments and total count + """ + # Arrange + document_id = "doc-123" + tenant_id = "tenant-123" + page = 1 + limit = 20 + + # Create mock segments + segment1 = SegmentServiceTestDataFactory.create_segment_mock( + segment_id="seg-1", position=1, content="First segment" + ) + segment2 = SegmentServiceTestDataFactory.create_segment_mock( + segment_id="seg-2", position=2, content="Second segment" + ) + + # Mock pagination result + mock_paginated = Mock() + mock_paginated.items = [segment1, segment2] + mock_paginated.total = 2 + + mock_segment_service_dependencies["db"].paginate.return_value = mock_paginated + + # Mock select builder + mock_query = Mock() + mock_segment_service_dependencies["select"].return_value = mock_query + mock_query.where.return_value = mock_query + mock_query.order_by.return_value = mock_query + + # Act + from services.dataset_service import SegmentService + + items, total = SegmentService.get_segments(document_id=document_id, tenant_id=tenant_id, page=page, limit=limit) + + # Assert + assert len(items) == 2 + assert total == 2 + assert items[0].id == "seg-1" + assert items[1].id == "seg-2" + mock_segment_service_dependencies["db"].paginate.assert_called_once() + call_kwargs = mock_segment_service_dependencies["db"].paginate.call_args[1] + assert call_kwargs["page"] == page + assert call_kwargs["per_page"] == limit + assert call_kwargs["max_per_page"] == 100 + assert call_kwargs["error_out"] is False + + def test_get_segments_with_status_filter(self, mock_segment_service_dependencies): + """ + Test filtering by status list. + + Verifies: + - Status list filter is applied to query + - Only segments with matching status are returned + """ + # Arrange + document_id = "doc-123" + tenant_id = "tenant-123" + status_list = ["completed", "indexing"] + + segment1 = SegmentServiceTestDataFactory.create_segment_mock(segment_id="seg-1", status="completed") + segment2 = SegmentServiceTestDataFactory.create_segment_mock(segment_id="seg-2", status="indexing") + + mock_paginated = Mock() + mock_paginated.items = [segment1, segment2] + mock_paginated.total = 2 + + mock_segment_service_dependencies["db"].paginate.return_value = mock_paginated + + mock_query = Mock() + mock_segment_service_dependencies["select"].return_value = mock_query + mock_query.where.return_value = mock_query + mock_query.order_by.return_value = mock_query + + # Act + from services.dataset_service import SegmentService + + items, total = SegmentService.get_segments( + document_id=document_id, tenant_id=tenant_id, status_list=status_list + ) + + # Assert + assert len(items) == 2 + assert total == 2 + # Verify where was called multiple times (base filters + status filter) + assert mock_query.where.call_count >= 2 + + def test_get_segments_with_empty_status_list(self, mock_segment_service_dependencies): + """ + Test with empty status list. + + Verifies: + - Empty status list is handled correctly + - No status filter is applied to avoid WHERE false condition + """ + # Arrange + document_id = "doc-123" + tenant_id = "tenant-123" + status_list = [] + + segment = SegmentServiceTestDataFactory.create_segment_mock(segment_id="seg-1") + + mock_paginated = Mock() + mock_paginated.items = [segment] + mock_paginated.total = 1 + + mock_segment_service_dependencies["db"].paginate.return_value = mock_paginated + + mock_query = Mock() + mock_segment_service_dependencies["select"].return_value = mock_query + mock_query.where.return_value = mock_query + mock_query.order_by.return_value = mock_query + + # Act + from services.dataset_service import SegmentService + + items, total = SegmentService.get_segments( + document_id=document_id, tenant_id=tenant_id, status_list=status_list + ) + + # Assert + assert len(items) == 1 + assert total == 1 + # Should only be called once (base filters, no status filter) + assert mock_query.where.call_count == 1 + + def test_get_segments_with_keyword_search(self, mock_segment_service_dependencies): + """ + Test keyword search functionality. + + Verifies: + - Keyword filter uses ilike for case-insensitive search + - Search pattern includes wildcards (%keyword%) + """ + # Arrange + document_id = "doc-123" + tenant_id = "tenant-123" + keyword = "search term" + + segment = SegmentServiceTestDataFactory.create_segment_mock( + segment_id="seg-1", content="This contains search term" + ) + + mock_paginated = Mock() + mock_paginated.items = [segment] + mock_paginated.total = 1 + + mock_segment_service_dependencies["db"].paginate.return_value = mock_paginated + + mock_query = Mock() + mock_segment_service_dependencies["select"].return_value = mock_query + mock_query.where.return_value = mock_query + mock_query.order_by.return_value = mock_query + + # Act + from services.dataset_service import SegmentService + + items, total = SegmentService.get_segments(document_id=document_id, tenant_id=tenant_id, keyword=keyword) + + # Assert + assert len(items) == 1 + assert total == 1 + # Verify where was called for base filters + keyword filter + assert mock_query.where.call_count == 2 + + def test_get_segments_ordering_by_position_and_id(self, mock_segment_service_dependencies): + """ + Test ordering by position and id. + + Verifies: + - Results are ordered by position ASC + - Results are secondarily ordered by id ASC to ensure uniqueness + - This prevents duplicate data across pages when positions are not unique + """ + # Arrange + document_id = "doc-123" + tenant_id = "tenant-123" + + # Create segments with same position but different ids + segment1 = SegmentServiceTestDataFactory.create_segment_mock( + segment_id="seg-1", position=1, content="Content 1" + ) + segment2 = SegmentServiceTestDataFactory.create_segment_mock( + segment_id="seg-2", position=1, content="Content 2" + ) + segment3 = SegmentServiceTestDataFactory.create_segment_mock( + segment_id="seg-3", position=2, content="Content 3" + ) + + mock_paginated = Mock() + mock_paginated.items = [segment1, segment2, segment3] + mock_paginated.total = 3 + + mock_segment_service_dependencies["db"].paginate.return_value = mock_paginated + + mock_query = Mock() + mock_segment_service_dependencies["select"].return_value = mock_query + mock_query.where.return_value = mock_query + mock_query.order_by.return_value = mock_query + + # Act + from services.dataset_service import SegmentService + + items, total = SegmentService.get_segments(document_id=document_id, tenant_id=tenant_id) + + # Assert + assert len(items) == 3 + assert total == 3 + mock_query.order_by.assert_called_once() + + def test_get_segments_empty_results(self, mock_segment_service_dependencies): + """ + Test when no segments match the criteria. + + Verifies: + - Empty list is returned for items + - Total count is 0 + """ + # Arrange + document_id = "non-existent-doc" + tenant_id = "tenant-123" + + mock_paginated = Mock() + mock_paginated.items = [] + mock_paginated.total = 0 + + mock_segment_service_dependencies["db"].paginate.return_value = mock_paginated + + mock_query = Mock() + mock_segment_service_dependencies["select"].return_value = mock_query + mock_query.where.return_value = mock_query + mock_query.order_by.return_value = mock_query + + # Act + from services.dataset_service import SegmentService + + items, total = SegmentService.get_segments(document_id=document_id, tenant_id=tenant_id) + + # Assert + assert items == [] + assert total == 0 + + def test_get_segments_combined_filters(self, mock_segment_service_dependencies): + """ + Test with multiple filters combined. + + Verifies: + - All filters work together correctly + - Status list and keyword search both applied + """ + # Arrange + document_id = "doc-123" + tenant_id = "tenant-123" + status_list = ["completed"] + keyword = "important" + page = 2 + limit = 10 + + segment = SegmentServiceTestDataFactory.create_segment_mock( + segment_id="seg-1", + status="completed", + content="This is important information", + ) + + mock_paginated = Mock() + mock_paginated.items = [segment] + mock_paginated.total = 1 + + mock_segment_service_dependencies["db"].paginate.return_value = mock_paginated + + mock_query = Mock() + mock_segment_service_dependencies["select"].return_value = mock_query + mock_query.where.return_value = mock_query + mock_query.order_by.return_value = mock_query + + # Act + from services.dataset_service import SegmentService + + items, total = SegmentService.get_segments( + document_id=document_id, + tenant_id=tenant_id, + status_list=status_list, + keyword=keyword, + page=page, + limit=limit, + ) + + # Assert + assert len(items) == 1 + assert total == 1 + # Verify filters: base + status + keyword + assert mock_query.where.call_count == 3 + # Verify pagination parameters + call_kwargs = mock_segment_service_dependencies["db"].paginate.call_args[1] + assert call_kwargs["page"] == page + assert call_kwargs["per_page"] == limit + + def test_get_segments_with_none_status_list(self, mock_segment_service_dependencies): + """ + Test with None status list. + + Verifies: + - None status list is handled correctly + - No status filter is applied + """ + # Arrange + document_id = "doc-123" + tenant_id = "tenant-123" + + segment = SegmentServiceTestDataFactory.create_segment_mock(segment_id="seg-1") + + mock_paginated = Mock() + mock_paginated.items = [segment] + mock_paginated.total = 1 + + mock_segment_service_dependencies["db"].paginate.return_value = mock_paginated + + mock_query = Mock() + mock_segment_service_dependencies["select"].return_value = mock_query + mock_query.where.return_value = mock_query + mock_query.order_by.return_value = mock_query + + # Act + from services.dataset_service import SegmentService + + items, total = SegmentService.get_segments( + document_id=document_id, + tenant_id=tenant_id, + status_list=None, + ) + + # Assert + assert len(items) == 1 + assert total == 1 + # Should only be called once (base filters only, no status filter) + assert mock_query.where.call_count == 1 + + def test_get_segments_pagination_max_per_page_limit(self, mock_segment_service_dependencies): + """ + Test that max_per_page is correctly set to 100. + + Verifies: + - max_per_page parameter is set to 100 + - This prevents excessive page sizes + """ + # Arrange + document_id = "doc-123" + tenant_id = "tenant-123" + limit = 200 # Request more than max_per_page + + mock_paginated = Mock() + mock_paginated.items = [] + mock_paginated.total = 0 + + mock_segment_service_dependencies["db"].paginate.return_value = mock_paginated + + mock_query = Mock() + mock_segment_service_dependencies["select"].return_value = mock_query + mock_query.where.return_value = mock_query + mock_query.order_by.return_value = mock_query + + # Act + from services.dataset_service import SegmentService + + SegmentService.get_segments( + document_id=document_id, + tenant_id=tenant_id, + limit=limit, + ) + + # Assert + call_kwargs = mock_segment_service_dependencies["db"].paginate.call_args[1] + assert call_kwargs["max_per_page"] == 100 diff --git a/api/tests/unit_tests/services/test_model_provider_service_sanitization.py b/api/tests/unit_tests/services/test_model_provider_service_sanitization.py index 9a107da1c7..e2360b116d 100644 --- a/api/tests/unit_tests/services/test_model_provider_service_sanitization.py +++ b/api/tests/unit_tests/services/test_model_provider_service_sanitization.py @@ -27,7 +27,6 @@ def service_with_fake_configurations(): description=None, icon_small=None, icon_small_dark=None, - icon_large=None, background=None, help=None, supported_model_types=[ModelType.LLM], diff --git a/api/tests/unit_tests/services/test_variable_truncator.py b/api/tests/unit_tests/services/test_variable_truncator.py index cf6fb25c1c..ec819ae57a 100644 --- a/api/tests/unit_tests/services/test_variable_truncator.py +++ b/api/tests/unit_tests/services/test_variable_truncator.py @@ -518,6 +518,55 @@ class TestEdgeCases: assert isinstance(result.result, StringSegment) +class TestTruncateJsonPrimitives: + """Test _truncate_json_primitives method with different data types.""" + + @pytest.fixture + def truncator(self): + return VariableTruncator() + + def test_truncate_json_primitives_file_type(self, truncator, file): + """Test that File objects are handled correctly in _truncate_json_primitives.""" + # Test File object is returned as-is without truncation + result = truncator._truncate_json_primitives(file, 1000) + + assert result.value == file + assert result.truncated is False + # Size should be calculated correctly + expected_size = VariableTruncator.calculate_json_size(file) + assert result.value_size == expected_size + + def test_truncate_json_primitives_file_type_small_budget(self, truncator, file): + """Test that File objects are returned as-is even with small budget.""" + # Even with a small size budget, File objects should not be truncated + result = truncator._truncate_json_primitives(file, 10) + + assert result.value == file + assert result.truncated is False + + def test_truncate_json_primitives_file_type_in_array(self, truncator, file): + """Test File objects in arrays are handled correctly.""" + array_with_files = [file, file] + result = truncator._truncate_json_primitives(array_with_files, 1000) + + assert isinstance(result.value, list) + assert len(result.value) == 2 + assert result.value[0] == file + assert result.value[1] == file + assert result.truncated is False + + def test_truncate_json_primitives_file_type_in_object(self, truncator, file): + """Test File objects in objects are handled correctly.""" + obj_with_files = {"file1": file, "file2": file} + result = truncator._truncate_json_primitives(obj_with_files, 1000) + + assert isinstance(result.value, dict) + assert len(result.value) == 2 + assert result.value["file1"] == file + assert result.value["file2"] == file + assert result.truncated is False + + class TestIntegrationScenarios: """Test realistic integration scenarios.""" diff --git a/api/tests/unit_tests/services/test_webhook_service.py b/api/tests/unit_tests/services/test_webhook_service.py index 920b1e91b6..d788657589 100644 --- a/api/tests/unit_tests/services/test_webhook_service.py +++ b/api/tests/unit_tests/services/test_webhook_service.py @@ -110,6 +110,70 @@ class TestWebhookServiceUnit: assert webhook_data["method"] == "POST" assert webhook_data["body"]["raw"] == "raw text content" + def test_extract_octet_stream_body_uses_detected_mime(self): + """Octet-stream uploads should rely on detected MIME type.""" + app = Flask(__name__) + binary_content = b"plain text data" + + with app.test_request_context( + "/webhook", method="POST", headers={"Content-Type": "application/octet-stream"}, data=binary_content + ): + webhook_trigger = MagicMock() + mock_file = MagicMock() + mock_file.to_dict.return_value = {"file": "data"} + + with ( + patch.object(WebhookService, "_detect_binary_mimetype", return_value="text/plain") as mock_detect, + patch.object(WebhookService, "_create_file_from_binary") as mock_create, + ): + mock_create.return_value = mock_file + body, files = WebhookService._extract_octet_stream_body(webhook_trigger) + + assert body["raw"] == {"file": "data"} + assert files == {} + mock_detect.assert_called_once_with(binary_content) + mock_create.assert_called_once() + args = mock_create.call_args[0] + assert args[0] == binary_content + assert args[1] == "text/plain" + assert args[2] is webhook_trigger + + def test_detect_binary_mimetype_uses_magic(self, monkeypatch): + """python-magic output should be used when available.""" + fake_magic = MagicMock() + fake_magic.from_buffer.return_value = "image/png" + monkeypatch.setattr("services.trigger.webhook_service.magic", fake_magic) + + result = WebhookService._detect_binary_mimetype(b"binary data") + + assert result == "image/png" + fake_magic.from_buffer.assert_called_once() + + def test_detect_binary_mimetype_fallback_without_magic(self, monkeypatch): + """Fallback MIME type should be used when python-magic is unavailable.""" + monkeypatch.setattr("services.trigger.webhook_service.magic", None) + + result = WebhookService._detect_binary_mimetype(b"binary data") + + assert result == "application/octet-stream" + + def test_detect_binary_mimetype_handles_magic_exception(self, monkeypatch): + """Fallback MIME type should be used when python-magic raises an exception.""" + try: + import magic as real_magic + except ImportError: + pytest.skip("python-magic is not installed") + + fake_magic = MagicMock() + fake_magic.from_buffer.side_effect = real_magic.MagicException("magic error") + monkeypatch.setattr("services.trigger.webhook_service.magic", fake_magic) + + with patch("services.trigger.webhook_service.logger") as mock_logger: + result = WebhookService._detect_binary_mimetype(b"binary data") + + assert result == "application/octet-stream" + mock_logger.debug.assert_called_once() + def test_extract_webhook_data_invalid_json(self): """Test webhook data extraction with invalid JSON.""" app = Flask(__name__) diff --git a/api/tests/unit_tests/tasks/test_document_indexing_sync_task.py b/api/tests/unit_tests/tasks/test_document_indexing_sync_task.py new file mode 100644 index 0000000000..374abe0368 --- /dev/null +++ b/api/tests/unit_tests/tasks/test_document_indexing_sync_task.py @@ -0,0 +1,520 @@ +""" +Unit tests for document indexing sync task. + +This module tests the document indexing sync task functionality including: +- Syncing Notion documents when updated +- Validating document and data source existence +- Credential validation and retrieval +- Cleaning old segments before re-indexing +- Error handling and edge cases +""" + +import uuid +from unittest.mock import MagicMock, Mock, patch + +import pytest + +from core.indexing_runner import DocumentIsPausedError, IndexingRunner +from models.dataset import Dataset, Document, DocumentSegment +from tasks.document_indexing_sync_task import document_indexing_sync_task + +# ============================================================================ +# Fixtures +# ============================================================================ + + +@pytest.fixture +def tenant_id(): + """Generate a unique tenant ID for testing.""" + return str(uuid.uuid4()) + + +@pytest.fixture +def dataset_id(): + """Generate a unique dataset ID for testing.""" + return str(uuid.uuid4()) + + +@pytest.fixture +def document_id(): + """Generate a unique document ID for testing.""" + return str(uuid.uuid4()) + + +@pytest.fixture +def notion_workspace_id(): + """Generate a Notion workspace ID for testing.""" + return str(uuid.uuid4()) + + +@pytest.fixture +def notion_page_id(): + """Generate a Notion page ID for testing.""" + return str(uuid.uuid4()) + + +@pytest.fixture +def credential_id(): + """Generate a credential ID for testing.""" + return str(uuid.uuid4()) + + +@pytest.fixture +def mock_dataset(dataset_id, tenant_id): + """Create a mock Dataset object.""" + dataset = Mock(spec=Dataset) + dataset.id = dataset_id + dataset.tenant_id = tenant_id + dataset.indexing_technique = "high_quality" + dataset.embedding_model_provider = "openai" + dataset.embedding_model = "text-embedding-ada-002" + return dataset + + +@pytest.fixture +def mock_document(document_id, dataset_id, tenant_id, notion_workspace_id, notion_page_id, credential_id): + """Create a mock Document object with Notion data source.""" + doc = Mock(spec=Document) + doc.id = document_id + doc.dataset_id = dataset_id + doc.tenant_id = tenant_id + doc.data_source_type = "notion_import" + doc.indexing_status = "completed" + doc.error = None + doc.stopped_at = None + doc.processing_started_at = None + doc.doc_form = "text_model" + doc.data_source_info_dict = { + "notion_workspace_id": notion_workspace_id, + "notion_page_id": notion_page_id, + "type": "page", + "last_edited_time": "2024-01-01T00:00:00Z", + "credential_id": credential_id, + } + return doc + + +@pytest.fixture +def mock_document_segments(document_id): + """Create mock DocumentSegment objects.""" + segments = [] + for i in range(3): + segment = Mock(spec=DocumentSegment) + segment.id = str(uuid.uuid4()) + segment.document_id = document_id + segment.index_node_id = f"node-{document_id}-{i}" + segments.append(segment) + return segments + + +@pytest.fixture +def mock_db_session(): + """Mock database session.""" + with patch("tasks.document_indexing_sync_task.db.session") as mock_session: + mock_query = MagicMock() + mock_session.query.return_value = mock_query + mock_query.where.return_value = mock_query + mock_session.scalars.return_value = MagicMock() + yield mock_session + + +@pytest.fixture +def mock_datasource_provider_service(): + """Mock DatasourceProviderService.""" + with patch("tasks.document_indexing_sync_task.DatasourceProviderService") as mock_service_class: + mock_service = MagicMock() + mock_service.get_datasource_credentials.return_value = {"integration_secret": "test_token"} + mock_service_class.return_value = mock_service + yield mock_service + + +@pytest.fixture +def mock_notion_extractor(): + """Mock NotionExtractor.""" + with patch("tasks.document_indexing_sync_task.NotionExtractor") as mock_extractor_class: + mock_extractor = MagicMock() + mock_extractor.get_notion_last_edited_time.return_value = "2024-01-02T00:00:00Z" # Updated time + mock_extractor_class.return_value = mock_extractor + yield mock_extractor + + +@pytest.fixture +def mock_index_processor_factory(): + """Mock IndexProcessorFactory.""" + with patch("tasks.document_indexing_sync_task.IndexProcessorFactory") as mock_factory: + mock_processor = MagicMock() + mock_processor.clean = Mock() + mock_factory.return_value.init_index_processor.return_value = mock_processor + yield mock_factory + + +@pytest.fixture +def mock_indexing_runner(): + """Mock IndexingRunner.""" + with patch("tasks.document_indexing_sync_task.IndexingRunner") as mock_runner_class: + mock_runner = MagicMock(spec=IndexingRunner) + mock_runner.run = Mock() + mock_runner_class.return_value = mock_runner + yield mock_runner + + +# ============================================================================ +# Tests for document_indexing_sync_task +# ============================================================================ + + +class TestDocumentIndexingSyncTask: + """Tests for the document_indexing_sync_task function.""" + + def test_document_not_found(self, mock_db_session, dataset_id, document_id): + """Test that task handles document not found gracefully.""" + # Arrange + mock_db_session.query.return_value.where.return_value.first.return_value = None + + # Act + document_indexing_sync_task(dataset_id, document_id) + + # Assert + mock_db_session.close.assert_called_once() + + def test_missing_notion_workspace_id(self, mock_db_session, mock_document, dataset_id, document_id): + """Test that task raises error when notion_workspace_id is missing.""" + # Arrange + mock_document.data_source_info_dict = {"notion_page_id": "page123", "type": "page"} + mock_db_session.query.return_value.where.return_value.first.return_value = mock_document + + # Act & Assert + with pytest.raises(ValueError, match="no notion page found"): + document_indexing_sync_task(dataset_id, document_id) + + def test_missing_notion_page_id(self, mock_db_session, mock_document, dataset_id, document_id): + """Test that task raises error when notion_page_id is missing.""" + # Arrange + mock_document.data_source_info_dict = {"notion_workspace_id": "ws123", "type": "page"} + mock_db_session.query.return_value.where.return_value.first.return_value = mock_document + + # Act & Assert + with pytest.raises(ValueError, match="no notion page found"): + document_indexing_sync_task(dataset_id, document_id) + + def test_empty_data_source_info(self, mock_db_session, mock_document, dataset_id, document_id): + """Test that task raises error when data_source_info is empty.""" + # Arrange + mock_document.data_source_info_dict = None + mock_db_session.query.return_value.where.return_value.first.return_value = mock_document + + # Act & Assert + with pytest.raises(ValueError, match="no notion page found"): + document_indexing_sync_task(dataset_id, document_id) + + def test_credential_not_found( + self, + mock_db_session, + mock_datasource_provider_service, + mock_document, + dataset_id, + document_id, + ): + """Test that task handles missing credentials by updating document status.""" + # Arrange + mock_db_session.query.return_value.where.return_value.first.return_value = mock_document + mock_datasource_provider_service.get_datasource_credentials.return_value = None + + # Act + document_indexing_sync_task(dataset_id, document_id) + + # Assert + assert mock_document.indexing_status == "error" + assert "Datasource credential not found" in mock_document.error + assert mock_document.stopped_at is not None + mock_db_session.commit.assert_called() + mock_db_session.close.assert_called() + + def test_page_not_updated( + self, + mock_db_session, + mock_datasource_provider_service, + mock_notion_extractor, + mock_document, + dataset_id, + document_id, + ): + """Test that task does nothing when page has not been updated.""" + # Arrange + mock_db_session.query.return_value.where.return_value.first.return_value = mock_document + # Return same time as stored in document + mock_notion_extractor.get_notion_last_edited_time.return_value = "2024-01-01T00:00:00Z" + + # Act + document_indexing_sync_task(dataset_id, document_id) + + # Assert + # Document status should remain unchanged + assert mock_document.indexing_status == "completed" + # No session operations should be performed beyond the initial query + mock_db_session.close.assert_not_called() + + def test_successful_sync_when_page_updated( + self, + mock_db_session, + mock_datasource_provider_service, + mock_notion_extractor, + mock_index_processor_factory, + mock_indexing_runner, + mock_dataset, + mock_document, + mock_document_segments, + dataset_id, + document_id, + ): + """Test successful sync flow when Notion page has been updated.""" + # Arrange + mock_db_session.query.return_value.where.return_value.first.side_effect = [mock_document, mock_dataset] + mock_db_session.scalars.return_value.all.return_value = mock_document_segments + # NotionExtractor returns updated time + mock_notion_extractor.get_notion_last_edited_time.return_value = "2024-01-02T00:00:00Z" + + # Act + document_indexing_sync_task(dataset_id, document_id) + + # Assert + # Verify document status was updated to parsing + assert mock_document.indexing_status == "parsing" + assert mock_document.processing_started_at is not None + + # Verify segments were cleaned + mock_processor = mock_index_processor_factory.return_value.init_index_processor.return_value + mock_processor.clean.assert_called_once() + + # Verify segments were deleted from database + for segment in mock_document_segments: + mock_db_session.delete.assert_any_call(segment) + + # Verify indexing runner was called + mock_indexing_runner.run.assert_called_once_with([mock_document]) + + # Verify session operations + assert mock_db_session.commit.called + mock_db_session.close.assert_called_once() + + def test_dataset_not_found_during_cleaning( + self, + mock_db_session, + mock_datasource_provider_service, + mock_notion_extractor, + mock_document, + dataset_id, + document_id, + ): + """Test that task handles dataset not found during cleaning phase.""" + # Arrange + mock_db_session.query.return_value.where.return_value.first.side_effect = [mock_document, None] + mock_notion_extractor.get_notion_last_edited_time.return_value = "2024-01-02T00:00:00Z" + + # Act + document_indexing_sync_task(dataset_id, document_id) + + # Assert + # Document should still be set to parsing + assert mock_document.indexing_status == "parsing" + # Session should be closed after error + mock_db_session.close.assert_called_once() + + def test_cleaning_error_continues_to_indexing( + self, + mock_db_session, + mock_datasource_provider_service, + mock_notion_extractor, + mock_index_processor_factory, + mock_indexing_runner, + mock_dataset, + mock_document, + dataset_id, + document_id, + ): + """Test that indexing continues even if cleaning fails.""" + # Arrange + mock_db_session.query.return_value.where.return_value.first.side_effect = [mock_document, mock_dataset] + mock_db_session.scalars.return_value.all.side_effect = Exception("Cleaning error") + mock_notion_extractor.get_notion_last_edited_time.return_value = "2024-01-02T00:00:00Z" + + # Act + document_indexing_sync_task(dataset_id, document_id) + + # Assert + # Indexing should still be attempted despite cleaning error + mock_indexing_runner.run.assert_called_once_with([mock_document]) + mock_db_session.close.assert_called_once() + + def test_indexing_runner_document_paused_error( + self, + mock_db_session, + mock_datasource_provider_service, + mock_notion_extractor, + mock_index_processor_factory, + mock_indexing_runner, + mock_dataset, + mock_document, + mock_document_segments, + dataset_id, + document_id, + ): + """Test that DocumentIsPausedError is handled gracefully.""" + # Arrange + mock_db_session.query.return_value.where.return_value.first.side_effect = [mock_document, mock_dataset] + mock_db_session.scalars.return_value.all.return_value = mock_document_segments + mock_notion_extractor.get_notion_last_edited_time.return_value = "2024-01-02T00:00:00Z" + mock_indexing_runner.run.side_effect = DocumentIsPausedError("Document paused") + + # Act + document_indexing_sync_task(dataset_id, document_id) + + # Assert + # Session should be closed after handling error + mock_db_session.close.assert_called_once() + + def test_indexing_runner_general_error( + self, + mock_db_session, + mock_datasource_provider_service, + mock_notion_extractor, + mock_index_processor_factory, + mock_indexing_runner, + mock_dataset, + mock_document, + mock_document_segments, + dataset_id, + document_id, + ): + """Test that general exceptions during indexing are handled.""" + # Arrange + mock_db_session.query.return_value.where.return_value.first.side_effect = [mock_document, mock_dataset] + mock_db_session.scalars.return_value.all.return_value = mock_document_segments + mock_notion_extractor.get_notion_last_edited_time.return_value = "2024-01-02T00:00:00Z" + mock_indexing_runner.run.side_effect = Exception("Indexing error") + + # Act + document_indexing_sync_task(dataset_id, document_id) + + # Assert + # Session should be closed after error + mock_db_session.close.assert_called_once() + + def test_notion_extractor_initialized_with_correct_params( + self, + mock_db_session, + mock_datasource_provider_service, + mock_notion_extractor, + mock_document, + dataset_id, + document_id, + notion_workspace_id, + notion_page_id, + ): + """Test that NotionExtractor is initialized with correct parameters.""" + # Arrange + mock_db_session.query.return_value.where.return_value.first.return_value = mock_document + mock_notion_extractor.get_notion_last_edited_time.return_value = "2024-01-01T00:00:00Z" # No update + + # Act + with patch("tasks.document_indexing_sync_task.NotionExtractor") as mock_extractor_class: + mock_extractor = MagicMock() + mock_extractor.get_notion_last_edited_time.return_value = "2024-01-01T00:00:00Z" + mock_extractor_class.return_value = mock_extractor + + document_indexing_sync_task(dataset_id, document_id) + + # Assert + mock_extractor_class.assert_called_once_with( + notion_workspace_id=notion_workspace_id, + notion_obj_id=notion_page_id, + notion_page_type="page", + notion_access_token="test_token", + tenant_id=mock_document.tenant_id, + ) + + def test_datasource_credentials_requested_correctly( + self, + mock_db_session, + mock_datasource_provider_service, + mock_notion_extractor, + mock_document, + dataset_id, + document_id, + credential_id, + ): + """Test that datasource credentials are requested with correct parameters.""" + # Arrange + mock_db_session.query.return_value.where.return_value.first.return_value = mock_document + mock_notion_extractor.get_notion_last_edited_time.return_value = "2024-01-01T00:00:00Z" + + # Act + document_indexing_sync_task(dataset_id, document_id) + + # Assert + mock_datasource_provider_service.get_datasource_credentials.assert_called_once_with( + tenant_id=mock_document.tenant_id, + credential_id=credential_id, + provider="notion_datasource", + plugin_id="langgenius/notion_datasource", + ) + + def test_credential_id_missing_uses_none( + self, + mock_db_session, + mock_datasource_provider_service, + mock_notion_extractor, + mock_document, + dataset_id, + document_id, + ): + """Test that task handles missing credential_id by passing None.""" + # Arrange + mock_document.data_source_info_dict = { + "notion_workspace_id": "ws123", + "notion_page_id": "page123", + "type": "page", + "last_edited_time": "2024-01-01T00:00:00Z", + } + mock_db_session.query.return_value.where.return_value.first.return_value = mock_document + mock_notion_extractor.get_notion_last_edited_time.return_value = "2024-01-01T00:00:00Z" + + # Act + document_indexing_sync_task(dataset_id, document_id) + + # Assert + mock_datasource_provider_service.get_datasource_credentials.assert_called_once_with( + tenant_id=mock_document.tenant_id, + credential_id=None, + provider="notion_datasource", + plugin_id="langgenius/notion_datasource", + ) + + def test_index_processor_clean_called_with_correct_params( + self, + mock_db_session, + mock_datasource_provider_service, + mock_notion_extractor, + mock_index_processor_factory, + mock_indexing_runner, + mock_dataset, + mock_document, + mock_document_segments, + dataset_id, + document_id, + ): + """Test that index processor clean is called with correct parameters.""" + # Arrange + mock_db_session.query.return_value.where.return_value.first.side_effect = [mock_document, mock_dataset] + mock_db_session.scalars.return_value.all.return_value = mock_document_segments + mock_notion_extractor.get_notion_last_edited_time.return_value = "2024-01-02T00:00:00Z" + + # Act + document_indexing_sync_task(dataset_id, document_id) + + # Assert + mock_processor = mock_index_processor_factory.return_value.init_index_processor.return_value + expected_node_ids = [seg.index_node_id for seg in mock_document_segments] + mock_processor.clean.assert_called_once_with( + mock_dataset, expected_node_ids, with_keywords=True, delete_child_chunks=True + ) diff --git a/api/tests/unit_tests/tools/test_mcp_tool.py b/api/tests/unit_tests/tools/test_mcp_tool.py new file mode 100644 index 0000000000..a527773e4e --- /dev/null +++ b/api/tests/unit_tests/tools/test_mcp_tool.py @@ -0,0 +1,122 @@ +import base64 +from unittest.mock import Mock, patch + +import pytest + +from core.mcp.types import ( + AudioContent, + BlobResourceContents, + CallToolResult, + EmbeddedResource, + ImageContent, + TextResourceContents, +) +from core.tools.__base.tool_runtime import ToolRuntime +from core.tools.entities.common_entities import I18nObject +from core.tools.entities.tool_entities import ToolEntity, ToolIdentity, ToolInvokeMessage +from core.tools.mcp_tool.tool import MCPTool + + +def _make_mcp_tool(output_schema: dict | None = None) -> MCPTool: + identity = ToolIdentity( + author="test", + name="test_mcp_tool", + label=I18nObject(en_US="Test MCP Tool", zh_Hans="测试MCP工具"), + provider="test_provider", + ) + entity = ToolEntity(identity=identity, output_schema=output_schema or {}) + runtime = Mock(spec=ToolRuntime) + runtime.credentials = {} + return MCPTool( + entity=entity, + runtime=runtime, + tenant_id="test_tenant", + icon="", + server_url="https://server.invalid", + provider_id="provider_1", + headers={}, + ) + + +class TestMCPToolInvoke: + @pytest.mark.parametrize( + ("content_factory", "mime_type"), + [ + ( + lambda b64, mt: ImageContent(type="image", data=b64, mimeType=mt), + "image/png", + ), + ( + lambda b64, mt: AudioContent(type="audio", data=b64, mimeType=mt), + "audio/mpeg", + ), + ], + ) + def test_invoke_image_or_audio_yields_blob(self, content_factory, mime_type) -> None: + tool = _make_mcp_tool() + raw = b"\x00\x01test-bytes\x02" + b64 = base64.b64encode(raw).decode() + content = content_factory(b64, mime_type) + result = CallToolResult(content=[content]) + + with patch.object(tool, "invoke_remote_mcp_tool", return_value=result): + messages = list(tool._invoke(user_id="test_user", tool_parameters={})) + + assert len(messages) == 1 + msg = messages[0] + assert msg.type == ToolInvokeMessage.MessageType.BLOB + assert isinstance(msg.message, ToolInvokeMessage.BlobMessage) + assert msg.message.blob == raw + assert msg.meta == {"mime_type": mime_type} + + def test_invoke_embedded_text_resource_yields_text(self) -> None: + tool = _make_mcp_tool() + text_resource = TextResourceContents(uri="file://test.txt", mimeType="text/plain", text="hello world") + content = EmbeddedResource(type="resource", resource=text_resource) + result = CallToolResult(content=[content]) + + with patch.object(tool, "invoke_remote_mcp_tool", return_value=result): + messages = list(tool._invoke(user_id="test_user", tool_parameters={})) + + assert len(messages) == 1 + msg = messages[0] + assert msg.type == ToolInvokeMessage.MessageType.TEXT + assert isinstance(msg.message, ToolInvokeMessage.TextMessage) + assert msg.message.text == "hello world" + + @pytest.mark.parametrize( + ("mime_type", "expected_mime"), + [("application/pdf", "application/pdf"), (None, "application/octet-stream")], + ) + def test_invoke_embedded_blob_resource_yields_blob(self, mime_type, expected_mime) -> None: + tool = _make_mcp_tool() + raw = b"binary-data" + b64 = base64.b64encode(raw).decode() + blob_resource = BlobResourceContents(uri="file://doc.bin", mimeType=mime_type, blob=b64) + content = EmbeddedResource(type="resource", resource=blob_resource) + result = CallToolResult(content=[content]) + + with patch.object(tool, "invoke_remote_mcp_tool", return_value=result): + messages = list(tool._invoke(user_id="test_user", tool_parameters={})) + + assert len(messages) == 1 + msg = messages[0] + assert msg.type == ToolInvokeMessage.MessageType.BLOB + assert isinstance(msg.message, ToolInvokeMessage.BlobMessage) + assert msg.message.blob == raw + assert msg.meta == {"mime_type": expected_mime} + + def test_invoke_yields_variables_when_structured_content_and_schema(self) -> None: + tool = _make_mcp_tool(output_schema={"type": "object"}) + result = CallToolResult(content=[], structuredContent={"a": 1, "b": "x"}) + + with patch.object(tool, "invoke_remote_mcp_tool", return_value=result): + messages = list(tool._invoke(user_id="test_user", tool_parameters={})) + + # Expect two variable messages corresponding to keys a and b + assert len(messages) == 2 + var_msgs = [m for m in messages if isinstance(m.message, ToolInvokeMessage.VariableMessage)] + assert {m.message.variable_name for m in var_msgs} == {"a", "b"} + # Validate values + values = {m.message.variable_name: m.message.variable_value for m in var_msgs} + assert values == {"a": 1, "b": "x"} diff --git a/api/tests/unit_tests/utils/test_text_processing.py b/api/tests/unit_tests/utils/test_text_processing.py index 11e017464a..bf61162a66 100644 --- a/api/tests/unit_tests/utils/test_text_processing.py +++ b/api/tests/unit_tests/utils/test_text_processing.py @@ -15,6 +15,11 @@ from core.tools.utils.text_processing_utils import remove_leading_symbols ("", ""), (" ", " "), ("【测试】", "【测试】"), + # Markdown link preservation - should be preserved if text starts with a markdown link + ("[Google](https://google.com) is a search engine", "[Google](https://google.com) is a search engine"), + ("[Example](http://example.com) some text", "[Example](http://example.com) some text"), + # Leading symbols before markdown link are removed, including the opening bracket [ + ("@[Test](https://example.com)", "Test](https://example.com)"), ], ) def test_remove_leading_symbols(input_text, expected_output): diff --git a/api/uv.lock b/api/uv.lock index e6a6cf8ffc..a999c4ee18 100644 --- a/api/uv.lock +++ b/api/uv.lock @@ -1,5 +1,5 @@ version = 1 -revision = 1 +revision = 3 requires-python = ">=3.11, <3.13" resolution-markers = [ "python_full_version >= '3.12.4' and platform_python_implementation != 'PyPy' and sys_platform == 'linux'", @@ -23,27 +23,27 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/9d/f2/7b5fac50ee42e8b8d4a098d76743a394546f938c94125adbb93414e5ae7d/abnf-2.2.0.tar.gz", hash = "sha256:433380fd32855bbc60bc7b3d35d40616e21383a32ed1c9b8893d16d9f4a6c2f4", size = 197507 } +sdist = { url = "https://files.pythonhosted.org/packages/9d/f2/7b5fac50ee42e8b8d4a098d76743a394546f938c94125adbb93414e5ae7d/abnf-2.2.0.tar.gz", hash = "sha256:433380fd32855bbc60bc7b3d35d40616e21383a32ed1c9b8893d16d9f4a6c2f4", size = 197507, upload-time = "2023-03-17T18:26:24.577Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/30/95/f456ae7928a2f3a913f467d4fd9e662e295dd7349fc58b35f77f6c757a23/abnf-2.2.0-py3-none-any.whl", hash = "sha256:5dc2ae31a84ff454f7de46e08a2a21a442a0e21a092468420587a1590b490d1f", size = 39938 }, + { url = "https://files.pythonhosted.org/packages/30/95/f456ae7928a2f3a913f467d4fd9e662e295dd7349fc58b35f77f6c757a23/abnf-2.2.0-py3-none-any.whl", hash = "sha256:5dc2ae31a84ff454f7de46e08a2a21a442a0e21a092468420587a1590b490d1f", size = 39938, upload-time = "2023-03-17T18:26:22.608Z" }, ] [[package]] name = "aiofiles" version = "24.1.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/0b/03/a88171e277e8caa88a4c77808c20ebb04ba74cc4681bf1e9416c862de237/aiofiles-24.1.0.tar.gz", hash = "sha256:22a075c9e5a3810f0c2e48f3008c94d68c65d763b9b03857924c99e57355166c", size = 30247 } +sdist = { url = "https://files.pythonhosted.org/packages/0b/03/a88171e277e8caa88a4c77808c20ebb04ba74cc4681bf1e9416c862de237/aiofiles-24.1.0.tar.gz", hash = "sha256:22a075c9e5a3810f0c2e48f3008c94d68c65d763b9b03857924c99e57355166c", size = 30247, upload-time = "2024-06-24T11:02:03.584Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/a5/45/30bb92d442636f570cb5651bc661f52b610e2eec3f891a5dc3a4c3667db0/aiofiles-24.1.0-py3-none-any.whl", hash = "sha256:b4ec55f4195e3eb5d7abd1bf7e061763e864dd4954231fb8539a0ef8bb8260e5", size = 15896 }, + { url = "https://files.pythonhosted.org/packages/a5/45/30bb92d442636f570cb5651bc661f52b610e2eec3f891a5dc3a4c3667db0/aiofiles-24.1.0-py3-none-any.whl", hash = "sha256:b4ec55f4195e3eb5d7abd1bf7e061763e864dd4954231fb8539a0ef8bb8260e5", size = 15896, upload-time = "2024-06-24T11:02:01.529Z" }, ] [[package]] name = "aiohappyeyeballs" version = "2.6.1" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/26/30/f84a107a9c4331c14b2b586036f40965c128aa4fee4dda5d3d51cb14ad54/aiohappyeyeballs-2.6.1.tar.gz", hash = "sha256:c3f9d0113123803ccadfdf3f0faa505bc78e6a72d1cc4806cbd719826e943558", size = 22760 } +sdist = { url = "https://files.pythonhosted.org/packages/26/30/f84a107a9c4331c14b2b586036f40965c128aa4fee4dda5d3d51cb14ad54/aiohappyeyeballs-2.6.1.tar.gz", hash = "sha256:c3f9d0113123803ccadfdf3f0faa505bc78e6a72d1cc4806cbd719826e943558", size = 22760, upload-time = "2025-03-12T01:42:48.764Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/0f/15/5bf3b99495fb160b63f95972b81750f18f7f4e02ad051373b669d17d44f2/aiohappyeyeballs-2.6.1-py3-none-any.whl", hash = "sha256:f349ba8f4b75cb25c99c5c2d84e997e485204d2902a9597802b0371f09331fb8", size = 15265 }, + { url = "https://files.pythonhosted.org/packages/0f/15/5bf3b99495fb160b63f95972b81750f18f7f4e02ad051373b669d17d44f2/aiohappyeyeballs-2.6.1-py3-none-any.whl", hash = "sha256:f349ba8f4b75cb25c99c5c2d84e997e485204d2902a9597802b0371f09331fb8", size = 15265, upload-time = "2025-03-12T01:42:47.083Z" }, ] [[package]] @@ -59,42 +59,42 @@ dependencies = [ { name = "propcache" }, { name = "yarl" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/1c/ce/3b83ebba6b3207a7135e5fcaba49706f8a4b6008153b4e30540c982fae26/aiohttp-3.13.2.tar.gz", hash = "sha256:40176a52c186aefef6eb3cad2cdd30cd06e3afbe88fe8ab2af9c0b90f228daca", size = 7837994 } +sdist = { url = "https://files.pythonhosted.org/packages/1c/ce/3b83ebba6b3207a7135e5fcaba49706f8a4b6008153b4e30540c982fae26/aiohttp-3.13.2.tar.gz", hash = "sha256:40176a52c186aefef6eb3cad2cdd30cd06e3afbe88fe8ab2af9c0b90f228daca", size = 7837994, upload-time = "2025-10-28T20:59:39.937Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/35/74/b321e7d7ca762638cdf8cdeceb39755d9c745aff7a64c8789be96ddf6e96/aiohttp-3.13.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:4647d02df098f6434bafd7f32ad14942f05a9caa06c7016fdcc816f343997dd0", size = 743409 }, - { url = "https://files.pythonhosted.org/packages/99/3d/91524b905ec473beaf35158d17f82ef5a38033e5809fe8742e3657cdbb97/aiohttp-3.13.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:e3403f24bcb9c3b29113611c3c16a2a447c3953ecf86b79775e7be06f7ae7ccb", size = 497006 }, - { url = "https://files.pythonhosted.org/packages/eb/d3/7f68bc02a67716fe80f063e19adbd80a642e30682ce74071269e17d2dba1/aiohttp-3.13.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:43dff14e35aba17e3d6d5ba628858fb8cb51e30f44724a2d2f0c75be492c55e9", size = 493195 }, - { url = "https://files.pythonhosted.org/packages/98/31/913f774a4708775433b7375c4f867d58ba58ead833af96c8af3621a0d243/aiohttp-3.13.2-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:e2a9ea08e8c58bb17655630198833109227dea914cd20be660f52215f6de5613", size = 1747759 }, - { url = "https://files.pythonhosted.org/packages/e8/63/04efe156f4326f31c7c4a97144f82132c3bb21859b7bb84748d452ccc17c/aiohttp-3.13.2-cp311-cp311-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:53b07472f235eb80e826ad038c9d106c2f653584753f3ddab907c83f49eedead", size = 1704456 }, - { url = "https://files.pythonhosted.org/packages/8e/02/4e16154d8e0a9cf4ae76f692941fd52543bbb148f02f098ca73cab9b1c1b/aiohttp-3.13.2-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:e736c93e9c274fce6419af4aac199984d866e55f8a4cec9114671d0ea9688780", size = 1807572 }, - { url = "https://files.pythonhosted.org/packages/34/58/b0583defb38689e7f06798f0285b1ffb3a6fb371f38363ce5fd772112724/aiohttp-3.13.2-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:ff5e771f5dcbc81c64898c597a434f7682f2259e0cd666932a913d53d1341d1a", size = 1895954 }, - { url = "https://files.pythonhosted.org/packages/6b/f3/083907ee3437425b4e376aa58b2c915eb1a33703ec0dc30040f7ae3368c6/aiohttp-3.13.2-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a3b6fb0c207cc661fa0bf8c66d8d9b657331ccc814f4719468af61034b478592", size = 1747092 }, - { url = "https://files.pythonhosted.org/packages/ac/61/98a47319b4e425cc134e05e5f3fc512bf9a04bf65aafd9fdcda5d57ec693/aiohttp-3.13.2-cp311-cp311-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:97a0895a8e840ab3520e2288db7cace3a1981300d48babeb50e7425609e2e0ab", size = 1606815 }, - { url = "https://files.pythonhosted.org/packages/97/4b/e78b854d82f66bb974189135d31fce265dee0f5344f64dd0d345158a5973/aiohttp-3.13.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:9e8f8afb552297aca127c90cb840e9a1d4bfd6a10d7d8f2d9176e1acc69bad30", size = 1723789 }, - { url = "https://files.pythonhosted.org/packages/ed/fc/9d2ccc794fc9b9acd1379d625c3a8c64a45508b5091c546dea273a41929e/aiohttp-3.13.2-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:ed2f9c7216e53c3df02264f25d824b079cc5914f9e2deba94155190ef648ee40", size = 1718104 }, - { url = "https://files.pythonhosted.org/packages/66/65/34564b8765ea5c7d79d23c9113135d1dd3609173da13084830f1507d56cf/aiohttp-3.13.2-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:99c5280a329d5fa18ef30fd10c793a190d996567667908bef8a7f81f8202b948", size = 1785584 }, - { url = "https://files.pythonhosted.org/packages/30/be/f6a7a426e02fc82781afd62016417b3948e2207426d90a0e478790d1c8a4/aiohttp-3.13.2-cp311-cp311-musllinux_1_2_riscv64.whl", hash = "sha256:2ca6ffef405fc9c09a746cb5d019c1672cd7f402542e379afc66b370833170cf", size = 1595126 }, - { url = "https://files.pythonhosted.org/packages/e5/c7/8e22d5d28f94f67d2af496f14a83b3c155d915d1fe53d94b66d425ec5b42/aiohttp-3.13.2-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:47f438b1a28e926c37632bff3c44df7d27c9b57aaf4e34b1def3c07111fdb782", size = 1800665 }, - { url = "https://files.pythonhosted.org/packages/d1/11/91133c8b68b1da9fc16555706aa7276fdf781ae2bb0876c838dd86b8116e/aiohttp-3.13.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:9acda8604a57bb60544e4646a4615c1866ee6c04a8edef9b8ee6fd1d8fa2ddc8", size = 1739532 }, - { url = "https://files.pythonhosted.org/packages/17/6b/3747644d26a998774b21a616016620293ddefa4d63af6286f389aedac844/aiohttp-3.13.2-cp311-cp311-win32.whl", hash = "sha256:868e195e39b24aaa930b063c08bb0c17924899c16c672a28a65afded9c46c6ec", size = 431876 }, - { url = "https://files.pythonhosted.org/packages/c3/63/688462108c1a00eb9f05765331c107f95ae86f6b197b865d29e930b7e462/aiohttp-3.13.2-cp311-cp311-win_amd64.whl", hash = "sha256:7fd19df530c292542636c2a9a85854fab93474396a52f1695e799186bbd7f24c", size = 456205 }, - { url = "https://files.pythonhosted.org/packages/29/9b/01f00e9856d0a73260e86dd8ed0c2234a466c5c1712ce1c281548df39777/aiohttp-3.13.2-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:b1e56bab2e12b2b9ed300218c351ee2a3d8c8fdab5b1ec6193e11a817767e47b", size = 737623 }, - { url = "https://files.pythonhosted.org/packages/5a/1b/4be39c445e2b2bd0aab4ba736deb649fabf14f6757f405f0c9685019b9e9/aiohttp-3.13.2-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:364e25edaabd3d37b1db1f0cbcee8c73c9a3727bfa262b83e5e4cf3489a2a9dc", size = 492664 }, - { url = "https://files.pythonhosted.org/packages/28/66/d35dcfea8050e131cdd731dff36434390479b4045a8d0b9d7111b0a968f1/aiohttp-3.13.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:c5c94825f744694c4b8db20b71dba9a257cd2ba8e010a803042123f3a25d50d7", size = 491808 }, - { url = "https://files.pythonhosted.org/packages/00/29/8e4609b93e10a853b65f8291e64985de66d4f5848c5637cddc70e98f01f8/aiohttp-3.13.2-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ba2715d842ffa787be87cbfce150d5e88c87a98e0b62e0f5aa489169a393dbbb", size = 1738863 }, - { url = "https://files.pythonhosted.org/packages/9d/fa/4ebdf4adcc0def75ced1a0d2d227577cd7b1b85beb7edad85fcc87693c75/aiohttp-3.13.2-cp312-cp312-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:585542825c4bc662221fb257889e011a5aa00f1ae4d75d1d246a5225289183e3", size = 1700586 }, - { url = "https://files.pythonhosted.org/packages/da/04/73f5f02ff348a3558763ff6abe99c223381b0bace05cd4530a0258e52597/aiohttp-3.13.2-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:39d02cb6025fe1aabca329c5632f48c9532a3dabccd859e7e2f110668972331f", size = 1768625 }, - { url = "https://files.pythonhosted.org/packages/f8/49/a825b79ffec124317265ca7d2344a86bcffeb960743487cb11988ffb3494/aiohttp-3.13.2-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:e67446b19e014d37342f7195f592a2a948141d15a312fe0e700c2fd2f03124f6", size = 1867281 }, - { url = "https://files.pythonhosted.org/packages/b9/48/adf56e05f81eac31edcfae45c90928f4ad50ef2e3ea72cb8376162a368f8/aiohttp-3.13.2-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4356474ad6333e41ccefd39eae869ba15a6c5299c9c01dfdcfdd5c107be4363e", size = 1752431 }, - { url = "https://files.pythonhosted.org/packages/30/ab/593855356eead019a74e862f21523db09c27f12fd24af72dbc3555b9bfd9/aiohttp-3.13.2-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:eeacf451c99b4525f700f078becff32c32ec327b10dcf31306a8a52d78166de7", size = 1562846 }, - { url = "https://files.pythonhosted.org/packages/39/0f/9f3d32271aa8dc35036e9668e31870a9d3b9542dd6b3e2c8a30931cb27ae/aiohttp-3.13.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:d8a9b889aeabd7a4e9af0b7f4ab5ad94d42e7ff679aaec6d0db21e3b639ad58d", size = 1699606 }, - { url = "https://files.pythonhosted.org/packages/2c/3c/52d2658c5699b6ef7692a3f7128b2d2d4d9775f2a68093f74bca06cf01e1/aiohttp-3.13.2-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:fa89cb11bc71a63b69568d5b8a25c3ca25b6d54c15f907ca1c130d72f320b76b", size = 1720663 }, - { url = "https://files.pythonhosted.org/packages/9b/d4/8f8f3ff1fb7fb9e3f04fcad4e89d8a1cd8fc7d05de67e3de5b15b33008ff/aiohttp-3.13.2-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:8aa7c807df234f693fed0ecd507192fc97692e61fee5702cdc11155d2e5cadc8", size = 1737939 }, - { url = "https://files.pythonhosted.org/packages/03/d3/ddd348f8a27a634daae39a1b8e291ff19c77867af438af844bf8b7e3231b/aiohttp-3.13.2-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:9eb3e33fdbe43f88c3c75fa608c25e7c47bbd80f48d012763cb67c47f39a7e16", size = 1555132 }, - { url = "https://files.pythonhosted.org/packages/39/b8/46790692dc46218406f94374903ba47552f2f9f90dad554eed61bfb7b64c/aiohttp-3.13.2-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:9434bc0d80076138ea986833156c5a48c9c7a8abb0c96039ddbb4afc93184169", size = 1764802 }, - { url = "https://files.pythonhosted.org/packages/ba/e4/19ce547b58ab2a385e5f0b8aa3db38674785085abcf79b6e0edd1632b12f/aiohttp-3.13.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:ff15c147b2ad66da1f2cbb0622313f2242d8e6e8f9b79b5206c84523a4473248", size = 1719512 }, - { url = "https://files.pythonhosted.org/packages/70/30/6355a737fed29dcb6dfdd48682d5790cb5eab050f7b4e01f49b121d3acad/aiohttp-3.13.2-cp312-cp312-win32.whl", hash = "sha256:27e569eb9d9e95dbd55c0fc3ec3a9335defbf1d8bc1d20171a49f3c4c607b93e", size = 426690 }, - { url = "https://files.pythonhosted.org/packages/0a/0d/b10ac09069973d112de6ef980c1f6bb31cb7dcd0bc363acbdad58f927873/aiohttp-3.13.2-cp312-cp312-win_amd64.whl", hash = "sha256:8709a0f05d59a71f33fd05c17fc11fcb8c30140506e13c2f5e8ee1b8964e1b45", size = 453465 }, + { url = "https://files.pythonhosted.org/packages/35/74/b321e7d7ca762638cdf8cdeceb39755d9c745aff7a64c8789be96ddf6e96/aiohttp-3.13.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:4647d02df098f6434bafd7f32ad14942f05a9caa06c7016fdcc816f343997dd0", size = 743409, upload-time = "2025-10-28T20:56:00.354Z" }, + { url = "https://files.pythonhosted.org/packages/99/3d/91524b905ec473beaf35158d17f82ef5a38033e5809fe8742e3657cdbb97/aiohttp-3.13.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:e3403f24bcb9c3b29113611c3c16a2a447c3953ecf86b79775e7be06f7ae7ccb", size = 497006, upload-time = "2025-10-28T20:56:01.85Z" }, + { url = "https://files.pythonhosted.org/packages/eb/d3/7f68bc02a67716fe80f063e19adbd80a642e30682ce74071269e17d2dba1/aiohttp-3.13.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:43dff14e35aba17e3d6d5ba628858fb8cb51e30f44724a2d2f0c75be492c55e9", size = 493195, upload-time = "2025-10-28T20:56:03.314Z" }, + { url = "https://files.pythonhosted.org/packages/98/31/913f774a4708775433b7375c4f867d58ba58ead833af96c8af3621a0d243/aiohttp-3.13.2-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:e2a9ea08e8c58bb17655630198833109227dea914cd20be660f52215f6de5613", size = 1747759, upload-time = "2025-10-28T20:56:04.904Z" }, + { url = "https://files.pythonhosted.org/packages/e8/63/04efe156f4326f31c7c4a97144f82132c3bb21859b7bb84748d452ccc17c/aiohttp-3.13.2-cp311-cp311-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:53b07472f235eb80e826ad038c9d106c2f653584753f3ddab907c83f49eedead", size = 1704456, upload-time = "2025-10-28T20:56:06.986Z" }, + { url = "https://files.pythonhosted.org/packages/8e/02/4e16154d8e0a9cf4ae76f692941fd52543bbb148f02f098ca73cab9b1c1b/aiohttp-3.13.2-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:e736c93e9c274fce6419af4aac199984d866e55f8a4cec9114671d0ea9688780", size = 1807572, upload-time = "2025-10-28T20:56:08.558Z" }, + { url = "https://files.pythonhosted.org/packages/34/58/b0583defb38689e7f06798f0285b1ffb3a6fb371f38363ce5fd772112724/aiohttp-3.13.2-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:ff5e771f5dcbc81c64898c597a434f7682f2259e0cd666932a913d53d1341d1a", size = 1895954, upload-time = "2025-10-28T20:56:10.545Z" }, + { url = "https://files.pythonhosted.org/packages/6b/f3/083907ee3437425b4e376aa58b2c915eb1a33703ec0dc30040f7ae3368c6/aiohttp-3.13.2-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a3b6fb0c207cc661fa0bf8c66d8d9b657331ccc814f4719468af61034b478592", size = 1747092, upload-time = "2025-10-28T20:56:12.118Z" }, + { url = "https://files.pythonhosted.org/packages/ac/61/98a47319b4e425cc134e05e5f3fc512bf9a04bf65aafd9fdcda5d57ec693/aiohttp-3.13.2-cp311-cp311-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:97a0895a8e840ab3520e2288db7cace3a1981300d48babeb50e7425609e2e0ab", size = 1606815, upload-time = "2025-10-28T20:56:14.191Z" }, + { url = "https://files.pythonhosted.org/packages/97/4b/e78b854d82f66bb974189135d31fce265dee0f5344f64dd0d345158a5973/aiohttp-3.13.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:9e8f8afb552297aca127c90cb840e9a1d4bfd6a10d7d8f2d9176e1acc69bad30", size = 1723789, upload-time = "2025-10-28T20:56:16.101Z" }, + { url = "https://files.pythonhosted.org/packages/ed/fc/9d2ccc794fc9b9acd1379d625c3a8c64a45508b5091c546dea273a41929e/aiohttp-3.13.2-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:ed2f9c7216e53c3df02264f25d824b079cc5914f9e2deba94155190ef648ee40", size = 1718104, upload-time = "2025-10-28T20:56:17.655Z" }, + { url = "https://files.pythonhosted.org/packages/66/65/34564b8765ea5c7d79d23c9113135d1dd3609173da13084830f1507d56cf/aiohttp-3.13.2-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:99c5280a329d5fa18ef30fd10c793a190d996567667908bef8a7f81f8202b948", size = 1785584, upload-time = "2025-10-28T20:56:19.238Z" }, + { url = "https://files.pythonhosted.org/packages/30/be/f6a7a426e02fc82781afd62016417b3948e2207426d90a0e478790d1c8a4/aiohttp-3.13.2-cp311-cp311-musllinux_1_2_riscv64.whl", hash = "sha256:2ca6ffef405fc9c09a746cb5d019c1672cd7f402542e379afc66b370833170cf", size = 1595126, upload-time = "2025-10-28T20:56:20.836Z" }, + { url = "https://files.pythonhosted.org/packages/e5/c7/8e22d5d28f94f67d2af496f14a83b3c155d915d1fe53d94b66d425ec5b42/aiohttp-3.13.2-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:47f438b1a28e926c37632bff3c44df7d27c9b57aaf4e34b1def3c07111fdb782", size = 1800665, upload-time = "2025-10-28T20:56:22.922Z" }, + { url = "https://files.pythonhosted.org/packages/d1/11/91133c8b68b1da9fc16555706aa7276fdf781ae2bb0876c838dd86b8116e/aiohttp-3.13.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:9acda8604a57bb60544e4646a4615c1866ee6c04a8edef9b8ee6fd1d8fa2ddc8", size = 1739532, upload-time = "2025-10-28T20:56:25.924Z" }, + { url = "https://files.pythonhosted.org/packages/17/6b/3747644d26a998774b21a616016620293ddefa4d63af6286f389aedac844/aiohttp-3.13.2-cp311-cp311-win32.whl", hash = "sha256:868e195e39b24aaa930b063c08bb0c17924899c16c672a28a65afded9c46c6ec", size = 431876, upload-time = "2025-10-28T20:56:27.524Z" }, + { url = "https://files.pythonhosted.org/packages/c3/63/688462108c1a00eb9f05765331c107f95ae86f6b197b865d29e930b7e462/aiohttp-3.13.2-cp311-cp311-win_amd64.whl", hash = "sha256:7fd19df530c292542636c2a9a85854fab93474396a52f1695e799186bbd7f24c", size = 456205, upload-time = "2025-10-28T20:56:29.062Z" }, + { url = "https://files.pythonhosted.org/packages/29/9b/01f00e9856d0a73260e86dd8ed0c2234a466c5c1712ce1c281548df39777/aiohttp-3.13.2-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:b1e56bab2e12b2b9ed300218c351ee2a3d8c8fdab5b1ec6193e11a817767e47b", size = 737623, upload-time = "2025-10-28T20:56:30.797Z" }, + { url = "https://files.pythonhosted.org/packages/5a/1b/4be39c445e2b2bd0aab4ba736deb649fabf14f6757f405f0c9685019b9e9/aiohttp-3.13.2-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:364e25edaabd3d37b1db1f0cbcee8c73c9a3727bfa262b83e5e4cf3489a2a9dc", size = 492664, upload-time = "2025-10-28T20:56:32.708Z" }, + { url = "https://files.pythonhosted.org/packages/28/66/d35dcfea8050e131cdd731dff36434390479b4045a8d0b9d7111b0a968f1/aiohttp-3.13.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:c5c94825f744694c4b8db20b71dba9a257cd2ba8e010a803042123f3a25d50d7", size = 491808, upload-time = "2025-10-28T20:56:34.57Z" }, + { url = "https://files.pythonhosted.org/packages/00/29/8e4609b93e10a853b65f8291e64985de66d4f5848c5637cddc70e98f01f8/aiohttp-3.13.2-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ba2715d842ffa787be87cbfce150d5e88c87a98e0b62e0f5aa489169a393dbbb", size = 1738863, upload-time = "2025-10-28T20:56:36.377Z" }, + { url = "https://files.pythonhosted.org/packages/9d/fa/4ebdf4adcc0def75ced1a0d2d227577cd7b1b85beb7edad85fcc87693c75/aiohttp-3.13.2-cp312-cp312-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:585542825c4bc662221fb257889e011a5aa00f1ae4d75d1d246a5225289183e3", size = 1700586, upload-time = "2025-10-28T20:56:38.034Z" }, + { url = "https://files.pythonhosted.org/packages/da/04/73f5f02ff348a3558763ff6abe99c223381b0bace05cd4530a0258e52597/aiohttp-3.13.2-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:39d02cb6025fe1aabca329c5632f48c9532a3dabccd859e7e2f110668972331f", size = 1768625, upload-time = "2025-10-28T20:56:39.75Z" }, + { url = "https://files.pythonhosted.org/packages/f8/49/a825b79ffec124317265ca7d2344a86bcffeb960743487cb11988ffb3494/aiohttp-3.13.2-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:e67446b19e014d37342f7195f592a2a948141d15a312fe0e700c2fd2f03124f6", size = 1867281, upload-time = "2025-10-28T20:56:41.471Z" }, + { url = "https://files.pythonhosted.org/packages/b9/48/adf56e05f81eac31edcfae45c90928f4ad50ef2e3ea72cb8376162a368f8/aiohttp-3.13.2-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4356474ad6333e41ccefd39eae869ba15a6c5299c9c01dfdcfdd5c107be4363e", size = 1752431, upload-time = "2025-10-28T20:56:43.162Z" }, + { url = "https://files.pythonhosted.org/packages/30/ab/593855356eead019a74e862f21523db09c27f12fd24af72dbc3555b9bfd9/aiohttp-3.13.2-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:eeacf451c99b4525f700f078becff32c32ec327b10dcf31306a8a52d78166de7", size = 1562846, upload-time = "2025-10-28T20:56:44.85Z" }, + { url = "https://files.pythonhosted.org/packages/39/0f/9f3d32271aa8dc35036e9668e31870a9d3b9542dd6b3e2c8a30931cb27ae/aiohttp-3.13.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:d8a9b889aeabd7a4e9af0b7f4ab5ad94d42e7ff679aaec6d0db21e3b639ad58d", size = 1699606, upload-time = "2025-10-28T20:56:46.519Z" }, + { url = "https://files.pythonhosted.org/packages/2c/3c/52d2658c5699b6ef7692a3f7128b2d2d4d9775f2a68093f74bca06cf01e1/aiohttp-3.13.2-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:fa89cb11bc71a63b69568d5b8a25c3ca25b6d54c15f907ca1c130d72f320b76b", size = 1720663, upload-time = "2025-10-28T20:56:48.528Z" }, + { url = "https://files.pythonhosted.org/packages/9b/d4/8f8f3ff1fb7fb9e3f04fcad4e89d8a1cd8fc7d05de67e3de5b15b33008ff/aiohttp-3.13.2-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:8aa7c807df234f693fed0ecd507192fc97692e61fee5702cdc11155d2e5cadc8", size = 1737939, upload-time = "2025-10-28T20:56:50.77Z" }, + { url = "https://files.pythonhosted.org/packages/03/d3/ddd348f8a27a634daae39a1b8e291ff19c77867af438af844bf8b7e3231b/aiohttp-3.13.2-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:9eb3e33fdbe43f88c3c75fa608c25e7c47bbd80f48d012763cb67c47f39a7e16", size = 1555132, upload-time = "2025-10-28T20:56:52.568Z" }, + { url = "https://files.pythonhosted.org/packages/39/b8/46790692dc46218406f94374903ba47552f2f9f90dad554eed61bfb7b64c/aiohttp-3.13.2-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:9434bc0d80076138ea986833156c5a48c9c7a8abb0c96039ddbb4afc93184169", size = 1764802, upload-time = "2025-10-28T20:56:54.292Z" }, + { url = "https://files.pythonhosted.org/packages/ba/e4/19ce547b58ab2a385e5f0b8aa3db38674785085abcf79b6e0edd1632b12f/aiohttp-3.13.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:ff15c147b2ad66da1f2cbb0622313f2242d8e6e8f9b79b5206c84523a4473248", size = 1719512, upload-time = "2025-10-28T20:56:56.428Z" }, + { url = "https://files.pythonhosted.org/packages/70/30/6355a737fed29dcb6dfdd48682d5790cb5eab050f7b4e01f49b121d3acad/aiohttp-3.13.2-cp312-cp312-win32.whl", hash = "sha256:27e569eb9d9e95dbd55c0fc3ec3a9335defbf1d8bc1d20171a49f3c4c607b93e", size = 426690, upload-time = "2025-10-28T20:56:58.736Z" }, + { url = "https://files.pythonhosted.org/packages/0a/0d/b10ac09069973d112de6ef980c1f6bb31cb7dcd0bc363acbdad58f927873/aiohttp-3.13.2-cp312-cp312-win_amd64.whl", hash = "sha256:8709a0f05d59a71f33fd05c17fc11fcb8c30140506e13c2f5e8ee1b8964e1b45", size = 453465, upload-time = "2025-10-28T20:57:00.795Z" }, ] [[package]] @@ -104,9 +104,9 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "pymysql" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/29/e0/302aeffe8d90853556f47f3106b89c16cc2ec2a4d269bdfd82e3f4ae12cc/aiomysql-0.3.2.tar.gz", hash = "sha256:72d15ef5cfc34c03468eb41e1b90adb9fd9347b0b589114bd23ead569a02ac1a", size = 108311 } +sdist = { url = "https://files.pythonhosted.org/packages/29/e0/302aeffe8d90853556f47f3106b89c16cc2ec2a4d269bdfd82e3f4ae12cc/aiomysql-0.3.2.tar.gz", hash = "sha256:72d15ef5cfc34c03468eb41e1b90adb9fd9347b0b589114bd23ead569a02ac1a", size = 108311, upload-time = "2025-10-22T00:15:21.278Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/4c/af/aae0153c3e28712adaf462328f6c7a3c196a1c1c27b491de4377dd3e6b52/aiomysql-0.3.2-py3-none-any.whl", hash = "sha256:c82c5ba04137d7afd5c693a258bea8ead2aad77101668044143a991e04632eb2", size = 71834 }, + { url = "https://files.pythonhosted.org/packages/4c/af/aae0153c3e28712adaf462328f6c7a3c196a1c1c27b491de4377dd3e6b52/aiomysql-0.3.2-py3-none-any.whl", hash = "sha256:c82c5ba04137d7afd5c693a258bea8ead2aad77101668044143a991e04632eb2", size = 71834, upload-time = "2025-10-22T00:15:15.905Z" }, ] [[package]] @@ -117,9 +117,9 @@ dependencies = [ { name = "frozenlist" }, { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/61/62/06741b579156360248d1ec624842ad0edf697050bbaf7c3e46394e106ad1/aiosignal-1.4.0.tar.gz", hash = "sha256:f47eecd9468083c2029cc99945502cb7708b082c232f9aca65da147157b251c7", size = 25007 } +sdist = { url = "https://files.pythonhosted.org/packages/61/62/06741b579156360248d1ec624842ad0edf697050bbaf7c3e46394e106ad1/aiosignal-1.4.0.tar.gz", hash = "sha256:f47eecd9468083c2029cc99945502cb7708b082c232f9aca65da147157b251c7", size = 25007, upload-time = "2025-07-03T22:54:43.528Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/fb/76/641ae371508676492379f16e2fa48f4e2c11741bd63c48be4b12a6b09cba/aiosignal-1.4.0-py3-none-any.whl", hash = "sha256:053243f8b92b990551949e63930a839ff0cf0b0ebbe0597b0f3fb19e1a0fe82e", size = 7490 }, + { url = "https://files.pythonhosted.org/packages/fb/76/641ae371508676492379f16e2fa48f4e2c11741bd63c48be4b12a6b09cba/aiosignal-1.4.0-py3-none-any.whl", hash = "sha256:053243f8b92b990551949e63930a839ff0cf0b0ebbe0597b0f3fb19e1a0fe82e", size = 7490, upload-time = "2025-07-03T22:54:42.156Z" }, ] [[package]] @@ -131,9 +131,9 @@ dependencies = [ { name = "sqlalchemy" }, { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/02/a6/74c8cadc2882977d80ad756a13857857dbcf9bd405bc80b662eb10651282/alembic-1.17.2.tar.gz", hash = "sha256:bbe9751705c5e0f14877f02d46c53d10885e377e3d90eda810a016f9baa19e8e", size = 1988064 } +sdist = { url = "https://files.pythonhosted.org/packages/02/a6/74c8cadc2882977d80ad756a13857857dbcf9bd405bc80b662eb10651282/alembic-1.17.2.tar.gz", hash = "sha256:bbe9751705c5e0f14877f02d46c53d10885e377e3d90eda810a016f9baa19e8e", size = 1988064, upload-time = "2025-11-14T20:35:04.057Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/ba/88/6237e97e3385b57b5f1528647addea5cc03d4d65d5979ab24327d41fb00d/alembic-1.17.2-py3-none-any.whl", hash = "sha256:f483dd1fe93f6c5d49217055e4d15b905b425b6af906746abb35b69c1996c4e6", size = 248554 }, + { url = "https://files.pythonhosted.org/packages/ba/88/6237e97e3385b57b5f1528647addea5cc03d4d65d5979ab24327d41fb00d/alembic-1.17.2-py3-none-any.whl", hash = "sha256:f483dd1fe93f6c5d49217055e4d15b905b425b6af906746abb35b69c1996c4e6", size = 248554, upload-time = "2025-11-14T20:35:05.699Z" }, ] [[package]] @@ -146,22 +146,22 @@ dependencies = [ { name = "alibabacloud-tea" }, { name = "apscheduler" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/df/82/45ec98bd19387507cf058ce47f62d6fea288bf0511c5a101b832e13d3edd/alibabacloud-credentials-1.0.3.tar.gz", hash = "sha256:9d8707e96afc6f348e23f5677ed15a21c2dfce7cfe6669776548ee4c80e1dfaf", size = 35831 } +sdist = { url = "https://files.pythonhosted.org/packages/df/82/45ec98bd19387507cf058ce47f62d6fea288bf0511c5a101b832e13d3edd/alibabacloud-credentials-1.0.3.tar.gz", hash = "sha256:9d8707e96afc6f348e23f5677ed15a21c2dfce7cfe6669776548ee4c80e1dfaf", size = 35831, upload-time = "2025-10-14T06:39:58.97Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/88/df/dbd9ae9d531a40d5613573c5a22ef774ecfdcaa0dc43aad42189f89c04ce/alibabacloud_credentials-1.0.3-py3-none-any.whl", hash = "sha256:30c8302f204b663c655d97e1c283ee9f9f84a6257d7901b931477d6cf34445a8", size = 41875 }, + { url = "https://files.pythonhosted.org/packages/88/df/dbd9ae9d531a40d5613573c5a22ef774ecfdcaa0dc43aad42189f89c04ce/alibabacloud_credentials-1.0.3-py3-none-any.whl", hash = "sha256:30c8302f204b663c655d97e1c283ee9f9f84a6257d7901b931477d6cf34445a8", size = 41875, upload-time = "2025-10-14T06:39:58.029Z" }, ] [[package]] name = "alibabacloud-credentials-api" version = "1.0.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/a0/87/1d7019d23891897cb076b2f7e3c81ab3c2ba91de3bb067196f675d60d34c/alibabacloud-credentials-api-1.0.0.tar.gz", hash = "sha256:8c340038d904f0218d7214a8f4088c31912bfcf279af2cbc7d9be4897a97dd2f", size = 2330 } +sdist = { url = "https://files.pythonhosted.org/packages/a0/87/1d7019d23891897cb076b2f7e3c81ab3c2ba91de3bb067196f675d60d34c/alibabacloud-credentials-api-1.0.0.tar.gz", hash = "sha256:8c340038d904f0218d7214a8f4088c31912bfcf279af2cbc7d9be4897a97dd2f", size = 2330, upload-time = "2025-01-13T05:53:04.931Z" } [[package]] name = "alibabacloud-endpoint-util" version = "0.0.4" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/92/7d/8cc92a95c920e344835b005af6ea45a0db98763ad6ad19299d26892e6c8d/alibabacloud_endpoint_util-0.0.4.tar.gz", hash = "sha256:a593eb8ddd8168d5dc2216cd33111b144f9189fcd6e9ca20e48f358a739bbf90", size = 2813 } +sdist = { url = "https://files.pythonhosted.org/packages/92/7d/8cc92a95c920e344835b005af6ea45a0db98763ad6ad19299d26892e6c8d/alibabacloud_endpoint_util-0.0.4.tar.gz", hash = "sha256:a593eb8ddd8168d5dc2216cd33111b144f9189fcd6e9ca20e48f358a739bbf90", size = 2813, upload-time = "2025-06-12T07:20:52.572Z" } [[package]] name = "alibabacloud-gateway-spi" @@ -170,7 +170,7 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "alibabacloud-credentials" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/ab/98/d7111245f17935bf72ee9bea60bbbeff2bc42cdfe24d2544db52bc517e1a/alibabacloud_gateway_spi-0.0.3.tar.gz", hash = "sha256:10d1c53a3fc5f87915fbd6b4985b98338a776e9b44a0263f56643c5048223b8b", size = 4249 } +sdist = { url = "https://files.pythonhosted.org/packages/ab/98/d7111245f17935bf72ee9bea60bbbeff2bc42cdfe24d2544db52bc517e1a/alibabacloud_gateway_spi-0.0.3.tar.gz", hash = "sha256:10d1c53a3fc5f87915fbd6b4985b98338a776e9b44a0263f56643c5048223b8b", size = 4249, upload-time = "2025-02-23T16:29:54.222Z" } [[package]] name = "alibabacloud-gpdb20160503" @@ -186,9 +186,9 @@ dependencies = [ { name = "alibabacloud-tea-openapi" }, { name = "alibabacloud-tea-util" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/15/6a/cc72e744e95c8f37fa6a84e66ae0b9b57a13ee97a0ef03d94c7127c31d75/alibabacloud_gpdb20160503-3.8.3.tar.gz", hash = "sha256:4dfcc0d9cff5a921d529d76f4bf97e2ceb9dc2fa53f00ab055f08509423d8e30", size = 155092 } +sdist = { url = "https://files.pythonhosted.org/packages/15/6a/cc72e744e95c8f37fa6a84e66ae0b9b57a13ee97a0ef03d94c7127c31d75/alibabacloud_gpdb20160503-3.8.3.tar.gz", hash = "sha256:4dfcc0d9cff5a921d529d76f4bf97e2ceb9dc2fa53f00ab055f08509423d8e30", size = 155092, upload-time = "2024-07-18T17:09:42.438Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/ab/36/bce41704b3bf59d607590ec73a42a254c5dea27c0f707aee11d20512a200/alibabacloud_gpdb20160503-3.8.3-py3-none-any.whl", hash = "sha256:06e1c46ce5e4e9d1bcae76e76e51034196c625799d06b2efec8d46a7df323fe8", size = 156097 }, + { url = "https://files.pythonhosted.org/packages/ab/36/bce41704b3bf59d607590ec73a42a254c5dea27c0f707aee11d20512a200/alibabacloud_gpdb20160503-3.8.3-py3-none-any.whl", hash = "sha256:06e1c46ce5e4e9d1bcae76e76e51034196c625799d06b2efec8d46a7df323fe8", size = 156097, upload-time = "2024-07-18T17:09:40.414Z" }, ] [[package]] @@ -199,7 +199,7 @@ dependencies = [ { name = "alibabacloud-tea-util" }, { name = "cryptography" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/f6/50/5f41ab550d7874c623f6e992758429802c4b52a6804db437017e5387de33/alibabacloud_openapi_util-0.2.2.tar.gz", hash = "sha256:ebbc3906f554cb4bf8f513e43e8a33e8b6a3d4a0ef13617a0e14c3dda8ef52a8", size = 7201 } +sdist = { url = "https://files.pythonhosted.org/packages/f6/50/5f41ab550d7874c623f6e992758429802c4b52a6804db437017e5387de33/alibabacloud_openapi_util-0.2.2.tar.gz", hash = "sha256:ebbc3906f554cb4bf8f513e43e8a33e8b6a3d4a0ef13617a0e14c3dda8ef52a8", size = 7201, upload-time = "2023-10-23T07:44:18.523Z" } [[package]] name = "alibabacloud-openplatform20191219" @@ -211,9 +211,9 @@ dependencies = [ { name = "alibabacloud-tea-openapi" }, { name = "alibabacloud-tea-util" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/4f/bf/f7fa2f3657ed352870f442434cb2f27b7f70dcd52a544a1f3998eeaf6d71/alibabacloud_openplatform20191219-2.0.0.tar.gz", hash = "sha256:e67f4c337b7542538746592c6a474bd4ae3a9edccdf62e11a32ca61fad3c9020", size = 5038 } +sdist = { url = "https://files.pythonhosted.org/packages/4f/bf/f7fa2f3657ed352870f442434cb2f27b7f70dcd52a544a1f3998eeaf6d71/alibabacloud_openplatform20191219-2.0.0.tar.gz", hash = "sha256:e67f4c337b7542538746592c6a474bd4ae3a9edccdf62e11a32ca61fad3c9020", size = 5038, upload-time = "2022-09-21T06:16:10.683Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/94/e5/18c75213551eeca9db1f6b41ddcc0bd87b5b6508c75a67f05cd8671847b4/alibabacloud_openplatform20191219-2.0.0-py3-none-any.whl", hash = "sha256:873821c45bca72a6c6ec7a906c9cb21554c122e88893bbac3986934dab30dd36", size = 5204 }, + { url = "https://files.pythonhosted.org/packages/94/e5/18c75213551eeca9db1f6b41ddcc0bd87b5b6508c75a67f05cd8671847b4/alibabacloud_openplatform20191219-2.0.0-py3-none-any.whl", hash = "sha256:873821c45bca72a6c6ec7a906c9cb21554c122e88893bbac3986934dab30dd36", size = 5204, upload-time = "2022-09-21T06:16:07.844Z" }, ] [[package]] @@ -227,7 +227,7 @@ dependencies = [ { name = "alibabacloud-tea-util" }, { name = "alibabacloud-tea-xml" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/7e/d1/f442dd026908fcf55340ca694bb1d027aa91e119e76ae2fbea62f2bde4f4/alibabacloud_oss_sdk-0.1.1.tar.gz", hash = "sha256:f51a368020d0964fcc0978f96736006f49f5ab6a4a4bf4f0b8549e2c659e7358", size = 46434 } +sdist = { url = "https://files.pythonhosted.org/packages/7e/d1/f442dd026908fcf55340ca694bb1d027aa91e119e76ae2fbea62f2bde4f4/alibabacloud_oss_sdk-0.1.1.tar.gz", hash = "sha256:f51a368020d0964fcc0978f96736006f49f5ab6a4a4bf4f0b8549e2c659e7358", size = 46434, upload-time = "2025-04-22T12:40:41.717Z" } [[package]] name = "alibabacloud-oss-util" @@ -236,7 +236,7 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "alibabacloud-tea" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/02/7c/d7e812b9968247a302573daebcfef95d0f9a718f7b4bfcca8d3d83e266be/alibabacloud_oss_util-0.0.6.tar.gz", hash = "sha256:d3ecec36632434bd509a113e8cf327dc23e830ac8d9dd6949926f4e334c8b5d6", size = 10008 } +sdist = { url = "https://files.pythonhosted.org/packages/02/7c/d7e812b9968247a302573daebcfef95d0f9a718f7b4bfcca8d3d83e266be/alibabacloud_oss_util-0.0.6.tar.gz", hash = "sha256:d3ecec36632434bd509a113e8cf327dc23e830ac8d9dd6949926f4e334c8b5d6", size = 10008, upload-time = "2021-04-28T09:25:04.056Z" } [[package]] name = "alibabacloud-tea" @@ -246,7 +246,7 @@ dependencies = [ { name = "aiohttp" }, { name = "requests" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/9a/7d/b22cb9a0d4f396ee0f3f9d7f26b76b9ed93d4101add7867a2c87ed2534f5/alibabacloud-tea-0.4.3.tar.gz", hash = "sha256:ec8053d0aa8d43ebe1deb632d5c5404339b39ec9a18a0707d57765838418504a", size = 8785 } +sdist = { url = "https://files.pythonhosted.org/packages/9a/7d/b22cb9a0d4f396ee0f3f9d7f26b76b9ed93d4101add7867a2c87ed2534f5/alibabacloud-tea-0.4.3.tar.gz", hash = "sha256:ec8053d0aa8d43ebe1deb632d5c5404339b39ec9a18a0707d57765838418504a", size = 8785, upload-time = "2025-03-24T07:34:42.958Z" } [[package]] name = "alibabacloud-tea-fileform" @@ -255,7 +255,7 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "alibabacloud-tea" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/22/8a/ef8ddf5ee0350984cad2749414b420369fe943e15e6d96b79be45367630e/alibabacloud_tea_fileform-0.0.5.tar.gz", hash = "sha256:fd00a8c9d85e785a7655059e9651f9e91784678881831f60589172387b968ee8", size = 3961 } +sdist = { url = "https://files.pythonhosted.org/packages/22/8a/ef8ddf5ee0350984cad2749414b420369fe943e15e6d96b79be45367630e/alibabacloud_tea_fileform-0.0.5.tar.gz", hash = "sha256:fd00a8c9d85e785a7655059e9651f9e91784678881831f60589172387b968ee8", size = 3961, upload-time = "2021-04-28T09:22:54.56Z" } [[package]] name = "alibabacloud-tea-openapi" @@ -268,7 +268,7 @@ dependencies = [ { name = "alibabacloud-tea-util" }, { name = "alibabacloud-tea-xml" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/09/be/f594e79625e5ccfcfe7f12d7d70709a3c59e920878469c998886211c850d/alibabacloud_tea_openapi-0.3.16.tar.gz", hash = "sha256:6bffed8278597592e67860156f424bde4173a6599d7b6039fb640a3612bae292", size = 13087 } +sdist = { url = "https://files.pythonhosted.org/packages/09/be/f594e79625e5ccfcfe7f12d7d70709a3c59e920878469c998886211c850d/alibabacloud_tea_openapi-0.3.16.tar.gz", hash = "sha256:6bffed8278597592e67860156f424bde4173a6599d7b6039fb640a3612bae292", size = 13087, upload-time = "2025-07-04T09:30:10.689Z" } [[package]] name = "alibabacloud-tea-util" @@ -277,9 +277,9 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "alibabacloud-tea" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/e9/ee/ea90be94ad781a5055db29556744681fc71190ef444ae53adba45e1be5f3/alibabacloud_tea_util-0.3.14.tar.gz", hash = "sha256:708e7c9f64641a3c9e0e566365d2f23675f8d7c2a3e2971d9402ceede0408cdb", size = 7515 } +sdist = { url = "https://files.pythonhosted.org/packages/e9/ee/ea90be94ad781a5055db29556744681fc71190ef444ae53adba45e1be5f3/alibabacloud_tea_util-0.3.14.tar.gz", hash = "sha256:708e7c9f64641a3c9e0e566365d2f23675f8d7c2a3e2971d9402ceede0408cdb", size = 7515, upload-time = "2025-11-19T06:01:08.504Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/72/9e/c394b4e2104766fb28a1e44e3ed36e4c7773b4d05c868e482be99d5635c9/alibabacloud_tea_util-0.3.14-py3-none-any.whl", hash = "sha256:10d3e5c340d8f7ec69dd27345eb2fc5a1dab07875742525edf07bbe86db93bfe", size = 6697 }, + { url = "https://files.pythonhosted.org/packages/72/9e/c394b4e2104766fb28a1e44e3ed36e4c7773b4d05c868e482be99d5635c9/alibabacloud_tea_util-0.3.14-py3-none-any.whl", hash = "sha256:10d3e5c340d8f7ec69dd27345eb2fc5a1dab07875742525edf07bbe86db93bfe", size = 6697, upload-time = "2025-11-19T06:01:07.355Z" }, ] [[package]] @@ -289,7 +289,23 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "alibabacloud-tea" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/32/eb/5e82e419c3061823f3feae9b5681588762929dc4da0176667297c2784c1a/alibabacloud_tea_xml-0.0.3.tar.gz", hash = "sha256:979cb51fadf43de77f41c69fc69c12529728919f849723eb0cd24eb7b048a90c", size = 3466 } +sdist = { url = "https://files.pythonhosted.org/packages/32/eb/5e82e419c3061823f3feae9b5681588762929dc4da0176667297c2784c1a/alibabacloud_tea_xml-0.0.3.tar.gz", hash = "sha256:979cb51fadf43de77f41c69fc69c12529728919f849723eb0cd24eb7b048a90c", size = 3466, upload-time = "2025-07-01T08:04:55.144Z" } + +[[package]] +name = "aliyun-log-python-sdk" +version = "0.9.37" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "dateparser" }, + { name = "elasticsearch" }, + { name = "jmespath" }, + { name = "lz4" }, + { name = "protobuf" }, + { name = "python-dateutil" }, + { name = "requests" }, + { name = "six" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/90/70/291d494619bb7b0cbcc00689ad995945737c2c9e0bff2733e0aa7dbaee14/aliyun_log_python_sdk-0.9.37.tar.gz", hash = "sha256:ea65c9cca3a7377cef87d568e897820338328a53a7acb1b02f1383910e103f68", size = 152549, upload-time = "2025-11-27T07:56:06.098Z" } [[package]] name = "aliyun-python-sdk-core" @@ -299,7 +315,7 @@ dependencies = [ { name = "cryptography" }, { name = "jmespath" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/3e/09/da9f58eb38b4fdb97ba6523274fbf445ef6a06be64b433693da8307b4bec/aliyun-python-sdk-core-2.16.0.tar.gz", hash = "sha256:651caad597eb39d4fad6cf85133dffe92837d53bdf62db9d8f37dab6508bb8f9", size = 449555 } +sdist = { url = "https://files.pythonhosted.org/packages/3e/09/da9f58eb38b4fdb97ba6523274fbf445ef6a06be64b433693da8307b4bec/aliyun-python-sdk-core-2.16.0.tar.gz", hash = "sha256:651caad597eb39d4fad6cf85133dffe92837d53bdf62db9d8f37dab6508bb8f9", size = 449555, upload-time = "2024-10-09T06:01:01.762Z" } [[package]] name = "aliyun-python-sdk-kms" @@ -308,9 +324,9 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "aliyun-python-sdk-core" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/a8/2c/9877d0e6b18ecf246df671ac65a5d1d9fecbf85bdcb5d43efbde0d4662eb/aliyun-python-sdk-kms-2.16.5.tar.gz", hash = "sha256:f328a8a19d83ecbb965ffce0ec1e9930755216d104638cd95ecd362753b813b3", size = 12018 } +sdist = { url = "https://files.pythonhosted.org/packages/a8/2c/9877d0e6b18ecf246df671ac65a5d1d9fecbf85bdcb5d43efbde0d4662eb/aliyun-python-sdk-kms-2.16.5.tar.gz", hash = "sha256:f328a8a19d83ecbb965ffce0ec1e9930755216d104638cd95ecd362753b813b3", size = 12018, upload-time = "2024-08-30T09:01:20.104Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/11/5c/0132193d7da2c735669a1ed103b142fd63c9455984d48c5a88a1a516efaa/aliyun_python_sdk_kms-2.16.5-py2.py3-none-any.whl", hash = "sha256:24b6cdc4fd161d2942619479c8d050c63ea9cd22b044fe33b60bbb60153786f0", size = 99495 }, + { url = "https://files.pythonhosted.org/packages/11/5c/0132193d7da2c735669a1ed103b142fd63c9455984d48c5a88a1a516efaa/aliyun_python_sdk_kms-2.16.5-py2.py3-none-any.whl", hash = "sha256:24b6cdc4fd161d2942619479c8d050c63ea9cd22b044fe33b60bbb60153786f0", size = 99495, upload-time = "2024-08-30T09:01:18.462Z" }, ] [[package]] @@ -320,36 +336,36 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "vine" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/79/fc/ec94a357dfc6683d8c86f8b4cfa5416a4c36b28052ec8260c77aca96a443/amqp-5.3.1.tar.gz", hash = "sha256:cddc00c725449522023bad949f70fff7b48f0b1ade74d170a6f10ab044739432", size = 129013 } +sdist = { url = "https://files.pythonhosted.org/packages/79/fc/ec94a357dfc6683d8c86f8b4cfa5416a4c36b28052ec8260c77aca96a443/amqp-5.3.1.tar.gz", hash = "sha256:cddc00c725449522023bad949f70fff7b48f0b1ade74d170a6f10ab044739432", size = 129013, upload-time = "2024-11-12T19:55:44.051Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/26/99/fc813cd978842c26c82534010ea849eee9ab3a13ea2b74e95cb9c99e747b/amqp-5.3.1-py3-none-any.whl", hash = "sha256:43b3319e1b4e7d1251833a93d672b4af1e40f3d632d479b98661a95f117880a2", size = 50944 }, + { url = "https://files.pythonhosted.org/packages/26/99/fc813cd978842c26c82534010ea849eee9ab3a13ea2b74e95cb9c99e747b/amqp-5.3.1-py3-none-any.whl", hash = "sha256:43b3319e1b4e7d1251833a93d672b4af1e40f3d632d479b98661a95f117880a2", size = 50944, upload-time = "2024-11-12T19:55:41.782Z" }, ] [[package]] name = "aniso8601" version = "10.0.1" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/8b/8d/52179c4e3f1978d3d9a285f98c706642522750ef343e9738286130423730/aniso8601-10.0.1.tar.gz", hash = "sha256:25488f8663dd1528ae1f54f94ac1ea51ae25b4d531539b8bc707fed184d16845", size = 47190 } +sdist = { url = "https://files.pythonhosted.org/packages/8b/8d/52179c4e3f1978d3d9a285f98c706642522750ef343e9738286130423730/aniso8601-10.0.1.tar.gz", hash = "sha256:25488f8663dd1528ae1f54f94ac1ea51ae25b4d531539b8bc707fed184d16845", size = 47190, upload-time = "2025-04-18T17:29:42.995Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/59/75/e0e10dc7ed1408c28e03a6cb2d7a407f99320eb953f229d008a7a6d05546/aniso8601-10.0.1-py2.py3-none-any.whl", hash = "sha256:eb19717fd4e0db6de1aab06f12450ab92144246b257423fe020af5748c0cb89e", size = 52848 }, + { url = "https://files.pythonhosted.org/packages/59/75/e0e10dc7ed1408c28e03a6cb2d7a407f99320eb953f229d008a7a6d05546/aniso8601-10.0.1-py2.py3-none-any.whl", hash = "sha256:eb19717fd4e0db6de1aab06f12450ab92144246b257423fe020af5748c0cb89e", size = 52848, upload-time = "2025-04-18T17:29:41.492Z" }, ] [[package]] name = "annotated-doc" version = "0.0.4" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/57/ba/046ceea27344560984e26a590f90bc7f4a75b06701f653222458922b558c/annotated_doc-0.0.4.tar.gz", hash = "sha256:fbcda96e87e9c92ad167c2e53839e57503ecfda18804ea28102353485033faa4", size = 7288 } +sdist = { url = "https://files.pythonhosted.org/packages/57/ba/046ceea27344560984e26a590f90bc7f4a75b06701f653222458922b558c/annotated_doc-0.0.4.tar.gz", hash = "sha256:fbcda96e87e9c92ad167c2e53839e57503ecfda18804ea28102353485033faa4", size = 7288, upload-time = "2025-11-10T22:07:42.062Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/1e/d3/26bf1008eb3d2daa8ef4cacc7f3bfdc11818d111f7e2d0201bc6e3b49d45/annotated_doc-0.0.4-py3-none-any.whl", hash = "sha256:571ac1dc6991c450b25a9c2d84a3705e2ae7a53467b5d111c24fa8baabbed320", size = 5303 }, + { url = "https://files.pythonhosted.org/packages/1e/d3/26bf1008eb3d2daa8ef4cacc7f3bfdc11818d111f7e2d0201bc6e3b49d45/annotated_doc-0.0.4-py3-none-any.whl", hash = "sha256:571ac1dc6991c450b25a9c2d84a3705e2ae7a53467b5d111c24fa8baabbed320", size = 5303, upload-time = "2025-11-10T22:07:40.673Z" }, ] [[package]] name = "annotated-types" version = "0.7.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/ee/67/531ea369ba64dcff5ec9c3402f9f51bf748cec26dde048a2f973a4eea7f5/annotated_types-0.7.0.tar.gz", hash = "sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89", size = 16081 } +sdist = { url = "https://files.pythonhosted.org/packages/ee/67/531ea369ba64dcff5ec9c3402f9f51bf748cec26dde048a2f973a4eea7f5/annotated_types-0.7.0.tar.gz", hash = "sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89", size = 16081, upload-time = "2024-05-20T21:33:25.928Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/78/b6/6307fbef88d9b5ee7421e68d78a9f162e0da4900bc5f5793f6d3d0e34fb8/annotated_types-0.7.0-py3-none-any.whl", hash = "sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53", size = 13643 }, + { url = "https://files.pythonhosted.org/packages/78/b6/6307fbef88d9b5ee7421e68d78a9f162e0da4900bc5f5793f6d3d0e34fb8/annotated_types-0.7.0-py3-none-any.whl", hash = "sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53", size = 13643, upload-time = "2024-05-20T21:33:24.1Z" }, ] [[package]] @@ -361,9 +377,9 @@ dependencies = [ { name = "sniffio" }, { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/c6/78/7d432127c41b50bccba979505f272c16cbcadcc33645d5fa3a738110ae75/anyio-4.11.0.tar.gz", hash = "sha256:82a8d0b81e318cc5ce71a5f1f8b5c4e63619620b63141ef8c995fa0db95a57c4", size = 219094 } +sdist = { url = "https://files.pythonhosted.org/packages/c6/78/7d432127c41b50bccba979505f272c16cbcadcc33645d5fa3a738110ae75/anyio-4.11.0.tar.gz", hash = "sha256:82a8d0b81e318cc5ce71a5f1f8b5c4e63619620b63141ef8c995fa0db95a57c4", size = 219094, upload-time = "2025-09-23T09:19:12.58Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/15/b3/9b1a8074496371342ec1e796a96f99c82c945a339cd81a8e73de28b4cf9e/anyio-4.11.0-py3-none-any.whl", hash = "sha256:0287e96f4d26d4149305414d4e3bc32f0dcd0862365a4bddea19d7a1ec38c4fc", size = 109097 }, + { url = "https://files.pythonhosted.org/packages/15/b3/9b1a8074496371342ec1e796a96f99c82c945a339cd81a8e73de28b4cf9e/anyio-4.11.0-py3-none-any.whl", hash = "sha256:0287e96f4d26d4149305414d4e3bc32f0dcd0862365a4bddea19d7a1ec38c4fc", size = 109097, upload-time = "2025-09-23T09:19:10.601Z" }, ] [[package]] @@ -373,9 +389,9 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "tzlocal" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/d0/81/192db4f8471de5bc1f0d098783decffb1e6e69c4f8b4bc6711094691950b/apscheduler-3.11.1.tar.gz", hash = "sha256:0db77af6400c84d1747fe98a04b8b58f0080c77d11d338c4f507a9752880f221", size = 108044 } +sdist = { url = "https://files.pythonhosted.org/packages/d0/81/192db4f8471de5bc1f0d098783decffb1e6e69c4f8b4bc6711094691950b/apscheduler-3.11.1.tar.gz", hash = "sha256:0db77af6400c84d1747fe98a04b8b58f0080c77d11d338c4f507a9752880f221", size = 108044, upload-time = "2025-10-31T18:55:42.819Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/58/9f/d3c76f76c73fcc959d28e9def45b8b1cc3d7722660c5003b19c1022fd7f4/apscheduler-3.11.1-py3-none-any.whl", hash = "sha256:6162cb5683cb09923654fa9bdd3130c4be4bfda6ad8990971c9597ecd52965d2", size = 64278 }, + { url = "https://files.pythonhosted.org/packages/58/9f/d3c76f76c73fcc959d28e9def45b8b1cc3d7722660c5003b19c1022fd7f4/apscheduler-3.11.1-py3-none-any.whl", hash = "sha256:6162cb5683cb09923654fa9bdd3130c4be4bfda6ad8990971c9597ecd52965d2", size = 64278, upload-time = "2025-10-31T18:55:41.186Z" }, ] [[package]] @@ -391,36 +407,36 @@ dependencies = [ { name = "opentelemetry-semantic-conventions" }, { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/27/b9/8c89191eb46915e9ba7bdb473e2fb1c510b7db3635ae5ede5e65b2176b9d/arize_phoenix_otel-0.9.2.tar.gz", hash = "sha256:a48c7d41f3ac60dc75b037f036bf3306d2af4af371cdb55e247e67957749bc31", size = 11599 } +sdist = { url = "https://files.pythonhosted.org/packages/27/b9/8c89191eb46915e9ba7bdb473e2fb1c510b7db3635ae5ede5e65b2176b9d/arize_phoenix_otel-0.9.2.tar.gz", hash = "sha256:a48c7d41f3ac60dc75b037f036bf3306d2af4af371cdb55e247e67957749bc31", size = 11599, upload-time = "2025-04-14T22:05:28.637Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/3a/3d/f64136a758c649e883315939f30fe51ad0747024b0db05fd78450801a78d/arize_phoenix_otel-0.9.2-py3-none-any.whl", hash = "sha256:5286b33c58b596ef8edd9a4255ee00fd74f774b1e5dbd9393e77e87870a14d76", size = 12560 }, + { url = "https://files.pythonhosted.org/packages/3a/3d/f64136a758c649e883315939f30fe51ad0747024b0db05fd78450801a78d/arize_phoenix_otel-0.9.2-py3-none-any.whl", hash = "sha256:5286b33c58b596ef8edd9a4255ee00fd74f774b1e5dbd9393e77e87870a14d76", size = 12560, upload-time = "2025-04-14T22:05:27.162Z" }, ] [[package]] name = "asgiref" version = "3.11.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/76/b9/4db2509eabd14b4a8c71d1b24c8d5734c52b8560a7b1e1a8b56c8d25568b/asgiref-3.11.0.tar.gz", hash = "sha256:13acff32519542a1736223fb79a715acdebe24286d98e8b164a73085f40da2c4", size = 37969 } +sdist = { url = "https://files.pythonhosted.org/packages/76/b9/4db2509eabd14b4a8c71d1b24c8d5734c52b8560a7b1e1a8b56c8d25568b/asgiref-3.11.0.tar.gz", hash = "sha256:13acff32519542a1736223fb79a715acdebe24286d98e8b164a73085f40da2c4", size = 37969, upload-time = "2025-11-19T15:32:20.106Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/91/be/317c2c55b8bbec407257d45f5c8d1b6867abc76d12043f2d3d58c538a4ea/asgiref-3.11.0-py3-none-any.whl", hash = "sha256:1db9021efadb0d9512ce8ffaf72fcef601c7b73a8807a1bb2ef143dc6b14846d", size = 24096 }, + { url = "https://files.pythonhosted.org/packages/91/be/317c2c55b8bbec407257d45f5c8d1b6867abc76d12043f2d3d58c538a4ea/asgiref-3.11.0-py3-none-any.whl", hash = "sha256:1db9021efadb0d9512ce8ffaf72fcef601c7b73a8807a1bb2ef143dc6b14846d", size = 24096, upload-time = "2025-11-19T15:32:19.004Z" }, ] [[package]] name = "async-timeout" version = "5.0.1" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/a5/ae/136395dfbfe00dfc94da3f3e136d0b13f394cba8f4841120e34226265780/async_timeout-5.0.1.tar.gz", hash = "sha256:d9321a7a3d5a6a5e187e824d2fa0793ce379a202935782d555d6e9d2735677d3", size = 9274 } +sdist = { url = "https://files.pythonhosted.org/packages/a5/ae/136395dfbfe00dfc94da3f3e136d0b13f394cba8f4841120e34226265780/async_timeout-5.0.1.tar.gz", hash = "sha256:d9321a7a3d5a6a5e187e824d2fa0793ce379a202935782d555d6e9d2735677d3", size = 9274, upload-time = "2024-11-06T16:41:39.6Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/fe/ba/e2081de779ca30d473f21f5b30e0e737c438205440784c7dfc81efc2b029/async_timeout-5.0.1-py3-none-any.whl", hash = "sha256:39e3809566ff85354557ec2398b55e096c8364bacac9405a7a1fa429e77fe76c", size = 6233 }, + { url = "https://files.pythonhosted.org/packages/fe/ba/e2081de779ca30d473f21f5b30e0e737c438205440784c7dfc81efc2b029/async_timeout-5.0.1-py3-none-any.whl", hash = "sha256:39e3809566ff85354557ec2398b55e096c8364bacac9405a7a1fa429e77fe76c", size = 6233, upload-time = "2024-11-06T16:41:37.9Z" }, ] [[package]] name = "attrs" version = "25.4.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/6b/5c/685e6633917e101e5dcb62b9dd76946cbb57c26e133bae9e0cd36033c0a9/attrs-25.4.0.tar.gz", hash = "sha256:16d5969b87f0859ef33a48b35d55ac1be6e42ae49d5e853b597db70c35c57e11", size = 934251 } +sdist = { url = "https://files.pythonhosted.org/packages/6b/5c/685e6633917e101e5dcb62b9dd76946cbb57c26e133bae9e0cd36033c0a9/attrs-25.4.0.tar.gz", hash = "sha256:16d5969b87f0859ef33a48b35d55ac1be6e42ae49d5e853b597db70c35c57e11", size = 934251, upload-time = "2025-10-06T13:54:44.725Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/3a/2a/7cc015f5b9f5db42b7d48157e23356022889fc354a2813c15934b7cb5c0e/attrs-25.4.0-py3-none-any.whl", hash = "sha256:adcf7e2a1fb3b36ac48d97835bb6d8ade15b8dcce26aba8bf1d14847b57a3373", size = 67615 }, + { url = "https://files.pythonhosted.org/packages/3a/2a/7cc015f5b9f5db42b7d48157e23356022889fc354a2813c15934b7cb5c0e/attrs-25.4.0-py3-none-any.whl", hash = "sha256:adcf7e2a1fb3b36ac48d97835bb6d8ade15b8dcce26aba8bf1d14847b57a3373", size = 67615, upload-time = "2025-10-06T13:54:43.17Z" }, ] [[package]] @@ -430,9 +446,9 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "cryptography" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/cd/3f/1d3bbd0bf23bdd99276d4def22f29c27a914067b4cf66f753ff9b8bbd0f3/authlib-1.6.5.tar.gz", hash = "sha256:6aaf9c79b7cc96c900f0b284061691c5d4e61221640a948fe690b556a6d6d10b", size = 164553 } +sdist = { url = "https://files.pythonhosted.org/packages/cd/3f/1d3bbd0bf23bdd99276d4def22f29c27a914067b4cf66f753ff9b8bbd0f3/authlib-1.6.5.tar.gz", hash = "sha256:6aaf9c79b7cc96c900f0b284061691c5d4e61221640a948fe690b556a6d6d10b", size = 164553, upload-time = "2025-10-02T13:36:09.489Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/f8/aa/5082412d1ee302e9e7d80b6949bc4d2a8fa1149aaab610c5fc24709605d6/authlib-1.6.5-py2.py3-none-any.whl", hash = "sha256:3e0e0507807f842b02175507bdee8957a1d5707fd4afb17c32fb43fee90b6e3a", size = 243608 }, + { url = "https://files.pythonhosted.org/packages/f8/aa/5082412d1ee302e9e7d80b6949bc4d2a8fa1149aaab610c5fc24709605d6/authlib-1.6.5-py2.py3-none-any.whl", hash = "sha256:3e0e0507807f842b02175507bdee8957a1d5707fd4afb17c32fb43fee90b6e3a", size = 243608, upload-time = "2025-10-02T13:36:07.637Z" }, ] [[package]] @@ -443,9 +459,9 @@ dependencies = [ { name = "requests" }, { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/0a/c4/d4ff3bc3ddf155156460bff340bbe9533f99fac54ddea165f35a8619f162/azure_core-1.36.0.tar.gz", hash = "sha256:22e5605e6d0bf1d229726af56d9e92bc37b6e726b141a18be0b4d424131741b7", size = 351139 } +sdist = { url = "https://files.pythonhosted.org/packages/0a/c4/d4ff3bc3ddf155156460bff340bbe9533f99fac54ddea165f35a8619f162/azure_core-1.36.0.tar.gz", hash = "sha256:22e5605e6d0bf1d229726af56d9e92bc37b6e726b141a18be0b4d424131741b7", size = 351139, upload-time = "2025-10-15T00:33:49.083Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/b1/3c/b90d5afc2e47c4a45f4bba00f9c3193b0417fad5ad3bb07869f9d12832aa/azure_core-1.36.0-py3-none-any.whl", hash = "sha256:fee9923a3a753e94a259563429f3644aaf05c486d45b1215d098115102d91d3b", size = 213302 }, + { url = "https://files.pythonhosted.org/packages/b1/3c/b90d5afc2e47c4a45f4bba00f9c3193b0417fad5ad3bb07869f9d12832aa/azure_core-1.36.0-py3-none-any.whl", hash = "sha256:fee9923a3a753e94a259563429f3644aaf05c486d45b1215d098115102d91d3b", size = 213302, upload-time = "2025-10-15T00:33:51.058Z" }, ] [[package]] @@ -458,9 +474,9 @@ dependencies = [ { name = "msal" }, { name = "msal-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/bb/1c/bd704075e555046e24b069157ca25c81aedb4199c3e0b35acba9243a6ca6/azure-identity-1.16.1.tar.gz", hash = "sha256:6d93f04468f240d59246d8afde3091494a5040d4f141cad0f49fc0c399d0d91e", size = 236726 } +sdist = { url = "https://files.pythonhosted.org/packages/bb/1c/bd704075e555046e24b069157ca25c81aedb4199c3e0b35acba9243a6ca6/azure-identity-1.16.1.tar.gz", hash = "sha256:6d93f04468f240d59246d8afde3091494a5040d4f141cad0f49fc0c399d0d91e", size = 236726, upload-time = "2024-06-10T22:23:27.46Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/ef/c5/ca55106564d2044ab90614381368b3756690fb7e3ab04552e17f308e4e4f/azure_identity-1.16.1-py3-none-any.whl", hash = "sha256:8fb07c25642cd4ac422559a8b50d3e77f73dcc2bbfaba419d06d6c9d7cff6726", size = 166741 }, + { url = "https://files.pythonhosted.org/packages/ef/c5/ca55106564d2044ab90614381368b3756690fb7e3ab04552e17f308e4e4f/azure_identity-1.16.1-py3-none-any.whl", hash = "sha256:8fb07c25642cd4ac422559a8b50d3e77f73dcc2bbfaba419d06d6c9d7cff6726", size = 166741, upload-time = "2024-06-10T22:23:30.906Z" }, ] [[package]] @@ -473,18 +489,18 @@ dependencies = [ { name = "isodate" }, { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/96/95/3e3414491ce45025a1cde107b6ae72bf72049e6021597c201cd6a3029b9a/azure_storage_blob-12.26.0.tar.gz", hash = "sha256:5dd7d7824224f7de00bfeb032753601c982655173061e242f13be6e26d78d71f", size = 583332 } +sdist = { url = "https://files.pythonhosted.org/packages/96/95/3e3414491ce45025a1cde107b6ae72bf72049e6021597c201cd6a3029b9a/azure_storage_blob-12.26.0.tar.gz", hash = "sha256:5dd7d7824224f7de00bfeb032753601c982655173061e242f13be6e26d78d71f", size = 583332, upload-time = "2025-07-16T21:34:07.644Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/5b/64/63dbfdd83b31200ac58820a7951ddfdeed1fbee9285b0f3eae12d1357155/azure_storage_blob-12.26.0-py3-none-any.whl", hash = "sha256:8c5631b8b22b4f53ec5fff2f3bededf34cfef111e2af613ad42c9e6de00a77fe", size = 412907 }, + { url = "https://files.pythonhosted.org/packages/5b/64/63dbfdd83b31200ac58820a7951ddfdeed1fbee9285b0f3eae12d1357155/azure_storage_blob-12.26.0-py3-none-any.whl", hash = "sha256:8c5631b8b22b4f53ec5fff2f3bededf34cfef111e2af613ad42c9e6de00a77fe", size = 412907, upload-time = "2025-07-16T21:34:09.367Z" }, ] [[package]] name = "backoff" version = "2.2.1" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/47/d7/5bbeb12c44d7c4f2fb5b56abce497eb5ed9f34d85701de869acedd602619/backoff-2.2.1.tar.gz", hash = "sha256:03f829f5bb1923180821643f8753b0502c3b682293992485b0eef2807afa5cba", size = 17001 } +sdist = { url = "https://files.pythonhosted.org/packages/47/d7/5bbeb12c44d7c4f2fb5b56abce497eb5ed9f34d85701de869acedd602619/backoff-2.2.1.tar.gz", hash = "sha256:03f829f5bb1923180821643f8753b0502c3b682293992485b0eef2807afa5cba", size = 17001, upload-time = "2022-10-05T19:19:32.061Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/df/73/b6e24bd22e6720ca8ee9a85a0c4a2971af8497d8f3193fa05390cbd46e09/backoff-2.2.1-py3-none-any.whl", hash = "sha256:63579f9a0628e06278f7e47b7d7d5b6ce20dc65c5e96a6f3ca99a6adca0396e8", size = 15148 }, + { url = "https://files.pythonhosted.org/packages/df/73/b6e24bd22e6720ca8ee9a85a0c4a2971af8497d8f3193fa05390cbd46e09/backoff-2.2.1-py3-none-any.whl", hash = "sha256:63579f9a0628e06278f7e47b7d7d5b6ce20dc65c5e96a6f3ca99a6adca0396e8", size = 15148, upload-time = "2022-10-05T19:19:30.546Z" }, ] [[package]] @@ -494,9 +510,9 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "nodejs-wheel-binaries" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/c6/ba/ed69e8df732a09c8ca469f592c8e08707fe29149735b834c276d94d4a3da/basedpyright-1.31.7.tar.gz", hash = "sha256:394f334c742a19bcc5905b2455c9f5858182866b7679a6f057a70b44b049bceb", size = 22710948 } +sdist = { url = "https://files.pythonhosted.org/packages/c6/ba/ed69e8df732a09c8ca469f592c8e08707fe29149735b834c276d94d4a3da/basedpyright-1.31.7.tar.gz", hash = "sha256:394f334c742a19bcc5905b2455c9f5858182866b7679a6f057a70b44b049bceb", size = 22710948, upload-time = "2025-10-11T05:12:48.3Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/f8/90/ce01ad2d0afdc1b82b8b5aaba27e60d2e138e39d887e71c35c55d8f1bfcd/basedpyright-1.31.7-py3-none-any.whl", hash = "sha256:7c54beb7828c9ed0028630aaa6904f395c27e5a9f5a313aa9e91fc1d11170831", size = 11817571 }, + { url = "https://files.pythonhosted.org/packages/f8/90/ce01ad2d0afdc1b82b8b5aaba27e60d2e138e39d887e71c35c55d8f1bfcd/basedpyright-1.31.7-py3-none-any.whl", hash = "sha256:7c54beb7828c9ed0028630aaa6904f395c27e5a9f5a313aa9e91fc1d11170831", size = 11817571, upload-time = "2025-10-11T05:12:45.432Z" }, ] [[package]] @@ -508,51 +524,51 @@ dependencies = [ { name = "pycryptodome" }, { name = "six" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/da/8d/85ec18ca2dba624cb5932bda74e926c346a7a6403a628aeda45d848edb48/bce_python_sdk-0.9.53.tar.gz", hash = "sha256:fb14b09d1064a6987025648589c8245cb7e404acd38bb900f0775f396e3d9b3e", size = 275594 } +sdist = { url = "https://files.pythonhosted.org/packages/da/8d/85ec18ca2dba624cb5932bda74e926c346a7a6403a628aeda45d848edb48/bce_python_sdk-0.9.53.tar.gz", hash = "sha256:fb14b09d1064a6987025648589c8245cb7e404acd38bb900f0775f396e3d9b3e", size = 275594, upload-time = "2025-11-21T03:48:58.869Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/7d/e9/6fc142b5ac5b2e544bc155757dc28eee2b22a576ca9eaf968ac033b6dc45/bce_python_sdk-0.9.53-py3-none-any.whl", hash = "sha256:00fc46b0ff8d1700911aef82b7263533c52a63b1cc5a51449c4f715a116846a7", size = 390434 }, + { url = "https://files.pythonhosted.org/packages/7d/e9/6fc142b5ac5b2e544bc155757dc28eee2b22a576ca9eaf968ac033b6dc45/bce_python_sdk-0.9.53-py3-none-any.whl", hash = "sha256:00fc46b0ff8d1700911aef82b7263533c52a63b1cc5a51449c4f715a116846a7", size = 390434, upload-time = "2025-11-21T03:48:57.201Z" }, ] [[package]] name = "bcrypt" version = "5.0.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/d4/36/3329e2518d70ad8e2e5817d5a4cac6bba05a47767ec416c7d020a965f408/bcrypt-5.0.0.tar.gz", hash = "sha256:f748f7c2d6fd375cc93d3fba7ef4a9e3a092421b8dbf34d8d4dc06be9492dfdd", size = 25386 } +sdist = { url = "https://files.pythonhosted.org/packages/d4/36/3329e2518d70ad8e2e5817d5a4cac6bba05a47767ec416c7d020a965f408/bcrypt-5.0.0.tar.gz", hash = "sha256:f748f7c2d6fd375cc93d3fba7ef4a9e3a092421b8dbf34d8d4dc06be9492dfdd", size = 25386, upload-time = "2025-09-25T19:50:47.829Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/84/29/6237f151fbfe295fe3e074ecc6d44228faa1e842a81f6d34a02937ee1736/bcrypt-5.0.0-cp38-abi3-macosx_10_12_universal2.whl", hash = "sha256:fc746432b951e92b58317af8e0ca746efe93e66555f1b40888865ef5bf56446b", size = 494553 }, - { url = "https://files.pythonhosted.org/packages/45/b6/4c1205dde5e464ea3bd88e8742e19f899c16fa8916fb8510a851fae985b5/bcrypt-5.0.0-cp38-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:c2388ca94ffee269b6038d48747f4ce8df0ffbea43f31abfa18ac72f0218effb", size = 275009 }, - { url = "https://files.pythonhosted.org/packages/3b/71/427945e6ead72ccffe77894b2655b695ccf14ae1866cd977e185d606dd2f/bcrypt-5.0.0-cp38-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:560ddb6ec730386e7b3b26b8b4c88197aaed924430e7b74666a586ac997249ef", size = 278029 }, - { url = "https://files.pythonhosted.org/packages/17/72/c344825e3b83c5389a369c8a8e58ffe1480b8a699f46c127c34580c4666b/bcrypt-5.0.0-cp38-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:d79e5c65dcc9af213594d6f7f1fa2c98ad3fc10431e7aa53c176b441943efbdd", size = 275907 }, - { url = "https://files.pythonhosted.org/packages/0b/7e/d4e47d2df1641a36d1212e5c0514f5291e1a956a7749f1e595c07a972038/bcrypt-5.0.0-cp38-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:2b732e7d388fa22d48920baa267ba5d97cca38070b69c0e2d37087b381c681fd", size = 296500 }, - { url = "https://files.pythonhosted.org/packages/0f/c3/0ae57a68be2039287ec28bc463b82e4b8dc23f9d12c0be331f4782e19108/bcrypt-5.0.0-cp38-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:0c8e093ea2532601a6f686edbc2c6b2ec24131ff5c52f7610dd64fa4553b5464", size = 278412 }, - { url = "https://files.pythonhosted.org/packages/45/2b/77424511adb11e6a99e3a00dcc7745034bee89036ad7d7e255a7e47be7d8/bcrypt-5.0.0-cp38-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:5b1589f4839a0899c146e8892efe320c0fa096568abd9b95593efac50a87cb75", size = 275486 }, - { url = "https://files.pythonhosted.org/packages/43/0a/405c753f6158e0f3f14b00b462d8bca31296f7ecfc8fc8bc7919c0c7d73a/bcrypt-5.0.0-cp38-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:89042e61b5e808b67daf24a434d89bab164d4de1746b37a8d173b6b14f3db9ff", size = 277940 }, - { url = "https://files.pythonhosted.org/packages/62/83/b3efc285d4aadc1fa83db385ec64dcfa1707e890eb42f03b127d66ac1b7b/bcrypt-5.0.0-cp38-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:e3cf5b2560c7b5a142286f69bde914494b6d8f901aaa71e453078388a50881c4", size = 310776 }, - { url = "https://files.pythonhosted.org/packages/95/7d/47ee337dacecde6d234890fe929936cb03ebc4c3a7460854bbd9c97780b8/bcrypt-5.0.0-cp38-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:f632fd56fc4e61564f78b46a2269153122db34988e78b6be8b32d28507b7eaeb", size = 312922 }, - { url = "https://files.pythonhosted.org/packages/d6/3a/43d494dfb728f55f4e1cf8fd435d50c16a2d75493225b54c8d06122523c6/bcrypt-5.0.0-cp38-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:801cad5ccb6b87d1b430f183269b94c24f248dddbbc5c1f78b6ed231743e001c", size = 341367 }, - { url = "https://files.pythonhosted.org/packages/55/ab/a0727a4547e383e2e22a630e0f908113db37904f58719dc48d4622139b5c/bcrypt-5.0.0-cp38-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:3cf67a804fc66fc217e6914a5635000259fbbbb12e78a99488e4d5ba445a71eb", size = 359187 }, - { url = "https://files.pythonhosted.org/packages/1b/bb/461f352fdca663524b4643d8b09e8435b4990f17fbf4fea6bc2a90aa0cc7/bcrypt-5.0.0-cp38-abi3-win32.whl", hash = "sha256:3abeb543874b2c0524ff40c57a4e14e5d3a66ff33fb423529c88f180fd756538", size = 153752 }, - { url = "https://files.pythonhosted.org/packages/41/aa/4190e60921927b7056820291f56fc57d00d04757c8b316b2d3c0d1d6da2c/bcrypt-5.0.0-cp38-abi3-win_amd64.whl", hash = "sha256:35a77ec55b541e5e583eb3436ffbbf53b0ffa1fa16ca6782279daf95d146dcd9", size = 150881 }, - { url = "https://files.pythonhosted.org/packages/54/12/cd77221719d0b39ac0b55dbd39358db1cd1246e0282e104366ebbfb8266a/bcrypt-5.0.0-cp38-abi3-win_arm64.whl", hash = "sha256:cde08734f12c6a4e28dc6755cd11d3bdfea608d93d958fffbe95a7026ebe4980", size = 144931 }, - { url = "https://files.pythonhosted.org/packages/5d/ba/2af136406e1c3839aea9ecadc2f6be2bcd1eff255bd451dd39bcf302c47a/bcrypt-5.0.0-cp39-abi3-macosx_10_12_universal2.whl", hash = "sha256:0c418ca99fd47e9c59a301744d63328f17798b5947b0f791e9af3c1c499c2d0a", size = 495313 }, - { url = "https://files.pythonhosted.org/packages/ac/ee/2f4985dbad090ace5ad1f7dd8ff94477fe089b5fab2040bd784a3d5f187b/bcrypt-5.0.0-cp39-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:ddb4e1500f6efdd402218ffe34d040a1196c072e07929b9820f363a1fd1f4191", size = 275290 }, - { url = "https://files.pythonhosted.org/packages/e4/6e/b77ade812672d15cf50842e167eead80ac3514f3beacac8902915417f8b7/bcrypt-5.0.0-cp39-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:7aeef54b60ceddb6f30ee3db090351ecf0d40ec6e2abf41430997407a46d2254", size = 278253 }, - { url = "https://files.pythonhosted.org/packages/36/c4/ed00ed32f1040f7990dac7115f82273e3c03da1e1a1587a778d8cea496d8/bcrypt-5.0.0-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:f0ce778135f60799d89c9693b9b398819d15f1921ba15fe719acb3178215a7db", size = 276084 }, - { url = "https://files.pythonhosted.org/packages/e7/c4/fa6e16145e145e87f1fa351bbd54b429354fd72145cd3d4e0c5157cf4c70/bcrypt-5.0.0-cp39-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:a71f70ee269671460b37a449f5ff26982a6f2ba493b3eabdd687b4bf35f875ac", size = 297185 }, - { url = "https://files.pythonhosted.org/packages/24/b4/11f8a31d8b67cca3371e046db49baa7c0594d71eb40ac8121e2fc0888db0/bcrypt-5.0.0-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:f8429e1c410b4073944f03bd778a9e066e7fad723564a52ff91841d278dfc822", size = 278656 }, - { url = "https://files.pythonhosted.org/packages/ac/31/79f11865f8078e192847d2cb526e3fa27c200933c982c5b2869720fa5fce/bcrypt-5.0.0-cp39-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:edfcdcedd0d0f05850c52ba3127b1fce70b9f89e0fe5ff16517df7e81fa3cbb8", size = 275662 }, - { url = "https://files.pythonhosted.org/packages/d4/8d/5e43d9584b3b3591a6f9b68f755a4da879a59712981ef5ad2a0ac1379f7a/bcrypt-5.0.0-cp39-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:611f0a17aa4a25a69362dcc299fda5c8a3d4f160e2abb3831041feb77393a14a", size = 278240 }, - { url = "https://files.pythonhosted.org/packages/89/48/44590e3fc158620f680a978aafe8f87a4c4320da81ed11552f0323aa9a57/bcrypt-5.0.0-cp39-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:db99dca3b1fdc3db87d7c57eac0c82281242d1eabf19dcb8a6b10eb29a2e72d1", size = 311152 }, - { url = "https://files.pythonhosted.org/packages/5f/85/e4fbfc46f14f47b0d20493669a625da5827d07e8a88ee460af6cd9768b44/bcrypt-5.0.0-cp39-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:5feebf85a9cefda32966d8171f5db7e3ba964b77fdfe31919622256f80f9cf42", size = 313284 }, - { url = "https://files.pythonhosted.org/packages/25/ae/479f81d3f4594456a01ea2f05b132a519eff9ab5768a70430fa1132384b1/bcrypt-5.0.0-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:3ca8a166b1140436e058298a34d88032ab62f15aae1c598580333dc21d27ef10", size = 341643 }, - { url = "https://files.pythonhosted.org/packages/df/d2/36a086dee1473b14276cd6ea7f61aef3b2648710b5d7f1c9e032c29b859f/bcrypt-5.0.0-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:61afc381250c3182d9078551e3ac3a41da14154fbff647ddf52a769f588c4172", size = 359698 }, - { url = "https://files.pythonhosted.org/packages/c0/f6/688d2cd64bfd0b14d805ddb8a565e11ca1fb0fd6817175d58b10052b6d88/bcrypt-5.0.0-cp39-abi3-win32.whl", hash = "sha256:64d7ce196203e468c457c37ec22390f1a61c85c6f0b8160fd752940ccfb3a683", size = 153725 }, - { url = "https://files.pythonhosted.org/packages/9f/b9/9d9a641194a730bda138b3dfe53f584d61c58cd5230e37566e83ec2ffa0d/bcrypt-5.0.0-cp39-abi3-win_amd64.whl", hash = "sha256:64ee8434b0da054d830fa8e89e1c8bf30061d539044a39524ff7dec90481e5c2", size = 150912 }, - { url = "https://files.pythonhosted.org/packages/27/44/d2ef5e87509158ad2187f4dd0852df80695bb1ee0cfe0a684727b01a69e0/bcrypt-5.0.0-cp39-abi3-win_arm64.whl", hash = "sha256:f2347d3534e76bf50bca5500989d6c1d05ed64b440408057a37673282c654927", size = 144953 }, - { url = "https://files.pythonhosted.org/packages/8a/75/4aa9f5a4d40d762892066ba1046000b329c7cd58e888a6db878019b282dc/bcrypt-5.0.0-pp311-pypy311_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:7edda91d5ab52b15636d9c30da87d2cc84f426c72b9dba7a9b4fe142ba11f534", size = 271180 }, - { url = "https://files.pythonhosted.org/packages/54/79/875f9558179573d40a9cc743038ac2bf67dfb79cecb1e8b5d70e88c94c3d/bcrypt-5.0.0-pp311-pypy311_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:046ad6db88edb3c5ece4369af997938fb1c19d6a699b9c1b27b0db432faae4c4", size = 273791 }, - { url = "https://files.pythonhosted.org/packages/bc/fe/975adb8c216174bf70fc17535f75e85ac06ed5252ea077be10d9cff5ce24/bcrypt-5.0.0-pp311-pypy311_pp73-manylinux_2_34_aarch64.whl", hash = "sha256:dcd58e2b3a908b5ecc9b9df2f0085592506ac2d5110786018ee5e160f28e0911", size = 270746 }, - { url = "https://files.pythonhosted.org/packages/e4/f8/972c96f5a2b6c4b3deca57009d93e946bbdbe2241dca9806d502f29dd3ee/bcrypt-5.0.0-pp311-pypy311_pp73-manylinux_2_34_x86_64.whl", hash = "sha256:6b8f520b61e8781efee73cba14e3e8c9556ccfb375623f4f97429544734545b4", size = 273375 }, + { url = "https://files.pythonhosted.org/packages/84/29/6237f151fbfe295fe3e074ecc6d44228faa1e842a81f6d34a02937ee1736/bcrypt-5.0.0-cp38-abi3-macosx_10_12_universal2.whl", hash = "sha256:fc746432b951e92b58317af8e0ca746efe93e66555f1b40888865ef5bf56446b", size = 494553, upload-time = "2025-09-25T19:49:49.006Z" }, + { url = "https://files.pythonhosted.org/packages/45/b6/4c1205dde5e464ea3bd88e8742e19f899c16fa8916fb8510a851fae985b5/bcrypt-5.0.0-cp38-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:c2388ca94ffee269b6038d48747f4ce8df0ffbea43f31abfa18ac72f0218effb", size = 275009, upload-time = "2025-09-25T19:49:50.581Z" }, + { url = "https://files.pythonhosted.org/packages/3b/71/427945e6ead72ccffe77894b2655b695ccf14ae1866cd977e185d606dd2f/bcrypt-5.0.0-cp38-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:560ddb6ec730386e7b3b26b8b4c88197aaed924430e7b74666a586ac997249ef", size = 278029, upload-time = "2025-09-25T19:49:52.533Z" }, + { url = "https://files.pythonhosted.org/packages/17/72/c344825e3b83c5389a369c8a8e58ffe1480b8a699f46c127c34580c4666b/bcrypt-5.0.0-cp38-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:d79e5c65dcc9af213594d6f7f1fa2c98ad3fc10431e7aa53c176b441943efbdd", size = 275907, upload-time = "2025-09-25T19:49:54.709Z" }, + { url = "https://files.pythonhosted.org/packages/0b/7e/d4e47d2df1641a36d1212e5c0514f5291e1a956a7749f1e595c07a972038/bcrypt-5.0.0-cp38-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:2b732e7d388fa22d48920baa267ba5d97cca38070b69c0e2d37087b381c681fd", size = 296500, upload-time = "2025-09-25T19:49:56.013Z" }, + { url = "https://files.pythonhosted.org/packages/0f/c3/0ae57a68be2039287ec28bc463b82e4b8dc23f9d12c0be331f4782e19108/bcrypt-5.0.0-cp38-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:0c8e093ea2532601a6f686edbc2c6b2ec24131ff5c52f7610dd64fa4553b5464", size = 278412, upload-time = "2025-09-25T19:49:57.356Z" }, + { url = "https://files.pythonhosted.org/packages/45/2b/77424511adb11e6a99e3a00dcc7745034bee89036ad7d7e255a7e47be7d8/bcrypt-5.0.0-cp38-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:5b1589f4839a0899c146e8892efe320c0fa096568abd9b95593efac50a87cb75", size = 275486, upload-time = "2025-09-25T19:49:59.116Z" }, + { url = "https://files.pythonhosted.org/packages/43/0a/405c753f6158e0f3f14b00b462d8bca31296f7ecfc8fc8bc7919c0c7d73a/bcrypt-5.0.0-cp38-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:89042e61b5e808b67daf24a434d89bab164d4de1746b37a8d173b6b14f3db9ff", size = 277940, upload-time = "2025-09-25T19:50:00.869Z" }, + { url = "https://files.pythonhosted.org/packages/62/83/b3efc285d4aadc1fa83db385ec64dcfa1707e890eb42f03b127d66ac1b7b/bcrypt-5.0.0-cp38-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:e3cf5b2560c7b5a142286f69bde914494b6d8f901aaa71e453078388a50881c4", size = 310776, upload-time = "2025-09-25T19:50:02.393Z" }, + { url = "https://files.pythonhosted.org/packages/95/7d/47ee337dacecde6d234890fe929936cb03ebc4c3a7460854bbd9c97780b8/bcrypt-5.0.0-cp38-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:f632fd56fc4e61564f78b46a2269153122db34988e78b6be8b32d28507b7eaeb", size = 312922, upload-time = "2025-09-25T19:50:04.232Z" }, + { url = "https://files.pythonhosted.org/packages/d6/3a/43d494dfb728f55f4e1cf8fd435d50c16a2d75493225b54c8d06122523c6/bcrypt-5.0.0-cp38-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:801cad5ccb6b87d1b430f183269b94c24f248dddbbc5c1f78b6ed231743e001c", size = 341367, upload-time = "2025-09-25T19:50:05.559Z" }, + { url = "https://files.pythonhosted.org/packages/55/ab/a0727a4547e383e2e22a630e0f908113db37904f58719dc48d4622139b5c/bcrypt-5.0.0-cp38-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:3cf67a804fc66fc217e6914a5635000259fbbbb12e78a99488e4d5ba445a71eb", size = 359187, upload-time = "2025-09-25T19:50:06.916Z" }, + { url = "https://files.pythonhosted.org/packages/1b/bb/461f352fdca663524b4643d8b09e8435b4990f17fbf4fea6bc2a90aa0cc7/bcrypt-5.0.0-cp38-abi3-win32.whl", hash = "sha256:3abeb543874b2c0524ff40c57a4e14e5d3a66ff33fb423529c88f180fd756538", size = 153752, upload-time = "2025-09-25T19:50:08.515Z" }, + { url = "https://files.pythonhosted.org/packages/41/aa/4190e60921927b7056820291f56fc57d00d04757c8b316b2d3c0d1d6da2c/bcrypt-5.0.0-cp38-abi3-win_amd64.whl", hash = "sha256:35a77ec55b541e5e583eb3436ffbbf53b0ffa1fa16ca6782279daf95d146dcd9", size = 150881, upload-time = "2025-09-25T19:50:09.742Z" }, + { url = "https://files.pythonhosted.org/packages/54/12/cd77221719d0b39ac0b55dbd39358db1cd1246e0282e104366ebbfb8266a/bcrypt-5.0.0-cp38-abi3-win_arm64.whl", hash = "sha256:cde08734f12c6a4e28dc6755cd11d3bdfea608d93d958fffbe95a7026ebe4980", size = 144931, upload-time = "2025-09-25T19:50:11.016Z" }, + { url = "https://files.pythonhosted.org/packages/5d/ba/2af136406e1c3839aea9ecadc2f6be2bcd1eff255bd451dd39bcf302c47a/bcrypt-5.0.0-cp39-abi3-macosx_10_12_universal2.whl", hash = "sha256:0c418ca99fd47e9c59a301744d63328f17798b5947b0f791e9af3c1c499c2d0a", size = 495313, upload-time = "2025-09-25T19:50:12.309Z" }, + { url = "https://files.pythonhosted.org/packages/ac/ee/2f4985dbad090ace5ad1f7dd8ff94477fe089b5fab2040bd784a3d5f187b/bcrypt-5.0.0-cp39-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:ddb4e1500f6efdd402218ffe34d040a1196c072e07929b9820f363a1fd1f4191", size = 275290, upload-time = "2025-09-25T19:50:13.673Z" }, + { url = "https://files.pythonhosted.org/packages/e4/6e/b77ade812672d15cf50842e167eead80ac3514f3beacac8902915417f8b7/bcrypt-5.0.0-cp39-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:7aeef54b60ceddb6f30ee3db090351ecf0d40ec6e2abf41430997407a46d2254", size = 278253, upload-time = "2025-09-25T19:50:15.089Z" }, + { url = "https://files.pythonhosted.org/packages/36/c4/ed00ed32f1040f7990dac7115f82273e3c03da1e1a1587a778d8cea496d8/bcrypt-5.0.0-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:f0ce778135f60799d89c9693b9b398819d15f1921ba15fe719acb3178215a7db", size = 276084, upload-time = "2025-09-25T19:50:16.699Z" }, + { url = "https://files.pythonhosted.org/packages/e7/c4/fa6e16145e145e87f1fa351bbd54b429354fd72145cd3d4e0c5157cf4c70/bcrypt-5.0.0-cp39-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:a71f70ee269671460b37a449f5ff26982a6f2ba493b3eabdd687b4bf35f875ac", size = 297185, upload-time = "2025-09-25T19:50:18.525Z" }, + { url = "https://files.pythonhosted.org/packages/24/b4/11f8a31d8b67cca3371e046db49baa7c0594d71eb40ac8121e2fc0888db0/bcrypt-5.0.0-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:f8429e1c410b4073944f03bd778a9e066e7fad723564a52ff91841d278dfc822", size = 278656, upload-time = "2025-09-25T19:50:19.809Z" }, + { url = "https://files.pythonhosted.org/packages/ac/31/79f11865f8078e192847d2cb526e3fa27c200933c982c5b2869720fa5fce/bcrypt-5.0.0-cp39-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:edfcdcedd0d0f05850c52ba3127b1fce70b9f89e0fe5ff16517df7e81fa3cbb8", size = 275662, upload-time = "2025-09-25T19:50:21.567Z" }, + { url = "https://files.pythonhosted.org/packages/d4/8d/5e43d9584b3b3591a6f9b68f755a4da879a59712981ef5ad2a0ac1379f7a/bcrypt-5.0.0-cp39-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:611f0a17aa4a25a69362dcc299fda5c8a3d4f160e2abb3831041feb77393a14a", size = 278240, upload-time = "2025-09-25T19:50:23.305Z" }, + { url = "https://files.pythonhosted.org/packages/89/48/44590e3fc158620f680a978aafe8f87a4c4320da81ed11552f0323aa9a57/bcrypt-5.0.0-cp39-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:db99dca3b1fdc3db87d7c57eac0c82281242d1eabf19dcb8a6b10eb29a2e72d1", size = 311152, upload-time = "2025-09-25T19:50:24.597Z" }, + { url = "https://files.pythonhosted.org/packages/5f/85/e4fbfc46f14f47b0d20493669a625da5827d07e8a88ee460af6cd9768b44/bcrypt-5.0.0-cp39-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:5feebf85a9cefda32966d8171f5db7e3ba964b77fdfe31919622256f80f9cf42", size = 313284, upload-time = "2025-09-25T19:50:26.268Z" }, + { url = "https://files.pythonhosted.org/packages/25/ae/479f81d3f4594456a01ea2f05b132a519eff9ab5768a70430fa1132384b1/bcrypt-5.0.0-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:3ca8a166b1140436e058298a34d88032ab62f15aae1c598580333dc21d27ef10", size = 341643, upload-time = "2025-09-25T19:50:28.02Z" }, + { url = "https://files.pythonhosted.org/packages/df/d2/36a086dee1473b14276cd6ea7f61aef3b2648710b5d7f1c9e032c29b859f/bcrypt-5.0.0-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:61afc381250c3182d9078551e3ac3a41da14154fbff647ddf52a769f588c4172", size = 359698, upload-time = "2025-09-25T19:50:31.347Z" }, + { url = "https://files.pythonhosted.org/packages/c0/f6/688d2cd64bfd0b14d805ddb8a565e11ca1fb0fd6817175d58b10052b6d88/bcrypt-5.0.0-cp39-abi3-win32.whl", hash = "sha256:64d7ce196203e468c457c37ec22390f1a61c85c6f0b8160fd752940ccfb3a683", size = 153725, upload-time = "2025-09-25T19:50:34.384Z" }, + { url = "https://files.pythonhosted.org/packages/9f/b9/9d9a641194a730bda138b3dfe53f584d61c58cd5230e37566e83ec2ffa0d/bcrypt-5.0.0-cp39-abi3-win_amd64.whl", hash = "sha256:64ee8434b0da054d830fa8e89e1c8bf30061d539044a39524ff7dec90481e5c2", size = 150912, upload-time = "2025-09-25T19:50:35.69Z" }, + { url = "https://files.pythonhosted.org/packages/27/44/d2ef5e87509158ad2187f4dd0852df80695bb1ee0cfe0a684727b01a69e0/bcrypt-5.0.0-cp39-abi3-win_arm64.whl", hash = "sha256:f2347d3534e76bf50bca5500989d6c1d05ed64b440408057a37673282c654927", size = 144953, upload-time = "2025-09-25T19:50:37.32Z" }, + { url = "https://files.pythonhosted.org/packages/8a/75/4aa9f5a4d40d762892066ba1046000b329c7cd58e888a6db878019b282dc/bcrypt-5.0.0-pp311-pypy311_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:7edda91d5ab52b15636d9c30da87d2cc84f426c72b9dba7a9b4fe142ba11f534", size = 271180, upload-time = "2025-09-25T19:50:38.575Z" }, + { url = "https://files.pythonhosted.org/packages/54/79/875f9558179573d40a9cc743038ac2bf67dfb79cecb1e8b5d70e88c94c3d/bcrypt-5.0.0-pp311-pypy311_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:046ad6db88edb3c5ece4369af997938fb1c19d6a699b9c1b27b0db432faae4c4", size = 273791, upload-time = "2025-09-25T19:50:39.913Z" }, + { url = "https://files.pythonhosted.org/packages/bc/fe/975adb8c216174bf70fc17535f75e85ac06ed5252ea077be10d9cff5ce24/bcrypt-5.0.0-pp311-pypy311_pp73-manylinux_2_34_aarch64.whl", hash = "sha256:dcd58e2b3a908b5ecc9b9df2f0085592506ac2d5110786018ee5e160f28e0911", size = 270746, upload-time = "2025-09-25T19:50:43.306Z" }, + { url = "https://files.pythonhosted.org/packages/e4/f8/972c96f5a2b6c4b3deca57009d93e946bbdbe2241dca9806d502f29dd3ee/bcrypt-5.0.0-pp311-pypy311_pp73-manylinux_2_34_x86_64.whl", hash = "sha256:6b8f520b61e8781efee73cba14e3e8c9556ccfb375623f4f97429544734545b4", size = 273375, upload-time = "2025-09-25T19:50:45.43Z" }, ] [[package]] @@ -562,27 +578,27 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "soupsieve" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/af/0b/44c39cf3b18a9280950ad63a579ce395dda4c32193ee9da7ff0aed547094/beautifulsoup4-4.12.2.tar.gz", hash = "sha256:492bbc69dca35d12daac71c4db1bfff0c876c00ef4a2ffacce226d4638eb72da", size = 505113 } +sdist = { url = "https://files.pythonhosted.org/packages/af/0b/44c39cf3b18a9280950ad63a579ce395dda4c32193ee9da7ff0aed547094/beautifulsoup4-4.12.2.tar.gz", hash = "sha256:492bbc69dca35d12daac71c4db1bfff0c876c00ef4a2ffacce226d4638eb72da", size = 505113, upload-time = "2023-04-07T15:02:49.038Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/57/f4/a69c20ee4f660081a7dedb1ac57f29be9378e04edfcb90c526b923d4bebc/beautifulsoup4-4.12.2-py3-none-any.whl", hash = "sha256:bd2520ca0d9d7d12694a53d44ac482d181b4ec1888909b035a3dbf40d0f57d4a", size = 142979 }, + { url = "https://files.pythonhosted.org/packages/57/f4/a69c20ee4f660081a7dedb1ac57f29be9378e04edfcb90c526b923d4bebc/beautifulsoup4-4.12.2-py3-none-any.whl", hash = "sha256:bd2520ca0d9d7d12694a53d44ac482d181b4ec1888909b035a3dbf40d0f57d4a", size = 142979, upload-time = "2023-04-07T15:02:50.77Z" }, ] [[package]] name = "billiard" version = "4.2.3" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/6a/50/cc2b8b6e6433918a6b9a3566483b743dcd229da1e974be9b5f259db3aad7/billiard-4.2.3.tar.gz", hash = "sha256:96486f0885afc38219d02d5f0ccd5bec8226a414b834ab244008cbb0025b8dcb", size = 156450 } +sdist = { url = "https://files.pythonhosted.org/packages/6a/50/cc2b8b6e6433918a6b9a3566483b743dcd229da1e974be9b5f259db3aad7/billiard-4.2.3.tar.gz", hash = "sha256:96486f0885afc38219d02d5f0ccd5bec8226a414b834ab244008cbb0025b8dcb", size = 156450, upload-time = "2025-11-16T17:47:30.281Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/b3/cc/38b6f87170908bd8aaf9e412b021d17e85f690abe00edf50192f1a4566b9/billiard-4.2.3-py3-none-any.whl", hash = "sha256:989e9b688e3abf153f307b68a1328dfacfb954e30a4f920005654e276c69236b", size = 87042 }, + { url = "https://files.pythonhosted.org/packages/b3/cc/38b6f87170908bd8aaf9e412b021d17e85f690abe00edf50192f1a4566b9/billiard-4.2.3-py3-none-any.whl", hash = "sha256:989e9b688e3abf153f307b68a1328dfacfb954e30a4f920005654e276c69236b", size = 87042, upload-time = "2025-11-16T17:47:29.005Z" }, ] [[package]] name = "blinker" version = "1.9.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/21/28/9b3f50ce0e048515135495f198351908d99540d69bfdc8c1d15b73dc55ce/blinker-1.9.0.tar.gz", hash = "sha256:b4ce2265a7abece45e7cc896e98dbebe6cead56bcf805a3d23136d145f5445bf", size = 22460 } +sdist = { url = "https://files.pythonhosted.org/packages/21/28/9b3f50ce0e048515135495f198351908d99540d69bfdc8c1d15b73dc55ce/blinker-1.9.0.tar.gz", hash = "sha256:b4ce2265a7abece45e7cc896e98dbebe6cead56bcf805a3d23136d145f5445bf", size = 22460, upload-time = "2024-11-08T17:25:47.436Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/10/cb/f2ad4230dc2eb1a74edf38f1a38b9b52277f75bef262d8908e60d957e13c/blinker-1.9.0-py3-none-any.whl", hash = "sha256:ba0efaa9080b619ff2f3459d1d500c57bddea4a6b424b60a91141db6fd2f08bc", size = 8458 }, + { url = "https://files.pythonhosted.org/packages/10/cb/f2ad4230dc2eb1a74edf38f1a38b9b52277f75bef262d8908e60d957e13c/blinker-1.9.0-py3-none-any.whl", hash = "sha256:ba0efaa9080b619ff2f3459d1d500c57bddea4a6b424b60a91141db6fd2f08bc", size = 8458, upload-time = "2024-11-08T17:25:46.184Z" }, ] [[package]] @@ -594,9 +610,9 @@ dependencies = [ { name = "jmespath" }, { name = "s3transfer" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/f7/99/3e8b48f15580672eda20f33439fc1622bd611f6238b6d05407320e1fb98c/boto3-1.35.99.tar.gz", hash = "sha256:e0abd794a7a591d90558e92e29a9f8837d25ece8e3c120e530526fe27eba5fca", size = 111028 } +sdist = { url = "https://files.pythonhosted.org/packages/f7/99/3e8b48f15580672eda20f33439fc1622bd611f6238b6d05407320e1fb98c/boto3-1.35.99.tar.gz", hash = "sha256:e0abd794a7a591d90558e92e29a9f8837d25ece8e3c120e530526fe27eba5fca", size = 111028, upload-time = "2025-01-14T20:20:28.636Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/65/77/8bbca82f70b062181cf0ae53fd43f1ac6556f3078884bfef9da2269c06a3/boto3-1.35.99-py3-none-any.whl", hash = "sha256:83e560faaec38a956dfb3d62e05e1703ee50432b45b788c09e25107c5058bd71", size = 139178 }, + { url = "https://files.pythonhosted.org/packages/65/77/8bbca82f70b062181cf0ae53fd43f1ac6556f3078884bfef9da2269c06a3/boto3-1.35.99-py3-none-any.whl", hash = "sha256:83e560faaec38a956dfb3d62e05e1703ee50432b45b788c09e25107c5058bd71", size = 139178, upload-time = "2025-01-14T20:20:25.48Z" }, ] [[package]] @@ -608,9 +624,9 @@ dependencies = [ { name = "types-s3transfer" }, { name = "typing-extensions", marker = "python_full_version < '3.12'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/fd/5b/6d274aa25f7fa09f8b7defab5cb9389e6496a7d9b76c1efcf27b0b15e868/boto3_stubs-1.41.3.tar.gz", hash = "sha256:c7cc9706ac969c8ea284c2d45ec45b6371745666d087c6c5e7c9d39dafdd48bc", size = 100010 } +sdist = { url = "https://files.pythonhosted.org/packages/fd/5b/6d274aa25f7fa09f8b7defab5cb9389e6496a7d9b76c1efcf27b0b15e868/boto3_stubs-1.41.3.tar.gz", hash = "sha256:c7cc9706ac969c8ea284c2d45ec45b6371745666d087c6c5e7c9d39dafdd48bc", size = 100010, upload-time = "2025-11-24T20:34:27.052Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/7e/d6/ef971013d1fc7333c6df322d98ebf4592df9c80e1966fb12732f91e9e71b/boto3_stubs-1.41.3-py3-none-any.whl", hash = "sha256:bec698419b31b499f3740f1dfb6dae6519167d9e3aa536f6f730ed280556230b", size = 69294 }, + { url = "https://files.pythonhosted.org/packages/7e/d6/ef971013d1fc7333c6df322d98ebf4592df9c80e1966fb12732f91e9e71b/boto3_stubs-1.41.3-py3-none-any.whl", hash = "sha256:bec698419b31b499f3740f1dfb6dae6519167d9e3aa536f6f730ed280556230b", size = 69294, upload-time = "2025-11-24T20:34:23.1Z" }, ] [package.optional-dependencies] @@ -627,9 +643,9 @@ dependencies = [ { name = "python-dateutil" }, { name = "urllib3" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/7c/9c/1df6deceee17c88f7170bad8325aa91452529d683486273928eecfd946d8/botocore-1.35.99.tar.gz", hash = "sha256:1eab44e969c39c5f3d9a3104a0836c24715579a455f12b3979a31d7cde51b3c3", size = 13490969 } +sdist = { url = "https://files.pythonhosted.org/packages/7c/9c/1df6deceee17c88f7170bad8325aa91452529d683486273928eecfd946d8/botocore-1.35.99.tar.gz", hash = "sha256:1eab44e969c39c5f3d9a3104a0836c24715579a455f12b3979a31d7cde51b3c3", size = 13490969, upload-time = "2025-01-14T20:20:11.419Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/fc/dd/d87e2a145fad9e08d0ec6edcf9d71f838ccc7acdd919acc4c0d4a93515f8/botocore-1.35.99-py3-none-any.whl", hash = "sha256:b22d27b6b617fc2d7342090d6129000af2efd20174215948c0d7ae2da0fab445", size = 13293216 }, + { url = "https://files.pythonhosted.org/packages/fc/dd/d87e2a145fad9e08d0ec6edcf9d71f838ccc7acdd919acc4c0d4a93515f8/botocore-1.35.99-py3-none-any.whl", hash = "sha256:b22d27b6b617fc2d7342090d6129000af2efd20174215948c0d7ae2da0fab445", size = 13293216, upload-time = "2025-01-14T20:20:06.427Z" }, ] [[package]] @@ -639,9 +655,9 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "types-awscrt" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/ec/8f/a42c3ae68d0b9916f6e067546d73e9a24a6af8793999a742e7af0b7bffa2/botocore_stubs-1.41.3.tar.gz", hash = "sha256:bacd1647cd95259aa8fc4ccdb5b1b3893f495270c120cda0d7d210e0ae6a4170", size = 42404 } +sdist = { url = "https://files.pythonhosted.org/packages/ec/8f/a42c3ae68d0b9916f6e067546d73e9a24a6af8793999a742e7af0b7bffa2/botocore_stubs-1.41.3.tar.gz", hash = "sha256:bacd1647cd95259aa8fc4ccdb5b1b3893f495270c120cda0d7d210e0ae6a4170", size = 42404, upload-time = "2025-11-24T20:29:27.47Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/57/b7/f4a051cefaf76930c77558b31646bcce7e9b3fbdcbc89e4073783e961519/botocore_stubs-1.41.3-py3-none-any.whl", hash = "sha256:6ab911bd9f7256f1dcea2e24a4af7ae0f9f07e83d0a760bba37f028f4a2e5589", size = 66749 }, + { url = "https://files.pythonhosted.org/packages/57/b7/f4a051cefaf76930c77558b31646bcce7e9b3fbdcbc89e4073783e961519/botocore_stubs-1.41.3-py3-none-any.whl", hash = "sha256:6ab911bd9f7256f1dcea2e24a4af7ae0f9f07e83d0a760bba37f028f4a2e5589", size = 66749, upload-time = "2025-11-24T20:29:26.142Z" }, ] [[package]] @@ -651,50 +667,50 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "numpy" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/14/d8/6d641573e210768816023a64966d66463f2ce9fc9945fa03290c8a18f87c/bottleneck-1.6.0.tar.gz", hash = "sha256:028d46ee4b025ad9ab4d79924113816f825f62b17b87c9e1d0d8ce144a4a0e31", size = 104311 } +sdist = { url = "https://files.pythonhosted.org/packages/14/d8/6d641573e210768816023a64966d66463f2ce9fc9945fa03290c8a18f87c/bottleneck-1.6.0.tar.gz", hash = "sha256:028d46ee4b025ad9ab4d79924113816f825f62b17b87c9e1d0d8ce144a4a0e31", size = 104311, upload-time = "2025-09-08T16:30:38.617Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/83/96/9d51012d729f97de1e75aad986f3ba50956742a40fc99cbab4c2aa896c1c/bottleneck-1.6.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:69ef4514782afe39db2497aaea93b1c167ab7ab3bc5e3930500ef9cf11841db7", size = 100400 }, - { url = "https://files.pythonhosted.org/packages/16/f4/4fcbebcbc42376a77e395a6838575950587e5eb82edf47d103f8daa7ba22/bottleneck-1.6.0-cp311-cp311-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:727363f99edc6dc83d52ed28224d4cb858c07a01c336c7499c0c2e5dd4fd3e4a", size = 375920 }, - { url = "https://files.pythonhosted.org/packages/36/13/7fa8cdc41cbf2dfe0540f98e1e0caf9ffbd681b1a0fc679a91c2698adaf9/bottleneck-1.6.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:847671a9e392220d1dfd2ff2524b4d61ec47b2a36ea78e169d2aa357fd9d933a", size = 367922 }, - { url = "https://files.pythonhosted.org/packages/13/7d/dccfa4a2792c1bdc0efdde8267e527727e517df1ff0d4976b84e0268c2f9/bottleneck-1.6.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:daef2603ab7b4ec4f032bb54facf5fa92dacd3a264c2fd9677c9fc22bcb5a245", size = 361379 }, - { url = "https://files.pythonhosted.org/packages/93/42/21c0fad823b71c3a8904cbb847ad45136d25573a2d001a9cff48d3985fab/bottleneck-1.6.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:fc7f09bda980d967f2e9f1a746eda57479f824f66de0b92b9835c431a8c922d4", size = 371911 }, - { url = "https://files.pythonhosted.org/packages/3b/b0/830ff80f8c74577d53034c494639eac7a0ffc70935c01ceadfbe77f590c2/bottleneck-1.6.0-cp311-cp311-win32.whl", hash = "sha256:1f78bad13ad190180f73cceb92d22f4101bde3d768f4647030089f704ae7cac7", size = 107831 }, - { url = "https://files.pythonhosted.org/packages/6f/42/01d4920b0aa51fba503f112c90714547609bbe17b6ecfc1c7ae1da3183df/bottleneck-1.6.0-cp311-cp311-win_amd64.whl", hash = "sha256:8f2adef59fdb9edf2983fe3a4c07e5d1b677c43e5669f4711da2c3daad8321ad", size = 113358 }, - { url = "https://files.pythonhosted.org/packages/8d/72/7e3593a2a3dd69ec831a9981a7b1443647acb66a5aec34c1620a5f7f8498/bottleneck-1.6.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:3bb16a16a86a655fdbb34df672109a8a227bb5f9c9cf5bb8ae400a639bc52fa3", size = 100515 }, - { url = "https://files.pythonhosted.org/packages/b5/d4/e7bbea08f4c0f0bab819d38c1a613da5f194fba7b19aae3e2b3a27e78886/bottleneck-1.6.0-cp312-cp312-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:0fbf5d0787af9aee6cef4db9cdd14975ce24bd02e0cc30155a51411ebe2ff35f", size = 377451 }, - { url = "https://files.pythonhosted.org/packages/fe/80/a6da430e3b1a12fd85f9fe90d3ad8fe9a527ecb046644c37b4b3f4baacfc/bottleneck-1.6.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d08966f4a22384862258940346a72087a6f7cebb19038fbf3a3f6690ee7fd39f", size = 368303 }, - { url = "https://files.pythonhosted.org/packages/30/11/abd30a49f3251f4538430e5f876df96f2b39dabf49e05c5836820d2c31fe/bottleneck-1.6.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:604f0b898b43b7bc631c564630e936a8759d2d952641c8b02f71e31dbcd9deaa", size = 361232 }, - { url = "https://files.pythonhosted.org/packages/1d/ac/1c0e09d8d92b9951f675bd42463ce76c3c3657b31c5bf53ca1f6dd9eccff/bottleneck-1.6.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:d33720bad761e642abc18eda5f188ff2841191c9f63f9d0c052245decc0faeb9", size = 373234 }, - { url = "https://files.pythonhosted.org/packages/fb/ea/382c572ae3057ba885d484726bb63629d1f63abedf91c6cd23974eb35a9b/bottleneck-1.6.0-cp312-cp312-win32.whl", hash = "sha256:a1e5907ec2714efbe7075d9207b58c22ab6984a59102e4ecd78dced80dab8374", size = 108020 }, - { url = "https://files.pythonhosted.org/packages/48/ad/d71da675eef85ac153eef5111ca0caa924548c9591da00939bcabba8de8e/bottleneck-1.6.0-cp312-cp312-win_amd64.whl", hash = "sha256:81e3822499f057a917b7d3972ebc631ac63c6bbcc79ad3542a66c4c40634e3a6", size = 113493 }, + { url = "https://files.pythonhosted.org/packages/83/96/9d51012d729f97de1e75aad986f3ba50956742a40fc99cbab4c2aa896c1c/bottleneck-1.6.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:69ef4514782afe39db2497aaea93b1c167ab7ab3bc5e3930500ef9cf11841db7", size = 100400, upload-time = "2025-09-08T16:29:44.464Z" }, + { url = "https://files.pythonhosted.org/packages/16/f4/4fcbebcbc42376a77e395a6838575950587e5eb82edf47d103f8daa7ba22/bottleneck-1.6.0-cp311-cp311-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:727363f99edc6dc83d52ed28224d4cb858c07a01c336c7499c0c2e5dd4fd3e4a", size = 375920, upload-time = "2025-09-08T16:29:45.52Z" }, + { url = "https://files.pythonhosted.org/packages/36/13/7fa8cdc41cbf2dfe0540f98e1e0caf9ffbd681b1a0fc679a91c2698adaf9/bottleneck-1.6.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:847671a9e392220d1dfd2ff2524b4d61ec47b2a36ea78e169d2aa357fd9d933a", size = 367922, upload-time = "2025-09-08T16:29:46.743Z" }, + { url = "https://files.pythonhosted.org/packages/13/7d/dccfa4a2792c1bdc0efdde8267e527727e517df1ff0d4976b84e0268c2f9/bottleneck-1.6.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:daef2603ab7b4ec4f032bb54facf5fa92dacd3a264c2fd9677c9fc22bcb5a245", size = 361379, upload-time = "2025-09-08T16:29:48.042Z" }, + { url = "https://files.pythonhosted.org/packages/93/42/21c0fad823b71c3a8904cbb847ad45136d25573a2d001a9cff48d3985fab/bottleneck-1.6.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:fc7f09bda980d967f2e9f1a746eda57479f824f66de0b92b9835c431a8c922d4", size = 371911, upload-time = "2025-09-08T16:29:49.366Z" }, + { url = "https://files.pythonhosted.org/packages/3b/b0/830ff80f8c74577d53034c494639eac7a0ffc70935c01ceadfbe77f590c2/bottleneck-1.6.0-cp311-cp311-win32.whl", hash = "sha256:1f78bad13ad190180f73cceb92d22f4101bde3d768f4647030089f704ae7cac7", size = 107831, upload-time = "2025-09-08T16:29:51.397Z" }, + { url = "https://files.pythonhosted.org/packages/6f/42/01d4920b0aa51fba503f112c90714547609bbe17b6ecfc1c7ae1da3183df/bottleneck-1.6.0-cp311-cp311-win_amd64.whl", hash = "sha256:8f2adef59fdb9edf2983fe3a4c07e5d1b677c43e5669f4711da2c3daad8321ad", size = 113358, upload-time = "2025-09-08T16:29:52.602Z" }, + { url = "https://files.pythonhosted.org/packages/8d/72/7e3593a2a3dd69ec831a9981a7b1443647acb66a5aec34c1620a5f7f8498/bottleneck-1.6.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:3bb16a16a86a655fdbb34df672109a8a227bb5f9c9cf5bb8ae400a639bc52fa3", size = 100515, upload-time = "2025-09-08T16:29:55.141Z" }, + { url = "https://files.pythonhosted.org/packages/b5/d4/e7bbea08f4c0f0bab819d38c1a613da5f194fba7b19aae3e2b3a27e78886/bottleneck-1.6.0-cp312-cp312-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:0fbf5d0787af9aee6cef4db9cdd14975ce24bd02e0cc30155a51411ebe2ff35f", size = 377451, upload-time = "2025-09-08T16:29:56.718Z" }, + { url = "https://files.pythonhosted.org/packages/fe/80/a6da430e3b1a12fd85f9fe90d3ad8fe9a527ecb046644c37b4b3f4baacfc/bottleneck-1.6.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d08966f4a22384862258940346a72087a6f7cebb19038fbf3a3f6690ee7fd39f", size = 368303, upload-time = "2025-09-08T16:29:57.834Z" }, + { url = "https://files.pythonhosted.org/packages/30/11/abd30a49f3251f4538430e5f876df96f2b39dabf49e05c5836820d2c31fe/bottleneck-1.6.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:604f0b898b43b7bc631c564630e936a8759d2d952641c8b02f71e31dbcd9deaa", size = 361232, upload-time = "2025-09-08T16:29:59.104Z" }, + { url = "https://files.pythonhosted.org/packages/1d/ac/1c0e09d8d92b9951f675bd42463ce76c3c3657b31c5bf53ca1f6dd9eccff/bottleneck-1.6.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:d33720bad761e642abc18eda5f188ff2841191c9f63f9d0c052245decc0faeb9", size = 373234, upload-time = "2025-09-08T16:30:00.488Z" }, + { url = "https://files.pythonhosted.org/packages/fb/ea/382c572ae3057ba885d484726bb63629d1f63abedf91c6cd23974eb35a9b/bottleneck-1.6.0-cp312-cp312-win32.whl", hash = "sha256:a1e5907ec2714efbe7075d9207b58c22ab6984a59102e4ecd78dced80dab8374", size = 108020, upload-time = "2025-09-08T16:30:01.773Z" }, + { url = "https://files.pythonhosted.org/packages/48/ad/d71da675eef85ac153eef5111ca0caa924548c9591da00939bcabba8de8e/bottleneck-1.6.0-cp312-cp312-win_amd64.whl", hash = "sha256:81e3822499f057a917b7d3972ebc631ac63c6bbcc79ad3542a66c4c40634e3a6", size = 113493, upload-time = "2025-09-08T16:30:02.872Z" }, ] [[package]] name = "brotli" version = "1.2.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/f7/16/c92ca344d646e71a43b8bb353f0a6490d7f6e06210f8554c8f874e454285/brotli-1.2.0.tar.gz", hash = "sha256:e310f77e41941c13340a95976fe66a8a95b01e783d430eeaf7a2f87e0a57dd0a", size = 7388632 } +sdist = { url = "https://files.pythonhosted.org/packages/f7/16/c92ca344d646e71a43b8bb353f0a6490d7f6e06210f8554c8f874e454285/brotli-1.2.0.tar.gz", hash = "sha256:e310f77e41941c13340a95976fe66a8a95b01e783d430eeaf7a2f87e0a57dd0a", size = 7388632, upload-time = "2025-11-05T18:39:42.86Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/7a/ef/f285668811a9e1ddb47a18cb0b437d5fc2760d537a2fe8a57875ad6f8448/brotli-1.2.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:15b33fe93cedc4caaff8a0bd1eb7e3dab1c61bb22a0bf5bdfdfd97cd7da79744", size = 863110 }, - { url = "https://files.pythonhosted.org/packages/50/62/a3b77593587010c789a9d6eaa527c79e0848b7b860402cc64bc0bc28a86c/brotli-1.2.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:898be2be399c221d2671d29eed26b6b2713a02c2119168ed914e7d00ceadb56f", size = 445438 }, - { url = "https://files.pythonhosted.org/packages/cd/e1/7fadd47f40ce5549dc44493877db40292277db373da5053aff181656e16e/brotli-1.2.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:350c8348f0e76fff0a0fd6c26755d2653863279d086d3aa2c290a6a7251135dd", size = 1534420 }, - { url = "https://files.pythonhosted.org/packages/12/8b/1ed2f64054a5a008a4ccd2f271dbba7a5fb1a3067a99f5ceadedd4c1d5a7/brotli-1.2.0-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:2e1ad3fda65ae0d93fec742a128d72e145c9c7a99ee2fcd667785d99eb25a7fe", size = 1632619 }, - { url = "https://files.pythonhosted.org/packages/89/5a/7071a621eb2d052d64efd5da2ef55ecdac7c3b0c6e4f9d519e9c66d987ef/brotli-1.2.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:40d918bce2b427a0c4ba189df7a006ac0c7277c180aee4617d99e9ccaaf59e6a", size = 1426014 }, - { url = "https://files.pythonhosted.org/packages/26/6d/0971a8ea435af5156acaaccec1a505f981c9c80227633851f2810abd252a/brotli-1.2.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:2a7f1d03727130fc875448b65b127a9ec5d06d19d0148e7554384229706f9d1b", size = 1489661 }, - { url = "https://files.pythonhosted.org/packages/f3/75/c1baca8b4ec6c96a03ef8230fab2a785e35297632f402ebb1e78a1e39116/brotli-1.2.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:9c79f57faa25d97900bfb119480806d783fba83cd09ee0b33c17623935b05fa3", size = 1599150 }, - { url = "https://files.pythonhosted.org/packages/0d/1a/23fcfee1c324fd48a63d7ebf4bac3a4115bdb1b00e600f80f727d850b1ae/brotli-1.2.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:844a8ceb8483fefafc412f85c14f2aae2fb69567bf2a0de53cdb88b73e7c43ae", size = 1493505 }, - { url = "https://files.pythonhosted.org/packages/36/e5/12904bbd36afeef53d45a84881a4810ae8810ad7e328a971ebbfd760a0b3/brotli-1.2.0-cp311-cp311-win32.whl", hash = "sha256:aa47441fa3026543513139cb8926a92a8e305ee9c71a6209ef7a97d91640ea03", size = 334451 }, - { url = "https://files.pythonhosted.org/packages/02/8b/ecb5761b989629a4758c394b9301607a5880de61ee2ee5fe104b87149ebc/brotli-1.2.0-cp311-cp311-win_amd64.whl", hash = "sha256:022426c9e99fd65d9475dce5c195526f04bb8be8907607e27e747893f6ee3e24", size = 369035 }, - { url = "https://files.pythonhosted.org/packages/11/ee/b0a11ab2315c69bb9b45a2aaed022499c9c24a205c3a49c3513b541a7967/brotli-1.2.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:35d382625778834a7f3061b15423919aa03e4f5da34ac8e02c074e4b75ab4f84", size = 861543 }, - { url = "https://files.pythonhosted.org/packages/e1/2f/29c1459513cd35828e25531ebfcbf3e92a5e49f560b1777a9af7203eb46e/brotli-1.2.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:7a61c06b334bd99bc5ae84f1eeb36bfe01400264b3c352f968c6e30a10f9d08b", size = 444288 }, - { url = "https://files.pythonhosted.org/packages/3d/6f/feba03130d5fceadfa3a1bb102cb14650798c848b1df2a808356f939bb16/brotli-1.2.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:acec55bb7c90f1dfc476126f9711a8e81c9af7fb617409a9ee2953115343f08d", size = 1528071 }, - { url = "https://files.pythonhosted.org/packages/2b/38/f3abb554eee089bd15471057ba85f47e53a44a462cfce265d9bf7088eb09/brotli-1.2.0-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:260d3692396e1895c5034f204f0db022c056f9e2ac841593a4cf9426e2a3faca", size = 1626913 }, - { url = "https://files.pythonhosted.org/packages/03/a7/03aa61fbc3c5cbf99b44d158665f9b0dd3d8059be16c460208d9e385c837/brotli-1.2.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:072e7624b1fc4d601036ab3f4f27942ef772887e876beff0301d261210bca97f", size = 1419762 }, - { url = "https://files.pythonhosted.org/packages/21/1b/0374a89ee27d152a5069c356c96b93afd1b94eae83f1e004b57eb6ce2f10/brotli-1.2.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:adedc4a67e15327dfdd04884873c6d5a01d3e3b6f61406f99b1ed4865a2f6d28", size = 1484494 }, - { url = "https://files.pythonhosted.org/packages/cf/57/69d4fe84a67aef4f524dcd075c6eee868d7850e85bf01d778a857d8dbe0a/brotli-1.2.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:7a47ce5c2288702e09dc22a44d0ee6152f2c7eda97b3c8482d826a1f3cfc7da7", size = 1593302 }, - { url = "https://files.pythonhosted.org/packages/d5/3b/39e13ce78a8e9a621c5df3aeb5fd181fcc8caba8c48a194cd629771f6828/brotli-1.2.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:af43b8711a8264bb4e7d6d9a6d004c3a2019c04c01127a868709ec29962b6036", size = 1487913 }, - { url = "https://files.pythonhosted.org/packages/62/28/4d00cb9bd76a6357a66fcd54b4b6d70288385584063f4b07884c1e7286ac/brotli-1.2.0-cp312-cp312-win32.whl", hash = "sha256:e99befa0b48f3cd293dafeacdd0d191804d105d279e0b387a32054c1180f3161", size = 334362 }, - { url = "https://files.pythonhosted.org/packages/1c/4e/bc1dcac9498859d5e353c9b153627a3752868a9d5f05ce8dedd81a2354ab/brotli-1.2.0-cp312-cp312-win_amd64.whl", hash = "sha256:b35c13ce241abdd44cb8ca70683f20c0c079728a36a996297adb5334adfc1c44", size = 369115 }, + { url = "https://files.pythonhosted.org/packages/7a/ef/f285668811a9e1ddb47a18cb0b437d5fc2760d537a2fe8a57875ad6f8448/brotli-1.2.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:15b33fe93cedc4caaff8a0bd1eb7e3dab1c61bb22a0bf5bdfdfd97cd7da79744", size = 863110, upload-time = "2025-11-05T18:38:12.978Z" }, + { url = "https://files.pythonhosted.org/packages/50/62/a3b77593587010c789a9d6eaa527c79e0848b7b860402cc64bc0bc28a86c/brotli-1.2.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:898be2be399c221d2671d29eed26b6b2713a02c2119168ed914e7d00ceadb56f", size = 445438, upload-time = "2025-11-05T18:38:14.208Z" }, + { url = "https://files.pythonhosted.org/packages/cd/e1/7fadd47f40ce5549dc44493877db40292277db373da5053aff181656e16e/brotli-1.2.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:350c8348f0e76fff0a0fd6c26755d2653863279d086d3aa2c290a6a7251135dd", size = 1534420, upload-time = "2025-11-05T18:38:15.111Z" }, + { url = "https://files.pythonhosted.org/packages/12/8b/1ed2f64054a5a008a4ccd2f271dbba7a5fb1a3067a99f5ceadedd4c1d5a7/brotli-1.2.0-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:2e1ad3fda65ae0d93fec742a128d72e145c9c7a99ee2fcd667785d99eb25a7fe", size = 1632619, upload-time = "2025-11-05T18:38:16.094Z" }, + { url = "https://files.pythonhosted.org/packages/89/5a/7071a621eb2d052d64efd5da2ef55ecdac7c3b0c6e4f9d519e9c66d987ef/brotli-1.2.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:40d918bce2b427a0c4ba189df7a006ac0c7277c180aee4617d99e9ccaaf59e6a", size = 1426014, upload-time = "2025-11-05T18:38:17.177Z" }, + { url = "https://files.pythonhosted.org/packages/26/6d/0971a8ea435af5156acaaccec1a505f981c9c80227633851f2810abd252a/brotli-1.2.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:2a7f1d03727130fc875448b65b127a9ec5d06d19d0148e7554384229706f9d1b", size = 1489661, upload-time = "2025-11-05T18:38:18.41Z" }, + { url = "https://files.pythonhosted.org/packages/f3/75/c1baca8b4ec6c96a03ef8230fab2a785e35297632f402ebb1e78a1e39116/brotli-1.2.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:9c79f57faa25d97900bfb119480806d783fba83cd09ee0b33c17623935b05fa3", size = 1599150, upload-time = "2025-11-05T18:38:19.792Z" }, + { url = "https://files.pythonhosted.org/packages/0d/1a/23fcfee1c324fd48a63d7ebf4bac3a4115bdb1b00e600f80f727d850b1ae/brotli-1.2.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:844a8ceb8483fefafc412f85c14f2aae2fb69567bf2a0de53cdb88b73e7c43ae", size = 1493505, upload-time = "2025-11-05T18:38:20.913Z" }, + { url = "https://files.pythonhosted.org/packages/36/e5/12904bbd36afeef53d45a84881a4810ae8810ad7e328a971ebbfd760a0b3/brotli-1.2.0-cp311-cp311-win32.whl", hash = "sha256:aa47441fa3026543513139cb8926a92a8e305ee9c71a6209ef7a97d91640ea03", size = 334451, upload-time = "2025-11-05T18:38:21.94Z" }, + { url = "https://files.pythonhosted.org/packages/02/8b/ecb5761b989629a4758c394b9301607a5880de61ee2ee5fe104b87149ebc/brotli-1.2.0-cp311-cp311-win_amd64.whl", hash = "sha256:022426c9e99fd65d9475dce5c195526f04bb8be8907607e27e747893f6ee3e24", size = 369035, upload-time = "2025-11-05T18:38:22.941Z" }, + { url = "https://files.pythonhosted.org/packages/11/ee/b0a11ab2315c69bb9b45a2aaed022499c9c24a205c3a49c3513b541a7967/brotli-1.2.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:35d382625778834a7f3061b15423919aa03e4f5da34ac8e02c074e4b75ab4f84", size = 861543, upload-time = "2025-11-05T18:38:24.183Z" }, + { url = "https://files.pythonhosted.org/packages/e1/2f/29c1459513cd35828e25531ebfcbf3e92a5e49f560b1777a9af7203eb46e/brotli-1.2.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:7a61c06b334bd99bc5ae84f1eeb36bfe01400264b3c352f968c6e30a10f9d08b", size = 444288, upload-time = "2025-11-05T18:38:25.139Z" }, + { url = "https://files.pythonhosted.org/packages/3d/6f/feba03130d5fceadfa3a1bb102cb14650798c848b1df2a808356f939bb16/brotli-1.2.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:acec55bb7c90f1dfc476126f9711a8e81c9af7fb617409a9ee2953115343f08d", size = 1528071, upload-time = "2025-11-05T18:38:26.081Z" }, + { url = "https://files.pythonhosted.org/packages/2b/38/f3abb554eee089bd15471057ba85f47e53a44a462cfce265d9bf7088eb09/brotli-1.2.0-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:260d3692396e1895c5034f204f0db022c056f9e2ac841593a4cf9426e2a3faca", size = 1626913, upload-time = "2025-11-05T18:38:27.284Z" }, + { url = "https://files.pythonhosted.org/packages/03/a7/03aa61fbc3c5cbf99b44d158665f9b0dd3d8059be16c460208d9e385c837/brotli-1.2.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:072e7624b1fc4d601036ab3f4f27942ef772887e876beff0301d261210bca97f", size = 1419762, upload-time = "2025-11-05T18:38:28.295Z" }, + { url = "https://files.pythonhosted.org/packages/21/1b/0374a89ee27d152a5069c356c96b93afd1b94eae83f1e004b57eb6ce2f10/brotli-1.2.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:adedc4a67e15327dfdd04884873c6d5a01d3e3b6f61406f99b1ed4865a2f6d28", size = 1484494, upload-time = "2025-11-05T18:38:29.29Z" }, + { url = "https://files.pythonhosted.org/packages/cf/57/69d4fe84a67aef4f524dcd075c6eee868d7850e85bf01d778a857d8dbe0a/brotli-1.2.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:7a47ce5c2288702e09dc22a44d0ee6152f2c7eda97b3c8482d826a1f3cfc7da7", size = 1593302, upload-time = "2025-11-05T18:38:30.639Z" }, + { url = "https://files.pythonhosted.org/packages/d5/3b/39e13ce78a8e9a621c5df3aeb5fd181fcc8caba8c48a194cd629771f6828/brotli-1.2.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:af43b8711a8264bb4e7d6d9a6d004c3a2019c04c01127a868709ec29962b6036", size = 1487913, upload-time = "2025-11-05T18:38:31.618Z" }, + { url = "https://files.pythonhosted.org/packages/62/28/4d00cb9bd76a6357a66fcd54b4b6d70288385584063f4b07884c1e7286ac/brotli-1.2.0-cp312-cp312-win32.whl", hash = "sha256:e99befa0b48f3cd293dafeacdd0d191804d105d279e0b387a32054c1180f3161", size = 334362, upload-time = "2025-11-05T18:38:32.939Z" }, + { url = "https://files.pythonhosted.org/packages/1c/4e/bc1dcac9498859d5e353c9b153627a3752868a9d5f05ce8dedd81a2354ab/brotli-1.2.0-cp312-cp312-win_amd64.whl", hash = "sha256:b35c13ce241abdd44cb8ca70683f20c0c079728a36a996297adb5334adfc1c44", size = 369115, upload-time = "2025-11-05T18:38:33.765Z" }, ] [[package]] @@ -704,17 +720,17 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "cffi", marker = "platform_python_implementation == 'PyPy'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/84/85/57c314a6b35336efbbdc13e5fc9ae13f6b60a0647cfa7c1221178ac6d8ae/brotlicffi-1.2.0.0.tar.gz", hash = "sha256:34345d8d1f9d534fcac2249e57a4c3c8801a33c9942ff9f8574f67a175e17adb", size = 476682 } +sdist = { url = "https://files.pythonhosted.org/packages/84/85/57c314a6b35336efbbdc13e5fc9ae13f6b60a0647cfa7c1221178ac6d8ae/brotlicffi-1.2.0.0.tar.gz", hash = "sha256:34345d8d1f9d534fcac2249e57a4c3c8801a33c9942ff9f8574f67a175e17adb", size = 476682, upload-time = "2025-11-21T18:17:57.334Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/e4/df/a72b284d8c7bef0ed5756b41c2eb7d0219a1dd6ac6762f1c7bdbc31ef3af/brotlicffi-1.2.0.0-cp38-abi3-macosx_11_0_arm64.whl", hash = "sha256:9458d08a7ccde8e3c0afedbf2c70a8263227a68dea5ab13590593f4c0a4fd5f4", size = 432340 }, - { url = "https://files.pythonhosted.org/packages/74/2b/cc55a2d1d6fb4f5d458fba44a3d3f91fb4320aa14145799fd3a996af0686/brotlicffi-1.2.0.0-cp38-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:84e3d0020cf1bd8b8131f4a07819edee9f283721566fe044a20ec792ca8fd8b7", size = 1534002 }, - { url = "https://files.pythonhosted.org/packages/e4/9c/d51486bf366fc7d6735f0e46b5b96ca58dc005b250263525a1eea3cd5d21/brotlicffi-1.2.0.0-cp38-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:33cfb408d0cff64cd50bef268c0fed397c46fbb53944aa37264148614a62e990", size = 1536547 }, - { url = "https://files.pythonhosted.org/packages/1b/37/293a9a0a7caf17e6e657668bebb92dfe730305999fe8c0e2703b8888789c/brotlicffi-1.2.0.0-cp38-abi3-win32.whl", hash = "sha256:23e5c912fdc6fd37143203820230374d24babd078fc054e18070a647118158f6", size = 343085 }, - { url = "https://files.pythonhosted.org/packages/07/6b/6e92009df3b8b7272f85a0992b306b61c34b7ea1c4776643746e61c380ac/brotlicffi-1.2.0.0-cp38-abi3-win_amd64.whl", hash = "sha256:f139a7cdfe4ae7859513067b736eb44d19fae1186f9e99370092f6915216451b", size = 378586 }, - { url = "https://files.pythonhosted.org/packages/a4/ec/52488a0563f1663e2ccc75834b470650f4b8bcdea3132aef3bf67219c661/brotlicffi-1.2.0.0-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:fa102a60e50ddbd08de86a63431a722ea216d9bc903b000bf544149cc9b823dc", size = 402002 }, - { url = "https://files.pythonhosted.org/packages/e4/63/d4aea4835fd97da1401d798d9b8ba77227974de565faea402f520b37b10f/brotlicffi-1.2.0.0-pp311-pypy311_pp73-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:7d3c4332fc808a94e8c1035950a10d04b681b03ab585ce897ae2a360d479037c", size = 406447 }, - { url = "https://files.pythonhosted.org/packages/62/4e/5554ecb2615ff035ef8678d4e419549a0f7a28b3f096b272174d656749fb/brotlicffi-1.2.0.0-pp311-pypy311_pp73-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:fb4eb5830026b79a93bf503ad32b2c5257315e9ffc49e76b2715cffd07c8e3db", size = 402521 }, - { url = "https://files.pythonhosted.org/packages/b5/d3/b07f8f125ac52bbee5dc00ef0d526f820f67321bf4184f915f17f50a4657/brotlicffi-1.2.0.0-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:3832c66e00d6d82087f20a972b2fc03e21cd99ef22705225a6f8f418a9158ecc", size = 374730 }, + { url = "https://files.pythonhosted.org/packages/e4/df/a72b284d8c7bef0ed5756b41c2eb7d0219a1dd6ac6762f1c7bdbc31ef3af/brotlicffi-1.2.0.0-cp38-abi3-macosx_11_0_arm64.whl", hash = "sha256:9458d08a7ccde8e3c0afedbf2c70a8263227a68dea5ab13590593f4c0a4fd5f4", size = 432340, upload-time = "2025-11-21T18:17:42.277Z" }, + { url = "https://files.pythonhosted.org/packages/74/2b/cc55a2d1d6fb4f5d458fba44a3d3f91fb4320aa14145799fd3a996af0686/brotlicffi-1.2.0.0-cp38-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:84e3d0020cf1bd8b8131f4a07819edee9f283721566fe044a20ec792ca8fd8b7", size = 1534002, upload-time = "2025-11-21T18:17:43.746Z" }, + { url = "https://files.pythonhosted.org/packages/e4/9c/d51486bf366fc7d6735f0e46b5b96ca58dc005b250263525a1eea3cd5d21/brotlicffi-1.2.0.0-cp38-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:33cfb408d0cff64cd50bef268c0fed397c46fbb53944aa37264148614a62e990", size = 1536547, upload-time = "2025-11-21T18:17:45.729Z" }, + { url = "https://files.pythonhosted.org/packages/1b/37/293a9a0a7caf17e6e657668bebb92dfe730305999fe8c0e2703b8888789c/brotlicffi-1.2.0.0-cp38-abi3-win32.whl", hash = "sha256:23e5c912fdc6fd37143203820230374d24babd078fc054e18070a647118158f6", size = 343085, upload-time = "2025-11-21T18:17:48.887Z" }, + { url = "https://files.pythonhosted.org/packages/07/6b/6e92009df3b8b7272f85a0992b306b61c34b7ea1c4776643746e61c380ac/brotlicffi-1.2.0.0-cp38-abi3-win_amd64.whl", hash = "sha256:f139a7cdfe4ae7859513067b736eb44d19fae1186f9e99370092f6915216451b", size = 378586, upload-time = "2025-11-21T18:17:50.531Z" }, + { url = "https://files.pythonhosted.org/packages/a4/ec/52488a0563f1663e2ccc75834b470650f4b8bcdea3132aef3bf67219c661/brotlicffi-1.2.0.0-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:fa102a60e50ddbd08de86a63431a722ea216d9bc903b000bf544149cc9b823dc", size = 402002, upload-time = "2025-11-21T18:17:51.76Z" }, + { url = "https://files.pythonhosted.org/packages/e4/63/d4aea4835fd97da1401d798d9b8ba77227974de565faea402f520b37b10f/brotlicffi-1.2.0.0-pp311-pypy311_pp73-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:7d3c4332fc808a94e8c1035950a10d04b681b03ab585ce897ae2a360d479037c", size = 406447, upload-time = "2025-11-21T18:17:53.614Z" }, + { url = "https://files.pythonhosted.org/packages/62/4e/5554ecb2615ff035ef8678d4e419549a0f7a28b3f096b272174d656749fb/brotlicffi-1.2.0.0-pp311-pypy311_pp73-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:fb4eb5830026b79a93bf503ad32b2c5257315e9ffc49e76b2715cffd07c8e3db", size = 402521, upload-time = "2025-11-21T18:17:54.875Z" }, + { url = "https://files.pythonhosted.org/packages/b5/d3/b07f8f125ac52bbee5dc00ef0d526f820f67321bf4184f915f17f50a4657/brotlicffi-1.2.0.0-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:3832c66e00d6d82087f20a972b2fc03e21cd99ef22705225a6f8f418a9158ecc", size = 374730, upload-time = "2025-11-21T18:17:56.334Z" }, ] [[package]] @@ -724,9 +740,9 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "beautifulsoup4" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/c9/aa/4acaf814ff901145da37332e05bb510452ebed97bc9602695059dd46ef39/bs4-0.0.2.tar.gz", hash = "sha256:a48685c58f50fe127722417bae83fe6badf500d54b55f7e39ffe43b798653925", size = 698 } +sdist = { url = "https://files.pythonhosted.org/packages/c9/aa/4acaf814ff901145da37332e05bb510452ebed97bc9602695059dd46ef39/bs4-0.0.2.tar.gz", hash = "sha256:a48685c58f50fe127722417bae83fe6badf500d54b55f7e39ffe43b798653925", size = 698, upload-time = "2024-01-17T18:15:47.371Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/51/bb/bf7aab772a159614954d84aa832c129624ba6c32faa559dfb200a534e50b/bs4-0.0.2-py2.py3-none-any.whl", hash = "sha256:abf8742c0805ef7f662dce4b51cca104cffe52b835238afc169142ab9b3fbccc", size = 1189 }, + { url = "https://files.pythonhosted.org/packages/51/bb/bf7aab772a159614954d84aa832c129624ba6c32faa559dfb200a534e50b/bs4-0.0.2-py2.py3-none-any.whl", hash = "sha256:abf8742c0805ef7f662dce4b51cca104cffe52b835238afc169142ab9b3fbccc", size = 1189, upload-time = "2024-01-17T18:15:48.613Z" }, ] [[package]] @@ -738,18 +754,18 @@ dependencies = [ { name = "packaging" }, { name = "pyproject-hooks" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/25/1c/23e33405a7c9eac261dff640926b8b5adaed6a6eb3e1767d441ed611d0c0/build-1.3.0.tar.gz", hash = "sha256:698edd0ea270bde950f53aed21f3a0135672206f3911e0176261a31e0e07b397", size = 48544 } +sdist = { url = "https://files.pythonhosted.org/packages/25/1c/23e33405a7c9eac261dff640926b8b5adaed6a6eb3e1767d441ed611d0c0/build-1.3.0.tar.gz", hash = "sha256:698edd0ea270bde950f53aed21f3a0135672206f3911e0176261a31e0e07b397", size = 48544, upload-time = "2025-08-01T21:27:09.268Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/cb/8c/2b30c12155ad8de0cf641d76a8b396a16d2c36bc6d50b621a62b7c4567c1/build-1.3.0-py3-none-any.whl", hash = "sha256:7145f0b5061ba90a1500d60bd1b13ca0a8a4cebdd0cc16ed8adf1c0e739f43b4", size = 23382 }, + { url = "https://files.pythonhosted.org/packages/cb/8c/2b30c12155ad8de0cf641d76a8b396a16d2c36bc6d50b621a62b7c4567c1/build-1.3.0-py3-none-any.whl", hash = "sha256:7145f0b5061ba90a1500d60bd1b13ca0a8a4cebdd0cc16ed8adf1c0e739f43b4", size = 23382, upload-time = "2025-08-01T21:27:07.844Z" }, ] [[package]] name = "cachetools" version = "5.3.3" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/b3/4d/27a3e6dd09011649ad5210bdf963765bc8fa81a0827a4fc01bafd2705c5b/cachetools-5.3.3.tar.gz", hash = "sha256:ba29e2dfa0b8b556606f097407ed1aa62080ee108ab0dc5ec9d6a723a007d105", size = 26522 } +sdist = { url = "https://files.pythonhosted.org/packages/b3/4d/27a3e6dd09011649ad5210bdf963765bc8fa81a0827a4fc01bafd2705c5b/cachetools-5.3.3.tar.gz", hash = "sha256:ba29e2dfa0b8b556606f097407ed1aa62080ee108ab0dc5ec9d6a723a007d105", size = 26522, upload-time = "2024-02-26T20:33:23.386Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/fb/2b/a64c2d25a37aeb921fddb929111413049fc5f8b9a4c1aefaffaafe768d54/cachetools-5.3.3-py3-none-any.whl", hash = "sha256:0abad1021d3f8325b2fc1d2e9c8b9c9d57b04c3932657a72465447332c24d945", size = 9325 }, + { url = "https://files.pythonhosted.org/packages/fb/2b/a64c2d25a37aeb921fddb929111413049fc5f8b9a4c1aefaffaafe768d54/cachetools-5.3.3-py3-none-any.whl", hash = "sha256:0abad1021d3f8325b2fc1d2e9c8b9c9d57b04c3932657a72465447332c24d945", size = 9325, upload-time = "2024-02-26T20:33:20.308Z" }, ] [[package]] @@ -766,9 +782,9 @@ dependencies = [ { name = "python-dateutil" }, { name = "vine" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/bb/7d/6c289f407d219ba36d8b384b42489ebdd0c84ce9c413875a8aae0c85f35b/celery-5.5.3.tar.gz", hash = "sha256:6c972ae7968c2b5281227f01c3a3f984037d21c5129d07bf3550cc2afc6b10a5", size = 1667144 } +sdist = { url = "https://files.pythonhosted.org/packages/bb/7d/6c289f407d219ba36d8b384b42489ebdd0c84ce9c413875a8aae0c85f35b/celery-5.5.3.tar.gz", hash = "sha256:6c972ae7968c2b5281227f01c3a3f984037d21c5129d07bf3550cc2afc6b10a5", size = 1667144, upload-time = "2025-06-01T11:08:12.563Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/c9/af/0dcccc7fdcdf170f9a1585e5e96b6fb0ba1749ef6be8c89a6202284759bd/celery-5.5.3-py3-none-any.whl", hash = "sha256:0b5761a07057acee94694464ca482416b959568904c9dfa41ce8413a7d65d525", size = 438775 }, + { url = "https://files.pythonhosted.org/packages/c9/af/0dcccc7fdcdf170f9a1585e5e96b6fb0ba1749ef6be8c89a6202284759bd/celery-5.5.3-py3-none-any.whl", hash = "sha256:0b5761a07057acee94694464ca482416b959568904c9dfa41ce8413a7d65d525", size = 438775, upload-time = "2025-06-01T11:08:09.94Z" }, ] [[package]] @@ -778,18 +794,18 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/e9/d1/0823e71c281e4ad0044e278cf1577d1a68e05f2809424bf94e1614925c5d/celery_types-0.23.0.tar.gz", hash = "sha256:402ed0555aea3cd5e1e6248f4632e4f18eec8edb2435173f9e6dc08449fa101e", size = 31479 } +sdist = { url = "https://files.pythonhosted.org/packages/e9/d1/0823e71c281e4ad0044e278cf1577d1a68e05f2809424bf94e1614925c5d/celery_types-0.23.0.tar.gz", hash = "sha256:402ed0555aea3cd5e1e6248f4632e4f18eec8edb2435173f9e6dc08449fa101e", size = 31479, upload-time = "2025-03-03T23:56:51.547Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/6f/8b/92bb54dd74d145221c3854aa245c84f4dc04cc9366147496182cec8e88e3/celery_types-0.23.0-py3-none-any.whl", hash = "sha256:0cc495b8d7729891b7e070d0ec8d4906d2373209656a6e8b8276fe1ed306af9a", size = 50189 }, + { url = "https://files.pythonhosted.org/packages/6f/8b/92bb54dd74d145221c3854aa245c84f4dc04cc9366147496182cec8e88e3/celery_types-0.23.0-py3-none-any.whl", hash = "sha256:0cc495b8d7729891b7e070d0ec8d4906d2373209656a6e8b8276fe1ed306af9a", size = 50189, upload-time = "2025-03-03T23:56:50.458Z" }, ] [[package]] name = "certifi" version = "2025.11.12" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/a2/8c/58f469717fa48465e4a50c014a0400602d3c437d7c0c468e17ada824da3a/certifi-2025.11.12.tar.gz", hash = "sha256:d8ab5478f2ecd78af242878415affce761ca6bc54a22a27e026d7c25357c3316", size = 160538 } +sdist = { url = "https://files.pythonhosted.org/packages/a2/8c/58f469717fa48465e4a50c014a0400602d3c437d7c0c468e17ada824da3a/certifi-2025.11.12.tar.gz", hash = "sha256:d8ab5478f2ecd78af242878415affce761ca6bc54a22a27e026d7c25357c3316", size = 160538, upload-time = "2025-11-12T02:54:51.517Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/70/7d/9bc192684cea499815ff478dfcdc13835ddf401365057044fb721ec6bddb/certifi-2025.11.12-py3-none-any.whl", hash = "sha256:97de8790030bbd5c2d96b7ec782fc2f7820ef8dba6db909ccf95449f2d062d4b", size = 159438 }, + { url = "https://files.pythonhosted.org/packages/70/7d/9bc192684cea499815ff478dfcdc13835ddf401365057044fb721ec6bddb/certifi-2025.11.12-py3-none-any.whl", hash = "sha256:97de8790030bbd5c2d96b7ec782fc2f7820ef8dba6db909ccf95449f2d062d4b", size = 159438, upload-time = "2025-11-12T02:54:49.735Z" }, ] [[package]] @@ -799,83 +815,83 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "pycparser", marker = "implementation_name != 'PyPy'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/eb/56/b1ba7935a17738ae8453301356628e8147c79dbb825bcbc73dc7401f9846/cffi-2.0.0.tar.gz", hash = "sha256:44d1b5909021139fe36001ae048dbdde8214afa20200eda0f64c068cac5d5529", size = 523588 } +sdist = { url = "https://files.pythonhosted.org/packages/eb/56/b1ba7935a17738ae8453301356628e8147c79dbb825bcbc73dc7401f9846/cffi-2.0.0.tar.gz", hash = "sha256:44d1b5909021139fe36001ae048dbdde8214afa20200eda0f64c068cac5d5529", size = 523588, upload-time = "2025-09-08T23:24:04.541Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/12/4a/3dfd5f7850cbf0d06dc84ba9aa00db766b52ca38d8b86e3a38314d52498c/cffi-2.0.0-cp311-cp311-macosx_10_13_x86_64.whl", hash = "sha256:b4c854ef3adc177950a8dfc81a86f5115d2abd545751a304c5bcf2c2c7283cfe", size = 184344 }, - { url = "https://files.pythonhosted.org/packages/4f/8b/f0e4c441227ba756aafbe78f117485b25bb26b1c059d01f137fa6d14896b/cffi-2.0.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:2de9a304e27f7596cd03d16f1b7c72219bd944e99cc52b84d0145aefb07cbd3c", size = 180560 }, - { url = "https://files.pythonhosted.org/packages/b1/b7/1200d354378ef52ec227395d95c2576330fd22a869f7a70e88e1447eb234/cffi-2.0.0-cp311-cp311-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:baf5215e0ab74c16e2dd324e8ec067ef59e41125d3eade2b863d294fd5035c92", size = 209613 }, - { url = "https://files.pythonhosted.org/packages/b8/56/6033f5e86e8cc9bb629f0077ba71679508bdf54a9a5e112a3c0b91870332/cffi-2.0.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:730cacb21e1bdff3ce90babf007d0a0917cc3e6492f336c2f0134101e0944f93", size = 216476 }, - { url = "https://files.pythonhosted.org/packages/dc/7f/55fecd70f7ece178db2f26128ec41430d8720f2d12ca97bf8f0a628207d5/cffi-2.0.0-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:6824f87845e3396029f3820c206e459ccc91760e8fa24422f8b0c3d1731cbec5", size = 203374 }, - { url = "https://files.pythonhosted.org/packages/84/ef/a7b77c8bdc0f77adc3b46888f1ad54be8f3b7821697a7b89126e829e676a/cffi-2.0.0-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:9de40a7b0323d889cf8d23d1ef214f565ab154443c42737dfe52ff82cf857664", size = 202597 }, - { url = "https://files.pythonhosted.org/packages/d7/91/500d892b2bf36529a75b77958edfcd5ad8e2ce4064ce2ecfeab2125d72d1/cffi-2.0.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:8941aaadaf67246224cee8c3803777eed332a19d909b47e29c9842ef1e79ac26", size = 215574 }, - { url = "https://files.pythonhosted.org/packages/44/64/58f6255b62b101093d5df22dcb752596066c7e89dd725e0afaed242a61be/cffi-2.0.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:a05d0c237b3349096d3981b727493e22147f934b20f6f125a3eba8f994bec4a9", size = 218971 }, - { url = "https://files.pythonhosted.org/packages/ab/49/fa72cebe2fd8a55fbe14956f9970fe8eb1ac59e5df042f603ef7c8ba0adc/cffi-2.0.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:94698a9c5f91f9d138526b48fe26a199609544591f859c870d477351dc7b2414", size = 211972 }, - { url = "https://files.pythonhosted.org/packages/0b/28/dd0967a76aab36731b6ebfe64dec4e981aff7e0608f60c2d46b46982607d/cffi-2.0.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:5fed36fccc0612a53f1d4d9a816b50a36702c28a2aa880cb8a122b3466638743", size = 217078 }, - { url = "https://files.pythonhosted.org/packages/2b/c0/015b25184413d7ab0a410775fdb4a50fca20f5589b5dab1dbbfa3baad8ce/cffi-2.0.0-cp311-cp311-win32.whl", hash = "sha256:c649e3a33450ec82378822b3dad03cc228b8f5963c0c12fc3b1e0ab940f768a5", size = 172076 }, - { url = "https://files.pythonhosted.org/packages/ae/8f/dc5531155e7070361eb1b7e4c1a9d896d0cb21c49f807a6c03fd63fc877e/cffi-2.0.0-cp311-cp311-win_amd64.whl", hash = "sha256:66f011380d0e49ed280c789fbd08ff0d40968ee7b665575489afa95c98196ab5", size = 182820 }, - { url = "https://files.pythonhosted.org/packages/95/5c/1b493356429f9aecfd56bc171285a4c4ac8697f76e9bbbbb105e537853a1/cffi-2.0.0-cp311-cp311-win_arm64.whl", hash = "sha256:c6638687455baf640e37344fe26d37c404db8b80d037c3d29f58fe8d1c3b194d", size = 177635 }, - { url = "https://files.pythonhosted.org/packages/ea/47/4f61023ea636104d4f16ab488e268b93008c3d0bb76893b1b31db1f96802/cffi-2.0.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:6d02d6655b0e54f54c4ef0b94eb6be0607b70853c45ce98bd278dc7de718be5d", size = 185271 }, - { url = "https://files.pythonhosted.org/packages/df/a2/781b623f57358e360d62cdd7a8c681f074a71d445418a776eef0aadb4ab4/cffi-2.0.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8eca2a813c1cb7ad4fb74d368c2ffbbb4789d377ee5bb8df98373c2cc0dee76c", size = 181048 }, - { url = "https://files.pythonhosted.org/packages/ff/df/a4f0fbd47331ceeba3d37c2e51e9dfc9722498becbeec2bd8bc856c9538a/cffi-2.0.0-cp312-cp312-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:21d1152871b019407d8ac3985f6775c079416c282e431a4da6afe7aefd2bccbe", size = 212529 }, - { url = "https://files.pythonhosted.org/packages/d5/72/12b5f8d3865bf0f87cf1404d8c374e7487dcf097a1c91c436e72e6badd83/cffi-2.0.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:b21e08af67b8a103c71a250401c78d5e0893beff75e28c53c98f4de42f774062", size = 220097 }, - { url = "https://files.pythonhosted.org/packages/c2/95/7a135d52a50dfa7c882ab0ac17e8dc11cec9d55d2c18dda414c051c5e69e/cffi-2.0.0-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:1e3a615586f05fc4065a8b22b8152f0c1b00cdbc60596d187c2a74f9e3036e4e", size = 207983 }, - { url = "https://files.pythonhosted.org/packages/3a/c8/15cb9ada8895957ea171c62dc78ff3e99159ee7adb13c0123c001a2546c1/cffi-2.0.0-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:81afed14892743bbe14dacb9e36d9e0e504cd204e0b165062c488942b9718037", size = 206519 }, - { url = "https://files.pythonhosted.org/packages/78/2d/7fa73dfa841b5ac06c7b8855cfc18622132e365f5b81d02230333ff26e9e/cffi-2.0.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:3e17ed538242334bf70832644a32a7aae3d83b57567f9fd60a26257e992b79ba", size = 219572 }, - { url = "https://files.pythonhosted.org/packages/07/e0/267e57e387b4ca276b90f0434ff88b2c2241ad72b16d31836adddfd6031b/cffi-2.0.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:3925dd22fa2b7699ed2617149842d2e6adde22b262fcbfada50e3d195e4b3a94", size = 222963 }, - { url = "https://files.pythonhosted.org/packages/b6/75/1f2747525e06f53efbd878f4d03bac5b859cbc11c633d0fb81432d98a795/cffi-2.0.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:2c8f814d84194c9ea681642fd164267891702542f028a15fc97d4674b6206187", size = 221361 }, - { url = "https://files.pythonhosted.org/packages/7b/2b/2b6435f76bfeb6bbf055596976da087377ede68df465419d192acf00c437/cffi-2.0.0-cp312-cp312-win32.whl", hash = "sha256:da902562c3e9c550df360bfa53c035b2f241fed6d9aef119048073680ace4a18", size = 172932 }, - { url = "https://files.pythonhosted.org/packages/f8/ed/13bd4418627013bec4ed6e54283b1959cf6db888048c7cf4b4c3b5b36002/cffi-2.0.0-cp312-cp312-win_amd64.whl", hash = "sha256:da68248800ad6320861f129cd9c1bf96ca849a2771a59e0344e88681905916f5", size = 183557 }, - { url = "https://files.pythonhosted.org/packages/95/31/9f7f93ad2f8eff1dbc1c3656d7ca5bfd8fb52c9d786b4dcf19b2d02217fa/cffi-2.0.0-cp312-cp312-win_arm64.whl", hash = "sha256:4671d9dd5ec934cb9a73e7ee9676f9362aba54f7f34910956b84d727b0d73fb6", size = 177762 }, + { url = "https://files.pythonhosted.org/packages/12/4a/3dfd5f7850cbf0d06dc84ba9aa00db766b52ca38d8b86e3a38314d52498c/cffi-2.0.0-cp311-cp311-macosx_10_13_x86_64.whl", hash = "sha256:b4c854ef3adc177950a8dfc81a86f5115d2abd545751a304c5bcf2c2c7283cfe", size = 184344, upload-time = "2025-09-08T23:22:26.456Z" }, + { url = "https://files.pythonhosted.org/packages/4f/8b/f0e4c441227ba756aafbe78f117485b25bb26b1c059d01f137fa6d14896b/cffi-2.0.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:2de9a304e27f7596cd03d16f1b7c72219bd944e99cc52b84d0145aefb07cbd3c", size = 180560, upload-time = "2025-09-08T23:22:28.197Z" }, + { url = "https://files.pythonhosted.org/packages/b1/b7/1200d354378ef52ec227395d95c2576330fd22a869f7a70e88e1447eb234/cffi-2.0.0-cp311-cp311-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:baf5215e0ab74c16e2dd324e8ec067ef59e41125d3eade2b863d294fd5035c92", size = 209613, upload-time = "2025-09-08T23:22:29.475Z" }, + { url = "https://files.pythonhosted.org/packages/b8/56/6033f5e86e8cc9bb629f0077ba71679508bdf54a9a5e112a3c0b91870332/cffi-2.0.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:730cacb21e1bdff3ce90babf007d0a0917cc3e6492f336c2f0134101e0944f93", size = 216476, upload-time = "2025-09-08T23:22:31.063Z" }, + { url = "https://files.pythonhosted.org/packages/dc/7f/55fecd70f7ece178db2f26128ec41430d8720f2d12ca97bf8f0a628207d5/cffi-2.0.0-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:6824f87845e3396029f3820c206e459ccc91760e8fa24422f8b0c3d1731cbec5", size = 203374, upload-time = "2025-09-08T23:22:32.507Z" }, + { url = "https://files.pythonhosted.org/packages/84/ef/a7b77c8bdc0f77adc3b46888f1ad54be8f3b7821697a7b89126e829e676a/cffi-2.0.0-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:9de40a7b0323d889cf8d23d1ef214f565ab154443c42737dfe52ff82cf857664", size = 202597, upload-time = "2025-09-08T23:22:34.132Z" }, + { url = "https://files.pythonhosted.org/packages/d7/91/500d892b2bf36529a75b77958edfcd5ad8e2ce4064ce2ecfeab2125d72d1/cffi-2.0.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:8941aaadaf67246224cee8c3803777eed332a19d909b47e29c9842ef1e79ac26", size = 215574, upload-time = "2025-09-08T23:22:35.443Z" }, + { url = "https://files.pythonhosted.org/packages/44/64/58f6255b62b101093d5df22dcb752596066c7e89dd725e0afaed242a61be/cffi-2.0.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:a05d0c237b3349096d3981b727493e22147f934b20f6f125a3eba8f994bec4a9", size = 218971, upload-time = "2025-09-08T23:22:36.805Z" }, + { url = "https://files.pythonhosted.org/packages/ab/49/fa72cebe2fd8a55fbe14956f9970fe8eb1ac59e5df042f603ef7c8ba0adc/cffi-2.0.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:94698a9c5f91f9d138526b48fe26a199609544591f859c870d477351dc7b2414", size = 211972, upload-time = "2025-09-08T23:22:38.436Z" }, + { url = "https://files.pythonhosted.org/packages/0b/28/dd0967a76aab36731b6ebfe64dec4e981aff7e0608f60c2d46b46982607d/cffi-2.0.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:5fed36fccc0612a53f1d4d9a816b50a36702c28a2aa880cb8a122b3466638743", size = 217078, upload-time = "2025-09-08T23:22:39.776Z" }, + { url = "https://files.pythonhosted.org/packages/2b/c0/015b25184413d7ab0a410775fdb4a50fca20f5589b5dab1dbbfa3baad8ce/cffi-2.0.0-cp311-cp311-win32.whl", hash = "sha256:c649e3a33450ec82378822b3dad03cc228b8f5963c0c12fc3b1e0ab940f768a5", size = 172076, upload-time = "2025-09-08T23:22:40.95Z" }, + { url = "https://files.pythonhosted.org/packages/ae/8f/dc5531155e7070361eb1b7e4c1a9d896d0cb21c49f807a6c03fd63fc877e/cffi-2.0.0-cp311-cp311-win_amd64.whl", hash = "sha256:66f011380d0e49ed280c789fbd08ff0d40968ee7b665575489afa95c98196ab5", size = 182820, upload-time = "2025-09-08T23:22:42.463Z" }, + { url = "https://files.pythonhosted.org/packages/95/5c/1b493356429f9aecfd56bc171285a4c4ac8697f76e9bbbbb105e537853a1/cffi-2.0.0-cp311-cp311-win_arm64.whl", hash = "sha256:c6638687455baf640e37344fe26d37c404db8b80d037c3d29f58fe8d1c3b194d", size = 177635, upload-time = "2025-09-08T23:22:43.623Z" }, + { url = "https://files.pythonhosted.org/packages/ea/47/4f61023ea636104d4f16ab488e268b93008c3d0bb76893b1b31db1f96802/cffi-2.0.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:6d02d6655b0e54f54c4ef0b94eb6be0607b70853c45ce98bd278dc7de718be5d", size = 185271, upload-time = "2025-09-08T23:22:44.795Z" }, + { url = "https://files.pythonhosted.org/packages/df/a2/781b623f57358e360d62cdd7a8c681f074a71d445418a776eef0aadb4ab4/cffi-2.0.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8eca2a813c1cb7ad4fb74d368c2ffbbb4789d377ee5bb8df98373c2cc0dee76c", size = 181048, upload-time = "2025-09-08T23:22:45.938Z" }, + { url = "https://files.pythonhosted.org/packages/ff/df/a4f0fbd47331ceeba3d37c2e51e9dfc9722498becbeec2bd8bc856c9538a/cffi-2.0.0-cp312-cp312-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:21d1152871b019407d8ac3985f6775c079416c282e431a4da6afe7aefd2bccbe", size = 212529, upload-time = "2025-09-08T23:22:47.349Z" }, + { url = "https://files.pythonhosted.org/packages/d5/72/12b5f8d3865bf0f87cf1404d8c374e7487dcf097a1c91c436e72e6badd83/cffi-2.0.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:b21e08af67b8a103c71a250401c78d5e0893beff75e28c53c98f4de42f774062", size = 220097, upload-time = "2025-09-08T23:22:48.677Z" }, + { url = "https://files.pythonhosted.org/packages/c2/95/7a135d52a50dfa7c882ab0ac17e8dc11cec9d55d2c18dda414c051c5e69e/cffi-2.0.0-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:1e3a615586f05fc4065a8b22b8152f0c1b00cdbc60596d187c2a74f9e3036e4e", size = 207983, upload-time = "2025-09-08T23:22:50.06Z" }, + { url = "https://files.pythonhosted.org/packages/3a/c8/15cb9ada8895957ea171c62dc78ff3e99159ee7adb13c0123c001a2546c1/cffi-2.0.0-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:81afed14892743bbe14dacb9e36d9e0e504cd204e0b165062c488942b9718037", size = 206519, upload-time = "2025-09-08T23:22:51.364Z" }, + { url = "https://files.pythonhosted.org/packages/78/2d/7fa73dfa841b5ac06c7b8855cfc18622132e365f5b81d02230333ff26e9e/cffi-2.0.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:3e17ed538242334bf70832644a32a7aae3d83b57567f9fd60a26257e992b79ba", size = 219572, upload-time = "2025-09-08T23:22:52.902Z" }, + { url = "https://files.pythonhosted.org/packages/07/e0/267e57e387b4ca276b90f0434ff88b2c2241ad72b16d31836adddfd6031b/cffi-2.0.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:3925dd22fa2b7699ed2617149842d2e6adde22b262fcbfada50e3d195e4b3a94", size = 222963, upload-time = "2025-09-08T23:22:54.518Z" }, + { url = "https://files.pythonhosted.org/packages/b6/75/1f2747525e06f53efbd878f4d03bac5b859cbc11c633d0fb81432d98a795/cffi-2.0.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:2c8f814d84194c9ea681642fd164267891702542f028a15fc97d4674b6206187", size = 221361, upload-time = "2025-09-08T23:22:55.867Z" }, + { url = "https://files.pythonhosted.org/packages/7b/2b/2b6435f76bfeb6bbf055596976da087377ede68df465419d192acf00c437/cffi-2.0.0-cp312-cp312-win32.whl", hash = "sha256:da902562c3e9c550df360bfa53c035b2f241fed6d9aef119048073680ace4a18", size = 172932, upload-time = "2025-09-08T23:22:57.188Z" }, + { url = "https://files.pythonhosted.org/packages/f8/ed/13bd4418627013bec4ed6e54283b1959cf6db888048c7cf4b4c3b5b36002/cffi-2.0.0-cp312-cp312-win_amd64.whl", hash = "sha256:da68248800ad6320861f129cd9c1bf96ca849a2771a59e0344e88681905916f5", size = 183557, upload-time = "2025-09-08T23:22:58.351Z" }, + { url = "https://files.pythonhosted.org/packages/95/31/9f7f93ad2f8eff1dbc1c3656d7ca5bfd8fb52c9d786b4dcf19b2d02217fa/cffi-2.0.0-cp312-cp312-win_arm64.whl", hash = "sha256:4671d9dd5ec934cb9a73e7ee9676f9362aba54f7f34910956b84d727b0d73fb6", size = 177762, upload-time = "2025-09-08T23:22:59.668Z" }, ] [[package]] name = "chardet" version = "5.1.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/41/32/cdc91dcf83849c7385bf8e2a5693d87376536ed000807fa07f5eab33430d/chardet-5.1.0.tar.gz", hash = "sha256:0d62712b956bc154f85fb0a266e2a3c5913c2967e00348701b32411d6def31e5", size = 2069617 } +sdist = { url = "https://files.pythonhosted.org/packages/41/32/cdc91dcf83849c7385bf8e2a5693d87376536ed000807fa07f5eab33430d/chardet-5.1.0.tar.gz", hash = "sha256:0d62712b956bc154f85fb0a266e2a3c5913c2967e00348701b32411d6def31e5", size = 2069617, upload-time = "2022-12-01T22:34:18.086Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/74/8f/8fc49109009e8d2169d94d72e6b1f4cd45c13d147ba7d6170fb41f22b08f/chardet-5.1.0-py3-none-any.whl", hash = "sha256:362777fb014af596ad31334fde1e8c327dfdb076e1960d1694662d46a6917ab9", size = 199124 }, + { url = "https://files.pythonhosted.org/packages/74/8f/8fc49109009e8d2169d94d72e6b1f4cd45c13d147ba7d6170fb41f22b08f/chardet-5.1.0-py3-none-any.whl", hash = "sha256:362777fb014af596ad31334fde1e8c327dfdb076e1960d1694662d46a6917ab9", size = 199124, upload-time = "2022-12-01T22:34:14.609Z" }, ] [[package]] name = "charset-normalizer" version = "3.4.4" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/13/69/33ddede1939fdd074bce5434295f38fae7136463422fe4fd3e0e89b98062/charset_normalizer-3.4.4.tar.gz", hash = "sha256:94537985111c35f28720e43603b8e7b43a6ecfb2ce1d3058bbe955b73404e21a", size = 129418 } +sdist = { url = "https://files.pythonhosted.org/packages/13/69/33ddede1939fdd074bce5434295f38fae7136463422fe4fd3e0e89b98062/charset_normalizer-3.4.4.tar.gz", hash = "sha256:94537985111c35f28720e43603b8e7b43a6ecfb2ce1d3058bbe955b73404e21a", size = 129418, upload-time = "2025-10-14T04:42:32.879Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/ed/27/c6491ff4954e58a10f69ad90aca8a1b6fe9c5d3c6f380907af3c37435b59/charset_normalizer-3.4.4-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:6e1fcf0720908f200cd21aa4e6750a48ff6ce4afe7ff5a79a90d5ed8a08296f8", size = 206988 }, - { url = "https://files.pythonhosted.org/packages/94/59/2e87300fe67ab820b5428580a53cad894272dbb97f38a7a814a2a1ac1011/charset_normalizer-3.4.4-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5f819d5fe9234f9f82d75bdfa9aef3a3d72c4d24a6e57aeaebba32a704553aa0", size = 147324 }, - { url = "https://files.pythonhosted.org/packages/07/fb/0cf61dc84b2b088391830f6274cb57c82e4da8bbc2efeac8c025edb88772/charset_normalizer-3.4.4-cp311-cp311-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:a59cb51917aa591b1c4e6a43c132f0cdc3c76dbad6155df4e28ee626cc77a0a3", size = 142742 }, - { url = "https://files.pythonhosted.org/packages/62/8b/171935adf2312cd745d290ed93cf16cf0dfe320863ab7cbeeae1dcd6535f/charset_normalizer-3.4.4-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:8ef3c867360f88ac904fd3f5e1f902f13307af9052646963ee08ff4f131adafc", size = 160863 }, - { url = "https://files.pythonhosted.org/packages/09/73/ad875b192bda14f2173bfc1bc9a55e009808484a4b256748d931b6948442/charset_normalizer-3.4.4-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:d9e45d7faa48ee908174d8fe84854479ef838fc6a705c9315372eacbc2f02897", size = 157837 }, - { url = "https://files.pythonhosted.org/packages/6d/fc/de9cce525b2c5b94b47c70a4b4fb19f871b24995c728e957ee68ab1671ea/charset_normalizer-3.4.4-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:840c25fb618a231545cbab0564a799f101b63b9901f2569faecd6b222ac72381", size = 151550 }, - { url = "https://files.pythonhosted.org/packages/55/c2/43edd615fdfba8c6f2dfbd459b25a6b3b551f24ea21981e23fb768503ce1/charset_normalizer-3.4.4-cp311-cp311-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:ca5862d5b3928c4940729dacc329aa9102900382fea192fc5e52eb69d6093815", size = 149162 }, - { url = "https://files.pythonhosted.org/packages/03/86/bde4ad8b4d0e9429a4e82c1e8f5c659993a9a863ad62c7df05cf7b678d75/charset_normalizer-3.4.4-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:d9c7f57c3d666a53421049053eaacdd14bbd0a528e2186fcb2e672effd053bb0", size = 150019 }, - { url = "https://files.pythonhosted.org/packages/1f/86/a151eb2af293a7e7bac3a739b81072585ce36ccfb4493039f49f1d3cae8c/charset_normalizer-3.4.4-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:277e970e750505ed74c832b4bf75dac7476262ee2a013f5574dd49075879e161", size = 143310 }, - { url = "https://files.pythonhosted.org/packages/b5/fe/43dae6144a7e07b87478fdfc4dbe9efd5defb0e7ec29f5f58a55aeef7bf7/charset_normalizer-3.4.4-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:31fd66405eaf47bb62e8cd575dc621c56c668f27d46a61d975a249930dd5e2a4", size = 162022 }, - { url = "https://files.pythonhosted.org/packages/80/e6/7aab83774f5d2bca81f42ac58d04caf44f0cc2b65fc6db2b3b2e8a05f3b3/charset_normalizer-3.4.4-cp311-cp311-musllinux_1_2_riscv64.whl", hash = "sha256:0d3d8f15c07f86e9ff82319b3d9ef6f4bf907608f53fe9d92b28ea9ae3d1fd89", size = 149383 }, - { url = "https://files.pythonhosted.org/packages/4f/e8/b289173b4edae05c0dde07f69f8db476a0b511eac556dfe0d6bda3c43384/charset_normalizer-3.4.4-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:9f7fcd74d410a36883701fafa2482a6af2ff5ba96b9a620e9e0721e28ead5569", size = 159098 }, - { url = "https://files.pythonhosted.org/packages/d8/df/fe699727754cae3f8478493c7f45f777b17c3ef0600e28abfec8619eb49c/charset_normalizer-3.4.4-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:ebf3e58c7ec8a8bed6d66a75d7fb37b55e5015b03ceae72a8e7c74495551e224", size = 152991 }, - { url = "https://files.pythonhosted.org/packages/1a/86/584869fe4ddb6ffa3bd9f491b87a01568797fb9bd8933f557dba9771beaf/charset_normalizer-3.4.4-cp311-cp311-win32.whl", hash = "sha256:eecbc200c7fd5ddb9a7f16c7decb07b566c29fa2161a16cf67b8d068bd21690a", size = 99456 }, - { url = "https://files.pythonhosted.org/packages/65/f6/62fdd5feb60530f50f7e38b4f6a1d5203f4d16ff4f9f0952962c044e919a/charset_normalizer-3.4.4-cp311-cp311-win_amd64.whl", hash = "sha256:5ae497466c7901d54b639cf42d5b8c1b6a4fead55215500d2f486d34db48d016", size = 106978 }, - { url = "https://files.pythonhosted.org/packages/7a/9d/0710916e6c82948b3be62d9d398cb4fcf4e97b56d6a6aeccd66c4b2f2bd5/charset_normalizer-3.4.4-cp311-cp311-win_arm64.whl", hash = "sha256:65e2befcd84bc6f37095f5961e68a6f077bf44946771354a28ad434c2cce0ae1", size = 99969 }, - { url = "https://files.pythonhosted.org/packages/f3/85/1637cd4af66fa687396e757dec650f28025f2a2f5a5531a3208dc0ec43f2/charset_normalizer-3.4.4-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:0a98e6759f854bd25a58a73fa88833fba3b7c491169f86ce1180c948ab3fd394", size = 208425 }, - { url = "https://files.pythonhosted.org/packages/9d/6a/04130023fef2a0d9c62d0bae2649b69f7b7d8d24ea5536feef50551029df/charset_normalizer-3.4.4-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b5b290ccc2a263e8d185130284f8501e3e36c5e02750fc6b6bdeb2e9e96f1e25", size = 148162 }, - { url = "https://files.pythonhosted.org/packages/78/29/62328d79aa60da22c9e0b9a66539feae06ca0f5a4171ac4f7dc285b83688/charset_normalizer-3.4.4-cp312-cp312-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:74bb723680f9f7a6234dcf67aea57e708ec1fbdf5699fb91dfd6f511b0a320ef", size = 144558 }, - { url = "https://files.pythonhosted.org/packages/86/bb/b32194a4bf15b88403537c2e120b817c61cd4ecffa9b6876e941c3ee38fe/charset_normalizer-3.4.4-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:f1e34719c6ed0b92f418c7c780480b26b5d9c50349e9a9af7d76bf757530350d", size = 161497 }, - { url = "https://files.pythonhosted.org/packages/19/89/a54c82b253d5b9b111dc74aca196ba5ccfcca8242d0fb64146d4d3183ff1/charset_normalizer-3.4.4-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:2437418e20515acec67d86e12bf70056a33abdacb5cb1655042f6538d6b085a8", size = 159240 }, - { url = "https://files.pythonhosted.org/packages/c0/10/d20b513afe03acc89ec33948320a5544d31f21b05368436d580dec4e234d/charset_normalizer-3.4.4-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:11d694519d7f29d6cd09f6ac70028dba10f92f6cdd059096db198c283794ac86", size = 153471 }, - { url = "https://files.pythonhosted.org/packages/61/fa/fbf177b55bdd727010f9c0a3c49eefa1d10f960e5f09d1d887bf93c2e698/charset_normalizer-3.4.4-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:ac1c4a689edcc530fc9d9aa11f5774b9e2f33f9a0c6a57864e90908f5208d30a", size = 150864 }, - { url = "https://files.pythonhosted.org/packages/05/12/9fbc6a4d39c0198adeebbde20b619790e9236557ca59fc40e0e3cebe6f40/charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:21d142cc6c0ec30d2efee5068ca36c128a30b0f2c53c1c07bd78cb6bc1d3be5f", size = 150647 }, - { url = "https://files.pythonhosted.org/packages/ad/1f/6a9a593d52e3e8c5d2b167daf8c6b968808efb57ef4c210acb907c365bc4/charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:5dbe56a36425d26d6cfb40ce79c314a2e4dd6211d51d6d2191c00bed34f354cc", size = 145110 }, - { url = "https://files.pythonhosted.org/packages/30/42/9a52c609e72471b0fc54386dc63c3781a387bb4fe61c20231a4ebcd58bdd/charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:5bfbb1b9acf3334612667b61bd3002196fe2a1eb4dd74d247e0f2a4d50ec9bbf", size = 162839 }, - { url = "https://files.pythonhosted.org/packages/c4/5b/c0682bbf9f11597073052628ddd38344a3d673fda35a36773f7d19344b23/charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:d055ec1e26e441f6187acf818b73564e6e6282709e9bcb5b63f5b23068356a15", size = 150667 }, - { url = "https://files.pythonhosted.org/packages/e4/24/a41afeab6f990cf2daf6cb8c67419b63b48cf518e4f56022230840c9bfb2/charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:af2d8c67d8e573d6de5bc30cdb27e9b95e49115cd9baad5ddbd1a6207aaa82a9", size = 160535 }, - { url = "https://files.pythonhosted.org/packages/2a/e5/6a4ce77ed243c4a50a1fecca6aaaab419628c818a49434be428fe24c9957/charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:780236ac706e66881f3b7f2f32dfe90507a09e67d1d454c762cf642e6e1586e0", size = 154816 }, - { url = "https://files.pythonhosted.org/packages/a8/ef/89297262b8092b312d29cdb2517cb1237e51db8ecef2e9af5edbe7b683b1/charset_normalizer-3.4.4-cp312-cp312-win32.whl", hash = "sha256:5833d2c39d8896e4e19b689ffc198f08ea58116bee26dea51e362ecc7cd3ed26", size = 99694 }, - { url = "https://files.pythonhosted.org/packages/3d/2d/1e5ed9dd3b3803994c155cd9aacb60c82c331bad84daf75bcb9c91b3295e/charset_normalizer-3.4.4-cp312-cp312-win_amd64.whl", hash = "sha256:a79cfe37875f822425b89a82333404539ae63dbdddf97f84dcbc3d339aae9525", size = 107131 }, - { url = "https://files.pythonhosted.org/packages/d0/d9/0ed4c7098a861482a7b6a95603edce4c0d9db2311af23da1fb2b75ec26fc/charset_normalizer-3.4.4-cp312-cp312-win_arm64.whl", hash = "sha256:376bec83a63b8021bb5c8ea75e21c4ccb86e7e45ca4eb81146091b56599b80c3", size = 100390 }, - { url = "https://files.pythonhosted.org/packages/0a/4c/925909008ed5a988ccbb72dcc897407e5d6d3bd72410d69e051fc0c14647/charset_normalizer-3.4.4-py3-none-any.whl", hash = "sha256:7a32c560861a02ff789ad905a2fe94e3f840803362c84fecf1851cb4cf3dc37f", size = 53402 }, + { url = "https://files.pythonhosted.org/packages/ed/27/c6491ff4954e58a10f69ad90aca8a1b6fe9c5d3c6f380907af3c37435b59/charset_normalizer-3.4.4-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:6e1fcf0720908f200cd21aa4e6750a48ff6ce4afe7ff5a79a90d5ed8a08296f8", size = 206988, upload-time = "2025-10-14T04:40:33.79Z" }, + { url = "https://files.pythonhosted.org/packages/94/59/2e87300fe67ab820b5428580a53cad894272dbb97f38a7a814a2a1ac1011/charset_normalizer-3.4.4-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5f819d5fe9234f9f82d75bdfa9aef3a3d72c4d24a6e57aeaebba32a704553aa0", size = 147324, upload-time = "2025-10-14T04:40:34.961Z" }, + { url = "https://files.pythonhosted.org/packages/07/fb/0cf61dc84b2b088391830f6274cb57c82e4da8bbc2efeac8c025edb88772/charset_normalizer-3.4.4-cp311-cp311-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:a59cb51917aa591b1c4e6a43c132f0cdc3c76dbad6155df4e28ee626cc77a0a3", size = 142742, upload-time = "2025-10-14T04:40:36.105Z" }, + { url = "https://files.pythonhosted.org/packages/62/8b/171935adf2312cd745d290ed93cf16cf0dfe320863ab7cbeeae1dcd6535f/charset_normalizer-3.4.4-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:8ef3c867360f88ac904fd3f5e1f902f13307af9052646963ee08ff4f131adafc", size = 160863, upload-time = "2025-10-14T04:40:37.188Z" }, + { url = "https://files.pythonhosted.org/packages/09/73/ad875b192bda14f2173bfc1bc9a55e009808484a4b256748d931b6948442/charset_normalizer-3.4.4-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:d9e45d7faa48ee908174d8fe84854479ef838fc6a705c9315372eacbc2f02897", size = 157837, upload-time = "2025-10-14T04:40:38.435Z" }, + { url = "https://files.pythonhosted.org/packages/6d/fc/de9cce525b2c5b94b47c70a4b4fb19f871b24995c728e957ee68ab1671ea/charset_normalizer-3.4.4-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:840c25fb618a231545cbab0564a799f101b63b9901f2569faecd6b222ac72381", size = 151550, upload-time = "2025-10-14T04:40:40.053Z" }, + { url = "https://files.pythonhosted.org/packages/55/c2/43edd615fdfba8c6f2dfbd459b25a6b3b551f24ea21981e23fb768503ce1/charset_normalizer-3.4.4-cp311-cp311-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:ca5862d5b3928c4940729dacc329aa9102900382fea192fc5e52eb69d6093815", size = 149162, upload-time = "2025-10-14T04:40:41.163Z" }, + { url = "https://files.pythonhosted.org/packages/03/86/bde4ad8b4d0e9429a4e82c1e8f5c659993a9a863ad62c7df05cf7b678d75/charset_normalizer-3.4.4-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:d9c7f57c3d666a53421049053eaacdd14bbd0a528e2186fcb2e672effd053bb0", size = 150019, upload-time = "2025-10-14T04:40:42.276Z" }, + { url = "https://files.pythonhosted.org/packages/1f/86/a151eb2af293a7e7bac3a739b81072585ce36ccfb4493039f49f1d3cae8c/charset_normalizer-3.4.4-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:277e970e750505ed74c832b4bf75dac7476262ee2a013f5574dd49075879e161", size = 143310, upload-time = "2025-10-14T04:40:43.439Z" }, + { url = "https://files.pythonhosted.org/packages/b5/fe/43dae6144a7e07b87478fdfc4dbe9efd5defb0e7ec29f5f58a55aeef7bf7/charset_normalizer-3.4.4-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:31fd66405eaf47bb62e8cd575dc621c56c668f27d46a61d975a249930dd5e2a4", size = 162022, upload-time = "2025-10-14T04:40:44.547Z" }, + { url = "https://files.pythonhosted.org/packages/80/e6/7aab83774f5d2bca81f42ac58d04caf44f0cc2b65fc6db2b3b2e8a05f3b3/charset_normalizer-3.4.4-cp311-cp311-musllinux_1_2_riscv64.whl", hash = "sha256:0d3d8f15c07f86e9ff82319b3d9ef6f4bf907608f53fe9d92b28ea9ae3d1fd89", size = 149383, upload-time = "2025-10-14T04:40:46.018Z" }, + { url = "https://files.pythonhosted.org/packages/4f/e8/b289173b4edae05c0dde07f69f8db476a0b511eac556dfe0d6bda3c43384/charset_normalizer-3.4.4-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:9f7fcd74d410a36883701fafa2482a6af2ff5ba96b9a620e9e0721e28ead5569", size = 159098, upload-time = "2025-10-14T04:40:47.081Z" }, + { url = "https://files.pythonhosted.org/packages/d8/df/fe699727754cae3f8478493c7f45f777b17c3ef0600e28abfec8619eb49c/charset_normalizer-3.4.4-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:ebf3e58c7ec8a8bed6d66a75d7fb37b55e5015b03ceae72a8e7c74495551e224", size = 152991, upload-time = "2025-10-14T04:40:48.246Z" }, + { url = "https://files.pythonhosted.org/packages/1a/86/584869fe4ddb6ffa3bd9f491b87a01568797fb9bd8933f557dba9771beaf/charset_normalizer-3.4.4-cp311-cp311-win32.whl", hash = "sha256:eecbc200c7fd5ddb9a7f16c7decb07b566c29fa2161a16cf67b8d068bd21690a", size = 99456, upload-time = "2025-10-14T04:40:49.376Z" }, + { url = "https://files.pythonhosted.org/packages/65/f6/62fdd5feb60530f50f7e38b4f6a1d5203f4d16ff4f9f0952962c044e919a/charset_normalizer-3.4.4-cp311-cp311-win_amd64.whl", hash = "sha256:5ae497466c7901d54b639cf42d5b8c1b6a4fead55215500d2f486d34db48d016", size = 106978, upload-time = "2025-10-14T04:40:50.844Z" }, + { url = "https://files.pythonhosted.org/packages/7a/9d/0710916e6c82948b3be62d9d398cb4fcf4e97b56d6a6aeccd66c4b2f2bd5/charset_normalizer-3.4.4-cp311-cp311-win_arm64.whl", hash = "sha256:65e2befcd84bc6f37095f5961e68a6f077bf44946771354a28ad434c2cce0ae1", size = 99969, upload-time = "2025-10-14T04:40:52.272Z" }, + { url = "https://files.pythonhosted.org/packages/f3/85/1637cd4af66fa687396e757dec650f28025f2a2f5a5531a3208dc0ec43f2/charset_normalizer-3.4.4-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:0a98e6759f854bd25a58a73fa88833fba3b7c491169f86ce1180c948ab3fd394", size = 208425, upload-time = "2025-10-14T04:40:53.353Z" }, + { url = "https://files.pythonhosted.org/packages/9d/6a/04130023fef2a0d9c62d0bae2649b69f7b7d8d24ea5536feef50551029df/charset_normalizer-3.4.4-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b5b290ccc2a263e8d185130284f8501e3e36c5e02750fc6b6bdeb2e9e96f1e25", size = 148162, upload-time = "2025-10-14T04:40:54.558Z" }, + { url = "https://files.pythonhosted.org/packages/78/29/62328d79aa60da22c9e0b9a66539feae06ca0f5a4171ac4f7dc285b83688/charset_normalizer-3.4.4-cp312-cp312-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:74bb723680f9f7a6234dcf67aea57e708ec1fbdf5699fb91dfd6f511b0a320ef", size = 144558, upload-time = "2025-10-14T04:40:55.677Z" }, + { url = "https://files.pythonhosted.org/packages/86/bb/b32194a4bf15b88403537c2e120b817c61cd4ecffa9b6876e941c3ee38fe/charset_normalizer-3.4.4-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:f1e34719c6ed0b92f418c7c780480b26b5d9c50349e9a9af7d76bf757530350d", size = 161497, upload-time = "2025-10-14T04:40:57.217Z" }, + { url = "https://files.pythonhosted.org/packages/19/89/a54c82b253d5b9b111dc74aca196ba5ccfcca8242d0fb64146d4d3183ff1/charset_normalizer-3.4.4-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:2437418e20515acec67d86e12bf70056a33abdacb5cb1655042f6538d6b085a8", size = 159240, upload-time = "2025-10-14T04:40:58.358Z" }, + { url = "https://files.pythonhosted.org/packages/c0/10/d20b513afe03acc89ec33948320a5544d31f21b05368436d580dec4e234d/charset_normalizer-3.4.4-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:11d694519d7f29d6cd09f6ac70028dba10f92f6cdd059096db198c283794ac86", size = 153471, upload-time = "2025-10-14T04:40:59.468Z" }, + { url = "https://files.pythonhosted.org/packages/61/fa/fbf177b55bdd727010f9c0a3c49eefa1d10f960e5f09d1d887bf93c2e698/charset_normalizer-3.4.4-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:ac1c4a689edcc530fc9d9aa11f5774b9e2f33f9a0c6a57864e90908f5208d30a", size = 150864, upload-time = "2025-10-14T04:41:00.623Z" }, + { url = "https://files.pythonhosted.org/packages/05/12/9fbc6a4d39c0198adeebbde20b619790e9236557ca59fc40e0e3cebe6f40/charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:21d142cc6c0ec30d2efee5068ca36c128a30b0f2c53c1c07bd78cb6bc1d3be5f", size = 150647, upload-time = "2025-10-14T04:41:01.754Z" }, + { url = "https://files.pythonhosted.org/packages/ad/1f/6a9a593d52e3e8c5d2b167daf8c6b968808efb57ef4c210acb907c365bc4/charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:5dbe56a36425d26d6cfb40ce79c314a2e4dd6211d51d6d2191c00bed34f354cc", size = 145110, upload-time = "2025-10-14T04:41:03.231Z" }, + { url = "https://files.pythonhosted.org/packages/30/42/9a52c609e72471b0fc54386dc63c3781a387bb4fe61c20231a4ebcd58bdd/charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:5bfbb1b9acf3334612667b61bd3002196fe2a1eb4dd74d247e0f2a4d50ec9bbf", size = 162839, upload-time = "2025-10-14T04:41:04.715Z" }, + { url = "https://files.pythonhosted.org/packages/c4/5b/c0682bbf9f11597073052628ddd38344a3d673fda35a36773f7d19344b23/charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:d055ec1e26e441f6187acf818b73564e6e6282709e9bcb5b63f5b23068356a15", size = 150667, upload-time = "2025-10-14T04:41:05.827Z" }, + { url = "https://files.pythonhosted.org/packages/e4/24/a41afeab6f990cf2daf6cb8c67419b63b48cf518e4f56022230840c9bfb2/charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:af2d8c67d8e573d6de5bc30cdb27e9b95e49115cd9baad5ddbd1a6207aaa82a9", size = 160535, upload-time = "2025-10-14T04:41:06.938Z" }, + { url = "https://files.pythonhosted.org/packages/2a/e5/6a4ce77ed243c4a50a1fecca6aaaab419628c818a49434be428fe24c9957/charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:780236ac706e66881f3b7f2f32dfe90507a09e67d1d454c762cf642e6e1586e0", size = 154816, upload-time = "2025-10-14T04:41:08.101Z" }, + { url = "https://files.pythonhosted.org/packages/a8/ef/89297262b8092b312d29cdb2517cb1237e51db8ecef2e9af5edbe7b683b1/charset_normalizer-3.4.4-cp312-cp312-win32.whl", hash = "sha256:5833d2c39d8896e4e19b689ffc198f08ea58116bee26dea51e362ecc7cd3ed26", size = 99694, upload-time = "2025-10-14T04:41:09.23Z" }, + { url = "https://files.pythonhosted.org/packages/3d/2d/1e5ed9dd3b3803994c155cd9aacb60c82c331bad84daf75bcb9c91b3295e/charset_normalizer-3.4.4-cp312-cp312-win_amd64.whl", hash = "sha256:a79cfe37875f822425b89a82333404539ae63dbdddf97f84dcbc3d339aae9525", size = 107131, upload-time = "2025-10-14T04:41:10.467Z" }, + { url = "https://files.pythonhosted.org/packages/d0/d9/0ed4c7098a861482a7b6a95603edce4c0d9db2311af23da1fb2b75ec26fc/charset_normalizer-3.4.4-cp312-cp312-win_arm64.whl", hash = "sha256:376bec83a63b8021bb5c8ea75e21c4ccb86e7e45ca4eb81146091b56599b80c3", size = 100390, upload-time = "2025-10-14T04:41:11.915Z" }, + { url = "https://files.pythonhosted.org/packages/0a/4c/925909008ed5a988ccbb72dcc897407e5d6d3bd72410d69e051fc0c14647/charset_normalizer-3.4.4-py3-none-any.whl", hash = "sha256:7a32c560861a02ff789ad905a2fe94e3f840803362c84fecf1851cb4cf3dc37f", size = 53402, upload-time = "2025-10-14T04:42:31.76Z" }, ] [[package]] @@ -885,17 +901,17 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "numpy" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/73/09/10d57569e399ce9cbc5eee2134996581c957f63a9addfa6ca657daf006b8/chroma_hnswlib-0.7.6.tar.gz", hash = "sha256:4dce282543039681160259d29fcde6151cc9106c6461e0485f57cdccd83059b7", size = 32256 } +sdist = { url = "https://files.pythonhosted.org/packages/73/09/10d57569e399ce9cbc5eee2134996581c957f63a9addfa6ca657daf006b8/chroma_hnswlib-0.7.6.tar.gz", hash = "sha256:4dce282543039681160259d29fcde6151cc9106c6461e0485f57cdccd83059b7", size = 32256, upload-time = "2024-07-22T20:19:29.259Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/f5/af/d15fdfed2a204c0f9467ad35084fbac894c755820b203e62f5dcba2d41f1/chroma_hnswlib-0.7.6-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:81181d54a2b1e4727369486a631f977ffc53c5533d26e3d366dda243fb0998ca", size = 196911 }, - { url = "https://files.pythonhosted.org/packages/0d/19/aa6f2139f1ff7ad23a690ebf2a511b2594ab359915d7979f76f3213e46c4/chroma_hnswlib-0.7.6-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4b4ab4e11f1083dd0a11ee4f0e0b183ca9f0f2ed63ededba1935b13ce2b3606f", size = 185000 }, - { url = "https://files.pythonhosted.org/packages/79/b1/1b269c750e985ec7d40b9bbe7d66d0a890e420525187786718e7f6b07913/chroma_hnswlib-0.7.6-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:53db45cd9173d95b4b0bdccb4dbff4c54a42b51420599c32267f3abbeb795170", size = 2377289 }, - { url = "https://files.pythonhosted.org/packages/c7/2d/d5663e134436e5933bc63516a20b5edc08b4c1b1588b9680908a5f1afd04/chroma_hnswlib-0.7.6-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5c093f07a010b499c00a15bc9376036ee4800d335360570b14f7fe92badcdcf9", size = 2411755 }, - { url = "https://files.pythonhosted.org/packages/3e/79/1bce519cf186112d6d5ce2985392a89528c6e1e9332d680bf752694a4cdf/chroma_hnswlib-0.7.6-cp311-cp311-win_amd64.whl", hash = "sha256:0540b0ac96e47d0aa39e88ea4714358ae05d64bbe6bf33c52f316c664190a6a3", size = 151888 }, - { url = "https://files.pythonhosted.org/packages/93/ac/782b8d72de1c57b64fdf5cb94711540db99a92768d93d973174c62d45eb8/chroma_hnswlib-0.7.6-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:e87e9b616c281bfbe748d01705817c71211613c3b063021f7ed5e47173556cb7", size = 197804 }, - { url = "https://files.pythonhosted.org/packages/32/4e/fd9ce0764228e9a98f6ff46af05e92804090b5557035968c5b4198bc7af9/chroma_hnswlib-0.7.6-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ec5ca25bc7b66d2ecbf14502b5729cde25f70945d22f2aaf523c2d747ea68912", size = 185421 }, - { url = "https://files.pythonhosted.org/packages/d9/3d/b59a8dedebd82545d873235ef2d06f95be244dfece7ee4a1a6044f080b18/chroma_hnswlib-0.7.6-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:305ae491de9d5f3c51e8bd52d84fdf2545a4a2bc7af49765cda286b7bb30b1d4", size = 2389672 }, - { url = "https://files.pythonhosted.org/packages/74/1e/80a033ea4466338824974a34f418e7b034a7748bf906f56466f5caa434b0/chroma_hnswlib-0.7.6-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:822ede968d25a2c88823ca078a58f92c9b5c4142e38c7c8b4c48178894a0a3c5", size = 2436986 }, + { url = "https://files.pythonhosted.org/packages/f5/af/d15fdfed2a204c0f9467ad35084fbac894c755820b203e62f5dcba2d41f1/chroma_hnswlib-0.7.6-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:81181d54a2b1e4727369486a631f977ffc53c5533d26e3d366dda243fb0998ca", size = 196911, upload-time = "2024-07-22T20:18:33.46Z" }, + { url = "https://files.pythonhosted.org/packages/0d/19/aa6f2139f1ff7ad23a690ebf2a511b2594ab359915d7979f76f3213e46c4/chroma_hnswlib-0.7.6-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4b4ab4e11f1083dd0a11ee4f0e0b183ca9f0f2ed63ededba1935b13ce2b3606f", size = 185000, upload-time = "2024-07-22T20:18:36.16Z" }, + { url = "https://files.pythonhosted.org/packages/79/b1/1b269c750e985ec7d40b9bbe7d66d0a890e420525187786718e7f6b07913/chroma_hnswlib-0.7.6-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:53db45cd9173d95b4b0bdccb4dbff4c54a42b51420599c32267f3abbeb795170", size = 2377289, upload-time = "2024-07-22T20:18:37.761Z" }, + { url = "https://files.pythonhosted.org/packages/c7/2d/d5663e134436e5933bc63516a20b5edc08b4c1b1588b9680908a5f1afd04/chroma_hnswlib-0.7.6-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5c093f07a010b499c00a15bc9376036ee4800d335360570b14f7fe92badcdcf9", size = 2411755, upload-time = "2024-07-22T20:18:39.949Z" }, + { url = "https://files.pythonhosted.org/packages/3e/79/1bce519cf186112d6d5ce2985392a89528c6e1e9332d680bf752694a4cdf/chroma_hnswlib-0.7.6-cp311-cp311-win_amd64.whl", hash = "sha256:0540b0ac96e47d0aa39e88ea4714358ae05d64bbe6bf33c52f316c664190a6a3", size = 151888, upload-time = "2024-07-22T20:18:45.003Z" }, + { url = "https://files.pythonhosted.org/packages/93/ac/782b8d72de1c57b64fdf5cb94711540db99a92768d93d973174c62d45eb8/chroma_hnswlib-0.7.6-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:e87e9b616c281bfbe748d01705817c71211613c3b063021f7ed5e47173556cb7", size = 197804, upload-time = "2024-07-22T20:18:46.442Z" }, + { url = "https://files.pythonhosted.org/packages/32/4e/fd9ce0764228e9a98f6ff46af05e92804090b5557035968c5b4198bc7af9/chroma_hnswlib-0.7.6-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ec5ca25bc7b66d2ecbf14502b5729cde25f70945d22f2aaf523c2d747ea68912", size = 185421, upload-time = "2024-07-22T20:18:47.72Z" }, + { url = "https://files.pythonhosted.org/packages/d9/3d/b59a8dedebd82545d873235ef2d06f95be244dfece7ee4a1a6044f080b18/chroma_hnswlib-0.7.6-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:305ae491de9d5f3c51e8bd52d84fdf2545a4a2bc7af49765cda286b7bb30b1d4", size = 2389672, upload-time = "2024-07-22T20:18:49.583Z" }, + { url = "https://files.pythonhosted.org/packages/74/1e/80a033ea4466338824974a34f418e7b034a7748bf906f56466f5caa434b0/chroma_hnswlib-0.7.6-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:822ede968d25a2c88823ca078a58f92c9b5c4142e38c7c8b4c48178894a0a3c5", size = 2436986, upload-time = "2024-07-22T20:18:51.872Z" }, ] [[package]] @@ -932,18 +948,18 @@ dependencies = [ { name = "typing-extensions" }, { name = "uvicorn", extra = ["standard"] }, ] -sdist = { url = "https://files.pythonhosted.org/packages/03/31/6c8e05405bb02b4a1f71f9aa3eef242415565dabf6afc1bde7f64f726963/chromadb-0.5.20.tar.gz", hash = "sha256:19513a23b2d20059866216bfd80195d1d4a160ffba234b8899f5e80978160ca7", size = 33664540 } +sdist = { url = "https://files.pythonhosted.org/packages/03/31/6c8e05405bb02b4a1f71f9aa3eef242415565dabf6afc1bde7f64f726963/chromadb-0.5.20.tar.gz", hash = "sha256:19513a23b2d20059866216bfd80195d1d4a160ffba234b8899f5e80978160ca7", size = 33664540, upload-time = "2024-11-19T05:13:58.678Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/5f/7a/10bf5dc92d13cc03230190fcc5016a0b138d99e5b36b8b89ee0fe1680e10/chromadb-0.5.20-py3-none-any.whl", hash = "sha256:9550ba1b6dce911e35cac2568b301badf4b42f457b99a432bdeec2b6b9dd3680", size = 617884 }, + { url = "https://files.pythonhosted.org/packages/5f/7a/10bf5dc92d13cc03230190fcc5016a0b138d99e5b36b8b89ee0fe1680e10/chromadb-0.5.20-py3-none-any.whl", hash = "sha256:9550ba1b6dce911e35cac2568b301badf4b42f457b99a432bdeec2b6b9dd3680", size = 617884, upload-time = "2024-11-19T05:13:56.29Z" }, ] [[package]] name = "cint" version = "1.0.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/3e/c8/3ae22fa142be0bf9eee856e90c314f4144dfae376cc5e3e55b9a169670fb/cint-1.0.0.tar.gz", hash = "sha256:66f026d28c46ef9ea9635be5cb342506c6a1af80d11cb1c881a8898ca429fc91", size = 4641 } +sdist = { url = "https://files.pythonhosted.org/packages/3e/c8/3ae22fa142be0bf9eee856e90c314f4144dfae376cc5e3e55b9a169670fb/cint-1.0.0.tar.gz", hash = "sha256:66f026d28c46ef9ea9635be5cb342506c6a1af80d11cb1c881a8898ca429fc91", size = 4641, upload-time = "2019-03-19T01:07:48.723Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/91/c2/898e59963084e1e2cbd4aad1dee92c5bd7a79d121dcff1e659c2a0c2174e/cint-1.0.0-py3-none-any.whl", hash = "sha256:8aa33028e04015711c0305f918cb278f1dc8c5c9997acdc45efad2c7cb1abf50", size = 5573 }, + { url = "https://files.pythonhosted.org/packages/91/c2/898e59963084e1e2cbd4aad1dee92c5bd7a79d121dcff1e659c2a0c2174e/cint-1.0.0-py3-none-any.whl", hash = "sha256:8aa33028e04015711c0305f918cb278f1dc8c5c9997acdc45efad2c7cb1abf50", size = 5573, upload-time = "2019-03-19T01:07:46.496Z" }, ] [[package]] @@ -953,9 +969,9 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "colorama", marker = "sys_platform == 'win32'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/3d/fa/656b739db8587d7b5dfa22e22ed02566950fbfbcdc20311993483657a5c0/click-8.3.1.tar.gz", hash = "sha256:12ff4785d337a1bb490bb7e9c2b1ee5da3112e94a8622f26a6c77f5d2fc6842a", size = 295065 } +sdist = { url = "https://files.pythonhosted.org/packages/3d/fa/656b739db8587d7b5dfa22e22ed02566950fbfbcdc20311993483657a5c0/click-8.3.1.tar.gz", hash = "sha256:12ff4785d337a1bb490bb7e9c2b1ee5da3112e94a8622f26a6c77f5d2fc6842a", size = 295065, upload-time = "2025-11-15T20:45:42.706Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/98/78/01c019cdb5d6498122777c1a43056ebb3ebfeef2076d9d026bfe15583b2b/click-8.3.1-py3-none-any.whl", hash = "sha256:981153a64e25f12d547d3426c367a4857371575ee7ad18df2a6183ab0545b2a6", size = 108274 }, + { url = "https://files.pythonhosted.org/packages/98/78/01c019cdb5d6498122777c1a43056ebb3ebfeef2076d9d026bfe15583b2b/click-8.3.1-py3-none-any.whl", hash = "sha256:981153a64e25f12d547d3426c367a4857371575ee7ad18df2a6183ab0545b2a6", size = 108274, upload-time = "2025-11-15T20:45:41.139Z" }, ] [[package]] @@ -965,9 +981,9 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "click" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/1d/ce/edb087fb53de63dad3b36408ca30368f438738098e668b78c87f93cd41df/click_default_group-1.2.4.tar.gz", hash = "sha256:eb3f3c99ec0d456ca6cd2a7f08f7d4e91771bef51b01bdd9580cc6450fe1251e", size = 3505 } +sdist = { url = "https://files.pythonhosted.org/packages/1d/ce/edb087fb53de63dad3b36408ca30368f438738098e668b78c87f93cd41df/click_default_group-1.2.4.tar.gz", hash = "sha256:eb3f3c99ec0d456ca6cd2a7f08f7d4e91771bef51b01bdd9580cc6450fe1251e", size = 3505, upload-time = "2023-08-04T07:54:58.425Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/2c/1a/aff8bb287a4b1400f69e09a53bd65de96aa5cee5691925b38731c67fc695/click_default_group-1.2.4-py2.py3-none-any.whl", hash = "sha256:9b60486923720e7fc61731bdb32b617039aba820e22e1c88766b1125592eaa5f", size = 4123 }, + { url = "https://files.pythonhosted.org/packages/2c/1a/aff8bb287a4b1400f69e09a53bd65de96aa5cee5691925b38731c67fc695/click_default_group-1.2.4-py2.py3-none-any.whl", hash = "sha256:9b60486923720e7fc61731bdb32b617039aba820e22e1c88766b1125592eaa5f", size = 4123, upload-time = "2023-08-04T07:54:56.875Z" }, ] [[package]] @@ -977,9 +993,9 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "click" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/30/ce/217289b77c590ea1e7c24242d9ddd6e249e52c795ff10fac2c50062c48cb/click_didyoumean-0.3.1.tar.gz", hash = "sha256:4f82fdff0dbe64ef8ab2279bd6aa3f6a99c3b28c05aa09cbfc07c9d7fbb5a463", size = 3089 } +sdist = { url = "https://files.pythonhosted.org/packages/30/ce/217289b77c590ea1e7c24242d9ddd6e249e52c795ff10fac2c50062c48cb/click_didyoumean-0.3.1.tar.gz", hash = "sha256:4f82fdff0dbe64ef8ab2279bd6aa3f6a99c3b28c05aa09cbfc07c9d7fbb5a463", size = 3089, upload-time = "2024-03-24T08:22:07.499Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/1b/5b/974430b5ffdb7a4f1941d13d83c64a0395114503cc357c6b9ae4ce5047ed/click_didyoumean-0.3.1-py3-none-any.whl", hash = "sha256:5c4bb6007cfea5f2fd6583a2fb6701a22a41eb98957e63d0fac41c10e7c3117c", size = 3631 }, + { url = "https://files.pythonhosted.org/packages/1b/5b/974430b5ffdb7a4f1941d13d83c64a0395114503cc357c6b9ae4ce5047ed/click_didyoumean-0.3.1-py3-none-any.whl", hash = "sha256:5c4bb6007cfea5f2fd6583a2fb6701a22a41eb98957e63d0fac41c10e7c3117c", size = 3631, upload-time = "2024-03-24T08:22:06.356Z" }, ] [[package]] @@ -989,9 +1005,9 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "click" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/c3/a4/34847b59150da33690a36da3681d6bbc2ec14ee9a846bc30a6746e5984e4/click_plugins-1.1.1.2.tar.gz", hash = "sha256:d7af3984a99d243c131aa1a828331e7630f4a88a9741fd05c927b204bcf92261", size = 8343 } +sdist = { url = "https://files.pythonhosted.org/packages/c3/a4/34847b59150da33690a36da3681d6bbc2ec14ee9a846bc30a6746e5984e4/click_plugins-1.1.1.2.tar.gz", hash = "sha256:d7af3984a99d243c131aa1a828331e7630f4a88a9741fd05c927b204bcf92261", size = 8343, upload-time = "2025-06-25T00:47:37.555Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/3d/9a/2abecb28ae875e39c8cad711eb1186d8d14eab564705325e77e4e6ab9ae5/click_plugins-1.1.1.2-py2.py3-none-any.whl", hash = "sha256:008d65743833ffc1f5417bf0e78e8d2c23aab04d9745ba817bd3e71b0feb6aa6", size = 11051 }, + { url = "https://files.pythonhosted.org/packages/3d/9a/2abecb28ae875e39c8cad711eb1186d8d14eab564705325e77e4e6ab9ae5/click_plugins-1.1.1.2-py2.py3-none-any.whl", hash = "sha256:008d65743833ffc1f5417bf0e78e8d2c23aab04d9745ba817bd3e71b0feb6aa6", size = 11051, upload-time = "2025-06-25T00:47:36.731Z" }, ] [[package]] @@ -1002,9 +1018,9 @@ dependencies = [ { name = "click" }, { name = "prompt-toolkit" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/cb/a2/57f4ac79838cfae6912f997b4d1a64a858fb0c86d7fcaae6f7b58d267fca/click-repl-0.3.0.tar.gz", hash = "sha256:17849c23dba3d667247dc4defe1757fff98694e90fe37474f3feebb69ced26a9", size = 10449 } +sdist = { url = "https://files.pythonhosted.org/packages/cb/a2/57f4ac79838cfae6912f997b4d1a64a858fb0c86d7fcaae6f7b58d267fca/click-repl-0.3.0.tar.gz", hash = "sha256:17849c23dba3d667247dc4defe1757fff98694e90fe37474f3feebb69ced26a9", size = 10449, upload-time = "2023-06-15T12:43:51.141Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/52/40/9d857001228658f0d59e97ebd4c346fe73e138c6de1bce61dc568a57c7f8/click_repl-0.3.0-py3-none-any.whl", hash = "sha256:fb7e06deb8da8de86180a33a9da97ac316751c094c6899382da7feeeeb51b812", size = 10289 }, + { url = "https://files.pythonhosted.org/packages/52/40/9d857001228658f0d59e97ebd4c346fe73e138c6de1bce61dc568a57c7f8/click_repl-0.3.0-py3-none-any.whl", hash = "sha256:fb7e06deb8da8de86180a33a9da97ac316751c094c6899382da7feeeeb51b812", size = 10289, upload-time = "2023-06-15T12:43:48.626Z" }, ] [[package]] @@ -1018,24 +1034,24 @@ dependencies = [ { name = "urllib3" }, { name = "zstandard" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/7b/fd/f8bea1157d40f117248dcaa9abdbf68c729513fcf2098ab5cb4aa58768b8/clickhouse_connect-0.10.0.tar.gz", hash = "sha256:a0256328802c6e5580513e197cef7f9ba49a99fc98e9ba410922873427569564", size = 104753 } +sdist = { url = "https://files.pythonhosted.org/packages/7b/fd/f8bea1157d40f117248dcaa9abdbf68c729513fcf2098ab5cb4aa58768b8/clickhouse_connect-0.10.0.tar.gz", hash = "sha256:a0256328802c6e5580513e197cef7f9ba49a99fc98e9ba410922873427569564", size = 104753, upload-time = "2025-11-14T20:31:00.947Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/bf/4e/f90caf963d14865c7a3f0e5d80b77e67e0fe0bf39b3de84110707746fa6b/clickhouse_connect-0.10.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:195f1824405501b747b572e1365c6265bb1629eeb712ce91eda91da3c5794879", size = 272911 }, - { url = "https://files.pythonhosted.org/packages/50/c7/e01bd2dd80ea4fbda8968e5022c60091a872fd9de0a123239e23851da231/clickhouse_connect-0.10.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:7907624635fe7f28e1b85c7c8b125a72679a63ecdb0b9f4250b704106ef438f8", size = 265938 }, - { url = "https://files.pythonhosted.org/packages/f4/07/8b567b949abca296e118331d13380bbdefa4225d7d1d32233c59d4b4b2e1/clickhouse_connect-0.10.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:60772faa54d56f0fa34650460910752a583f5948f44dddeabfafaecbca21fc54", size = 1113548 }, - { url = "https://files.pythonhosted.org/packages/9c/13/11f2d37fc95e74d7e2d80702cde87666ce372486858599a61f5209e35fc5/clickhouse_connect-0.10.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:7fe2a6cd98517330c66afe703fb242c0d3aa2c91f2f7dc9fb97c122c5c60c34b", size = 1135061 }, - { url = "https://files.pythonhosted.org/packages/a0/d0/517181ea80060f84d84cff4d42d330c80c77bb352b728fb1f9681fbad291/clickhouse_connect-0.10.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:a2427d312bc3526520a0be8c648479af3f6353da7a33a62db2368d6203b08efd", size = 1105105 }, - { url = "https://files.pythonhosted.org/packages/7c/b2/4ad93e898562725b58c537cad83ab2694c9b1c1ef37fa6c3f674bdad366a/clickhouse_connect-0.10.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:63bbb5721bfece698e155c01b8fa95ce4377c584f4d04b43f383824e8a8fa129", size = 1150791 }, - { url = "https://files.pythonhosted.org/packages/45/a4/fdfbfacc1fa67b8b1ce980adcf42f9e3202325586822840f04f068aff395/clickhouse_connect-0.10.0-cp311-cp311-win32.whl", hash = "sha256:48554e836c6b56fe0854d9a9f565569010583d4960094d60b68a53f9f83042f0", size = 244014 }, - { url = "https://files.pythonhosted.org/packages/08/50/cf53f33f4546a9ce2ab1b9930db4850aa1ae53bff1e4e4fa97c566cdfa19/clickhouse_connect-0.10.0-cp311-cp311-win_amd64.whl", hash = "sha256:9eb8df083e5fda78ac7249938691c2c369e8578b5df34c709467147e8289f1d9", size = 262356 }, - { url = "https://files.pythonhosted.org/packages/9e/59/fadbbf64f4c6496cd003a0a3c9223772409a86d0eea9d4ff45d2aa88aabf/clickhouse_connect-0.10.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:b090c7d8e602dd084b2795265cd30610461752284763d9ad93a5d619a0e0ff21", size = 276401 }, - { url = "https://files.pythonhosted.org/packages/1c/e3/781f9970f2ef202410f0d64681e42b2aecd0010097481a91e4df186a36c7/clickhouse_connect-0.10.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:b8a708d38b81dcc8c13bb85549c904817e304d2b7f461246fed2945524b7a31b", size = 268193 }, - { url = "https://files.pythonhosted.org/packages/f0/e0/64ab66b38fce762b77b5203a4fcecc603595f2a2361ce1605fc7bb79c835/clickhouse_connect-0.10.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:3646fc9184a5469b95cf4a0846e6954e6e9e85666f030a5d2acae58fa8afb37e", size = 1123810 }, - { url = "https://files.pythonhosted.org/packages/f5/03/19121aecf11a30feaf19049be96988131798c54ac6ba646a38e5faecaa0a/clickhouse_connect-0.10.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:fe7e6be0f40a8a77a90482944f5cc2aa39084c1570899e8d2d1191f62460365b", size = 1153409 }, - { url = "https://files.pythonhosted.org/packages/ce/ee/63870fd8b666c6030393950ad4ee76b7b69430f5a49a5d3fa32a70b11942/clickhouse_connect-0.10.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:88b4890f13163e163bf6fa61f3a013bb974c95676853b7a4e63061faf33911ac", size = 1104696 }, - { url = "https://files.pythonhosted.org/packages/e9/bc/fcd8da1c4d007ebce088783979c495e3d7360867cfa8c91327ed235778f5/clickhouse_connect-0.10.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:6286832cc79affc6fddfbf5563075effa65f80e7cd1481cf2b771ce317c67d08", size = 1156389 }, - { url = "https://files.pythonhosted.org/packages/4e/33/7cb99cc3fc503c23fd3a365ec862eb79cd81c8dc3037242782d709280fa9/clickhouse_connect-0.10.0-cp312-cp312-win32.whl", hash = "sha256:92b8b6691a92d2613ee35f5759317bd4be7ba66d39bf81c4deed620feb388ca6", size = 243682 }, - { url = "https://files.pythonhosted.org/packages/48/5c/12eee6a1f5ecda2dfc421781fde653c6d6ca6f3080f24547c0af40485a5a/clickhouse_connect-0.10.0-cp312-cp312-win_amd64.whl", hash = "sha256:1159ee2c33e7eca40b53dda917a8b6a2ed889cb4c54f3d83b303b31ddb4f351d", size = 262790 }, + { url = "https://files.pythonhosted.org/packages/bf/4e/f90caf963d14865c7a3f0e5d80b77e67e0fe0bf39b3de84110707746fa6b/clickhouse_connect-0.10.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:195f1824405501b747b572e1365c6265bb1629eeb712ce91eda91da3c5794879", size = 272911, upload-time = "2025-11-14T20:29:57.129Z" }, + { url = "https://files.pythonhosted.org/packages/50/c7/e01bd2dd80ea4fbda8968e5022c60091a872fd9de0a123239e23851da231/clickhouse_connect-0.10.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:7907624635fe7f28e1b85c7c8b125a72679a63ecdb0b9f4250b704106ef438f8", size = 265938, upload-time = "2025-11-14T20:29:58.443Z" }, + { url = "https://files.pythonhosted.org/packages/f4/07/8b567b949abca296e118331d13380bbdefa4225d7d1d32233c59d4b4b2e1/clickhouse_connect-0.10.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:60772faa54d56f0fa34650460910752a583f5948f44dddeabfafaecbca21fc54", size = 1113548, upload-time = "2025-11-14T20:29:59.781Z" }, + { url = "https://files.pythonhosted.org/packages/9c/13/11f2d37fc95e74d7e2d80702cde87666ce372486858599a61f5209e35fc5/clickhouse_connect-0.10.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:7fe2a6cd98517330c66afe703fb242c0d3aa2c91f2f7dc9fb97c122c5c60c34b", size = 1135061, upload-time = "2025-11-14T20:30:01.244Z" }, + { url = "https://files.pythonhosted.org/packages/a0/d0/517181ea80060f84d84cff4d42d330c80c77bb352b728fb1f9681fbad291/clickhouse_connect-0.10.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:a2427d312bc3526520a0be8c648479af3f6353da7a33a62db2368d6203b08efd", size = 1105105, upload-time = "2025-11-14T20:30:02.679Z" }, + { url = "https://files.pythonhosted.org/packages/7c/b2/4ad93e898562725b58c537cad83ab2694c9b1c1ef37fa6c3f674bdad366a/clickhouse_connect-0.10.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:63bbb5721bfece698e155c01b8fa95ce4377c584f4d04b43f383824e8a8fa129", size = 1150791, upload-time = "2025-11-14T20:30:03.824Z" }, + { url = "https://files.pythonhosted.org/packages/45/a4/fdfbfacc1fa67b8b1ce980adcf42f9e3202325586822840f04f068aff395/clickhouse_connect-0.10.0-cp311-cp311-win32.whl", hash = "sha256:48554e836c6b56fe0854d9a9f565569010583d4960094d60b68a53f9f83042f0", size = 244014, upload-time = "2025-11-14T20:30:05.157Z" }, + { url = "https://files.pythonhosted.org/packages/08/50/cf53f33f4546a9ce2ab1b9930db4850aa1ae53bff1e4e4fa97c566cdfa19/clickhouse_connect-0.10.0-cp311-cp311-win_amd64.whl", hash = "sha256:9eb8df083e5fda78ac7249938691c2c369e8578b5df34c709467147e8289f1d9", size = 262356, upload-time = "2025-11-14T20:30:06.478Z" }, + { url = "https://files.pythonhosted.org/packages/9e/59/fadbbf64f4c6496cd003a0a3c9223772409a86d0eea9d4ff45d2aa88aabf/clickhouse_connect-0.10.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:b090c7d8e602dd084b2795265cd30610461752284763d9ad93a5d619a0e0ff21", size = 276401, upload-time = "2025-11-14T20:30:07.469Z" }, + { url = "https://files.pythonhosted.org/packages/1c/e3/781f9970f2ef202410f0d64681e42b2aecd0010097481a91e4df186a36c7/clickhouse_connect-0.10.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:b8a708d38b81dcc8c13bb85549c904817e304d2b7f461246fed2945524b7a31b", size = 268193, upload-time = "2025-11-14T20:30:08.503Z" }, + { url = "https://files.pythonhosted.org/packages/f0/e0/64ab66b38fce762b77b5203a4fcecc603595f2a2361ce1605fc7bb79c835/clickhouse_connect-0.10.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:3646fc9184a5469b95cf4a0846e6954e6e9e85666f030a5d2acae58fa8afb37e", size = 1123810, upload-time = "2025-11-14T20:30:09.62Z" }, + { url = "https://files.pythonhosted.org/packages/f5/03/19121aecf11a30feaf19049be96988131798c54ac6ba646a38e5faecaa0a/clickhouse_connect-0.10.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:fe7e6be0f40a8a77a90482944f5cc2aa39084c1570899e8d2d1191f62460365b", size = 1153409, upload-time = "2025-11-14T20:30:10.855Z" }, + { url = "https://files.pythonhosted.org/packages/ce/ee/63870fd8b666c6030393950ad4ee76b7b69430f5a49a5d3fa32a70b11942/clickhouse_connect-0.10.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:88b4890f13163e163bf6fa61f3a013bb974c95676853b7a4e63061faf33911ac", size = 1104696, upload-time = "2025-11-14T20:30:12.187Z" }, + { url = "https://files.pythonhosted.org/packages/e9/bc/fcd8da1c4d007ebce088783979c495e3d7360867cfa8c91327ed235778f5/clickhouse_connect-0.10.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:6286832cc79affc6fddfbf5563075effa65f80e7cd1481cf2b771ce317c67d08", size = 1156389, upload-time = "2025-11-14T20:30:13.385Z" }, + { url = "https://files.pythonhosted.org/packages/4e/33/7cb99cc3fc503c23fd3a365ec862eb79cd81c8dc3037242782d709280fa9/clickhouse_connect-0.10.0-cp312-cp312-win32.whl", hash = "sha256:92b8b6691a92d2613ee35f5759317bd4be7ba66d39bf81c4deed620feb388ca6", size = 243682, upload-time = "2025-11-14T20:30:14.52Z" }, + { url = "https://files.pythonhosted.org/packages/48/5c/12eee6a1f5ecda2dfc421781fde653c6d6ca6f3080f24547c0af40485a5a/clickhouse_connect-0.10.0-cp312-cp312-win_amd64.whl", hash = "sha256:1159ee2c33e7eca40b53dda917a8b6a2ed889cb4c54f3d83b303b31ddb4f351d", size = 262790, upload-time = "2025-11-14T20:30:15.555Z" }, ] [[package]] @@ -1054,16 +1070,16 @@ dependencies = [ { name = "urllib3" }, ] wheels = [ - { url = "https://files.pythonhosted.org/packages/19/b4/91dfe25592bbcaf7eede05849c77d09d43a2656943585bbcf7ba4cc604bc/clickzetta_connector_python-0.8.107-py3-none-any.whl", hash = "sha256:7f28752bfa0a50e89ed218db0540c02c6bfbfdae3589ac81cf28523d7caa93b0", size = 76864 }, + { url = "https://files.pythonhosted.org/packages/19/b4/91dfe25592bbcaf7eede05849c77d09d43a2656943585bbcf7ba4cc604bc/clickzetta_connector_python-0.8.107-py3-none-any.whl", hash = "sha256:7f28752bfa0a50e89ed218db0540c02c6bfbfdae3589ac81cf28523d7caa93b0", size = 76864, upload-time = "2025-12-01T07:56:39.177Z" }, ] [[package]] name = "cloudpickle" version = "3.1.2" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/27/fb/576f067976d320f5f0114a8d9fa1215425441bb35627b1993e5afd8111e5/cloudpickle-3.1.2.tar.gz", hash = "sha256:7fda9eb655c9c230dab534f1983763de5835249750e85fbcef43aaa30a9a2414", size = 22330 } +sdist = { url = "https://files.pythonhosted.org/packages/27/fb/576f067976d320f5f0114a8d9fa1215425441bb35627b1993e5afd8111e5/cloudpickle-3.1.2.tar.gz", hash = "sha256:7fda9eb655c9c230dab534f1983763de5835249750e85fbcef43aaa30a9a2414", size = 22330, upload-time = "2025-11-03T09:25:26.604Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/88/39/799be3f2f0f38cc727ee3b4f1445fe6d5e4133064ec2e4115069418a5bb6/cloudpickle-3.1.2-py3-none-any.whl", hash = "sha256:9acb47f6afd73f60dc1df93bb801b472f05ff42fa6c84167d25cb206be1fbf4a", size = 22228 }, + { url = "https://files.pythonhosted.org/packages/88/39/799be3f2f0f38cc727ee3b4f1445fe6d5e4133064ec2e4115069418a5bb6/cloudpickle-3.1.2-py3-none-any.whl", hash = "sha256:9acb47f6afd73f60dc1df93bb801b472f05ff42fa6c84167d25cb206be1fbf4a", size = 22228, upload-time = "2025-11-03T09:25:25.534Z" }, ] [[package]] @@ -1075,18 +1091,18 @@ dependencies = [ { name = "requests" }, { name = "requests-toolbelt" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/ac/25/6d0481860583f44953bd791de0b7c4f6d7ead7223f8a17e776247b34a5b4/cloudscraper-1.2.71.tar.gz", hash = "sha256:429c6e8aa6916d5bad5c8a5eac50f3ea53c9ac22616f6cb21b18dcc71517d0d3", size = 93261 } +sdist = { url = "https://files.pythonhosted.org/packages/ac/25/6d0481860583f44953bd791de0b7c4f6d7ead7223f8a17e776247b34a5b4/cloudscraper-1.2.71.tar.gz", hash = "sha256:429c6e8aa6916d5bad5c8a5eac50f3ea53c9ac22616f6cb21b18dcc71517d0d3", size = 93261, upload-time = "2023-04-25T23:20:19.467Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/81/97/fc88803a451029688dffd7eb446dc1b529657577aec13aceff1cc9628c5d/cloudscraper-1.2.71-py2.py3-none-any.whl", hash = "sha256:76f50ca529ed2279e220837befdec892626f9511708e200d48d5bb76ded679b0", size = 99652 }, + { url = "https://files.pythonhosted.org/packages/81/97/fc88803a451029688dffd7eb446dc1b529657577aec13aceff1cc9628c5d/cloudscraper-1.2.71-py2.py3-none-any.whl", hash = "sha256:76f50ca529ed2279e220837befdec892626f9511708e200d48d5bb76ded679b0", size = 99652, upload-time = "2023-04-25T23:20:15.974Z" }, ] [[package]] name = "colorama" version = "0.4.6" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/d8/53/6f443c9a4a8358a93a6792e2acffb9d9d5cb0a5cfd8802644b7b1c9a02e4/colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44", size = 27697 } +sdist = { url = "https://files.pythonhosted.org/packages/d8/53/6f443c9a4a8358a93a6792e2acffb9d9d5cb0a5cfd8802644b7b1c9a02e4/colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44", size = 27697, upload-time = "2022-10-25T02:36:22.414Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/d1/d6/3965ed04c63042e047cb6a3e6ed1a63a35087b6a609aa3a15ed8ac56c221/colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6", size = 25335 }, + { url = "https://files.pythonhosted.org/packages/d1/d6/3965ed04c63042e047cb6a3e6ed1a63a35087b6a609aa3a15ed8ac56c221/colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6", size = 25335, upload-time = "2022-10-25T02:36:20.889Z" }, ] [[package]] @@ -1096,9 +1112,9 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "humanfriendly" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/cc/c7/eed8f27100517e8c0e6b923d5f0845d0cb99763da6fdee00478f91db7325/coloredlogs-15.0.1.tar.gz", hash = "sha256:7c991aa71a4577af2f82600d8f8f3a89f936baeaf9b50a9c197da014e5bf16b0", size = 278520 } +sdist = { url = "https://files.pythonhosted.org/packages/cc/c7/eed8f27100517e8c0e6b923d5f0845d0cb99763da6fdee00478f91db7325/coloredlogs-15.0.1.tar.gz", hash = "sha256:7c991aa71a4577af2f82600d8f8f3a89f936baeaf9b50a9c197da014e5bf16b0", size = 278520, upload-time = "2021-06-11T10:22:45.202Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/a7/06/3d6badcf13db419e25b07041d9c7b4a2c331d3f4e7134445ec5df57714cd/coloredlogs-15.0.1-py2.py3-none-any.whl", hash = "sha256:612ee75c546f53e92e70049c9dbfcc18c935a2b9a53b66085ce9ef6a6e5c0934", size = 46018 }, + { url = "https://files.pythonhosted.org/packages/a7/06/3d6badcf13db419e25b07041d9c7b4a2c331d3f4e7134445ec5df57714cd/coloredlogs-15.0.1-py2.py3-none-any.whl", hash = "sha256:612ee75c546f53e92e70049c9dbfcc18c935a2b9a53b66085ce9ef6a6e5c0934", size = 46018, upload-time = "2021-06-11T10:22:42.561Z" }, ] [[package]] @@ -1112,56 +1128,56 @@ dependencies = [ { name = "six" }, { name = "xmltodict" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/24/3c/d208266fec7cc3221b449e236b87c3fc1999d5ac4379d4578480321cfecc/cos_python_sdk_v5-1.9.38.tar.gz", hash = "sha256:491a8689ae2f1a6f04dacba66a877b2c8d361456f9cfd788ed42170a1cbf7a9f", size = 98092 } +sdist = { url = "https://files.pythonhosted.org/packages/24/3c/d208266fec7cc3221b449e236b87c3fc1999d5ac4379d4578480321cfecc/cos_python_sdk_v5-1.9.38.tar.gz", hash = "sha256:491a8689ae2f1a6f04dacba66a877b2c8d361456f9cfd788ed42170a1cbf7a9f", size = 98092, upload-time = "2025-07-22T07:56:20.34Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/ab/c8/c9c156aa3bc7caba9b4f8a2b6abec3da6263215988f3fec0ea843f137a10/cos_python_sdk_v5-1.9.38-py3-none-any.whl", hash = "sha256:1d3dd3be2bd992b2e9c2dcd018e2596aa38eab022dbc86b4a5d14c8fc88370e6", size = 92601 }, + { url = "https://files.pythonhosted.org/packages/ab/c8/c9c156aa3bc7caba9b4f8a2b6abec3da6263215988f3fec0ea843f137a10/cos_python_sdk_v5-1.9.38-py3-none-any.whl", hash = "sha256:1d3dd3be2bd992b2e9c2dcd018e2596aa38eab022dbc86b4a5d14c8fc88370e6", size = 92601, upload-time = "2025-08-17T05:12:30.867Z" }, ] [[package]] name = "couchbase" version = "4.3.6" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/2f/70/7cf92b2443330e7a4b626a02fe15fbeb1531337d75e6ae6393294e960d18/couchbase-4.3.6.tar.gz", hash = "sha256:d58c5ccdad5d85fc026f328bf4190c4fc0041fdbe68ad900fb32fc5497c3f061", size = 6517695 } +sdist = { url = "https://files.pythonhosted.org/packages/2f/70/7cf92b2443330e7a4b626a02fe15fbeb1531337d75e6ae6393294e960d18/couchbase-4.3.6.tar.gz", hash = "sha256:d58c5ccdad5d85fc026f328bf4190c4fc0041fdbe68ad900fb32fc5497c3f061", size = 6517695, upload-time = "2025-05-15T17:21:38.157Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/f3/0a/eae21d3a9331f7c93e8483f686e1bcb9e3b48f2ce98193beb0637a620926/couchbase-4.3.6-cp311-cp311-macosx_10_15_x86_64.whl", hash = "sha256:4c10fd26271c5630196b9bcc0dd7e17a45fa9c7e46ed5756e5690d125423160c", size = 4775710 }, - { url = "https://files.pythonhosted.org/packages/f6/98/0ca042a42f5807bbf8050f52fff39ebceebc7bea7e5897907758f3e1ad39/couchbase-4.3.6-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:811eee7a6013cea7b15a718e201ee1188df162c656d27c7882b618ab57a08f3a", size = 4020743 }, - { url = "https://files.pythonhosted.org/packages/f8/0f/c91407cb082d2322217e8f7ca4abb8eda016a81a4db5a74b7ac6b737597d/couchbase-4.3.6-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:2fc177e0161beb1e6e8c4b9561efcb97c51aed55a77ee11836ca194d33ae22b7", size = 4796091 }, - { url = "https://files.pythonhosted.org/packages/8c/02/5567b660543828bdbbc68dcae080e388cb0be391aa8a97cce9d8c8a6c147/couchbase-4.3.6-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:02afb1c1edd6b215f702510412b5177ed609df8135930c23789bbc5901dd1b45", size = 5015684 }, - { url = "https://files.pythonhosted.org/packages/dc/d1/767908826d5bdd258addab26d7f1d21bc42bafbf5f30d1b556ace06295af/couchbase-4.3.6-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:594e9eb17bb76ba8e10eeee17a16aef897dd90d33c6771cf2b5b4091da415b32", size = 5673513 }, - { url = "https://files.pythonhosted.org/packages/f2/25/39ecde0a06692abce8bb0df4f15542933f05883647a1a57cdc7bbed9c77c/couchbase-4.3.6-cp311-cp311-win_amd64.whl", hash = "sha256:db22c56e38b8313f65807aa48309c8b8c7c44d5517b9ff1d8b4404d4740ec286", size = 4010728 }, - { url = "https://files.pythonhosted.org/packages/b1/55/c12b8f626de71363fbe30578f4a0de1b8bb41afbe7646ff8538c3b38ce2a/couchbase-4.3.6-cp312-cp312-macosx_10_15_x86_64.whl", hash = "sha256:a2ae13432b859f513485d4cee691e1e4fce4af23ed4218b9355874b146343f8c", size = 4693517 }, - { url = "https://files.pythonhosted.org/packages/a1/aa/2184934d283d99b34a004f577bf724d918278a2962781ca5690d4fa4b6c6/couchbase-4.3.6-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:4ea5ca7e34b5d023c8bab406211ab5d71e74a976ba25fa693b4f8e6c74f85aa2", size = 4022393 }, - { url = "https://files.pythonhosted.org/packages/80/29/ba6d3b205a51c04c270c1b56ea31da678b7edc565b35a34237ec2cfc708d/couchbase-4.3.6-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:6eaca0a71fd8f9af4344b7d6474d7b74d1784ae9a658f6bc3751df5f9a4185ae", size = 4798396 }, - { url = "https://files.pythonhosted.org/packages/4a/94/d7d791808bd9064c01f965015ff40ee76e6bac10eaf2c73308023b9bdedf/couchbase-4.3.6-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:0470378b986f69368caed6d668ac6530e635b0c1abaef3d3f524cfac0dacd878", size = 5018099 }, - { url = "https://files.pythonhosted.org/packages/a6/04/cec160f9f4b862788e2a0167616472a5695b2f569bd62204938ab674835d/couchbase-4.3.6-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:374ce392558f1688ac073aa0b15c256b1a441201d965811fd862357ff05d27a9", size = 5672633 }, - { url = "https://files.pythonhosted.org/packages/1b/a2/1da2ab45412b9414e2c6a578e0e7a24f29b9261ef7de11707c2fc98045b8/couchbase-4.3.6-cp312-cp312-win_amd64.whl", hash = "sha256:cd734333de34d8594504c163bb6c47aea9cc1f2cefdf8e91875dd9bf14e61e29", size = 4013298 }, + { url = "https://files.pythonhosted.org/packages/f3/0a/eae21d3a9331f7c93e8483f686e1bcb9e3b48f2ce98193beb0637a620926/couchbase-4.3.6-cp311-cp311-macosx_10_15_x86_64.whl", hash = "sha256:4c10fd26271c5630196b9bcc0dd7e17a45fa9c7e46ed5756e5690d125423160c", size = 4775710, upload-time = "2025-05-15T17:20:29.388Z" }, + { url = "https://files.pythonhosted.org/packages/f6/98/0ca042a42f5807bbf8050f52fff39ebceebc7bea7e5897907758f3e1ad39/couchbase-4.3.6-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:811eee7a6013cea7b15a718e201ee1188df162c656d27c7882b618ab57a08f3a", size = 4020743, upload-time = "2025-05-15T17:20:31.515Z" }, + { url = "https://files.pythonhosted.org/packages/f8/0f/c91407cb082d2322217e8f7ca4abb8eda016a81a4db5a74b7ac6b737597d/couchbase-4.3.6-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:2fc177e0161beb1e6e8c4b9561efcb97c51aed55a77ee11836ca194d33ae22b7", size = 4796091, upload-time = "2025-05-15T17:20:33.818Z" }, + { url = "https://files.pythonhosted.org/packages/8c/02/5567b660543828bdbbc68dcae080e388cb0be391aa8a97cce9d8c8a6c147/couchbase-4.3.6-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:02afb1c1edd6b215f702510412b5177ed609df8135930c23789bbc5901dd1b45", size = 5015684, upload-time = "2025-05-15T17:20:36.364Z" }, + { url = "https://files.pythonhosted.org/packages/dc/d1/767908826d5bdd258addab26d7f1d21bc42bafbf5f30d1b556ace06295af/couchbase-4.3.6-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:594e9eb17bb76ba8e10eeee17a16aef897dd90d33c6771cf2b5b4091da415b32", size = 5673513, upload-time = "2025-05-15T17:20:38.972Z" }, + { url = "https://files.pythonhosted.org/packages/f2/25/39ecde0a06692abce8bb0df4f15542933f05883647a1a57cdc7bbed9c77c/couchbase-4.3.6-cp311-cp311-win_amd64.whl", hash = "sha256:db22c56e38b8313f65807aa48309c8b8c7c44d5517b9ff1d8b4404d4740ec286", size = 4010728, upload-time = "2025-05-15T17:20:43.286Z" }, + { url = "https://files.pythonhosted.org/packages/b1/55/c12b8f626de71363fbe30578f4a0de1b8bb41afbe7646ff8538c3b38ce2a/couchbase-4.3.6-cp312-cp312-macosx_10_15_x86_64.whl", hash = "sha256:a2ae13432b859f513485d4cee691e1e4fce4af23ed4218b9355874b146343f8c", size = 4693517, upload-time = "2025-05-15T17:20:45.433Z" }, + { url = "https://files.pythonhosted.org/packages/a1/aa/2184934d283d99b34a004f577bf724d918278a2962781ca5690d4fa4b6c6/couchbase-4.3.6-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:4ea5ca7e34b5d023c8bab406211ab5d71e74a976ba25fa693b4f8e6c74f85aa2", size = 4022393, upload-time = "2025-05-15T17:20:47.442Z" }, + { url = "https://files.pythonhosted.org/packages/80/29/ba6d3b205a51c04c270c1b56ea31da678b7edc565b35a34237ec2cfc708d/couchbase-4.3.6-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:6eaca0a71fd8f9af4344b7d6474d7b74d1784ae9a658f6bc3751df5f9a4185ae", size = 4798396, upload-time = "2025-05-15T17:20:49.473Z" }, + { url = "https://files.pythonhosted.org/packages/4a/94/d7d791808bd9064c01f965015ff40ee76e6bac10eaf2c73308023b9bdedf/couchbase-4.3.6-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:0470378b986f69368caed6d668ac6530e635b0c1abaef3d3f524cfac0dacd878", size = 5018099, upload-time = "2025-05-15T17:20:52.541Z" }, + { url = "https://files.pythonhosted.org/packages/a6/04/cec160f9f4b862788e2a0167616472a5695b2f569bd62204938ab674835d/couchbase-4.3.6-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:374ce392558f1688ac073aa0b15c256b1a441201d965811fd862357ff05d27a9", size = 5672633, upload-time = "2025-05-15T17:20:55.994Z" }, + { url = "https://files.pythonhosted.org/packages/1b/a2/1da2ab45412b9414e2c6a578e0e7a24f29b9261ef7de11707c2fc98045b8/couchbase-4.3.6-cp312-cp312-win_amd64.whl", hash = "sha256:cd734333de34d8594504c163bb6c47aea9cc1f2cefdf8e91875dd9bf14e61e29", size = 4013298, upload-time = "2025-05-15T17:20:59.533Z" }, ] [[package]] name = "coverage" version = "7.2.7" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/45/8b/421f30467e69ac0e414214856798d4bc32da1336df745e49e49ae5c1e2a8/coverage-7.2.7.tar.gz", hash = "sha256:924d94291ca674905fe9481f12294eb11f2d3d3fd1adb20314ba89e94f44ed59", size = 762575 } +sdist = { url = "https://files.pythonhosted.org/packages/45/8b/421f30467e69ac0e414214856798d4bc32da1336df745e49e49ae5c1e2a8/coverage-7.2.7.tar.gz", hash = "sha256:924d94291ca674905fe9481f12294eb11f2d3d3fd1adb20314ba89e94f44ed59", size = 762575, upload-time = "2023-05-29T20:08:50.273Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/c6/fa/529f55c9a1029c840bcc9109d5a15ff00478b7ff550a1ae361f8745f8ad5/coverage-7.2.7-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:06a9a2be0b5b576c3f18f1a241f0473575c4a26021b52b2a85263a00f034d51f", size = 200895 }, - { url = "https://files.pythonhosted.org/packages/67/d7/cd8fe689b5743fffac516597a1222834c42b80686b99f5b44ef43ccc2a43/coverage-7.2.7-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:5baa06420f837184130752b7c5ea0808762083bf3487b5038d68b012e5937dbe", size = 201120 }, - { url = "https://files.pythonhosted.org/packages/8c/95/16eed713202406ca0a37f8ac259bbf144c9d24f9b8097a8e6ead61da2dbb/coverage-7.2.7-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fdec9e8cbf13a5bf63290fc6013d216a4c7232efb51548594ca3631a7f13c3a3", size = 233178 }, - { url = "https://files.pythonhosted.org/packages/c1/49/4d487e2ad5d54ed82ac1101e467e8994c09d6123c91b2a962145f3d262c2/coverage-7.2.7-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:52edc1a60c0d34afa421c9c37078817b2e67a392cab17d97283b64c5833f427f", size = 230754 }, - { url = "https://files.pythonhosted.org/packages/a7/cd/3ce94ad9d407a052dc2a74fbeb1c7947f442155b28264eb467ee78dea812/coverage-7.2.7-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:63426706118b7f5cf6bb6c895dc215d8a418d5952544042c8a2d9fe87fcf09cb", size = 232558 }, - { url = "https://files.pythonhosted.org/packages/8f/a8/12cc7b261f3082cc299ab61f677f7e48d93e35ca5c3c2f7241ed5525ccea/coverage-7.2.7-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:afb17f84d56068a7c29f5fa37bfd38d5aba69e3304af08ee94da8ed5b0865833", size = 241509 }, - { url = "https://files.pythonhosted.org/packages/04/fa/43b55101f75a5e9115259e8be70ff9279921cb6b17f04c34a5702ff9b1f7/coverage-7.2.7-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:48c19d2159d433ccc99e729ceae7d5293fbffa0bdb94952d3579983d1c8c9d97", size = 239924 }, - { url = "https://files.pythonhosted.org/packages/68/5f/d2bd0f02aa3c3e0311986e625ccf97fdc511b52f4f1a063e4f37b624772f/coverage-7.2.7-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:0e1f928eaf5469c11e886fe0885ad2bf1ec606434e79842a879277895a50942a", size = 240977 }, - { url = "https://files.pythonhosted.org/packages/ba/92/69c0722882643df4257ecc5437b83f4c17ba9e67f15dc6b77bad89b6982e/coverage-7.2.7-cp311-cp311-win32.whl", hash = "sha256:33d6d3ea29d5b3a1a632b3c4e4f4ecae24ef170b0b9ee493883f2df10039959a", size = 203168 }, - { url = "https://files.pythonhosted.org/packages/b1/96/c12ed0dfd4ec587f3739f53eb677b9007853fd486ccb0e7d5512a27bab2e/coverage-7.2.7-cp311-cp311-win_amd64.whl", hash = "sha256:5b7540161790b2f28143191f5f8ec02fb132660ff175b7747b95dcb77ac26562", size = 204185 }, - { url = "https://files.pythonhosted.org/packages/ff/d5/52fa1891d1802ab2e1b346d37d349cb41cdd4fd03f724ebbf94e80577687/coverage-7.2.7-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:f2f67fe12b22cd130d34d0ef79206061bfb5eda52feb6ce0dba0644e20a03cf4", size = 201020 }, - { url = "https://files.pythonhosted.org/packages/24/df/6765898d54ea20e3197a26d26bb65b084deefadd77ce7de946b9c96dfdc5/coverage-7.2.7-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a342242fe22407f3c17f4b499276a02b01e80f861f1682ad1d95b04018e0c0d4", size = 233994 }, - { url = "https://files.pythonhosted.org/packages/15/81/b108a60bc758b448c151e5abceed027ed77a9523ecbc6b8a390938301841/coverage-7.2.7-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:171717c7cb6b453aebac9a2ef603699da237f341b38eebfee9be75d27dc38e01", size = 231358 }, - { url = "https://files.pythonhosted.org/packages/61/90/c76b9462f39897ebd8714faf21bc985b65c4e1ea6dff428ea9dc711ed0dd/coverage-7.2.7-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:49969a9f7ffa086d973d91cec8d2e31080436ef0fb4a359cae927e742abfaaa6", size = 233316 }, - { url = "https://files.pythonhosted.org/packages/04/d6/8cba3bf346e8b1a4fb3f084df7d8cea25a6b6c56aaca1f2e53829be17e9e/coverage-7.2.7-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:b46517c02ccd08092f4fa99f24c3b83d8f92f739b4657b0f146246a0ca6a831d", size = 240159 }, - { url = "https://files.pythonhosted.org/packages/6e/ea/4a252dc77ca0605b23d477729d139915e753ee89e4c9507630e12ad64a80/coverage-7.2.7-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:a3d33a6b3eae87ceaefa91ffdc130b5e8536182cd6dfdbfc1aa56b46ff8c86de", size = 238127 }, - { url = "https://files.pythonhosted.org/packages/9f/5c/d9760ac497c41f9c4841f5972d0edf05d50cad7814e86ee7d133ec4a0ac8/coverage-7.2.7-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:976b9c42fb2a43ebf304fa7d4a310e5f16cc99992f33eced91ef6f908bd8f33d", size = 239833 }, - { url = "https://files.pythonhosted.org/packages/69/8c/26a95b08059db1cbb01e4b0e6d40f2e9debb628c6ca86b78f625ceaf9bab/coverage-7.2.7-cp312-cp312-win32.whl", hash = "sha256:8de8bb0e5ad103888d65abef8bca41ab93721647590a3f740100cd65c3b00511", size = 203463 }, - { url = "https://files.pythonhosted.org/packages/b7/00/14b00a0748e9eda26e97be07a63cc911108844004687321ddcc213be956c/coverage-7.2.7-cp312-cp312-win_amd64.whl", hash = "sha256:9e31cb64d7de6b6f09702bb27c02d1904b3aebfca610c12772452c4e6c21a0d3", size = 204347 }, + { url = "https://files.pythonhosted.org/packages/c6/fa/529f55c9a1029c840bcc9109d5a15ff00478b7ff550a1ae361f8745f8ad5/coverage-7.2.7-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:06a9a2be0b5b576c3f18f1a241f0473575c4a26021b52b2a85263a00f034d51f", size = 200895, upload-time = "2023-05-29T20:07:21.963Z" }, + { url = "https://files.pythonhosted.org/packages/67/d7/cd8fe689b5743fffac516597a1222834c42b80686b99f5b44ef43ccc2a43/coverage-7.2.7-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:5baa06420f837184130752b7c5ea0808762083bf3487b5038d68b012e5937dbe", size = 201120, upload-time = "2023-05-29T20:07:23.765Z" }, + { url = "https://files.pythonhosted.org/packages/8c/95/16eed713202406ca0a37f8ac259bbf144c9d24f9b8097a8e6ead61da2dbb/coverage-7.2.7-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fdec9e8cbf13a5bf63290fc6013d216a4c7232efb51548594ca3631a7f13c3a3", size = 233178, upload-time = "2023-05-29T20:07:25.281Z" }, + { url = "https://files.pythonhosted.org/packages/c1/49/4d487e2ad5d54ed82ac1101e467e8994c09d6123c91b2a962145f3d262c2/coverage-7.2.7-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:52edc1a60c0d34afa421c9c37078817b2e67a392cab17d97283b64c5833f427f", size = 230754, upload-time = "2023-05-29T20:07:27.044Z" }, + { url = "https://files.pythonhosted.org/packages/a7/cd/3ce94ad9d407a052dc2a74fbeb1c7947f442155b28264eb467ee78dea812/coverage-7.2.7-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:63426706118b7f5cf6bb6c895dc215d8a418d5952544042c8a2d9fe87fcf09cb", size = 232558, upload-time = "2023-05-29T20:07:28.743Z" }, + { url = "https://files.pythonhosted.org/packages/8f/a8/12cc7b261f3082cc299ab61f677f7e48d93e35ca5c3c2f7241ed5525ccea/coverage-7.2.7-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:afb17f84d56068a7c29f5fa37bfd38d5aba69e3304af08ee94da8ed5b0865833", size = 241509, upload-time = "2023-05-29T20:07:30.434Z" }, + { url = "https://files.pythonhosted.org/packages/04/fa/43b55101f75a5e9115259e8be70ff9279921cb6b17f04c34a5702ff9b1f7/coverage-7.2.7-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:48c19d2159d433ccc99e729ceae7d5293fbffa0bdb94952d3579983d1c8c9d97", size = 239924, upload-time = "2023-05-29T20:07:32.065Z" }, + { url = "https://files.pythonhosted.org/packages/68/5f/d2bd0f02aa3c3e0311986e625ccf97fdc511b52f4f1a063e4f37b624772f/coverage-7.2.7-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:0e1f928eaf5469c11e886fe0885ad2bf1ec606434e79842a879277895a50942a", size = 240977, upload-time = "2023-05-29T20:07:34.184Z" }, + { url = "https://files.pythonhosted.org/packages/ba/92/69c0722882643df4257ecc5437b83f4c17ba9e67f15dc6b77bad89b6982e/coverage-7.2.7-cp311-cp311-win32.whl", hash = "sha256:33d6d3ea29d5b3a1a632b3c4e4f4ecae24ef170b0b9ee493883f2df10039959a", size = 203168, upload-time = "2023-05-29T20:07:35.869Z" }, + { url = "https://files.pythonhosted.org/packages/b1/96/c12ed0dfd4ec587f3739f53eb677b9007853fd486ccb0e7d5512a27bab2e/coverage-7.2.7-cp311-cp311-win_amd64.whl", hash = "sha256:5b7540161790b2f28143191f5f8ec02fb132660ff175b7747b95dcb77ac26562", size = 204185, upload-time = "2023-05-29T20:07:37.39Z" }, + { url = "https://files.pythonhosted.org/packages/ff/d5/52fa1891d1802ab2e1b346d37d349cb41cdd4fd03f724ebbf94e80577687/coverage-7.2.7-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:f2f67fe12b22cd130d34d0ef79206061bfb5eda52feb6ce0dba0644e20a03cf4", size = 201020, upload-time = "2023-05-29T20:07:38.724Z" }, + { url = "https://files.pythonhosted.org/packages/24/df/6765898d54ea20e3197a26d26bb65b084deefadd77ce7de946b9c96dfdc5/coverage-7.2.7-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a342242fe22407f3c17f4b499276a02b01e80f861f1682ad1d95b04018e0c0d4", size = 233994, upload-time = "2023-05-29T20:07:40.274Z" }, + { url = "https://files.pythonhosted.org/packages/15/81/b108a60bc758b448c151e5abceed027ed77a9523ecbc6b8a390938301841/coverage-7.2.7-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:171717c7cb6b453aebac9a2ef603699da237f341b38eebfee9be75d27dc38e01", size = 231358, upload-time = "2023-05-29T20:07:41.998Z" }, + { url = "https://files.pythonhosted.org/packages/61/90/c76b9462f39897ebd8714faf21bc985b65c4e1ea6dff428ea9dc711ed0dd/coverage-7.2.7-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:49969a9f7ffa086d973d91cec8d2e31080436ef0fb4a359cae927e742abfaaa6", size = 233316, upload-time = "2023-05-29T20:07:43.539Z" }, + { url = "https://files.pythonhosted.org/packages/04/d6/8cba3bf346e8b1a4fb3f084df7d8cea25a6b6c56aaca1f2e53829be17e9e/coverage-7.2.7-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:b46517c02ccd08092f4fa99f24c3b83d8f92f739b4657b0f146246a0ca6a831d", size = 240159, upload-time = "2023-05-29T20:07:44.982Z" }, + { url = "https://files.pythonhosted.org/packages/6e/ea/4a252dc77ca0605b23d477729d139915e753ee89e4c9507630e12ad64a80/coverage-7.2.7-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:a3d33a6b3eae87ceaefa91ffdc130b5e8536182cd6dfdbfc1aa56b46ff8c86de", size = 238127, upload-time = "2023-05-29T20:07:46.522Z" }, + { url = "https://files.pythonhosted.org/packages/9f/5c/d9760ac497c41f9c4841f5972d0edf05d50cad7814e86ee7d133ec4a0ac8/coverage-7.2.7-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:976b9c42fb2a43ebf304fa7d4a310e5f16cc99992f33eced91ef6f908bd8f33d", size = 239833, upload-time = "2023-05-29T20:07:47.992Z" }, + { url = "https://files.pythonhosted.org/packages/69/8c/26a95b08059db1cbb01e4b0e6d40f2e9debb628c6ca86b78f625ceaf9bab/coverage-7.2.7-cp312-cp312-win32.whl", hash = "sha256:8de8bb0e5ad103888d65abef8bca41ab93721647590a3f740100cd65c3b00511", size = 203463, upload-time = "2023-05-29T20:07:49.939Z" }, + { url = "https://files.pythonhosted.org/packages/b7/00/14b00a0748e9eda26e97be07a63cc911108844004687321ddcc213be956c/coverage-7.2.7-cp312-cp312-win_amd64.whl", hash = "sha256:9e31cb64d7de6b6f09702bb27c02d1904b3aebfca610c12772452c4e6c21a0d3", size = 204347, upload-time = "2023-05-29T20:07:51.909Z" }, ] [package.optional-dependencies] @@ -1173,38 +1189,38 @@ toml = [ name = "crc32c" version = "2.8" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/e3/66/7e97aa77af7cf6afbff26e3651b564fe41932599bc2d3dce0b2f73d4829a/crc32c-2.8.tar.gz", hash = "sha256:578728964e59c47c356aeeedee6220e021e124b9d3e8631d95d9a5e5f06e261c", size = 48179 } +sdist = { url = "https://files.pythonhosted.org/packages/e3/66/7e97aa77af7cf6afbff26e3651b564fe41932599bc2d3dce0b2f73d4829a/crc32c-2.8.tar.gz", hash = "sha256:578728964e59c47c356aeeedee6220e021e124b9d3e8631d95d9a5e5f06e261c", size = 48179, upload-time = "2025-10-17T06:20:13.61Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/dc/0b/5e03b22d913698e9cc563f39b9f6bbd508606bf6b8e9122cd6bf196b87ea/crc32c-2.8-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:e560a97fbb96c9897cb1d9b5076ef12fc12e2e25622530a1afd0de4240f17e1f", size = 66329 }, - { url = "https://files.pythonhosted.org/packages/6b/38/2fe0051ffe8c6a650c8b1ac0da31b8802d1dbe5fa40a84e4b6b6f5583db5/crc32c-2.8-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6762d276d90331a490ef7e71ffee53b9c0eb053bd75a272d786f3b08d3fe3671", size = 62988 }, - { url = "https://files.pythonhosted.org/packages/3e/30/5837a71c014be83aba1469c58820d287fc836512a0cad6b8fdd43868accd/crc32c-2.8-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:60670569f5ede91e39f48fb0cb4060e05b8d8704dd9e17ede930bf441b2f73ef", size = 61522 }, - { url = "https://files.pythonhosted.org/packages/ca/29/63972fc1452778e2092ae998c50cbfc2fc93e3fa9798a0278650cd6169c5/crc32c-2.8-cp311-cp311-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:711743da6ccc70b3c6718c328947b0b6f34a1fe6a6c27cc6c1d69cc226bf70e9", size = 80200 }, - { url = "https://files.pythonhosted.org/packages/cb/3a/60eb49d7bdada4122b3ffd45b0df54bdc1b8dd092cda4b069a287bdfcff4/crc32c-2.8-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5eb4094a2054774f13b26f21bf56792bb44fa1fcee6c6ad099387a43ffbfb4fa", size = 81757 }, - { url = "https://files.pythonhosted.org/packages/f5/63/6efc1b64429ef7d23bd58b75b7ac24d15df327e3ebbe9c247a0f7b1c2ed1/crc32c-2.8-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:fff15bf2bd3e95780516baae935ed12be88deaa5ebe6143c53eb0d26a7bdc7b7", size = 80830 }, - { url = "https://files.pythonhosted.org/packages/e1/eb/0ae9f436f8004f1c88f7429e659a7218a3879bd11a6b18ed1257aad7e98b/crc32c-2.8-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:4c0e11e3826668121fa53e0745635baf5e4f0ded437e8ff63ea56f38fc4f970a", size = 80095 }, - { url = "https://files.pythonhosted.org/packages/9e/81/4afc9d468977a4cd94a2eb62908553345009a7c0d30e74463a15d4b48ec3/crc32c-2.8-cp311-cp311-win32.whl", hash = "sha256:38f915336715d1f1353ab07d7d786f8a789b119e273aea106ba55355dfc9101d", size = 64886 }, - { url = "https://files.pythonhosted.org/packages/d6/e8/94e839c9f7e767bf8479046a207afd440a08f5c59b52586e1af5e64fa4a0/crc32c-2.8-cp311-cp311-win_amd64.whl", hash = "sha256:60e0a765b1caab8d31b2ea80840639253906a9351d4b861551c8c8625ea20f86", size = 66639 }, - { url = "https://files.pythonhosted.org/packages/b6/36/fd18ef23c42926b79c7003e16cb0f79043b5b179c633521343d3b499e996/crc32c-2.8-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:572ffb1b78cce3d88e8d4143e154d31044a44be42cb3f6fbbf77f1e7a941c5ab", size = 66379 }, - { url = "https://files.pythonhosted.org/packages/7f/b8/c584958e53f7798dd358f5bdb1bbfc97483134f053ee399d3eeb26cca075/crc32c-2.8-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:cf827b3758ee0c4aacd21ceca0e2da83681f10295c38a10bfeb105f7d98f7a68", size = 63042 }, - { url = "https://files.pythonhosted.org/packages/62/e6/6f2af0ec64a668a46c861e5bc778ea3ee42171fedfc5440f791f470fd783/crc32c-2.8-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:106fbd79013e06fa92bc3b51031694fcc1249811ed4364ef1554ee3dd2c7f5a2", size = 61528 }, - { url = "https://files.pythonhosted.org/packages/17/8b/4a04bd80a024f1a23978f19ae99407783e06549e361ab56e9c08bba3c1d3/crc32c-2.8-cp312-cp312-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:6dde035f91ffbfe23163e68605ee5a4bb8ceebd71ed54bb1fb1d0526cdd125a2", size = 80028 }, - { url = "https://files.pythonhosted.org/packages/21/8f/01c7afdc76ac2007d0e6a98e7300b4470b170480f8188475b597d1f4b4c6/crc32c-2.8-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:e41ebe7c2f0fdcd9f3a3fd206989a36b460b4d3f24816d53e5be6c7dba72c5e1", size = 81531 }, - { url = "https://files.pythonhosted.org/packages/32/2b/8f78c5a8cc66486be5f51b6f038fc347c3ba748d3ea68be17a014283c331/crc32c-2.8-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:ecf66cf90266d9c15cea597d5cc86c01917cd1a238dc3c51420c7886fa750d7e", size = 80608 }, - { url = "https://files.pythonhosted.org/packages/db/86/fad1a94cdeeeb6b6e2323c87f970186e74bfd6fbfbc247bf5c88ad0873d5/crc32c-2.8-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:59eee5f3a69ad0793d5fa9cdc9b9d743b0cd50edf7fccc0a3988a821fef0208c", size = 79886 }, - { url = "https://files.pythonhosted.org/packages/d5/db/1a7cb6757a1e32376fa2dfce00c815ea4ee614a94f9bff8228e37420c183/crc32c-2.8-cp312-cp312-win32.whl", hash = "sha256:a73d03ce3604aa5d7a2698e9057a0eef69f529c46497b27ee1c38158e90ceb76", size = 64896 }, - { url = "https://files.pythonhosted.org/packages/bf/8e/2024de34399b2e401a37dcb54b224b56c747b0dc46de4966886827b4d370/crc32c-2.8-cp312-cp312-win_amd64.whl", hash = "sha256:56b3b7d015247962cf58186e06d18c3d75a1a63d709d3233509e1c50a2d36aa2", size = 66645 }, - { url = "https://files.pythonhosted.org/packages/a7/1d/dd926c68eb8aac8b142a1a10b8eb62d95212c1cf81775644373fe7cceac2/crc32c-2.8-pp311-pypy311_pp73-macosx_10_15_x86_64.whl", hash = "sha256:5833f4071da7ea182c514ba17d1eee8aec3c5be927d798222fbfbbd0f5eea02c", size = 62345 }, - { url = "https://files.pythonhosted.org/packages/51/be/803404e5abea2ef2c15042edca04bbb7f625044cca879e47f186b43887c2/crc32c-2.8-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:1dc4da036126ac07b39dd9d03e93e585ec615a2ad28ff12757aef7de175295a8", size = 61229 }, - { url = "https://files.pythonhosted.org/packages/fc/3a/00cc578cd27ed0b22c9be25cef2c24539d92df9fa80ebd67a3fc5419724c/crc32c-2.8-pp311-pypy311_pp73-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:15905fa78344654e241371c47e6ed2411f9eeb2b8095311c68c88eccf541e8b4", size = 64108 }, - { url = "https://files.pythonhosted.org/packages/6b/bc/0587ef99a1c7629f95dd0c9d4f3d894de383a0df85831eb16c48a6afdae4/crc32c-2.8-pp311-pypy311_pp73-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c596f918688821f796434e89b431b1698396c38bf0b56de873621528fe3ecb1e", size = 64815 }, - { url = "https://files.pythonhosted.org/packages/73/42/94f2b8b92eae9064fcfb8deef2b971514065bd606231f8857ff8ae02bebd/crc32c-2.8-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:8d23c4fe01b3844cb6e091044bc1cebdef7d16472e058ce12d9fadf10d2614af", size = 66659 }, + { url = "https://files.pythonhosted.org/packages/dc/0b/5e03b22d913698e9cc563f39b9f6bbd508606bf6b8e9122cd6bf196b87ea/crc32c-2.8-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:e560a97fbb96c9897cb1d9b5076ef12fc12e2e25622530a1afd0de4240f17e1f", size = 66329, upload-time = "2025-10-17T06:19:01.771Z" }, + { url = "https://files.pythonhosted.org/packages/6b/38/2fe0051ffe8c6a650c8b1ac0da31b8802d1dbe5fa40a84e4b6b6f5583db5/crc32c-2.8-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6762d276d90331a490ef7e71ffee53b9c0eb053bd75a272d786f3b08d3fe3671", size = 62988, upload-time = "2025-10-17T06:19:02.953Z" }, + { url = "https://files.pythonhosted.org/packages/3e/30/5837a71c014be83aba1469c58820d287fc836512a0cad6b8fdd43868accd/crc32c-2.8-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:60670569f5ede91e39f48fb0cb4060e05b8d8704dd9e17ede930bf441b2f73ef", size = 61522, upload-time = "2025-10-17T06:19:03.796Z" }, + { url = "https://files.pythonhosted.org/packages/ca/29/63972fc1452778e2092ae998c50cbfc2fc93e3fa9798a0278650cd6169c5/crc32c-2.8-cp311-cp311-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:711743da6ccc70b3c6718c328947b0b6f34a1fe6a6c27cc6c1d69cc226bf70e9", size = 80200, upload-time = "2025-10-17T06:19:04.617Z" }, + { url = "https://files.pythonhosted.org/packages/cb/3a/60eb49d7bdada4122b3ffd45b0df54bdc1b8dd092cda4b069a287bdfcff4/crc32c-2.8-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5eb4094a2054774f13b26f21bf56792bb44fa1fcee6c6ad099387a43ffbfb4fa", size = 81757, upload-time = "2025-10-17T06:19:05.496Z" }, + { url = "https://files.pythonhosted.org/packages/f5/63/6efc1b64429ef7d23bd58b75b7ac24d15df327e3ebbe9c247a0f7b1c2ed1/crc32c-2.8-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:fff15bf2bd3e95780516baae935ed12be88deaa5ebe6143c53eb0d26a7bdc7b7", size = 80830, upload-time = "2025-10-17T06:19:06.621Z" }, + { url = "https://files.pythonhosted.org/packages/e1/eb/0ae9f436f8004f1c88f7429e659a7218a3879bd11a6b18ed1257aad7e98b/crc32c-2.8-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:4c0e11e3826668121fa53e0745635baf5e4f0ded437e8ff63ea56f38fc4f970a", size = 80095, upload-time = "2025-10-17T06:19:07.381Z" }, + { url = "https://files.pythonhosted.org/packages/9e/81/4afc9d468977a4cd94a2eb62908553345009a7c0d30e74463a15d4b48ec3/crc32c-2.8-cp311-cp311-win32.whl", hash = "sha256:38f915336715d1f1353ab07d7d786f8a789b119e273aea106ba55355dfc9101d", size = 64886, upload-time = "2025-10-17T06:19:08.497Z" }, + { url = "https://files.pythonhosted.org/packages/d6/e8/94e839c9f7e767bf8479046a207afd440a08f5c59b52586e1af5e64fa4a0/crc32c-2.8-cp311-cp311-win_amd64.whl", hash = "sha256:60e0a765b1caab8d31b2ea80840639253906a9351d4b861551c8c8625ea20f86", size = 66639, upload-time = "2025-10-17T06:19:09.338Z" }, + { url = "https://files.pythonhosted.org/packages/b6/36/fd18ef23c42926b79c7003e16cb0f79043b5b179c633521343d3b499e996/crc32c-2.8-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:572ffb1b78cce3d88e8d4143e154d31044a44be42cb3f6fbbf77f1e7a941c5ab", size = 66379, upload-time = "2025-10-17T06:19:10.115Z" }, + { url = "https://files.pythonhosted.org/packages/7f/b8/c584958e53f7798dd358f5bdb1bbfc97483134f053ee399d3eeb26cca075/crc32c-2.8-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:cf827b3758ee0c4aacd21ceca0e2da83681f10295c38a10bfeb105f7d98f7a68", size = 63042, upload-time = "2025-10-17T06:19:10.946Z" }, + { url = "https://files.pythonhosted.org/packages/62/e6/6f2af0ec64a668a46c861e5bc778ea3ee42171fedfc5440f791f470fd783/crc32c-2.8-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:106fbd79013e06fa92bc3b51031694fcc1249811ed4364ef1554ee3dd2c7f5a2", size = 61528, upload-time = "2025-10-17T06:19:11.768Z" }, + { url = "https://files.pythonhosted.org/packages/17/8b/4a04bd80a024f1a23978f19ae99407783e06549e361ab56e9c08bba3c1d3/crc32c-2.8-cp312-cp312-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:6dde035f91ffbfe23163e68605ee5a4bb8ceebd71ed54bb1fb1d0526cdd125a2", size = 80028, upload-time = "2025-10-17T06:19:12.554Z" }, + { url = "https://files.pythonhosted.org/packages/21/8f/01c7afdc76ac2007d0e6a98e7300b4470b170480f8188475b597d1f4b4c6/crc32c-2.8-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:e41ebe7c2f0fdcd9f3a3fd206989a36b460b4d3f24816d53e5be6c7dba72c5e1", size = 81531, upload-time = "2025-10-17T06:19:13.406Z" }, + { url = "https://files.pythonhosted.org/packages/32/2b/8f78c5a8cc66486be5f51b6f038fc347c3ba748d3ea68be17a014283c331/crc32c-2.8-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:ecf66cf90266d9c15cea597d5cc86c01917cd1a238dc3c51420c7886fa750d7e", size = 80608, upload-time = "2025-10-17T06:19:14.223Z" }, + { url = "https://files.pythonhosted.org/packages/db/86/fad1a94cdeeeb6b6e2323c87f970186e74bfd6fbfbc247bf5c88ad0873d5/crc32c-2.8-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:59eee5f3a69ad0793d5fa9cdc9b9d743b0cd50edf7fccc0a3988a821fef0208c", size = 79886, upload-time = "2025-10-17T06:19:15.345Z" }, + { url = "https://files.pythonhosted.org/packages/d5/db/1a7cb6757a1e32376fa2dfce00c815ea4ee614a94f9bff8228e37420c183/crc32c-2.8-cp312-cp312-win32.whl", hash = "sha256:a73d03ce3604aa5d7a2698e9057a0eef69f529c46497b27ee1c38158e90ceb76", size = 64896, upload-time = "2025-10-17T06:19:16.457Z" }, + { url = "https://files.pythonhosted.org/packages/bf/8e/2024de34399b2e401a37dcb54b224b56c747b0dc46de4966886827b4d370/crc32c-2.8-cp312-cp312-win_amd64.whl", hash = "sha256:56b3b7d015247962cf58186e06d18c3d75a1a63d709d3233509e1c50a2d36aa2", size = 66645, upload-time = "2025-10-17T06:19:17.235Z" }, + { url = "https://files.pythonhosted.org/packages/a7/1d/dd926c68eb8aac8b142a1a10b8eb62d95212c1cf81775644373fe7cceac2/crc32c-2.8-pp311-pypy311_pp73-macosx_10_15_x86_64.whl", hash = "sha256:5833f4071da7ea182c514ba17d1eee8aec3c5be927d798222fbfbbd0f5eea02c", size = 62345, upload-time = "2025-10-17T06:20:09.39Z" }, + { url = "https://files.pythonhosted.org/packages/51/be/803404e5abea2ef2c15042edca04bbb7f625044cca879e47f186b43887c2/crc32c-2.8-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:1dc4da036126ac07b39dd9d03e93e585ec615a2ad28ff12757aef7de175295a8", size = 61229, upload-time = "2025-10-17T06:20:10.236Z" }, + { url = "https://files.pythonhosted.org/packages/fc/3a/00cc578cd27ed0b22c9be25cef2c24539d92df9fa80ebd67a3fc5419724c/crc32c-2.8-pp311-pypy311_pp73-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:15905fa78344654e241371c47e6ed2411f9eeb2b8095311c68c88eccf541e8b4", size = 64108, upload-time = "2025-10-17T06:20:11.072Z" }, + { url = "https://files.pythonhosted.org/packages/6b/bc/0587ef99a1c7629f95dd0c9d4f3d894de383a0df85831eb16c48a6afdae4/crc32c-2.8-pp311-pypy311_pp73-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c596f918688821f796434e89b431b1698396c38bf0b56de873621528fe3ecb1e", size = 64815, upload-time = "2025-10-17T06:20:11.919Z" }, + { url = "https://files.pythonhosted.org/packages/73/42/94f2b8b92eae9064fcfb8deef2b971514065bd606231f8857ff8ae02bebd/crc32c-2.8-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:8d23c4fe01b3844cb6e091044bc1cebdef7d16472e058ce12d9fadf10d2614af", size = 66659, upload-time = "2025-10-17T06:20:12.766Z" }, ] [[package]] name = "crcmod" version = "1.7" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/6b/b0/e595ce2a2527e169c3bcd6c33d2473c1918e0b7f6826a043ca1245dd4e5b/crcmod-1.7.tar.gz", hash = "sha256:dc7051a0db5f2bd48665a990d3ec1cc305a466a77358ca4492826f41f283601e", size = 89670 } +sdist = { url = "https://files.pythonhosted.org/packages/6b/b0/e595ce2a2527e169c3bcd6c33d2473c1918e0b7f6826a043ca1245dd4e5b/crcmod-1.7.tar.gz", hash = "sha256:dc7051a0db5f2bd48665a990d3ec1cc305a466a77358ca4492826f41f283601e", size = 89670, upload-time = "2010-06-27T14:35:29.538Z" } [[package]] name = "croniter" @@ -1214,9 +1230,9 @@ dependencies = [ { name = "python-dateutil" }, { name = "pytz" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/ad/2f/44d1ae153a0e27be56be43465e5cb39b9650c781e001e7864389deb25090/croniter-6.0.0.tar.gz", hash = "sha256:37c504b313956114a983ece2c2b07790b1f1094fe9d81cc94739214748255577", size = 64481 } +sdist = { url = "https://files.pythonhosted.org/packages/ad/2f/44d1ae153a0e27be56be43465e5cb39b9650c781e001e7864389deb25090/croniter-6.0.0.tar.gz", hash = "sha256:37c504b313956114a983ece2c2b07790b1f1094fe9d81cc94739214748255577", size = 64481, upload-time = "2024-12-17T17:17:47.32Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/07/4b/290b4c3efd6417a8b0c284896de19b1d5855e6dbdb97d2a35e68fa42de85/croniter-6.0.0-py2.py3-none-any.whl", hash = "sha256:2f878c3856f17896979b2a4379ba1f09c83e374931ea15cc835c5dd2eee9b368", size = 25468 }, + { url = "https://files.pythonhosted.org/packages/07/4b/290b4c3efd6417a8b0c284896de19b1d5855e6dbdb97d2a35e68fa42de85/croniter-6.0.0-py2.py3-none-any.whl", hash = "sha256:2f878c3856f17896979b2a4379ba1f09c83e374931ea15cc835c5dd2eee9b368", size = 25468, upload-time = "2024-12-17T17:17:45.359Z" }, ] [[package]] @@ -1226,44 +1242,44 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "cffi", marker = "platform_python_implementation != 'PyPy'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/9f/33/c00162f49c0e2fe8064a62cb92b93e50c74a72bc370ab92f86112b33ff62/cryptography-46.0.3.tar.gz", hash = "sha256:a8b17438104fed022ce745b362294d9ce35b4c2e45c1d958ad4a4b019285f4a1", size = 749258 } +sdist = { url = "https://files.pythonhosted.org/packages/9f/33/c00162f49c0e2fe8064a62cb92b93e50c74a72bc370ab92f86112b33ff62/cryptography-46.0.3.tar.gz", hash = "sha256:a8b17438104fed022ce745b362294d9ce35b4c2e45c1d958ad4a4b019285f4a1", size = 749258, upload-time = "2025-10-15T23:18:31.74Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/1d/42/9c391dd801d6cf0d561b5890549d4b27bafcc53b39c31a817e69d87c625b/cryptography-46.0.3-cp311-abi3-macosx_10_9_universal2.whl", hash = "sha256:109d4ddfadf17e8e7779c39f9b18111a09efb969a301a31e987416a0191ed93a", size = 7225004 }, - { url = "https://files.pythonhosted.org/packages/1c/67/38769ca6b65f07461eb200e85fc1639b438bdc667be02cf7f2cd6a64601c/cryptography-46.0.3-cp311-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:09859af8466b69bc3c27bdf4f5d84a665e0f7ab5088412e9e2ec49758eca5cbc", size = 4296667 }, - { url = "https://files.pythonhosted.org/packages/5c/49/498c86566a1d80e978b42f0d702795f69887005548c041636df6ae1ca64c/cryptography-46.0.3-cp311-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:01ca9ff2885f3acc98c29f1860552e37f6d7c7d013d7334ff2a9de43a449315d", size = 4450807 }, - { url = "https://files.pythonhosted.org/packages/4b/0a/863a3604112174c8624a2ac3c038662d9e59970c7f926acdcfaed8d61142/cryptography-46.0.3-cp311-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:6eae65d4c3d33da080cff9c4ab1f711b15c1d9760809dad6ea763f3812d254cb", size = 4299615 }, - { url = "https://files.pythonhosted.org/packages/64/02/b73a533f6b64a69f3cd3872acb6ebc12aef924d8d103133bb3ea750dc703/cryptography-46.0.3-cp311-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:e5bf0ed4490068a2e72ac03d786693adeb909981cc596425d09032d372bcc849", size = 4016800 }, - { url = "https://files.pythonhosted.org/packages/25/d5/16e41afbfa450cde85a3b7ec599bebefaef16b5c6ba4ec49a3532336ed72/cryptography-46.0.3-cp311-abi3-manylinux_2_28_ppc64le.whl", hash = "sha256:5ecfccd2329e37e9b7112a888e76d9feca2347f12f37918facbb893d7bb88ee8", size = 4984707 }, - { url = "https://files.pythonhosted.org/packages/c9/56/e7e69b427c3878352c2fb9b450bd0e19ed552753491d39d7d0a2f5226d41/cryptography-46.0.3-cp311-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:a2c0cd47381a3229c403062f764160d57d4d175e022c1df84e168c6251a22eec", size = 4482541 }, - { url = "https://files.pythonhosted.org/packages/78/f6/50736d40d97e8483172f1bb6e698895b92a223dba513b0ca6f06b2365339/cryptography-46.0.3-cp311-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:549e234ff32571b1f4076ac269fcce7a808d3bf98b76c8dd560e42dbc66d7d91", size = 4299464 }, - { url = "https://files.pythonhosted.org/packages/00/de/d8e26b1a855f19d9994a19c702fa2e93b0456beccbcfe437eda00e0701f2/cryptography-46.0.3-cp311-abi3-manylinux_2_34_ppc64le.whl", hash = "sha256:c0a7bb1a68a5d3471880e264621346c48665b3bf1c3759d682fc0864c540bd9e", size = 4950838 }, - { url = "https://files.pythonhosted.org/packages/8f/29/798fc4ec461a1c9e9f735f2fc58741b0daae30688f41b2497dcbc9ed1355/cryptography-46.0.3-cp311-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:10b01676fc208c3e6feeb25a8b83d81767e8059e1fe86e1dc62d10a3018fa926", size = 4481596 }, - { url = "https://files.pythonhosted.org/packages/15/8d/03cd48b20a573adfff7652b76271078e3045b9f49387920e7f1f631d125e/cryptography-46.0.3-cp311-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:0abf1ffd6e57c67e92af68330d05760b7b7efb243aab8377e583284dbab72c71", size = 4426782 }, - { url = "https://files.pythonhosted.org/packages/fa/b1/ebacbfe53317d55cf33165bda24c86523497a6881f339f9aae5c2e13e57b/cryptography-46.0.3-cp311-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:a04bee9ab6a4da801eb9b51f1b708a1b5b5c9eb48c03f74198464c66f0d344ac", size = 4698381 }, - { url = "https://files.pythonhosted.org/packages/96/92/8a6a9525893325fc057a01f654d7efc2c64b9de90413adcf605a85744ff4/cryptography-46.0.3-cp311-abi3-win32.whl", hash = "sha256:f260d0d41e9b4da1ed1e0f1ce571f97fe370b152ab18778e9e8f67d6af432018", size = 3055988 }, - { url = "https://files.pythonhosted.org/packages/7e/bf/80fbf45253ea585a1e492a6a17efcb93467701fa79e71550a430c5e60df0/cryptography-46.0.3-cp311-abi3-win_amd64.whl", hash = "sha256:a9a3008438615669153eb86b26b61e09993921ebdd75385ddd748702c5adfddb", size = 3514451 }, - { url = "https://files.pythonhosted.org/packages/2e/af/9b302da4c87b0beb9db4e756386a7c6c5b8003cd0e742277888d352ae91d/cryptography-46.0.3-cp311-abi3-win_arm64.whl", hash = "sha256:5d7f93296ee28f68447397bf5198428c9aeeab45705a55d53a6343455dcb2c3c", size = 2928007 }, - { url = "https://files.pythonhosted.org/packages/fd/23/45fe7f376a7df8daf6da3556603b36f53475a99ce4faacb6ba2cf3d82021/cryptography-46.0.3-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:cb3d760a6117f621261d662bccc8ef5bc32ca673e037c83fbe565324f5c46936", size = 7218248 }, - { url = "https://files.pythonhosted.org/packages/27/32/b68d27471372737054cbd34c84981f9edbc24fe67ca225d389799614e27f/cryptography-46.0.3-cp38-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:4b7387121ac7d15e550f5cb4a43aef2559ed759c35df7336c402bb8275ac9683", size = 4294089 }, - { url = "https://files.pythonhosted.org/packages/26/42/fa8389d4478368743e24e61eea78846a0006caffaf72ea24a15159215a14/cryptography-46.0.3-cp38-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:15ab9b093e8f09daab0f2159bb7e47532596075139dd74365da52ecc9cb46c5d", size = 4440029 }, - { url = "https://files.pythonhosted.org/packages/5f/eb/f483db0ec5ac040824f269e93dd2bd8a21ecd1027e77ad7bdf6914f2fd80/cryptography-46.0.3-cp38-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:46acf53b40ea38f9c6c229599a4a13f0d46a6c3fa9ef19fc1a124d62e338dfa0", size = 4297222 }, - { url = "https://files.pythonhosted.org/packages/fd/cf/da9502c4e1912cb1da3807ea3618a6829bee8207456fbbeebc361ec38ba3/cryptography-46.0.3-cp38-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:10ca84c4668d066a9878890047f03546f3ae0a6b8b39b697457b7757aaf18dbc", size = 4012280 }, - { url = "https://files.pythonhosted.org/packages/6b/8f/9adb86b93330e0df8b3dcf03eae67c33ba89958fc2e03862ef1ac2b42465/cryptography-46.0.3-cp38-abi3-manylinux_2_28_ppc64le.whl", hash = "sha256:36e627112085bb3b81b19fed209c05ce2a52ee8b15d161b7c643a7d5a88491f3", size = 4978958 }, - { url = "https://files.pythonhosted.org/packages/d1/a0/5fa77988289c34bdb9f913f5606ecc9ada1adb5ae870bd0d1054a7021cc4/cryptography-46.0.3-cp38-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:1000713389b75c449a6e979ffc7dcc8ac90b437048766cef052d4d30b8220971", size = 4473714 }, - { url = "https://files.pythonhosted.org/packages/14/e5/fc82d72a58d41c393697aa18c9abe5ae1214ff6f2a5c18ac470f92777895/cryptography-46.0.3-cp38-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:b02cf04496f6576afffef5ddd04a0cb7d49cf6be16a9059d793a30b035f6b6ac", size = 4296970 }, - { url = "https://files.pythonhosted.org/packages/78/06/5663ed35438d0b09056973994f1aec467492b33bd31da36e468b01ec1097/cryptography-46.0.3-cp38-abi3-manylinux_2_34_ppc64le.whl", hash = "sha256:71e842ec9bc7abf543b47cf86b9a743baa95f4677d22baa4c7d5c69e49e9bc04", size = 4940236 }, - { url = "https://files.pythonhosted.org/packages/fc/59/873633f3f2dcd8a053b8dd1d38f783043b5fce589c0f6988bf55ef57e43e/cryptography-46.0.3-cp38-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:402b58fc32614f00980b66d6e56a5b4118e6cb362ae8f3fda141ba4689bd4506", size = 4472642 }, - { url = "https://files.pythonhosted.org/packages/3d/39/8e71f3930e40f6877737d6f69248cf74d4e34b886a3967d32f919cc50d3b/cryptography-46.0.3-cp38-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:ef639cb3372f69ec44915fafcd6698b6cc78fbe0c2ea41be867f6ed612811963", size = 4423126 }, - { url = "https://files.pythonhosted.org/packages/cd/c7/f65027c2810e14c3e7268353b1681932b87e5a48e65505d8cc17c99e36ae/cryptography-46.0.3-cp38-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:3b51b8ca4f1c6453d8829e1eb7299499ca7f313900dd4d89a24b8b87c0a780d4", size = 4686573 }, - { url = "https://files.pythonhosted.org/packages/0a/6e/1c8331ddf91ca4730ab3086a0f1be19c65510a33b5a441cb334e7a2d2560/cryptography-46.0.3-cp38-abi3-win32.whl", hash = "sha256:6276eb85ef938dc035d59b87c8a7dc559a232f954962520137529d77b18ff1df", size = 3036695 }, - { url = "https://files.pythonhosted.org/packages/90/45/b0d691df20633eff80955a0fc7695ff9051ffce8b69741444bd9ed7bd0db/cryptography-46.0.3-cp38-abi3-win_amd64.whl", hash = "sha256:416260257577718c05135c55958b674000baef9a1c7d9e8f306ec60d71db850f", size = 3501720 }, - { url = "https://files.pythonhosted.org/packages/e8/cb/2da4cc83f5edb9c3257d09e1e7ab7b23f049c7962cae8d842bbef0a9cec9/cryptography-46.0.3-cp38-abi3-win_arm64.whl", hash = "sha256:d89c3468de4cdc4f08a57e214384d0471911a3830fcdaf7a8cc587e42a866372", size = 2918740 }, - { url = "https://files.pythonhosted.org/packages/06/8a/e60e46adab4362a682cf142c7dcb5bf79b782ab2199b0dcb81f55970807f/cryptography-46.0.3-pp311-pypy311_pp73-macosx_10_9_x86_64.whl", hash = "sha256:7ce938a99998ed3c8aa7e7272dca1a610401ede816d36d0693907d863b10d9ea", size = 3698132 }, - { url = "https://files.pythonhosted.org/packages/da/38/f59940ec4ee91e93d3311f7532671a5cef5570eb04a144bf203b58552d11/cryptography-46.0.3-pp311-pypy311_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:191bb60a7be5e6f54e30ba16fdfae78ad3a342a0599eb4193ba88e3f3d6e185b", size = 4243992 }, - { url = "https://files.pythonhosted.org/packages/b0/0c/35b3d92ddebfdfda76bb485738306545817253d0a3ded0bfe80ef8e67aa5/cryptography-46.0.3-pp311-pypy311_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:c70cc23f12726be8f8bc72e41d5065d77e4515efae3690326764ea1b07845cfb", size = 4409944 }, - { url = "https://files.pythonhosted.org/packages/99/55/181022996c4063fc0e7666a47049a1ca705abb9c8a13830f074edb347495/cryptography-46.0.3-pp311-pypy311_pp73-manylinux_2_34_aarch64.whl", hash = "sha256:9394673a9f4de09e28b5356e7fff97d778f8abad85c9d5ac4a4b7e25a0de7717", size = 4242957 }, - { url = "https://files.pythonhosted.org/packages/ba/af/72cd6ef29f9c5f731251acadaeb821559fe25f10852f44a63374c9ca08c1/cryptography-46.0.3-pp311-pypy311_pp73-manylinux_2_34_x86_64.whl", hash = "sha256:94cd0549accc38d1494e1f8de71eca837d0509d0d44bf11d158524b0e12cebf9", size = 4409447 }, - { url = "https://files.pythonhosted.org/packages/0d/c3/e90f4a4feae6410f914f8ebac129b9ae7a8c92eb60a638012dde42030a9d/cryptography-46.0.3-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:6b5063083824e5509fdba180721d55909ffacccc8adbec85268b48439423d78c", size = 3438528 }, + { url = "https://files.pythonhosted.org/packages/1d/42/9c391dd801d6cf0d561b5890549d4b27bafcc53b39c31a817e69d87c625b/cryptography-46.0.3-cp311-abi3-macosx_10_9_universal2.whl", hash = "sha256:109d4ddfadf17e8e7779c39f9b18111a09efb969a301a31e987416a0191ed93a", size = 7225004, upload-time = "2025-10-15T23:16:52.239Z" }, + { url = "https://files.pythonhosted.org/packages/1c/67/38769ca6b65f07461eb200e85fc1639b438bdc667be02cf7f2cd6a64601c/cryptography-46.0.3-cp311-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:09859af8466b69bc3c27bdf4f5d84a665e0f7ab5088412e9e2ec49758eca5cbc", size = 4296667, upload-time = "2025-10-15T23:16:54.369Z" }, + { url = "https://files.pythonhosted.org/packages/5c/49/498c86566a1d80e978b42f0d702795f69887005548c041636df6ae1ca64c/cryptography-46.0.3-cp311-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:01ca9ff2885f3acc98c29f1860552e37f6d7c7d013d7334ff2a9de43a449315d", size = 4450807, upload-time = "2025-10-15T23:16:56.414Z" }, + { url = "https://files.pythonhosted.org/packages/4b/0a/863a3604112174c8624a2ac3c038662d9e59970c7f926acdcfaed8d61142/cryptography-46.0.3-cp311-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:6eae65d4c3d33da080cff9c4ab1f711b15c1d9760809dad6ea763f3812d254cb", size = 4299615, upload-time = "2025-10-15T23:16:58.442Z" }, + { url = "https://files.pythonhosted.org/packages/64/02/b73a533f6b64a69f3cd3872acb6ebc12aef924d8d103133bb3ea750dc703/cryptography-46.0.3-cp311-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:e5bf0ed4490068a2e72ac03d786693adeb909981cc596425d09032d372bcc849", size = 4016800, upload-time = "2025-10-15T23:17:00.378Z" }, + { url = "https://files.pythonhosted.org/packages/25/d5/16e41afbfa450cde85a3b7ec599bebefaef16b5c6ba4ec49a3532336ed72/cryptography-46.0.3-cp311-abi3-manylinux_2_28_ppc64le.whl", hash = "sha256:5ecfccd2329e37e9b7112a888e76d9feca2347f12f37918facbb893d7bb88ee8", size = 4984707, upload-time = "2025-10-15T23:17:01.98Z" }, + { url = "https://files.pythonhosted.org/packages/c9/56/e7e69b427c3878352c2fb9b450bd0e19ed552753491d39d7d0a2f5226d41/cryptography-46.0.3-cp311-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:a2c0cd47381a3229c403062f764160d57d4d175e022c1df84e168c6251a22eec", size = 4482541, upload-time = "2025-10-15T23:17:04.078Z" }, + { url = "https://files.pythonhosted.org/packages/78/f6/50736d40d97e8483172f1bb6e698895b92a223dba513b0ca6f06b2365339/cryptography-46.0.3-cp311-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:549e234ff32571b1f4076ac269fcce7a808d3bf98b76c8dd560e42dbc66d7d91", size = 4299464, upload-time = "2025-10-15T23:17:05.483Z" }, + { url = "https://files.pythonhosted.org/packages/00/de/d8e26b1a855f19d9994a19c702fa2e93b0456beccbcfe437eda00e0701f2/cryptography-46.0.3-cp311-abi3-manylinux_2_34_ppc64le.whl", hash = "sha256:c0a7bb1a68a5d3471880e264621346c48665b3bf1c3759d682fc0864c540bd9e", size = 4950838, upload-time = "2025-10-15T23:17:07.425Z" }, + { url = "https://files.pythonhosted.org/packages/8f/29/798fc4ec461a1c9e9f735f2fc58741b0daae30688f41b2497dcbc9ed1355/cryptography-46.0.3-cp311-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:10b01676fc208c3e6feeb25a8b83d81767e8059e1fe86e1dc62d10a3018fa926", size = 4481596, upload-time = "2025-10-15T23:17:09.343Z" }, + { url = "https://files.pythonhosted.org/packages/15/8d/03cd48b20a573adfff7652b76271078e3045b9f49387920e7f1f631d125e/cryptography-46.0.3-cp311-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:0abf1ffd6e57c67e92af68330d05760b7b7efb243aab8377e583284dbab72c71", size = 4426782, upload-time = "2025-10-15T23:17:11.22Z" }, + { url = "https://files.pythonhosted.org/packages/fa/b1/ebacbfe53317d55cf33165bda24c86523497a6881f339f9aae5c2e13e57b/cryptography-46.0.3-cp311-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:a04bee9ab6a4da801eb9b51f1b708a1b5b5c9eb48c03f74198464c66f0d344ac", size = 4698381, upload-time = "2025-10-15T23:17:12.829Z" }, + { url = "https://files.pythonhosted.org/packages/96/92/8a6a9525893325fc057a01f654d7efc2c64b9de90413adcf605a85744ff4/cryptography-46.0.3-cp311-abi3-win32.whl", hash = "sha256:f260d0d41e9b4da1ed1e0f1ce571f97fe370b152ab18778e9e8f67d6af432018", size = 3055988, upload-time = "2025-10-15T23:17:14.65Z" }, + { url = "https://files.pythonhosted.org/packages/7e/bf/80fbf45253ea585a1e492a6a17efcb93467701fa79e71550a430c5e60df0/cryptography-46.0.3-cp311-abi3-win_amd64.whl", hash = "sha256:a9a3008438615669153eb86b26b61e09993921ebdd75385ddd748702c5adfddb", size = 3514451, upload-time = "2025-10-15T23:17:16.142Z" }, + { url = "https://files.pythonhosted.org/packages/2e/af/9b302da4c87b0beb9db4e756386a7c6c5b8003cd0e742277888d352ae91d/cryptography-46.0.3-cp311-abi3-win_arm64.whl", hash = "sha256:5d7f93296ee28f68447397bf5198428c9aeeab45705a55d53a6343455dcb2c3c", size = 2928007, upload-time = "2025-10-15T23:17:18.04Z" }, + { url = "https://files.pythonhosted.org/packages/fd/23/45fe7f376a7df8daf6da3556603b36f53475a99ce4faacb6ba2cf3d82021/cryptography-46.0.3-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:cb3d760a6117f621261d662bccc8ef5bc32ca673e037c83fbe565324f5c46936", size = 7218248, upload-time = "2025-10-15T23:17:46.294Z" }, + { url = "https://files.pythonhosted.org/packages/27/32/b68d27471372737054cbd34c84981f9edbc24fe67ca225d389799614e27f/cryptography-46.0.3-cp38-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:4b7387121ac7d15e550f5cb4a43aef2559ed759c35df7336c402bb8275ac9683", size = 4294089, upload-time = "2025-10-15T23:17:48.269Z" }, + { url = "https://files.pythonhosted.org/packages/26/42/fa8389d4478368743e24e61eea78846a0006caffaf72ea24a15159215a14/cryptography-46.0.3-cp38-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:15ab9b093e8f09daab0f2159bb7e47532596075139dd74365da52ecc9cb46c5d", size = 4440029, upload-time = "2025-10-15T23:17:49.837Z" }, + { url = "https://files.pythonhosted.org/packages/5f/eb/f483db0ec5ac040824f269e93dd2bd8a21ecd1027e77ad7bdf6914f2fd80/cryptography-46.0.3-cp38-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:46acf53b40ea38f9c6c229599a4a13f0d46a6c3fa9ef19fc1a124d62e338dfa0", size = 4297222, upload-time = "2025-10-15T23:17:51.357Z" }, + { url = "https://files.pythonhosted.org/packages/fd/cf/da9502c4e1912cb1da3807ea3618a6829bee8207456fbbeebc361ec38ba3/cryptography-46.0.3-cp38-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:10ca84c4668d066a9878890047f03546f3ae0a6b8b39b697457b7757aaf18dbc", size = 4012280, upload-time = "2025-10-15T23:17:52.964Z" }, + { url = "https://files.pythonhosted.org/packages/6b/8f/9adb86b93330e0df8b3dcf03eae67c33ba89958fc2e03862ef1ac2b42465/cryptography-46.0.3-cp38-abi3-manylinux_2_28_ppc64le.whl", hash = "sha256:36e627112085bb3b81b19fed209c05ce2a52ee8b15d161b7c643a7d5a88491f3", size = 4978958, upload-time = "2025-10-15T23:17:54.965Z" }, + { url = "https://files.pythonhosted.org/packages/d1/a0/5fa77988289c34bdb9f913f5606ecc9ada1adb5ae870bd0d1054a7021cc4/cryptography-46.0.3-cp38-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:1000713389b75c449a6e979ffc7dcc8ac90b437048766cef052d4d30b8220971", size = 4473714, upload-time = "2025-10-15T23:17:56.754Z" }, + { url = "https://files.pythonhosted.org/packages/14/e5/fc82d72a58d41c393697aa18c9abe5ae1214ff6f2a5c18ac470f92777895/cryptography-46.0.3-cp38-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:b02cf04496f6576afffef5ddd04a0cb7d49cf6be16a9059d793a30b035f6b6ac", size = 4296970, upload-time = "2025-10-15T23:17:58.588Z" }, + { url = "https://files.pythonhosted.org/packages/78/06/5663ed35438d0b09056973994f1aec467492b33bd31da36e468b01ec1097/cryptography-46.0.3-cp38-abi3-manylinux_2_34_ppc64le.whl", hash = "sha256:71e842ec9bc7abf543b47cf86b9a743baa95f4677d22baa4c7d5c69e49e9bc04", size = 4940236, upload-time = "2025-10-15T23:18:00.897Z" }, + { url = "https://files.pythonhosted.org/packages/fc/59/873633f3f2dcd8a053b8dd1d38f783043b5fce589c0f6988bf55ef57e43e/cryptography-46.0.3-cp38-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:402b58fc32614f00980b66d6e56a5b4118e6cb362ae8f3fda141ba4689bd4506", size = 4472642, upload-time = "2025-10-15T23:18:02.749Z" }, + { url = "https://files.pythonhosted.org/packages/3d/39/8e71f3930e40f6877737d6f69248cf74d4e34b886a3967d32f919cc50d3b/cryptography-46.0.3-cp38-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:ef639cb3372f69ec44915fafcd6698b6cc78fbe0c2ea41be867f6ed612811963", size = 4423126, upload-time = "2025-10-15T23:18:04.85Z" }, + { url = "https://files.pythonhosted.org/packages/cd/c7/f65027c2810e14c3e7268353b1681932b87e5a48e65505d8cc17c99e36ae/cryptography-46.0.3-cp38-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:3b51b8ca4f1c6453d8829e1eb7299499ca7f313900dd4d89a24b8b87c0a780d4", size = 4686573, upload-time = "2025-10-15T23:18:06.908Z" }, + { url = "https://files.pythonhosted.org/packages/0a/6e/1c8331ddf91ca4730ab3086a0f1be19c65510a33b5a441cb334e7a2d2560/cryptography-46.0.3-cp38-abi3-win32.whl", hash = "sha256:6276eb85ef938dc035d59b87c8a7dc559a232f954962520137529d77b18ff1df", size = 3036695, upload-time = "2025-10-15T23:18:08.672Z" }, + { url = "https://files.pythonhosted.org/packages/90/45/b0d691df20633eff80955a0fc7695ff9051ffce8b69741444bd9ed7bd0db/cryptography-46.0.3-cp38-abi3-win_amd64.whl", hash = "sha256:416260257577718c05135c55958b674000baef9a1c7d9e8f306ec60d71db850f", size = 3501720, upload-time = "2025-10-15T23:18:10.632Z" }, + { url = "https://files.pythonhosted.org/packages/e8/cb/2da4cc83f5edb9c3257d09e1e7ab7b23f049c7962cae8d842bbef0a9cec9/cryptography-46.0.3-cp38-abi3-win_arm64.whl", hash = "sha256:d89c3468de4cdc4f08a57e214384d0471911a3830fcdaf7a8cc587e42a866372", size = 2918740, upload-time = "2025-10-15T23:18:12.277Z" }, + { url = "https://files.pythonhosted.org/packages/06/8a/e60e46adab4362a682cf142c7dcb5bf79b782ab2199b0dcb81f55970807f/cryptography-46.0.3-pp311-pypy311_pp73-macosx_10_9_x86_64.whl", hash = "sha256:7ce938a99998ed3c8aa7e7272dca1a610401ede816d36d0693907d863b10d9ea", size = 3698132, upload-time = "2025-10-15T23:18:17.056Z" }, + { url = "https://files.pythonhosted.org/packages/da/38/f59940ec4ee91e93d3311f7532671a5cef5570eb04a144bf203b58552d11/cryptography-46.0.3-pp311-pypy311_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:191bb60a7be5e6f54e30ba16fdfae78ad3a342a0599eb4193ba88e3f3d6e185b", size = 4243992, upload-time = "2025-10-15T23:18:18.695Z" }, + { url = "https://files.pythonhosted.org/packages/b0/0c/35b3d92ddebfdfda76bb485738306545817253d0a3ded0bfe80ef8e67aa5/cryptography-46.0.3-pp311-pypy311_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:c70cc23f12726be8f8bc72e41d5065d77e4515efae3690326764ea1b07845cfb", size = 4409944, upload-time = "2025-10-15T23:18:20.597Z" }, + { url = "https://files.pythonhosted.org/packages/99/55/181022996c4063fc0e7666a47049a1ca705abb9c8a13830f074edb347495/cryptography-46.0.3-pp311-pypy311_pp73-manylinux_2_34_aarch64.whl", hash = "sha256:9394673a9f4de09e28b5356e7fff97d778f8abad85c9d5ac4a4b7e25a0de7717", size = 4242957, upload-time = "2025-10-15T23:18:22.18Z" }, + { url = "https://files.pythonhosted.org/packages/ba/af/72cd6ef29f9c5f731251acadaeb821559fe25f10852f44a63374c9ca08c1/cryptography-46.0.3-pp311-pypy311_pp73-manylinux_2_34_x86_64.whl", hash = "sha256:94cd0549accc38d1494e1f8de71eca837d0509d0d44bf11d158524b0e12cebf9", size = 4409447, upload-time = "2025-10-15T23:18:24.209Z" }, + { url = "https://files.pythonhosted.org/packages/0d/c3/e90f4a4feae6410f914f8ebac129b9ae7a8c92eb60a638012dde42030a9d/cryptography-46.0.3-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:6b5063083824e5509fdba180721d55909ffacccc8adbec85268b48439423d78c", size = 3438528, upload-time = "2025-10-15T23:18:26.227Z" }, ] [[package]] @@ -1275,9 +1291,9 @@ dependencies = [ { name = "protobuf" }, { name = "requests" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/a8/7f/cfb2a00d10f6295332616e5b22f2ae3aaf2841a3afa6c49262acb6b94f5b/databricks_sdk-0.73.0.tar.gz", hash = "sha256:db09eaaacd98e07dded78d3e7ab47d2f6c886e0380cb577977bd442bace8bd8d", size = 801017 } +sdist = { url = "https://files.pythonhosted.org/packages/a8/7f/cfb2a00d10f6295332616e5b22f2ae3aaf2841a3afa6c49262acb6b94f5b/databricks_sdk-0.73.0.tar.gz", hash = "sha256:db09eaaacd98e07dded78d3e7ab47d2f6c886e0380cb577977bd442bace8bd8d", size = 801017, upload-time = "2025-11-05T06:52:58.509Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/a7/27/b822b474aaefb684d11df358d52e012699a2a8af231f9b47c54b73f280cb/databricks_sdk-0.73.0-py3-none-any.whl", hash = "sha256:a4d3cfd19357a2b459d2dc3101454d7f0d1b62865ce099c35d0c342b66ac64ff", size = 753896 }, + { url = "https://files.pythonhosted.org/packages/a7/27/b822b474aaefb684d11df358d52e012699a2a8af231f9b47c54b73f280cb/databricks_sdk-0.73.0-py3-none-any.whl", hash = "sha256:a4d3cfd19357a2b459d2dc3101454d7f0d1b62865ce099c35d0c342b66ac64ff", size = 753896, upload-time = "2025-11-05T06:52:56.451Z" }, ] [[package]] @@ -1288,27 +1304,42 @@ dependencies = [ { name = "marshmallow" }, { name = "typing-inspect" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/64/a4/f71d9cf3a5ac257c993b5ca3f93df5f7fb395c725e7f1e6479d2514173c3/dataclasses_json-0.6.7.tar.gz", hash = "sha256:b6b3e528266ea45b9535223bc53ca645f5208833c29229e847b3f26a1cc55fc0", size = 32227 } +sdist = { url = "https://files.pythonhosted.org/packages/64/a4/f71d9cf3a5ac257c993b5ca3f93df5f7fb395c725e7f1e6479d2514173c3/dataclasses_json-0.6.7.tar.gz", hash = "sha256:b6b3e528266ea45b9535223bc53ca645f5208833c29229e847b3f26a1cc55fc0", size = 32227, upload-time = "2024-06-09T16:20:19.103Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/c3/be/d0d44e092656fe7a06b55e6103cbce807cdbdee17884a5367c68c9860853/dataclasses_json-0.6.7-py3-none-any.whl", hash = "sha256:0dbf33f26c8d5305befd61b39d2b3414e8a407bedc2834dea9b8d642666fb40a", size = 28686 }, + { url = "https://files.pythonhosted.org/packages/c3/be/d0d44e092656fe7a06b55e6103cbce807cdbdee17884a5367c68c9860853/dataclasses_json-0.6.7-py3-none-any.whl", hash = "sha256:0dbf33f26c8d5305befd61b39d2b3414e8a407bedc2834dea9b8d642666fb40a", size = 28686, upload-time = "2024-06-09T16:20:16.715Z" }, +] + +[[package]] +name = "dateparser" +version = "1.2.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "python-dateutil" }, + { name = "pytz" }, + { name = "regex" }, + { name = "tzlocal" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/a9/30/064144f0df1749e7bb5faaa7f52b007d7c2d08ec08fed8411aba87207f68/dateparser-1.2.2.tar.gz", hash = "sha256:986316f17cb8cdc23ea8ce563027c5ef12fc725b6fb1d137c14ca08777c5ecf7", size = 329840, upload-time = "2025-06-26T09:29:23.211Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/87/22/f020c047ae1346613db9322638186468238bcfa8849b4668a22b97faad65/dateparser-1.2.2-py3-none-any.whl", hash = "sha256:5a5d7211a09013499867547023a2a0c91d5a27d15dd4dbcea676ea9fe66f2482", size = 315453, upload-time = "2025-06-26T09:29:21.412Z" }, ] [[package]] name = "decorator" version = "5.2.1" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/43/fa/6d96a0978d19e17b68d634497769987b16c8f4cd0a7a05048bec693caa6b/decorator-5.2.1.tar.gz", hash = "sha256:65f266143752f734b0a7cc83c46f4618af75b8c5911b00ccb61d0ac9b6da0360", size = 56711 } +sdist = { url = "https://files.pythonhosted.org/packages/43/fa/6d96a0978d19e17b68d634497769987b16c8f4cd0a7a05048bec693caa6b/decorator-5.2.1.tar.gz", hash = "sha256:65f266143752f734b0a7cc83c46f4618af75b8c5911b00ccb61d0ac9b6da0360", size = 56711, upload-time = "2025-02-24T04:41:34.073Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/4e/8c/f3147f5c4b73e7550fe5f9352eaa956ae838d5c51eb58e7a25b9f3e2643b/decorator-5.2.1-py3-none-any.whl", hash = "sha256:d316bb415a2d9e2d2b3abcc4084c6502fc09240e292cd76a76afc106a1c8e04a", size = 9190 }, + { url = "https://files.pythonhosted.org/packages/4e/8c/f3147f5c4b73e7550fe5f9352eaa956ae838d5c51eb58e7a25b9f3e2643b/decorator-5.2.1-py3-none-any.whl", hash = "sha256:d316bb415a2d9e2d2b3abcc4084c6502fc09240e292cd76a76afc106a1c8e04a", size = 9190, upload-time = "2025-02-24T04:41:32.565Z" }, ] [[package]] name = "defusedxml" version = "0.7.1" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/0f/d5/c66da9b79e5bdb124974bfe172b4daf3c984ebd9c2a06e2b8a4dc7331c72/defusedxml-0.7.1.tar.gz", hash = "sha256:1bb3032db185915b62d7c6209c5a8792be6a32ab2fedacc84e01b52c51aa3e69", size = 75520 } +sdist = { url = "https://files.pythonhosted.org/packages/0f/d5/c66da9b79e5bdb124974bfe172b4daf3c984ebd9c2a06e2b8a4dc7331c72/defusedxml-0.7.1.tar.gz", hash = "sha256:1bb3032db185915b62d7c6209c5a8792be6a32ab2fedacc84e01b52c51aa3e69", size = 75520, upload-time = "2021-03-08T10:59:26.269Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/07/6c/aa3f2f849e01cb6a001cd8554a88d4c77c5c1a31c95bdf1cf9301e6d9ef4/defusedxml-0.7.1-py2.py3-none-any.whl", hash = "sha256:a352e7e428770286cc899e2542b6cdaedb2b4953ff269a210103ec58f6198a61", size = 25604 }, + { url = "https://files.pythonhosted.org/packages/07/6c/aa3f2f849e01cb6a001cd8554a88d4c77c5c1a31c95bdf1cf9301e6d9ef4/defusedxml-0.7.1-py2.py3-none-any.whl", hash = "sha256:a352e7e428770286cc899e2542b6cdaedb2b4953ff269a210103ec58f6198a61", size = 25604, upload-time = "2021-03-08T10:59:24.45Z" }, ] [[package]] @@ -1318,9 +1349,9 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "wrapt" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/49/85/12f0a49a7c4ffb70572b6c2ef13c90c88fd190debda93b23f026b25f9634/deprecated-1.3.1.tar.gz", hash = "sha256:b1b50e0ff0c1fddaa5708a2c6b0a6588bb09b892825ab2b214ac9ea9d92a5223", size = 2932523 } +sdist = { url = "https://files.pythonhosted.org/packages/49/85/12f0a49a7c4ffb70572b6c2ef13c90c88fd190debda93b23f026b25f9634/deprecated-1.3.1.tar.gz", hash = "sha256:b1b50e0ff0c1fddaa5708a2c6b0a6588bb09b892825ab2b214ac9ea9d92a5223", size = 2932523, upload-time = "2025-10-30T08:19:02.757Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/84/d0/205d54408c08b13550c733c4b85429e7ead111c7f0014309637425520a9a/deprecated-1.3.1-py2.py3-none-any.whl", hash = "sha256:597bfef186b6f60181535a29fbe44865ce137a5079f295b479886c82729d5f3f", size = 11298 }, + { url = "https://files.pythonhosted.org/packages/84/d0/205d54408c08b13550c733c4b85429e7ead111c7f0014309637425520a9a/deprecated-1.3.1-py2.py3-none-any.whl", hash = "sha256:597bfef186b6f60181535a29fbe44865ce137a5079f295b479886c82729d5f3f", size = 11298, upload-time = "2025-10-30T08:19:00.758Z" }, ] [[package]] @@ -1330,16 +1361,17 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "packaging" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/5a/d3/8ae2869247df154b64c1884d7346d412fed0c49df84db635aab2d1c40e62/deprecation-2.1.0.tar.gz", hash = "sha256:72b3bde64e5d778694b0cf68178aed03d15e15477116add3fb773e581f9518ff", size = 173788 } +sdist = { url = "https://files.pythonhosted.org/packages/5a/d3/8ae2869247df154b64c1884d7346d412fed0c49df84db635aab2d1c40e62/deprecation-2.1.0.tar.gz", hash = "sha256:72b3bde64e5d778694b0cf68178aed03d15e15477116add3fb773e581f9518ff", size = 173788, upload-time = "2020-04-20T14:23:38.738Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/02/c3/253a89ee03fc9b9682f1541728eb66db7db22148cd94f89ab22528cd1e1b/deprecation-2.1.0-py2.py3-none-any.whl", hash = "sha256:a10811591210e1fb0e768a8c25517cabeabcba6f0bf96564f8ff45189f90b14a", size = 11178 }, + { url = "https://files.pythonhosted.org/packages/02/c3/253a89ee03fc9b9682f1541728eb66db7db22148cd94f89ab22528cd1e1b/deprecation-2.1.0-py2.py3-none-any.whl", hash = "sha256:a10811591210e1fb0e768a8c25517cabeabcba6f0bf96564f8ff45189f90b14a", size = 11178, upload-time = "2020-04-20T14:23:36.581Z" }, ] [[package]] name = "dify-api" -version = "1.11.1" +version = "1.11.2" source = { virtual = "." } dependencies = [ + { name = "aliyun-log-python-sdk" }, { name = "apscheduler" }, { name = "arize-phoenix-otel" }, { name = "azure-identity" }, @@ -1537,6 +1569,7 @@ vdb = [ [package.metadata] requires-dist = [ + { name = "aliyun-log-python-sdk", specifier = "~=0.9.37" }, { name = "apscheduler", specifier = ">=3.11.0" }, { name = "arize-phoenix-otel", specifier = "~=0.9.2" }, { name = "azure-identity", specifier = "==1.16.1" }, @@ -1603,7 +1636,7 @@ requires-dist = [ { name = "pydantic-extra-types", specifier = "~=2.10.3" }, { name = "pydantic-settings", specifier = "~=2.11.0" }, { name = "pyjwt", specifier = "~=2.10.1" }, - { name = "pypdfium2", specifier = "==4.30.0" }, + { name = "pypdfium2", specifier = "==5.2.0" }, { name = "python-docx", specifier = "~=1.1.0" }, { name = "python-dotenv", specifier = "==1.0.1" }, { name = "pyyaml", specifier = "~=6.0.1" }, @@ -1698,7 +1731,7 @@ storage = [ { name = "opendal", specifier = "~=0.46.0" }, { name = "oss2", specifier = "==2.18.5" }, { name = "supabase", specifier = "~=2.18.1" }, - { name = "tos", specifier = "~=2.7.1" }, + { name = "tos", specifier = "~=2.9.0" }, ] tools = [ { name = "cloudscraper", specifier = "~=1.2.71" }, @@ -1736,18 +1769,18 @@ vdb = [ name = "diskcache" version = "5.6.3" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/3f/21/1c1ffc1a039ddcc459db43cc108658f32c57d271d7289a2794e401d0fdb6/diskcache-5.6.3.tar.gz", hash = "sha256:2c3a3fa2743d8535d832ec61c2054a1641f41775aa7c556758a109941e33e4fc", size = 67916 } +sdist = { url = "https://files.pythonhosted.org/packages/3f/21/1c1ffc1a039ddcc459db43cc108658f32c57d271d7289a2794e401d0fdb6/diskcache-5.6.3.tar.gz", hash = "sha256:2c3a3fa2743d8535d832ec61c2054a1641f41775aa7c556758a109941e33e4fc", size = 67916, upload-time = "2023-08-31T06:12:00.316Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/3f/27/4570e78fc0bf5ea0ca45eb1de3818a23787af9b390c0b0a0033a1b8236f9/diskcache-5.6.3-py3-none-any.whl", hash = "sha256:5e31b2d5fbad117cc363ebaf6b689474db18a1f6438bc82358b024abd4c2ca19", size = 45550 }, + { url = "https://files.pythonhosted.org/packages/3f/27/4570e78fc0bf5ea0ca45eb1de3818a23787af9b390c0b0a0033a1b8236f9/diskcache-5.6.3-py3-none-any.whl", hash = "sha256:5e31b2d5fbad117cc363ebaf6b689474db18a1f6438bc82358b024abd4c2ca19", size = 45550, upload-time = "2023-08-31T06:11:58.822Z" }, ] [[package]] name = "distro" version = "1.9.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/fc/f8/98eea607f65de6527f8a2e8885fc8015d3e6f5775df186e443e0964a11c3/distro-1.9.0.tar.gz", hash = "sha256:2fa77c6fd8940f116ee1d6b94a2f90b13b5ea8d019b98bc8bafdcabcdd9bdbed", size = 60722 } +sdist = { url = "https://files.pythonhosted.org/packages/fc/f8/98eea607f65de6527f8a2e8885fc8015d3e6f5775df186e443e0964a11c3/distro-1.9.0.tar.gz", hash = "sha256:2fa77c6fd8940f116ee1d6b94a2f90b13b5ea8d019b98bc8bafdcabcdd9bdbed", size = 60722, upload-time = "2023-12-24T09:54:32.31Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/12/b3/231ffd4ab1fc9d679809f356cebee130ac7daa00d6d6f3206dd4fd137e9e/distro-1.9.0-py3-none-any.whl", hash = "sha256:7bffd925d65168f85027d8da9af6bddab658135b840670a223589bc0c8ef02b2", size = 20277 }, + { url = "https://files.pythonhosted.org/packages/12/b3/231ffd4ab1fc9d679809f356cebee130ac7daa00d6d6f3206dd4fd137e9e/distro-1.9.0-py3-none-any.whl", hash = "sha256:7bffd925d65168f85027d8da9af6bddab658135b840670a223589bc0c8ef02b2", size = 20277, upload-time = "2023-12-24T09:54:30.421Z" }, ] [[package]] @@ -1759,18 +1792,18 @@ dependencies = [ { name = "requests" }, { name = "urllib3" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/91/9b/4a2ea29aeba62471211598dac5d96825bb49348fa07e906ea930394a83ce/docker-7.1.0.tar.gz", hash = "sha256:ad8c70e6e3f8926cb8a92619b832b4ea5299e2831c14284663184e200546fa6c", size = 117834 } +sdist = { url = "https://files.pythonhosted.org/packages/91/9b/4a2ea29aeba62471211598dac5d96825bb49348fa07e906ea930394a83ce/docker-7.1.0.tar.gz", hash = "sha256:ad8c70e6e3f8926cb8a92619b832b4ea5299e2831c14284663184e200546fa6c", size = 117834, upload-time = "2024-05-23T11:13:57.216Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/e3/26/57c6fb270950d476074c087527a558ccb6f4436657314bfb6cdf484114c4/docker-7.1.0-py3-none-any.whl", hash = "sha256:c96b93b7f0a746f9e77d325bcfb87422a3d8bd4f03136ae8a85b37f1898d5fc0", size = 147774 }, + { url = "https://files.pythonhosted.org/packages/e3/26/57c6fb270950d476074c087527a558ccb6f4436657314bfb6cdf484114c4/docker-7.1.0-py3-none-any.whl", hash = "sha256:c96b93b7f0a746f9e77d325bcfb87422a3d8bd4f03136ae8a85b37f1898d5fc0", size = 147774, upload-time = "2024-05-23T11:13:55.01Z" }, ] [[package]] name = "docstring-parser" version = "0.17.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/b2/9d/c3b43da9515bd270df0f80548d9944e389870713cc1fe2b8fb35fe2bcefd/docstring_parser-0.17.0.tar.gz", hash = "sha256:583de4a309722b3315439bb31d64ba3eebada841f2e2cee23b99df001434c912", size = 27442 } +sdist = { url = "https://files.pythonhosted.org/packages/b2/9d/c3b43da9515bd270df0f80548d9944e389870713cc1fe2b8fb35fe2bcefd/docstring_parser-0.17.0.tar.gz", hash = "sha256:583de4a309722b3315439bb31d64ba3eebada841f2e2cee23b99df001434c912", size = 27442, upload-time = "2025-07-21T07:35:01.868Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/55/e2/2537ebcff11c1ee1ff17d8d0b6f4db75873e3b0fb32c2d4a2ee31ecb310a/docstring_parser-0.17.0-py3-none-any.whl", hash = "sha256:cf2569abd23dce8099b300f9b4fa8191e9582dda731fd533daf54c4551658708", size = 36896 }, + { url = "https://files.pythonhosted.org/packages/55/e2/2537ebcff11c1ee1ff17d8d0b6f4db75873e3b0fb32c2d4a2ee31ecb310a/docstring_parser-0.17.0-py3-none-any.whl", hash = "sha256:cf2569abd23dce8099b300f9b4fa8191e9582dda731fd533daf54c4551658708", size = 36896, upload-time = "2025-07-21T07:35:00.684Z" }, ] [[package]] @@ -1784,18 +1817,18 @@ dependencies = [ { name = "ply" }, { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/ef/fe/77e184ccc312f6263cbcc48a9579eec99f5c7ff72a9b1bd7812cafc22bbb/dotenv_linter-0.5.0.tar.gz", hash = "sha256:4862a8393e5ecdfb32982f1b32dbc006fff969a7b3c8608ba7db536108beeaea", size = 15346 } +sdist = { url = "https://files.pythonhosted.org/packages/ef/fe/77e184ccc312f6263cbcc48a9579eec99f5c7ff72a9b1bd7812cafc22bbb/dotenv_linter-0.5.0.tar.gz", hash = "sha256:4862a8393e5ecdfb32982f1b32dbc006fff969a7b3c8608ba7db536108beeaea", size = 15346, upload-time = "2024-03-13T11:52:10.52Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/f0/01/62ed4374340e6cf17c5084828974d96db8085e4018439ac41dc3cbbbcab3/dotenv_linter-0.5.0-py3-none-any.whl", hash = "sha256:fd01cca7f2140cb1710f49cbc1bf0e62397a75a6f0522d26a8b9b2331143c8bd", size = 21770 }, + { url = "https://files.pythonhosted.org/packages/f0/01/62ed4374340e6cf17c5084828974d96db8085e4018439ac41dc3cbbbcab3/dotenv_linter-0.5.0-py3-none-any.whl", hash = "sha256:fd01cca7f2140cb1710f49cbc1bf0e62397a75a6f0522d26a8b9b2331143c8bd", size = 21770, upload-time = "2024-03-13T11:52:08.607Z" }, ] [[package]] name = "durationpy" version = "0.10" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/9d/a4/e44218c2b394e31a6dd0d6b095c4e1f32d0be54c2a4b250032d717647bab/durationpy-0.10.tar.gz", hash = "sha256:1fa6893409a6e739c9c72334fc65cca1f355dbdd93405d30f726deb5bde42fba", size = 3335 } +sdist = { url = "https://files.pythonhosted.org/packages/9d/a4/e44218c2b394e31a6dd0d6b095c4e1f32d0be54c2a4b250032d717647bab/durationpy-0.10.tar.gz", hash = "sha256:1fa6893409a6e739c9c72334fc65cca1f355dbdd93405d30f726deb5bde42fba", size = 3335, upload-time = "2025-05-17T13:52:37.26Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/b0/0d/9feae160378a3553fa9a339b0e9c1a048e147a4127210e286ef18b730f03/durationpy-0.10-py3-none-any.whl", hash = "sha256:3b41e1b601234296b4fb368338fdcd3e13e0b4fb5b67345948f4f2bf9868b286", size = 3922 }, + { url = "https://files.pythonhosted.org/packages/b0/0d/9feae160378a3553fa9a339b0e9c1a048e147a4127210e286ef18b730f03/durationpy-0.10-py3-none-any.whl", hash = "sha256:3b41e1b601234296b4fb368338fdcd3e13e0b4fb5b67345948f4f2bf9868b286", size = 3922, upload-time = "2025-05-17T13:52:36.463Z" }, ] [[package]] @@ -1806,9 +1839,9 @@ dependencies = [ { name = "certifi" }, { name = "urllib3" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/6a/54/d498a766ac8fa475f931da85a154666cc81a70f8eb4a780bc8e4e934e9ac/elastic_transport-8.17.1.tar.gz", hash = "sha256:5edef32ac864dca8e2f0a613ef63491ee8d6b8cfb52881fa7313ba9290cac6d2", size = 73425 } +sdist = { url = "https://files.pythonhosted.org/packages/6a/54/d498a766ac8fa475f931da85a154666cc81a70f8eb4a780bc8e4e934e9ac/elastic_transport-8.17.1.tar.gz", hash = "sha256:5edef32ac864dca8e2f0a613ef63491ee8d6b8cfb52881fa7313ba9290cac6d2", size = 73425, upload-time = "2025-03-13T07:28:30.776Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/cf/cd/b71d5bc74cde7fc6fd9b2ff9389890f45d9762cbbbf81dc5e51fd7588c4a/elastic_transport-8.17.1-py3-none-any.whl", hash = "sha256:192718f498f1d10c5e9aa8b9cf32aed405e469a7f0e9d6a8923431dbb2c59fb8", size = 64969 }, + { url = "https://files.pythonhosted.org/packages/cf/cd/b71d5bc74cde7fc6fd9b2ff9389890f45d9762cbbbf81dc5e51fd7588c4a/elastic_transport-8.17.1-py3-none-any.whl", hash = "sha256:192718f498f1d10c5e9aa8b9cf32aed405e469a7f0e9d6a8923431dbb2c59fb8", size = 64969, upload-time = "2025-03-13T07:28:29.031Z" }, ] [[package]] @@ -1818,18 +1851,18 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "elastic-transport" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/36/63/8dc82cbf1bfbca2a2af8eeaa4a7eccc2cf7a87bf217130f6bc66d33b4d8f/elasticsearch-8.14.0.tar.gz", hash = "sha256:aa2490029dd96f4015b333c1827aa21fd6c0a4d223b00dfb0fe933b8d09a511b", size = 382506 } +sdist = { url = "https://files.pythonhosted.org/packages/36/63/8dc82cbf1bfbca2a2af8eeaa4a7eccc2cf7a87bf217130f6bc66d33b4d8f/elasticsearch-8.14.0.tar.gz", hash = "sha256:aa2490029dd96f4015b333c1827aa21fd6c0a4d223b00dfb0fe933b8d09a511b", size = 382506, upload-time = "2024-06-06T13:31:10.205Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/a2/09/c9dec8bd95bff6aaa8fe29a834257a6606608d0b2ed9932a1857683f736f/elasticsearch-8.14.0-py3-none-any.whl", hash = "sha256:cef8ef70a81af027f3da74a4f7d9296b390c636903088439087b8262a468c130", size = 480236 }, + { url = "https://files.pythonhosted.org/packages/a2/09/c9dec8bd95bff6aaa8fe29a834257a6606608d0b2ed9932a1857683f736f/elasticsearch-8.14.0-py3-none-any.whl", hash = "sha256:cef8ef70a81af027f3da74a4f7d9296b390c636903088439087b8262a468c130", size = 480236, upload-time = "2024-06-06T13:31:00.987Z" }, ] [[package]] name = "emoji" version = "2.15.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/a2/78/0d2db9382c92a163d7095fc08efff7800880f830a152cfced40161e7638d/emoji-2.15.0.tar.gz", hash = "sha256:eae4ab7d86456a70a00a985125a03263a5eac54cd55e51d7e184b1ed3b6757e4", size = 615483 } +sdist = { url = "https://files.pythonhosted.org/packages/a2/78/0d2db9382c92a163d7095fc08efff7800880f830a152cfced40161e7638d/emoji-2.15.0.tar.gz", hash = "sha256:eae4ab7d86456a70a00a985125a03263a5eac54cd55e51d7e184b1ed3b6757e4", size = 615483, upload-time = "2025-09-21T12:13:02.755Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/e1/5e/4b5aaaabddfacfe36ba7768817bd1f71a7a810a43705e531f3ae4c690767/emoji-2.15.0-py3-none-any.whl", hash = "sha256:205296793d66a89d88af4688fa57fd6496732eb48917a87175a023c8138995eb", size = 608433 }, + { url = "https://files.pythonhosted.org/packages/e1/5e/4b5aaaabddfacfe36ba7768817bd1f71a7a810a43705e531f3ae4c690767/emoji-2.15.0-py3-none-any.whl", hash = "sha256:205296793d66a89d88af4688fa57fd6496732eb48917a87175a023c8138995eb", size = 608433, upload-time = "2025-09-21T12:13:01.197Z" }, ] [[package]] @@ -1841,24 +1874,24 @@ dependencies = [ { name = "pycryptodome" }, { name = "requests" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/40/99/52362d6e081a642d6de78f6ab53baa5e3f82f2386c48954e18ee7b4ab22b/esdk-obs-python-3.25.8.tar.gz", hash = "sha256:aeded00b27ecd5a25ffaec38a2cc9416b51923d48db96c663f1a735f859b5273", size = 96302 } +sdist = { url = "https://files.pythonhosted.org/packages/40/99/52362d6e081a642d6de78f6ab53baa5e3f82f2386c48954e18ee7b4ab22b/esdk-obs-python-3.25.8.tar.gz", hash = "sha256:aeded00b27ecd5a25ffaec38a2cc9416b51923d48db96c663f1a735f859b5273", size = 96302, upload-time = "2025-09-01T11:35:20.432Z" } [[package]] name = "et-xmlfile" version = "2.0.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/d3/38/af70d7ab1ae9d4da450eeec1fa3918940a5fafb9055e934af8d6eb0c2313/et_xmlfile-2.0.0.tar.gz", hash = "sha256:dab3f4764309081ce75662649be815c4c9081e88f0837825f90fd28317d4da54", size = 17234 } +sdist = { url = "https://files.pythonhosted.org/packages/d3/38/af70d7ab1ae9d4da450eeec1fa3918940a5fafb9055e934af8d6eb0c2313/et_xmlfile-2.0.0.tar.gz", hash = "sha256:dab3f4764309081ce75662649be815c4c9081e88f0837825f90fd28317d4da54", size = 17234, upload-time = "2024-10-25T17:25:40.039Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/c1/8b/5fe2cc11fee489817272089c4203e679c63b570a5aaeb18d852ae3cbba6a/et_xmlfile-2.0.0-py3-none-any.whl", hash = "sha256:7a91720bc756843502c3b7504c77b8fe44217c85c537d85037f0f536151b2caa", size = 18059 }, + { url = "https://files.pythonhosted.org/packages/c1/8b/5fe2cc11fee489817272089c4203e679c63b570a5aaeb18d852ae3cbba6a/et_xmlfile-2.0.0-py3-none-any.whl", hash = "sha256:7a91720bc756843502c3b7504c77b8fe44217c85c537d85037f0f536151b2caa", size = 18059, upload-time = "2024-10-25T17:25:39.051Z" }, ] [[package]] name = "eval-type-backport" version = "0.3.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/51/23/079e39571d6dd8d90d7a369ecb55ad766efb6bae4e77389629e14458c280/eval_type_backport-0.3.0.tar.gz", hash = "sha256:1638210401e184ff17f877e9a2fa076b60b5838790f4532a21761cc2be67aea1", size = 9272 } +sdist = { url = "https://files.pythonhosted.org/packages/51/23/079e39571d6dd8d90d7a369ecb55ad766efb6bae4e77389629e14458c280/eval_type_backport-0.3.0.tar.gz", hash = "sha256:1638210401e184ff17f877e9a2fa076b60b5838790f4532a21761cc2be67aea1", size = 9272, upload-time = "2025-11-13T20:56:50.845Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/19/d8/2a1c638d9e0aa7e269269a1a1bf423ddd94267f1a01bbe3ad03432b67dd4/eval_type_backport-0.3.0-py3-none-any.whl", hash = "sha256:975a10a0fe333c8b6260d7fdb637698c9a16c3a9e3b6eb943fee6a6f67a37fe8", size = 6061 }, + { url = "https://files.pythonhosted.org/packages/19/d8/2a1c638d9e0aa7e269269a1a1bf423ddd94267f1a01bbe3ad03432b67dd4/eval_type_backport-0.3.0-py3-none-any.whl", hash = "sha256:975a10a0fe333c8b6260d7fdb637698c9a16c3a9e3b6eb943fee6a6f67a37fe8", size = 6061, upload-time = "2025-11-13T20:56:49.499Z" }, ] [[package]] @@ -1868,9 +1901,9 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "tzdata" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/64/27/022d4dbd4c20567b4c294f79a133cc2f05240ea61e0d515ead18c995c249/faker-38.2.0.tar.gz", hash = "sha256:20672803db9c7cb97f9b56c18c54b915b6f1d8991f63d1d673642dc43f5ce7ab", size = 1941469 } +sdist = { url = "https://files.pythonhosted.org/packages/64/27/022d4dbd4c20567b4c294f79a133cc2f05240ea61e0d515ead18c995c249/faker-38.2.0.tar.gz", hash = "sha256:20672803db9c7cb97f9b56c18c54b915b6f1d8991f63d1d673642dc43f5ce7ab", size = 1941469, upload-time = "2025-11-19T16:37:31.892Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/17/93/00c94d45f55c336434a15f98d906387e87ce28f9918e4444829a8fda432d/faker-38.2.0-py3-none-any.whl", hash = "sha256:35fe4a0a79dee0dc4103a6083ee9224941e7d3594811a50e3969e547b0d2ee65", size = 1980505 }, + { url = "https://files.pythonhosted.org/packages/17/93/00c94d45f55c336434a15f98d906387e87ce28f9918e4444829a8fda432d/faker-38.2.0-py3-none-any.whl", hash = "sha256:35fe4a0a79dee0dc4103a6083ee9224941e7d3594811a50e3969e547b0d2ee65", size = 1980505, upload-time = "2025-11-19T16:37:30.208Z" }, ] [[package]] @@ -1883,69 +1916,69 @@ dependencies = [ { name = "starlette" }, { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/b2/de/3ee97a4f6ffef1fb70bf20561e4f88531633bb5045dc6cebc0f8471f764d/fastapi-0.122.0.tar.gz", hash = "sha256:cd9b5352031f93773228af8b4c443eedc2ac2aa74b27780387b853c3726fb94b", size = 346436 } +sdist = { url = "https://files.pythonhosted.org/packages/b2/de/3ee97a4f6ffef1fb70bf20561e4f88531633bb5045dc6cebc0f8471f764d/fastapi-0.122.0.tar.gz", hash = "sha256:cd9b5352031f93773228af8b4c443eedc2ac2aa74b27780387b853c3726fb94b", size = 346436, upload-time = "2025-11-24T19:17:47.95Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/7a/93/aa8072af4ff37b795f6bbf43dcaf61115f40f49935c7dbb180c9afc3f421/fastapi-0.122.0-py3-none-any.whl", hash = "sha256:a456e8915dfc6c8914a50d9651133bd47ec96d331c5b44600baa635538a30d67", size = 110671 }, + { url = "https://files.pythonhosted.org/packages/7a/93/aa8072af4ff37b795f6bbf43dcaf61115f40f49935c7dbb180c9afc3f421/fastapi-0.122.0-py3-none-any.whl", hash = "sha256:a456e8915dfc6c8914a50d9651133bd47ec96d331c5b44600baa635538a30d67", size = 110671, upload-time = "2025-11-24T19:17:45.96Z" }, ] [[package]] name = "fastuuid" version = "0.14.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/c3/7d/d9daedf0f2ebcacd20d599928f8913e9d2aea1d56d2d355a93bfa2b611d7/fastuuid-0.14.0.tar.gz", hash = "sha256:178947fc2f995b38497a74172adee64fdeb8b7ec18f2a5934d037641ba265d26", size = 18232 } +sdist = { url = "https://files.pythonhosted.org/packages/c3/7d/d9daedf0f2ebcacd20d599928f8913e9d2aea1d56d2d355a93bfa2b611d7/fastuuid-0.14.0.tar.gz", hash = "sha256:178947fc2f995b38497a74172adee64fdeb8b7ec18f2a5934d037641ba265d26", size = 18232, upload-time = "2025-10-19T22:19:22.402Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/98/f3/12481bda4e5b6d3e698fbf525df4443cc7dce746f246b86b6fcb2fba1844/fastuuid-0.14.0-cp311-cp311-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:73946cb950c8caf65127d4e9a325e2b6be0442a224fd51ba3b6ac44e1912ce34", size = 516386 }, - { url = "https://files.pythonhosted.org/packages/59/19/2fc58a1446e4d72b655648eb0879b04e88ed6fa70d474efcf550f640f6ec/fastuuid-0.14.0-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:12ac85024637586a5b69645e7ed986f7535106ed3013640a393a03e461740cb7", size = 264569 }, - { url = "https://files.pythonhosted.org/packages/78/29/3c74756e5b02c40cfcc8b1d8b5bac4edbd532b55917a6bcc9113550e99d1/fastuuid-0.14.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:05a8dde1f395e0c9b4be515b7a521403d1e8349443e7641761af07c7ad1624b1", size = 254366 }, - { url = "https://files.pythonhosted.org/packages/52/96/d761da3fccfa84f0f353ce6e3eb8b7f76b3aa21fd25e1b00a19f9c80a063/fastuuid-0.14.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:09378a05020e3e4883dfdab438926f31fea15fd17604908f3d39cbeb22a0b4dc", size = 278978 }, - { url = "https://files.pythonhosted.org/packages/fc/c2/f84c90167cc7765cb82b3ff7808057608b21c14a38531845d933a4637307/fastuuid-0.14.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bbb0c4b15d66b435d2538f3827f05e44e2baafcc003dd7d8472dc67807ab8fd8", size = 279692 }, - { url = "https://files.pythonhosted.org/packages/af/7b/4bacd03897b88c12348e7bd77943bac32ccf80ff98100598fcff74f75f2e/fastuuid-0.14.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:cd5a7f648d4365b41dbf0e38fe8da4884e57bed4e77c83598e076ac0c93995e7", size = 303384 }, - { url = "https://files.pythonhosted.org/packages/c0/a2/584f2c29641df8bd810d00c1f21d408c12e9ad0c0dafdb8b7b29e5ddf787/fastuuid-0.14.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:c0a94245afae4d7af8c43b3159d5e3934c53f47140be0be624b96acd672ceb73", size = 460921 }, - { url = "https://files.pythonhosted.org/packages/24/68/c6b77443bb7764c760e211002c8638c0c7cce11cb584927e723215ba1398/fastuuid-0.14.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:2b29e23c97e77c3a9514d70ce343571e469098ac7f5a269320a0f0b3e193ab36", size = 480575 }, - { url = "https://files.pythonhosted.org/packages/5a/87/93f553111b33f9bb83145be12868c3c475bf8ea87c107063d01377cc0e8e/fastuuid-0.14.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:1e690d48f923c253f28151b3a6b4e335f2b06bf669c68a02665bc150b7839e94", size = 452317 }, - { url = "https://files.pythonhosted.org/packages/9e/8c/a04d486ca55b5abb7eaa65b39df8d891b7b1635b22db2163734dc273579a/fastuuid-0.14.0-cp311-cp311-win32.whl", hash = "sha256:a6f46790d59ab38c6aa0e35c681c0484b50dc0acf9e2679c005d61e019313c24", size = 154804 }, - { url = "https://files.pythonhosted.org/packages/9c/b2/2d40bf00820de94b9280366a122cbaa60090c8cf59e89ac3938cf5d75895/fastuuid-0.14.0-cp311-cp311-win_amd64.whl", hash = "sha256:e150eab56c95dc9e3fefc234a0eedb342fac433dacc273cd4d150a5b0871e1fa", size = 156099 }, - { url = "https://files.pythonhosted.org/packages/02/a2/e78fcc5df65467f0d207661b7ef86c5b7ac62eea337c0c0fcedbeee6fb13/fastuuid-0.14.0-cp312-cp312-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:77e94728324b63660ebf8adb27055e92d2e4611645bf12ed9d88d30486471d0a", size = 510164 }, - { url = "https://files.pythonhosted.org/packages/2b/b3/c846f933f22f581f558ee63f81f29fa924acd971ce903dab1a9b6701816e/fastuuid-0.14.0-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:caa1f14d2102cb8d353096bc6ef6c13b2c81f347e6ab9d6fbd48b9dea41c153d", size = 261837 }, - { url = "https://files.pythonhosted.org/packages/54/ea/682551030f8c4fa9a769d9825570ad28c0c71e30cf34020b85c1f7ee7382/fastuuid-0.14.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:d23ef06f9e67163be38cece704170486715b177f6baae338110983f99a72c070", size = 251370 }, - { url = "https://files.pythonhosted.org/packages/14/dd/5927f0a523d8e6a76b70968e6004966ee7df30322f5fc9b6cdfb0276646a/fastuuid-0.14.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0c9ec605ace243b6dbe3bd27ebdd5d33b00d8d1d3f580b39fdd15cd96fd71796", size = 277766 }, - { url = "https://files.pythonhosted.org/packages/16/6e/c0fb547eef61293153348f12e0f75a06abb322664b34a1573a7760501336/fastuuid-0.14.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:808527f2407f58a76c916d6aa15d58692a4a019fdf8d4c32ac7ff303b7d7af09", size = 278105 }, - { url = "https://files.pythonhosted.org/packages/2d/b1/b9c75e03b768f61cf2e84ee193dc18601aeaf89a4684b20f2f0e9f52b62c/fastuuid-0.14.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2fb3c0d7fef6674bbeacdd6dbd386924a7b60b26de849266d1ff6602937675c8", size = 301564 }, - { url = "https://files.pythonhosted.org/packages/fc/fa/f7395fdac07c7a54f18f801744573707321ca0cee082e638e36452355a9d/fastuuid-0.14.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:ab3f5d36e4393e628a4df337c2c039069344db5f4b9d2a3c9cea48284f1dd741", size = 459659 }, - { url = "https://files.pythonhosted.org/packages/66/49/c9fd06a4a0b1f0f048aacb6599e7d96e5d6bc6fa680ed0d46bf111929d1b/fastuuid-0.14.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:b9a0ca4f03b7e0b01425281ffd44e99d360e15c895f1907ca105854ed85e2057", size = 478430 }, - { url = "https://files.pythonhosted.org/packages/be/9c/909e8c95b494e8e140e8be6165d5fc3f61fdc46198c1554df7b3e1764471/fastuuid-0.14.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:3acdf655684cc09e60fb7e4cf524e8f42ea760031945aa8086c7eae2eeeabeb8", size = 450894 }, - { url = "https://files.pythonhosted.org/packages/90/eb/d29d17521976e673c55ef7f210d4cdd72091a9ec6755d0fd4710d9b3c871/fastuuid-0.14.0-cp312-cp312-win32.whl", hash = "sha256:9579618be6280700ae36ac42c3efd157049fe4dd40ca49b021280481c78c3176", size = 154374 }, - { url = "https://files.pythonhosted.org/packages/cc/fc/f5c799a6ea6d877faec0472d0b27c079b47c86b1cdc577720a5386483b36/fastuuid-0.14.0-cp312-cp312-win_amd64.whl", hash = "sha256:d9e4332dc4ba054434a9594cbfaf7823b57993d7d8e7267831c3e059857cf397", size = 156550 }, + { url = "https://files.pythonhosted.org/packages/98/f3/12481bda4e5b6d3e698fbf525df4443cc7dce746f246b86b6fcb2fba1844/fastuuid-0.14.0-cp311-cp311-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:73946cb950c8caf65127d4e9a325e2b6be0442a224fd51ba3b6ac44e1912ce34", size = 516386, upload-time = "2025-10-19T22:42:40.176Z" }, + { url = "https://files.pythonhosted.org/packages/59/19/2fc58a1446e4d72b655648eb0879b04e88ed6fa70d474efcf550f640f6ec/fastuuid-0.14.0-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:12ac85024637586a5b69645e7ed986f7535106ed3013640a393a03e461740cb7", size = 264569, upload-time = "2025-10-19T22:25:50.977Z" }, + { url = "https://files.pythonhosted.org/packages/78/29/3c74756e5b02c40cfcc8b1d8b5bac4edbd532b55917a6bcc9113550e99d1/fastuuid-0.14.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:05a8dde1f395e0c9b4be515b7a521403d1e8349443e7641761af07c7ad1624b1", size = 254366, upload-time = "2025-10-19T22:29:49.166Z" }, + { url = "https://files.pythonhosted.org/packages/52/96/d761da3fccfa84f0f353ce6e3eb8b7f76b3aa21fd25e1b00a19f9c80a063/fastuuid-0.14.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:09378a05020e3e4883dfdab438926f31fea15fd17604908f3d39cbeb22a0b4dc", size = 278978, upload-time = "2025-10-19T22:35:41.306Z" }, + { url = "https://files.pythonhosted.org/packages/fc/c2/f84c90167cc7765cb82b3ff7808057608b21c14a38531845d933a4637307/fastuuid-0.14.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bbb0c4b15d66b435d2538f3827f05e44e2baafcc003dd7d8472dc67807ab8fd8", size = 279692, upload-time = "2025-10-19T22:25:36.997Z" }, + { url = "https://files.pythonhosted.org/packages/af/7b/4bacd03897b88c12348e7bd77943bac32ccf80ff98100598fcff74f75f2e/fastuuid-0.14.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:cd5a7f648d4365b41dbf0e38fe8da4884e57bed4e77c83598e076ac0c93995e7", size = 303384, upload-time = "2025-10-19T22:29:46.578Z" }, + { url = "https://files.pythonhosted.org/packages/c0/a2/584f2c29641df8bd810d00c1f21d408c12e9ad0c0dafdb8b7b29e5ddf787/fastuuid-0.14.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:c0a94245afae4d7af8c43b3159d5e3934c53f47140be0be624b96acd672ceb73", size = 460921, upload-time = "2025-10-19T22:36:42.006Z" }, + { url = "https://files.pythonhosted.org/packages/24/68/c6b77443bb7764c760e211002c8638c0c7cce11cb584927e723215ba1398/fastuuid-0.14.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:2b29e23c97e77c3a9514d70ce343571e469098ac7f5a269320a0f0b3e193ab36", size = 480575, upload-time = "2025-10-19T22:28:18.975Z" }, + { url = "https://files.pythonhosted.org/packages/5a/87/93f553111b33f9bb83145be12868c3c475bf8ea87c107063d01377cc0e8e/fastuuid-0.14.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:1e690d48f923c253f28151b3a6b4e335f2b06bf669c68a02665bc150b7839e94", size = 452317, upload-time = "2025-10-19T22:25:32.75Z" }, + { url = "https://files.pythonhosted.org/packages/9e/8c/a04d486ca55b5abb7eaa65b39df8d891b7b1635b22db2163734dc273579a/fastuuid-0.14.0-cp311-cp311-win32.whl", hash = "sha256:a6f46790d59ab38c6aa0e35c681c0484b50dc0acf9e2679c005d61e019313c24", size = 154804, upload-time = "2025-10-19T22:24:15.615Z" }, + { url = "https://files.pythonhosted.org/packages/9c/b2/2d40bf00820de94b9280366a122cbaa60090c8cf59e89ac3938cf5d75895/fastuuid-0.14.0-cp311-cp311-win_amd64.whl", hash = "sha256:e150eab56c95dc9e3fefc234a0eedb342fac433dacc273cd4d150a5b0871e1fa", size = 156099, upload-time = "2025-10-19T22:24:31.646Z" }, + { url = "https://files.pythonhosted.org/packages/02/a2/e78fcc5df65467f0d207661b7ef86c5b7ac62eea337c0c0fcedbeee6fb13/fastuuid-0.14.0-cp312-cp312-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:77e94728324b63660ebf8adb27055e92d2e4611645bf12ed9d88d30486471d0a", size = 510164, upload-time = "2025-10-19T22:31:45.635Z" }, + { url = "https://files.pythonhosted.org/packages/2b/b3/c846f933f22f581f558ee63f81f29fa924acd971ce903dab1a9b6701816e/fastuuid-0.14.0-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:caa1f14d2102cb8d353096bc6ef6c13b2c81f347e6ab9d6fbd48b9dea41c153d", size = 261837, upload-time = "2025-10-19T22:38:38.53Z" }, + { url = "https://files.pythonhosted.org/packages/54/ea/682551030f8c4fa9a769d9825570ad28c0c71e30cf34020b85c1f7ee7382/fastuuid-0.14.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:d23ef06f9e67163be38cece704170486715b177f6baae338110983f99a72c070", size = 251370, upload-time = "2025-10-19T22:40:26.07Z" }, + { url = "https://files.pythonhosted.org/packages/14/dd/5927f0a523d8e6a76b70968e6004966ee7df30322f5fc9b6cdfb0276646a/fastuuid-0.14.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0c9ec605ace243b6dbe3bd27ebdd5d33b00d8d1d3f580b39fdd15cd96fd71796", size = 277766, upload-time = "2025-10-19T22:37:23.779Z" }, + { url = "https://files.pythonhosted.org/packages/16/6e/c0fb547eef61293153348f12e0f75a06abb322664b34a1573a7760501336/fastuuid-0.14.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:808527f2407f58a76c916d6aa15d58692a4a019fdf8d4c32ac7ff303b7d7af09", size = 278105, upload-time = "2025-10-19T22:26:56.821Z" }, + { url = "https://files.pythonhosted.org/packages/2d/b1/b9c75e03b768f61cf2e84ee193dc18601aeaf89a4684b20f2f0e9f52b62c/fastuuid-0.14.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2fb3c0d7fef6674bbeacdd6dbd386924a7b60b26de849266d1ff6602937675c8", size = 301564, upload-time = "2025-10-19T22:30:31.604Z" }, + { url = "https://files.pythonhosted.org/packages/fc/fa/f7395fdac07c7a54f18f801744573707321ca0cee082e638e36452355a9d/fastuuid-0.14.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:ab3f5d36e4393e628a4df337c2c039069344db5f4b9d2a3c9cea48284f1dd741", size = 459659, upload-time = "2025-10-19T22:31:32.341Z" }, + { url = "https://files.pythonhosted.org/packages/66/49/c9fd06a4a0b1f0f048aacb6599e7d96e5d6bc6fa680ed0d46bf111929d1b/fastuuid-0.14.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:b9a0ca4f03b7e0b01425281ffd44e99d360e15c895f1907ca105854ed85e2057", size = 478430, upload-time = "2025-10-19T22:26:22.962Z" }, + { url = "https://files.pythonhosted.org/packages/be/9c/909e8c95b494e8e140e8be6165d5fc3f61fdc46198c1554df7b3e1764471/fastuuid-0.14.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:3acdf655684cc09e60fb7e4cf524e8f42ea760031945aa8086c7eae2eeeabeb8", size = 450894, upload-time = "2025-10-19T22:27:01.647Z" }, + { url = "https://files.pythonhosted.org/packages/90/eb/d29d17521976e673c55ef7f210d4cdd72091a9ec6755d0fd4710d9b3c871/fastuuid-0.14.0-cp312-cp312-win32.whl", hash = "sha256:9579618be6280700ae36ac42c3efd157049fe4dd40ca49b021280481c78c3176", size = 154374, upload-time = "2025-10-19T22:29:19.879Z" }, + { url = "https://files.pythonhosted.org/packages/cc/fc/f5c799a6ea6d877faec0472d0b27c079b47c86b1cdc577720a5386483b36/fastuuid-0.14.0-cp312-cp312-win_amd64.whl", hash = "sha256:d9e4332dc4ba054434a9594cbfaf7823b57993d7d8e7267831c3e059857cf397", size = 156550, upload-time = "2025-10-19T22:27:49.658Z" }, ] [[package]] name = "fickling" -version = "0.1.5" +version = "0.1.6" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "stdlib-list" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/41/94/0d0ce455952c036cfee235637f786c1d1d07d1b90f6a4dfb50e0eff929d6/fickling-0.1.5.tar.gz", hash = "sha256:92f9b49e717fa8dbc198b4b7b685587adb652d85aa9ede8131b3e44494efca05", size = 282462 } +sdist = { url = "https://files.pythonhosted.org/packages/07/ab/7571453f9365c17c047b5a7b7e82692a7f6be51203f295030886758fd57a/fickling-0.1.6.tar.gz", hash = "sha256:03cb5d7bd09f9169c7583d2079fad4b3b88b25f865ed0049172e5cb68582311d", size = 284033, upload-time = "2025-12-15T18:14:58.721Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/bf/a7/d25912b2e3a5b0a37e6f460050bbc396042b5906a6563a1962c484abc3c6/fickling-0.1.5-py3-none-any.whl", hash = "sha256:6aed7270bfa276e188b0abe043a27b3a042129d28ec1fa6ff389bdcc5ad178bb", size = 46240 }, + { url = "https://files.pythonhosted.org/packages/76/99/cc04258dda421bc612cdfe4be8c253f45b922f1c7f268b5a0b9962d9cd12/fickling-0.1.6-py3-none-any.whl", hash = "sha256:465d0069548bfc731bdd75a583cb4cf5a4b2666739c0f76287807d724b147ed3", size = 47922, upload-time = "2025-12-15T18:14:57.526Z" }, ] [[package]] name = "filelock" version = "3.20.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/58/46/0028a82567109b5ef6e4d2a1f04a583fb513e6cf9527fcdd09afd817deeb/filelock-3.20.0.tar.gz", hash = "sha256:711e943b4ec6be42e1d4e6690b48dc175c822967466bb31c0c293f34334c13f4", size = 18922 } +sdist = { url = "https://files.pythonhosted.org/packages/58/46/0028a82567109b5ef6e4d2a1f04a583fb513e6cf9527fcdd09afd817deeb/filelock-3.20.0.tar.gz", hash = "sha256:711e943b4ec6be42e1d4e6690b48dc175c822967466bb31c0c293f34334c13f4", size = 18922, upload-time = "2025-10-08T18:03:50.056Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/76/91/7216b27286936c16f5b4d0c530087e4a54eead683e6b0b73dd0c64844af6/filelock-3.20.0-py3-none-any.whl", hash = "sha256:339b4732ffda5cd79b13f4e2711a31b0365ce445d95d243bb996273d072546a2", size = 16054 }, + { url = "https://files.pythonhosted.org/packages/76/91/7216b27286936c16f5b4d0c530087e4a54eead683e6b0b73dd0c64844af6/filelock-3.20.0-py3-none-any.whl", hash = "sha256:339b4732ffda5cd79b13f4e2711a31b0365ce445d95d243bb996273d072546a2", size = 16054, upload-time = "2025-10-08T18:03:48.35Z" }, ] [[package]] name = "filetype" version = "1.2.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/bb/29/745f7d30d47fe0f251d3ad3dc2978a23141917661998763bebb6da007eb1/filetype-1.2.0.tar.gz", hash = "sha256:66b56cd6474bf41d8c54660347d37afcc3f7d1970648de365c102ef77548aadb", size = 998020 } +sdist = { url = "https://files.pythonhosted.org/packages/bb/29/745f7d30d47fe0f251d3ad3dc2978a23141917661998763bebb6da007eb1/filetype-1.2.0.tar.gz", hash = "sha256:66b56cd6474bf41d8c54660347d37afcc3f7d1970648de365c102ef77548aadb", size = 998020, upload-time = "2022-11-02T17:34:04.141Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/18/79/1b8fa1bb3568781e84c9200f951c735f3f157429f44be0495da55894d620/filetype-1.2.0-py2.py3-none-any.whl", hash = "sha256:7ce71b6880181241cf7ac8697a2f1eb6a8bd9b429f7ad6d27b8db9ba5f1c2d25", size = 19970 }, + { url = "https://files.pythonhosted.org/packages/18/79/1b8fa1bb3568781e84c9200f951c735f3f157429f44be0495da55894d620/filetype-1.2.0-py2.py3-none-any.whl", hash = "sha256:7ce71b6880181241cf7ac8697a2f1eb6a8bd9b429f7ad6d27b8db9ba5f1c2d25", size = 19970, upload-time = "2022-11-02T17:34:01.425Z" }, ] [[package]] @@ -1960,9 +1993,9 @@ dependencies = [ { name = "markupsafe" }, { name = "werkzeug" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/dc/6d/cfe3c0fcc5e477df242b98bfe186a4c34357b4847e87ecaef04507332dab/flask-3.1.2.tar.gz", hash = "sha256:bf656c15c80190ed628ad08cdfd3aaa35beb087855e2f494910aa3774cc4fd87", size = 720160 } +sdist = { url = "https://files.pythonhosted.org/packages/dc/6d/cfe3c0fcc5e477df242b98bfe186a4c34357b4847e87ecaef04507332dab/flask-3.1.2.tar.gz", hash = "sha256:bf656c15c80190ed628ad08cdfd3aaa35beb087855e2f494910aa3774cc4fd87", size = 720160, upload-time = "2025-08-19T21:03:21.205Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/ec/f9/7f9263c5695f4bd0023734af91bedb2ff8209e8de6ead162f35d8dc762fd/flask-3.1.2-py3-none-any.whl", hash = "sha256:ca1d8112ec8a6158cc29ea4858963350011b5c846a414cdb7a954aa9e967d03c", size = 103308 }, + { url = "https://files.pythonhosted.org/packages/ec/f9/7f9263c5695f4bd0023734af91bedb2ff8209e8de6ead162f35d8dc762fd/flask-3.1.2-py3-none-any.whl", hash = "sha256:ca1d8112ec8a6158cc29ea4858963350011b5c846a414cdb7a954aa9e967d03c", size = 103308, upload-time = "2025-08-19T21:03:19.499Z" }, ] [[package]] @@ -1976,9 +2009,9 @@ dependencies = [ { name = "zstandard" }, { name = "zstandard", marker = "platform_python_implementation == 'PyPy'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/cc/1f/260db5a4517d59bfde7b4a0d71052df68fb84983bda9231100e3b80f5989/flask_compress-1.17.tar.gz", hash = "sha256:1ebb112b129ea7c9e7d6ee6d5cc0d64f226cbc50c4daddf1a58b9bd02253fbd8", size = 15733 } +sdist = { url = "https://files.pythonhosted.org/packages/cc/1f/260db5a4517d59bfde7b4a0d71052df68fb84983bda9231100e3b80f5989/flask_compress-1.17.tar.gz", hash = "sha256:1ebb112b129ea7c9e7d6ee6d5cc0d64f226cbc50c4daddf1a58b9bd02253fbd8", size = 15733, upload-time = "2024-10-14T08:13:33.196Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/f7/54/ff08f947d07c0a8a5d8f1c8e57b142c97748ca912b259db6467ab35983cd/Flask_Compress-1.17-py3-none-any.whl", hash = "sha256:415131f197c41109f08e8fdfc3a6628d83d81680fb5ecd0b3a97410e02397b20", size = 8723 }, + { url = "https://files.pythonhosted.org/packages/f7/54/ff08f947d07c0a8a5d8f1c8e57b142c97748ca912b259db6467ab35983cd/Flask_Compress-1.17-py3-none-any.whl", hash = "sha256:415131f197c41109f08e8fdfc3a6628d83d81680fb5ecd0b3a97410e02397b20", size = 8723, upload-time = "2024-10-14T08:13:31.726Z" }, ] [[package]] @@ -1989,9 +2022,9 @@ dependencies = [ { name = "flask" }, { name = "werkzeug" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/76/37/bcfa6c7d5eec777c4c7cf45ce6b27631cebe5230caf88d85eadd63edd37a/flask_cors-6.0.1.tar.gz", hash = "sha256:d81bcb31f07b0985be7f48406247e9243aced229b7747219160a0559edd678db", size = 13463 } +sdist = { url = "https://files.pythonhosted.org/packages/76/37/bcfa6c7d5eec777c4c7cf45ce6b27631cebe5230caf88d85eadd63edd37a/flask_cors-6.0.1.tar.gz", hash = "sha256:d81bcb31f07b0985be7f48406247e9243aced229b7747219160a0559edd678db", size = 13463, upload-time = "2025-06-11T01:32:08.518Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/17/f8/01bf35a3afd734345528f98d0353f2a978a476528ad4d7e78b70c4d149dd/flask_cors-6.0.1-py3-none-any.whl", hash = "sha256:c7b2cbfb1a31aa0d2e5341eea03a6805349f7a61647daee1a15c46bbe981494c", size = 13244 }, + { url = "https://files.pythonhosted.org/packages/17/f8/01bf35a3afd734345528f98d0353f2a978a476528ad4d7e78b70c4d149dd/flask_cors-6.0.1-py3-none-any.whl", hash = "sha256:c7b2cbfb1a31aa0d2e5341eea03a6805349f7a61647daee1a15c46bbe981494c", size = 13244, upload-time = "2025-06-11T01:32:07.352Z" }, ] [[package]] @@ -2002,9 +2035,9 @@ dependencies = [ { name = "flask" }, { name = "werkzeug" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/c3/6e/2f4e13e373bb49e68c02c51ceadd22d172715a06716f9299d9df01b6ddb2/Flask-Login-0.6.3.tar.gz", hash = "sha256:5e23d14a607ef12806c699590b89d0f0e0d67baeec599d75947bf9c147330333", size = 48834 } +sdist = { url = "https://files.pythonhosted.org/packages/c3/6e/2f4e13e373bb49e68c02c51ceadd22d172715a06716f9299d9df01b6ddb2/Flask-Login-0.6.3.tar.gz", hash = "sha256:5e23d14a607ef12806c699590b89d0f0e0d67baeec599d75947bf9c147330333", size = 48834, upload-time = "2023-10-30T14:53:21.151Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/59/f5/67e9cc5c2036f58115f9fe0f00d203cf6780c3ff8ae0e705e7a9d9e8ff9e/Flask_Login-0.6.3-py3-none-any.whl", hash = "sha256:849b25b82a436bf830a054e74214074af59097171562ab10bfa999e6b78aae5d", size = 17303 }, + { url = "https://files.pythonhosted.org/packages/59/f5/67e9cc5c2036f58115f9fe0f00d203cf6780c3ff8ae0e705e7a9d9e8ff9e/Flask_Login-0.6.3-py3-none-any.whl", hash = "sha256:849b25b82a436bf830a054e74214074af59097171562ab10bfa999e6b78aae5d", size = 17303, upload-time = "2023-10-30T14:53:19.636Z" }, ] [[package]] @@ -2016,9 +2049,9 @@ dependencies = [ { name = "flask" }, { name = "flask-sqlalchemy" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/3b/e2/4008fc0d298d7ce797021b194bbe151d4d12db670691648a226d4fc8aefc/Flask-Migrate-4.0.7.tar.gz", hash = "sha256:dff7dd25113c210b069af280ea713b883f3840c1e3455274745d7355778c8622", size = 21770 } +sdist = { url = "https://files.pythonhosted.org/packages/3b/e2/4008fc0d298d7ce797021b194bbe151d4d12db670691648a226d4fc8aefc/Flask-Migrate-4.0.7.tar.gz", hash = "sha256:dff7dd25113c210b069af280ea713b883f3840c1e3455274745d7355778c8622", size = 21770, upload-time = "2024-03-11T18:43:01.498Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/93/01/587023575286236f95d2ab8a826c320375ed5ea2102bb103ed89704ffa6b/Flask_Migrate-4.0.7-py3-none-any.whl", hash = "sha256:5c532be17e7b43a223b7500d620edae33795df27c75811ddf32560f7d48ec617", size = 21127 }, + { url = "https://files.pythonhosted.org/packages/93/01/587023575286236f95d2ab8a826c320375ed5ea2102bb103ed89704ffa6b/Flask_Migrate-4.0.7-py3-none-any.whl", hash = "sha256:5c532be17e7b43a223b7500d620edae33795df27c75811ddf32560f7d48ec617", size = 21127, upload-time = "2024-03-11T18:42:59.462Z" }, ] [[package]] @@ -2029,9 +2062,9 @@ dependencies = [ { name = "flask" }, { name = "orjson" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/a3/49/575796f6ddca171d82dbb12762e33166c8b8f8616c946f0a6dfbb9bc3cd6/flask_orjson-2.0.0.tar.gz", hash = "sha256:6df6631437f9bc52cf9821735f896efa5583b5f80712f7d29d9ef69a79986a9c", size = 2974 } +sdist = { url = "https://files.pythonhosted.org/packages/a3/49/575796f6ddca171d82dbb12762e33166c8b8f8616c946f0a6dfbb9bc3cd6/flask_orjson-2.0.0.tar.gz", hash = "sha256:6df6631437f9bc52cf9821735f896efa5583b5f80712f7d29d9ef69a79986a9c", size = 2974, upload-time = "2024-01-15T00:03:22.236Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/f3/ca/53e14be018a2284acf799830e8cd8e0b263c0fd3dff1ad7b35f8417e7067/flask_orjson-2.0.0-py3-none-any.whl", hash = "sha256:5d15f2ba94b8d6c02aee88fc156045016e83db9eda2c30545fabd640aebaec9d", size = 3622 }, + { url = "https://files.pythonhosted.org/packages/f3/ca/53e14be018a2284acf799830e8cd8e0b263c0fd3dff1ad7b35f8417e7067/flask_orjson-2.0.0-py3-none-any.whl", hash = "sha256:5d15f2ba94b8d6c02aee88fc156045016e83db9eda2c30545fabd640aebaec9d", size = 3622, upload-time = "2024-01-15T00:03:17.511Z" }, ] [[package]] @@ -2046,9 +2079,9 @@ dependencies = [ { name = "referencing" }, { name = "werkzeug" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/43/89/9b9ca58cbb8e9ec46f4a510ba93878e0c88d518bf03c350e3b1b7ad85cbe/flask-restx-1.3.2.tar.gz", hash = "sha256:0ae13d77e7d7e4dce513970cfa9db45364aef210e99022de26d2b73eb4dbced5", size = 2814719 } +sdist = { url = "https://files.pythonhosted.org/packages/43/89/9b9ca58cbb8e9ec46f4a510ba93878e0c88d518bf03c350e3b1b7ad85cbe/flask-restx-1.3.2.tar.gz", hash = "sha256:0ae13d77e7d7e4dce513970cfa9db45364aef210e99022de26d2b73eb4dbced5", size = 2814719, upload-time = "2025-09-23T20:34:25.21Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/7a/3f/b82cd8e733a355db1abb8297afbf59ec972c00ef90bf8d4eed287958b204/flask_restx-1.3.2-py2.py3-none-any.whl", hash = "sha256:6e035496e8223668044fc45bf769e526352fd648d9e159bd631d94fd645a687b", size = 2799859 }, + { url = "https://files.pythonhosted.org/packages/7a/3f/b82cd8e733a355db1abb8297afbf59ec972c00ef90bf8d4eed287958b204/flask_restx-1.3.2-py2.py3-none-any.whl", hash = "sha256:6e035496e8223668044fc45bf769e526352fd648d9e159bd631d94fd645a687b", size = 2799859, upload-time = "2025-09-23T20:34:23.055Z" }, ] [[package]] @@ -2059,77 +2092,77 @@ dependencies = [ { name = "flask" }, { name = "sqlalchemy" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/91/53/b0a9fcc1b1297f51e68b69ed3b7c3c40d8c45be1391d77ae198712914392/flask_sqlalchemy-3.1.1.tar.gz", hash = "sha256:e4b68bb881802dda1a7d878b2fc84c06d1ee57fb40b874d3dc97dabfa36b8312", size = 81899 } +sdist = { url = "https://files.pythonhosted.org/packages/91/53/b0a9fcc1b1297f51e68b69ed3b7c3c40d8c45be1391d77ae198712914392/flask_sqlalchemy-3.1.1.tar.gz", hash = "sha256:e4b68bb881802dda1a7d878b2fc84c06d1ee57fb40b874d3dc97dabfa36b8312", size = 81899, upload-time = "2023-09-11T21:42:36.147Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/1d/6a/89963a5c6ecf166e8be29e0d1bf6806051ee8fe6c82e232842e3aeac9204/flask_sqlalchemy-3.1.1-py3-none-any.whl", hash = "sha256:4ba4be7f419dc72f4efd8802d69974803c37259dd42f3913b0dcf75c9447e0a0", size = 25125 }, + { url = "https://files.pythonhosted.org/packages/1d/6a/89963a5c6ecf166e8be29e0d1bf6806051ee8fe6c82e232842e3aeac9204/flask_sqlalchemy-3.1.1-py3-none-any.whl", hash = "sha256:4ba4be7f419dc72f4efd8802d69974803c37259dd42f3913b0dcf75c9447e0a0", size = 25125, upload-time = "2023-09-11T21:42:34.514Z" }, ] [[package]] name = "flatbuffers" version = "25.9.23" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/9d/1f/3ee70b0a55137442038f2a33469cc5fddd7e0ad2abf83d7497c18a2b6923/flatbuffers-25.9.23.tar.gz", hash = "sha256:676f9fa62750bb50cf531b42a0a2a118ad8f7f797a511eda12881c016f093b12", size = 22067 } +sdist = { url = "https://files.pythonhosted.org/packages/9d/1f/3ee70b0a55137442038f2a33469cc5fddd7e0ad2abf83d7497c18a2b6923/flatbuffers-25.9.23.tar.gz", hash = "sha256:676f9fa62750bb50cf531b42a0a2a118ad8f7f797a511eda12881c016f093b12", size = 22067, upload-time = "2025-09-24T05:25:30.106Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/ee/1b/00a78aa2e8fbd63f9af08c9c19e6deb3d5d66b4dda677a0f61654680ee89/flatbuffers-25.9.23-py2.py3-none-any.whl", hash = "sha256:255538574d6cb6d0a79a17ec8bc0d30985913b87513a01cce8bcdb6b4c44d0e2", size = 30869 }, + { url = "https://files.pythonhosted.org/packages/ee/1b/00a78aa2e8fbd63f9af08c9c19e6deb3d5d66b4dda677a0f61654680ee89/flatbuffers-25.9.23-py2.py3-none-any.whl", hash = "sha256:255538574d6cb6d0a79a17ec8bc0d30985913b87513a01cce8bcdb6b4c44d0e2", size = 30869, upload-time = "2025-09-24T05:25:28.912Z" }, ] [[package]] name = "frozenlist" version = "1.8.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/2d/f5/c831fac6cc817d26fd54c7eaccd04ef7e0288806943f7cc5bbf69f3ac1f0/frozenlist-1.8.0.tar.gz", hash = "sha256:3ede829ed8d842f6cd48fc7081d7a41001a56f1f38603f9d49bf3020d59a31ad", size = 45875 } +sdist = { url = "https://files.pythonhosted.org/packages/2d/f5/c831fac6cc817d26fd54c7eaccd04ef7e0288806943f7cc5bbf69f3ac1f0/frozenlist-1.8.0.tar.gz", hash = "sha256:3ede829ed8d842f6cd48fc7081d7a41001a56f1f38603f9d49bf3020d59a31ad", size = 45875, upload-time = "2025-10-06T05:38:17.865Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/bc/03/077f869d540370db12165c0aa51640a873fb661d8b315d1d4d67b284d7ac/frozenlist-1.8.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:09474e9831bc2b2199fad6da3c14c7b0fbdd377cce9d3d77131be28906cb7d84", size = 86912 }, - { url = "https://files.pythonhosted.org/packages/df/b5/7610b6bd13e4ae77b96ba85abea1c8cb249683217ef09ac9e0ae93f25a91/frozenlist-1.8.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:17c883ab0ab67200b5f964d2b9ed6b00971917d5d8a92df149dc2c9779208ee9", size = 50046 }, - { url = "https://files.pythonhosted.org/packages/6e/ef/0e8f1fe32f8a53dd26bdd1f9347efe0778b0fddf62789ea683f4cc7d787d/frozenlist-1.8.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:fa47e444b8ba08fffd1c18e8cdb9a75db1b6a27f17507522834ad13ed5922b93", size = 50119 }, - { url = "https://files.pythonhosted.org/packages/11/b1/71a477adc7c36e5fb628245dfbdea2166feae310757dea848d02bd0689fd/frozenlist-1.8.0-cp311-cp311-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:2552f44204b744fba866e573be4c1f9048d6a324dfe14475103fd51613eb1d1f", size = 231067 }, - { url = "https://files.pythonhosted.org/packages/45/7e/afe40eca3a2dc19b9904c0f5d7edfe82b5304cb831391edec0ac04af94c2/frozenlist-1.8.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:957e7c38f250991e48a9a73e6423db1bb9dd14e722a10f6b8bb8e16a0f55f695", size = 233160 }, - { url = "https://files.pythonhosted.org/packages/a6/aa/7416eac95603ce428679d273255ffc7c998d4132cfae200103f164b108aa/frozenlist-1.8.0-cp311-cp311-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:8585e3bb2cdea02fc88ffa245069c36555557ad3609e83be0ec71f54fd4abb52", size = 228544 }, - { url = "https://files.pythonhosted.org/packages/8b/3d/2a2d1f683d55ac7e3875e4263d28410063e738384d3adc294f5ff3d7105e/frozenlist-1.8.0-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:edee74874ce20a373d62dc28b0b18b93f645633c2943fd90ee9d898550770581", size = 243797 }, - { url = "https://files.pythonhosted.org/packages/78/1e/2d5565b589e580c296d3bb54da08d206e797d941a83a6fdea42af23be79c/frozenlist-1.8.0-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:c9a63152fe95756b85f31186bddf42e4c02c6321207fd6601a1c89ebac4fe567", size = 247923 }, - { url = "https://files.pythonhosted.org/packages/aa/c3/65872fcf1d326a7f101ad4d86285c403c87be7d832b7470b77f6d2ed5ddc/frozenlist-1.8.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:b6db2185db9be0a04fecf2f241c70b63b1a242e2805be291855078f2b404dd6b", size = 230886 }, - { url = "https://files.pythonhosted.org/packages/a0/76/ac9ced601d62f6956f03cc794f9e04c81719509f85255abf96e2510f4265/frozenlist-1.8.0-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:f4be2e3d8bc8aabd566f8d5b8ba7ecc09249d74ba3c9ed52e54dc23a293f0b92", size = 245731 }, - { url = "https://files.pythonhosted.org/packages/b9/49/ecccb5f2598daf0b4a1415497eba4c33c1e8ce07495eb07d2860c731b8d5/frozenlist-1.8.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:c8d1634419f39ea6f5c427ea2f90ca85126b54b50837f31497f3bf38266e853d", size = 241544 }, - { url = "https://files.pythonhosted.org/packages/53/4b/ddf24113323c0bbcc54cb38c8b8916f1da7165e07b8e24a717b4a12cbf10/frozenlist-1.8.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:1a7fa382a4a223773ed64242dbe1c9c326ec09457e6b8428efb4118c685c3dfd", size = 241806 }, - { url = "https://files.pythonhosted.org/packages/a7/fb/9b9a084d73c67175484ba2789a59f8eebebd0827d186a8102005ce41e1ba/frozenlist-1.8.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:11847b53d722050808926e785df837353bd4d75f1d494377e59b23594d834967", size = 229382 }, - { url = "https://files.pythonhosted.org/packages/95/a3/c8fb25aac55bf5e12dae5c5aa6a98f85d436c1dc658f21c3ac73f9fa95e5/frozenlist-1.8.0-cp311-cp311-win32.whl", hash = "sha256:27c6e8077956cf73eadd514be8fb04d77fc946a7fe9f7fe167648b0b9085cc25", size = 39647 }, - { url = "https://files.pythonhosted.org/packages/0a/f5/603d0d6a02cfd4c8f2a095a54672b3cf967ad688a60fb9faf04fc4887f65/frozenlist-1.8.0-cp311-cp311-win_amd64.whl", hash = "sha256:ac913f8403b36a2c8610bbfd25b8013488533e71e62b4b4adce9c86c8cea905b", size = 44064 }, - { url = "https://files.pythonhosted.org/packages/5d/16/c2c9ab44e181f043a86f9a8f84d5124b62dbcb3a02c0977ec72b9ac1d3e0/frozenlist-1.8.0-cp311-cp311-win_arm64.whl", hash = "sha256:d4d3214a0f8394edfa3e303136d0575eece0745ff2b47bd2cb2e66dd92d4351a", size = 39937 }, - { url = "https://files.pythonhosted.org/packages/69/29/948b9aa87e75820a38650af445d2ef2b6b8a6fab1a23b6bb9e4ef0be2d59/frozenlist-1.8.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:78f7b9e5d6f2fdb88cdde9440dc147259b62b9d3b019924def9f6478be254ac1", size = 87782 }, - { url = "https://files.pythonhosted.org/packages/64/80/4f6e318ee2a7c0750ed724fa33a4bdf1eacdc5a39a7a24e818a773cd91af/frozenlist-1.8.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:229bf37d2e4acdaf808fd3f06e854a4a7a3661e871b10dc1f8f1896a3b05f18b", size = 50594 }, - { url = "https://files.pythonhosted.org/packages/2b/94/5c8a2b50a496b11dd519f4a24cb5496cf125681dd99e94c604ccdea9419a/frozenlist-1.8.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f833670942247a14eafbb675458b4e61c82e002a148f49e68257b79296e865c4", size = 50448 }, - { url = "https://files.pythonhosted.org/packages/6a/bd/d91c5e39f490a49df14320f4e8c80161cfcce09f1e2cde1edd16a551abb3/frozenlist-1.8.0-cp312-cp312-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:494a5952b1c597ba44e0e78113a7266e656b9794eec897b19ead706bd7074383", size = 242411 }, - { url = "https://files.pythonhosted.org/packages/8f/83/f61505a05109ef3293dfb1ff594d13d64a2324ac3482be2cedc2be818256/frozenlist-1.8.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:96f423a119f4777a4a056b66ce11527366a8bb92f54e541ade21f2374433f6d4", size = 243014 }, - { url = "https://files.pythonhosted.org/packages/d8/cb/cb6c7b0f7d4023ddda30cf56b8b17494eb3a79e3fda666bf735f63118b35/frozenlist-1.8.0-cp312-cp312-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:3462dd9475af2025c31cc61be6652dfa25cbfb56cbbf52f4ccfe029f38decaf8", size = 234909 }, - { url = "https://files.pythonhosted.org/packages/31/c5/cd7a1f3b8b34af009fb17d4123c5a778b44ae2804e3ad6b86204255f9ec5/frozenlist-1.8.0-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:c4c800524c9cd9bac5166cd6f55285957fcfc907db323e193f2afcd4d9abd69b", size = 250049 }, - { url = "https://files.pythonhosted.org/packages/c0/01/2f95d3b416c584a1e7f0e1d6d31998c4a795f7544069ee2e0962a4b60740/frozenlist-1.8.0-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:d6a5df73acd3399d893dafc71663ad22534b5aa4f94e8a2fabfe856c3c1b6a52", size = 256485 }, - { url = "https://files.pythonhosted.org/packages/ce/03/024bf7720b3abaebcff6d0793d73c154237b85bdf67b7ed55e5e9596dc9a/frozenlist-1.8.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:405e8fe955c2280ce66428b3ca55e12b3c4e9c336fb2103a4937e891c69a4a29", size = 237619 }, - { url = "https://files.pythonhosted.org/packages/69/fa/f8abdfe7d76b731f5d8bd217827cf6764d4f1d9763407e42717b4bed50a0/frozenlist-1.8.0-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:908bd3f6439f2fef9e85031b59fd4f1297af54415fb60e4254a95f75b3cab3f3", size = 250320 }, - { url = "https://files.pythonhosted.org/packages/f5/3c/b051329f718b463b22613e269ad72138cc256c540f78a6de89452803a47d/frozenlist-1.8.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:294e487f9ec720bd8ffcebc99d575f7eff3568a08a253d1ee1a0378754b74143", size = 246820 }, - { url = "https://files.pythonhosted.org/packages/0f/ae/58282e8f98e444b3f4dd42448ff36fa38bef29e40d40f330b22e7108f565/frozenlist-1.8.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:74c51543498289c0c43656701be6b077f4b265868fa7f8a8859c197006efb608", size = 250518 }, - { url = "https://files.pythonhosted.org/packages/8f/96/007e5944694d66123183845a106547a15944fbbb7154788cbf7272789536/frozenlist-1.8.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:776f352e8329135506a1d6bf16ac3f87bc25b28e765949282dcc627af36123aa", size = 239096 }, - { url = "https://files.pythonhosted.org/packages/66/bb/852b9d6db2fa40be96f29c0d1205c306288f0684df8fd26ca1951d461a56/frozenlist-1.8.0-cp312-cp312-win32.whl", hash = "sha256:433403ae80709741ce34038da08511d4a77062aa924baf411ef73d1146e74faf", size = 39985 }, - { url = "https://files.pythonhosted.org/packages/b8/af/38e51a553dd66eb064cdf193841f16f077585d4d28394c2fa6235cb41765/frozenlist-1.8.0-cp312-cp312-win_amd64.whl", hash = "sha256:34187385b08f866104f0c0617404c8eb08165ab1272e884abc89c112e9c00746", size = 44591 }, - { url = "https://files.pythonhosted.org/packages/a7/06/1dc65480ab147339fecc70797e9c2f69d9cea9cf38934ce08df070fdb9cb/frozenlist-1.8.0-cp312-cp312-win_arm64.whl", hash = "sha256:fe3c58d2f5db5fbd18c2987cba06d51b0529f52bc3a6cdc33d3f4eab725104bd", size = 40102 }, - { url = "https://files.pythonhosted.org/packages/9a/9a/e35b4a917281c0b8419d4207f4334c8e8c5dbf4f3f5f9ada73958d937dcc/frozenlist-1.8.0-py3-none-any.whl", hash = "sha256:0c18a16eab41e82c295618a77502e17b195883241c563b00f0aa5106fc4eaa0d", size = 13409 }, + { url = "https://files.pythonhosted.org/packages/bc/03/077f869d540370db12165c0aa51640a873fb661d8b315d1d4d67b284d7ac/frozenlist-1.8.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:09474e9831bc2b2199fad6da3c14c7b0fbdd377cce9d3d77131be28906cb7d84", size = 86912, upload-time = "2025-10-06T05:35:45.98Z" }, + { url = "https://files.pythonhosted.org/packages/df/b5/7610b6bd13e4ae77b96ba85abea1c8cb249683217ef09ac9e0ae93f25a91/frozenlist-1.8.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:17c883ab0ab67200b5f964d2b9ed6b00971917d5d8a92df149dc2c9779208ee9", size = 50046, upload-time = "2025-10-06T05:35:47.009Z" }, + { url = "https://files.pythonhosted.org/packages/6e/ef/0e8f1fe32f8a53dd26bdd1f9347efe0778b0fddf62789ea683f4cc7d787d/frozenlist-1.8.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:fa47e444b8ba08fffd1c18e8cdb9a75db1b6a27f17507522834ad13ed5922b93", size = 50119, upload-time = "2025-10-06T05:35:48.38Z" }, + { url = "https://files.pythonhosted.org/packages/11/b1/71a477adc7c36e5fb628245dfbdea2166feae310757dea848d02bd0689fd/frozenlist-1.8.0-cp311-cp311-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:2552f44204b744fba866e573be4c1f9048d6a324dfe14475103fd51613eb1d1f", size = 231067, upload-time = "2025-10-06T05:35:49.97Z" }, + { url = "https://files.pythonhosted.org/packages/45/7e/afe40eca3a2dc19b9904c0f5d7edfe82b5304cb831391edec0ac04af94c2/frozenlist-1.8.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:957e7c38f250991e48a9a73e6423db1bb9dd14e722a10f6b8bb8e16a0f55f695", size = 233160, upload-time = "2025-10-06T05:35:51.729Z" }, + { url = "https://files.pythonhosted.org/packages/a6/aa/7416eac95603ce428679d273255ffc7c998d4132cfae200103f164b108aa/frozenlist-1.8.0-cp311-cp311-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:8585e3bb2cdea02fc88ffa245069c36555557ad3609e83be0ec71f54fd4abb52", size = 228544, upload-time = "2025-10-06T05:35:53.246Z" }, + { url = "https://files.pythonhosted.org/packages/8b/3d/2a2d1f683d55ac7e3875e4263d28410063e738384d3adc294f5ff3d7105e/frozenlist-1.8.0-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:edee74874ce20a373d62dc28b0b18b93f645633c2943fd90ee9d898550770581", size = 243797, upload-time = "2025-10-06T05:35:54.497Z" }, + { url = "https://files.pythonhosted.org/packages/78/1e/2d5565b589e580c296d3bb54da08d206e797d941a83a6fdea42af23be79c/frozenlist-1.8.0-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:c9a63152fe95756b85f31186bddf42e4c02c6321207fd6601a1c89ebac4fe567", size = 247923, upload-time = "2025-10-06T05:35:55.861Z" }, + { url = "https://files.pythonhosted.org/packages/aa/c3/65872fcf1d326a7f101ad4d86285c403c87be7d832b7470b77f6d2ed5ddc/frozenlist-1.8.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:b6db2185db9be0a04fecf2f241c70b63b1a242e2805be291855078f2b404dd6b", size = 230886, upload-time = "2025-10-06T05:35:57.399Z" }, + { url = "https://files.pythonhosted.org/packages/a0/76/ac9ced601d62f6956f03cc794f9e04c81719509f85255abf96e2510f4265/frozenlist-1.8.0-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:f4be2e3d8bc8aabd566f8d5b8ba7ecc09249d74ba3c9ed52e54dc23a293f0b92", size = 245731, upload-time = "2025-10-06T05:35:58.563Z" }, + { url = "https://files.pythonhosted.org/packages/b9/49/ecccb5f2598daf0b4a1415497eba4c33c1e8ce07495eb07d2860c731b8d5/frozenlist-1.8.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:c8d1634419f39ea6f5c427ea2f90ca85126b54b50837f31497f3bf38266e853d", size = 241544, upload-time = "2025-10-06T05:35:59.719Z" }, + { url = "https://files.pythonhosted.org/packages/53/4b/ddf24113323c0bbcc54cb38c8b8916f1da7165e07b8e24a717b4a12cbf10/frozenlist-1.8.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:1a7fa382a4a223773ed64242dbe1c9c326ec09457e6b8428efb4118c685c3dfd", size = 241806, upload-time = "2025-10-06T05:36:00.959Z" }, + { url = "https://files.pythonhosted.org/packages/a7/fb/9b9a084d73c67175484ba2789a59f8eebebd0827d186a8102005ce41e1ba/frozenlist-1.8.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:11847b53d722050808926e785df837353bd4d75f1d494377e59b23594d834967", size = 229382, upload-time = "2025-10-06T05:36:02.22Z" }, + { url = "https://files.pythonhosted.org/packages/95/a3/c8fb25aac55bf5e12dae5c5aa6a98f85d436c1dc658f21c3ac73f9fa95e5/frozenlist-1.8.0-cp311-cp311-win32.whl", hash = "sha256:27c6e8077956cf73eadd514be8fb04d77fc946a7fe9f7fe167648b0b9085cc25", size = 39647, upload-time = "2025-10-06T05:36:03.409Z" }, + { url = "https://files.pythonhosted.org/packages/0a/f5/603d0d6a02cfd4c8f2a095a54672b3cf967ad688a60fb9faf04fc4887f65/frozenlist-1.8.0-cp311-cp311-win_amd64.whl", hash = "sha256:ac913f8403b36a2c8610bbfd25b8013488533e71e62b4b4adce9c86c8cea905b", size = 44064, upload-time = "2025-10-06T05:36:04.368Z" }, + { url = "https://files.pythonhosted.org/packages/5d/16/c2c9ab44e181f043a86f9a8f84d5124b62dbcb3a02c0977ec72b9ac1d3e0/frozenlist-1.8.0-cp311-cp311-win_arm64.whl", hash = "sha256:d4d3214a0f8394edfa3e303136d0575eece0745ff2b47bd2cb2e66dd92d4351a", size = 39937, upload-time = "2025-10-06T05:36:05.669Z" }, + { url = "https://files.pythonhosted.org/packages/69/29/948b9aa87e75820a38650af445d2ef2b6b8a6fab1a23b6bb9e4ef0be2d59/frozenlist-1.8.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:78f7b9e5d6f2fdb88cdde9440dc147259b62b9d3b019924def9f6478be254ac1", size = 87782, upload-time = "2025-10-06T05:36:06.649Z" }, + { url = "https://files.pythonhosted.org/packages/64/80/4f6e318ee2a7c0750ed724fa33a4bdf1eacdc5a39a7a24e818a773cd91af/frozenlist-1.8.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:229bf37d2e4acdaf808fd3f06e854a4a7a3661e871b10dc1f8f1896a3b05f18b", size = 50594, upload-time = "2025-10-06T05:36:07.69Z" }, + { url = "https://files.pythonhosted.org/packages/2b/94/5c8a2b50a496b11dd519f4a24cb5496cf125681dd99e94c604ccdea9419a/frozenlist-1.8.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f833670942247a14eafbb675458b4e61c82e002a148f49e68257b79296e865c4", size = 50448, upload-time = "2025-10-06T05:36:08.78Z" }, + { url = "https://files.pythonhosted.org/packages/6a/bd/d91c5e39f490a49df14320f4e8c80161cfcce09f1e2cde1edd16a551abb3/frozenlist-1.8.0-cp312-cp312-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:494a5952b1c597ba44e0e78113a7266e656b9794eec897b19ead706bd7074383", size = 242411, upload-time = "2025-10-06T05:36:09.801Z" }, + { url = "https://files.pythonhosted.org/packages/8f/83/f61505a05109ef3293dfb1ff594d13d64a2324ac3482be2cedc2be818256/frozenlist-1.8.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:96f423a119f4777a4a056b66ce11527366a8bb92f54e541ade21f2374433f6d4", size = 243014, upload-time = "2025-10-06T05:36:11.394Z" }, + { url = "https://files.pythonhosted.org/packages/d8/cb/cb6c7b0f7d4023ddda30cf56b8b17494eb3a79e3fda666bf735f63118b35/frozenlist-1.8.0-cp312-cp312-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:3462dd9475af2025c31cc61be6652dfa25cbfb56cbbf52f4ccfe029f38decaf8", size = 234909, upload-time = "2025-10-06T05:36:12.598Z" }, + { url = "https://files.pythonhosted.org/packages/31/c5/cd7a1f3b8b34af009fb17d4123c5a778b44ae2804e3ad6b86204255f9ec5/frozenlist-1.8.0-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:c4c800524c9cd9bac5166cd6f55285957fcfc907db323e193f2afcd4d9abd69b", size = 250049, upload-time = "2025-10-06T05:36:14.065Z" }, + { url = "https://files.pythonhosted.org/packages/c0/01/2f95d3b416c584a1e7f0e1d6d31998c4a795f7544069ee2e0962a4b60740/frozenlist-1.8.0-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:d6a5df73acd3399d893dafc71663ad22534b5aa4f94e8a2fabfe856c3c1b6a52", size = 256485, upload-time = "2025-10-06T05:36:15.39Z" }, + { url = "https://files.pythonhosted.org/packages/ce/03/024bf7720b3abaebcff6d0793d73c154237b85bdf67b7ed55e5e9596dc9a/frozenlist-1.8.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:405e8fe955c2280ce66428b3ca55e12b3c4e9c336fb2103a4937e891c69a4a29", size = 237619, upload-time = "2025-10-06T05:36:16.558Z" }, + { url = "https://files.pythonhosted.org/packages/69/fa/f8abdfe7d76b731f5d8bd217827cf6764d4f1d9763407e42717b4bed50a0/frozenlist-1.8.0-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:908bd3f6439f2fef9e85031b59fd4f1297af54415fb60e4254a95f75b3cab3f3", size = 250320, upload-time = "2025-10-06T05:36:17.821Z" }, + { url = "https://files.pythonhosted.org/packages/f5/3c/b051329f718b463b22613e269ad72138cc256c540f78a6de89452803a47d/frozenlist-1.8.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:294e487f9ec720bd8ffcebc99d575f7eff3568a08a253d1ee1a0378754b74143", size = 246820, upload-time = "2025-10-06T05:36:19.046Z" }, + { url = "https://files.pythonhosted.org/packages/0f/ae/58282e8f98e444b3f4dd42448ff36fa38bef29e40d40f330b22e7108f565/frozenlist-1.8.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:74c51543498289c0c43656701be6b077f4b265868fa7f8a8859c197006efb608", size = 250518, upload-time = "2025-10-06T05:36:20.763Z" }, + { url = "https://files.pythonhosted.org/packages/8f/96/007e5944694d66123183845a106547a15944fbbb7154788cbf7272789536/frozenlist-1.8.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:776f352e8329135506a1d6bf16ac3f87bc25b28e765949282dcc627af36123aa", size = 239096, upload-time = "2025-10-06T05:36:22.129Z" }, + { url = "https://files.pythonhosted.org/packages/66/bb/852b9d6db2fa40be96f29c0d1205c306288f0684df8fd26ca1951d461a56/frozenlist-1.8.0-cp312-cp312-win32.whl", hash = "sha256:433403ae80709741ce34038da08511d4a77062aa924baf411ef73d1146e74faf", size = 39985, upload-time = "2025-10-06T05:36:23.661Z" }, + { url = "https://files.pythonhosted.org/packages/b8/af/38e51a553dd66eb064cdf193841f16f077585d4d28394c2fa6235cb41765/frozenlist-1.8.0-cp312-cp312-win_amd64.whl", hash = "sha256:34187385b08f866104f0c0617404c8eb08165ab1272e884abc89c112e9c00746", size = 44591, upload-time = "2025-10-06T05:36:24.958Z" }, + { url = "https://files.pythonhosted.org/packages/a7/06/1dc65480ab147339fecc70797e9c2f69d9cea9cf38934ce08df070fdb9cb/frozenlist-1.8.0-cp312-cp312-win_arm64.whl", hash = "sha256:fe3c58d2f5db5fbd18c2987cba06d51b0529f52bc3a6cdc33d3f4eab725104bd", size = 40102, upload-time = "2025-10-06T05:36:26.333Z" }, + { url = "https://files.pythonhosted.org/packages/9a/9a/e35b4a917281c0b8419d4207f4334c8e8c5dbf4f3f5f9ada73958d937dcc/frozenlist-1.8.0-py3-none-any.whl", hash = "sha256:0c18a16eab41e82c295618a77502e17b195883241c563b00f0aa5106fc4eaa0d", size = 13409, upload-time = "2025-10-06T05:38:16.721Z" }, ] [[package]] name = "fsspec" version = "2025.10.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/24/7f/2747c0d332b9acfa75dc84447a066fdf812b5a6b8d30472b74d309bfe8cb/fsspec-2025.10.0.tar.gz", hash = "sha256:b6789427626f068f9a83ca4e8a3cc050850b6c0f71f99ddb4f542b8266a26a59", size = 309285 } +sdist = { url = "https://files.pythonhosted.org/packages/24/7f/2747c0d332b9acfa75dc84447a066fdf812b5a6b8d30472b74d309bfe8cb/fsspec-2025.10.0.tar.gz", hash = "sha256:b6789427626f068f9a83ca4e8a3cc050850b6c0f71f99ddb4f542b8266a26a59", size = 309285, upload-time = "2025-10-30T14:58:44.036Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/eb/02/a6b21098b1d5d6249b7c5ab69dde30108a71e4e819d4a9778f1de1d5b70d/fsspec-2025.10.0-py3-none-any.whl", hash = "sha256:7c7712353ae7d875407f97715f0e1ffcc21e33d5b24556cb1e090ae9409ec61d", size = 200966 }, + { url = "https://files.pythonhosted.org/packages/eb/02/a6b21098b1d5d6249b7c5ab69dde30108a71e4e819d4a9778f1de1d5b70d/fsspec-2025.10.0-py3-none-any.whl", hash = "sha256:7c7712353ae7d875407f97715f0e1ffcc21e33d5b24556cb1e090ae9409ec61d", size = 200966, upload-time = "2025-10-30T14:58:42.53Z" }, ] [[package]] name = "future" version = "1.0.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/a7/b2/4140c69c6a66432916b26158687e821ba631a4c9273c474343badf84d3ba/future-1.0.0.tar.gz", hash = "sha256:bd2968309307861edae1458a4f8a4f3598c03be43b97521076aebf5d94c07b05", size = 1228490 } +sdist = { url = "https://files.pythonhosted.org/packages/a7/b2/4140c69c6a66432916b26158687e821ba631a4c9273c474343badf84d3ba/future-1.0.0.tar.gz", hash = "sha256:bd2968309307861edae1458a4f8a4f3598c03be43b97521076aebf5d94c07b05", size = 1228490, upload-time = "2024-02-21T11:52:38.461Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/da/71/ae30dadffc90b9006d77af76b393cb9dfbfc9629f339fc1574a1c52e6806/future-1.0.0-py3-none-any.whl", hash = "sha256:929292d34f5872e70396626ef385ec22355a1fae8ad29e1a734c3e43f9fbc216", size = 491326 }, + { url = "https://files.pythonhosted.org/packages/da/71/ae30dadffc90b9006d77af76b393cb9dfbfc9629f339fc1574a1c52e6806/future-1.0.0-py3-none-any.whl", hash = "sha256:929292d34f5872e70396626ef385ec22355a1fae8ad29e1a734c3e43f9fbc216", size = 491326, upload-time = "2024-02-21T11:52:35.956Z" }, ] [[package]] @@ -2142,23 +2175,23 @@ dependencies = [ { name = "zope-event" }, { name = "zope-interface" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/9e/48/b3ef2673ffb940f980966694e40d6d32560f3ffa284ecaeb5ea3a90a6d3f/gevent-25.9.1.tar.gz", hash = "sha256:adf9cd552de44a4e6754c51ff2e78d9193b7fa6eab123db9578a210e657235dd", size = 5059025 } +sdist = { url = "https://files.pythonhosted.org/packages/9e/48/b3ef2673ffb940f980966694e40d6d32560f3ffa284ecaeb5ea3a90a6d3f/gevent-25.9.1.tar.gz", hash = "sha256:adf9cd552de44a4e6754c51ff2e78d9193b7fa6eab123db9578a210e657235dd", size = 5059025, upload-time = "2025-09-17T16:15:34.528Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/81/86/03f8db0704fed41b0fa830425845f1eb4e20c92efa3f18751ee17809e9c6/gevent-25.9.1-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:18e5aff9e8342dc954adb9c9c524db56c2f3557999463445ba3d9cbe3dada7b7", size = 1792418 }, - { url = "https://files.pythonhosted.org/packages/5f/35/f6b3a31f0849a62cfa2c64574bcc68a781d5499c3195e296e892a121a3cf/gevent-25.9.1-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:1cdf6db28f050ee103441caa8b0448ace545364f775059d5e2de089da975c457", size = 1875700 }, - { url = "https://files.pythonhosted.org/packages/66/1e/75055950aa9b48f553e061afa9e3728061b5ccecca358cef19166e4ab74a/gevent-25.9.1-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:812debe235a8295be3b2a63b136c2474241fa5c58af55e6a0f8cfc29d4936235", size = 1831365 }, - { url = "https://files.pythonhosted.org/packages/31/e8/5c1f6968e5547e501cfa03dcb0239dff55e44c3660a37ec534e32a0c008f/gevent-25.9.1-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:b28b61ff9216a3d73fe8f35669eefcafa957f143ac534faf77e8a19eb9e6883a", size = 2122087 }, - { url = "https://files.pythonhosted.org/packages/c0/2c/ebc5d38a7542af9fb7657bfe10932a558bb98c8a94e4748e827d3823fced/gevent-25.9.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:5e4b6278b37373306fc6b1e5f0f1cf56339a1377f67c35972775143d8d7776ff", size = 1808776 }, - { url = "https://files.pythonhosted.org/packages/e6/26/e1d7d6c8ffbf76fe1fbb4e77bdb7f47d419206adc391ec40a8ace6ebbbf0/gevent-25.9.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:d99f0cb2ce43c2e8305bf75bee61a8bde06619d21b9d0316ea190fc7a0620a56", size = 2179141 }, - { url = "https://files.pythonhosted.org/packages/1d/6c/bb21fd9c095506aeeaa616579a356aa50935165cc0f1e250e1e0575620a7/gevent-25.9.1-cp311-cp311-win_amd64.whl", hash = "sha256:72152517ecf548e2f838c61b4be76637d99279dbaa7e01b3924df040aa996586", size = 1677941 }, - { url = "https://files.pythonhosted.org/packages/f7/49/e55930ba5259629eb28ac7ee1abbca971996a9165f902f0249b561602f24/gevent-25.9.1-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:46b188248c84ffdec18a686fcac5dbb32365d76912e14fda350db5dc0bfd4f86", size = 2955991 }, - { url = "https://files.pythonhosted.org/packages/aa/88/63dc9e903980e1da1e16541ec5c70f2b224ec0a8e34088cb42794f1c7f52/gevent-25.9.1-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:f2b54ea3ca6f0c763281cd3f96010ac7e98c2e267feb1221b5a26e2ca0b9a692", size = 1808503 }, - { url = "https://files.pythonhosted.org/packages/7a/8d/7236c3a8f6ef7e94c22e658397009596fa90f24c7d19da11ad7ab3a9248e/gevent-25.9.1-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:7a834804ac00ed8a92a69d3826342c677be651b1c3cd66cc35df8bc711057aa2", size = 1890001 }, - { url = "https://files.pythonhosted.org/packages/4f/63/0d7f38c4a2085ecce26b50492fc6161aa67250d381e26d6a7322c309b00f/gevent-25.9.1-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:323a27192ec4da6b22a9e51c3d9d896ff20bc53fdc9e45e56eaab76d1c39dd74", size = 1855335 }, - { url = "https://files.pythonhosted.org/packages/95/18/da5211dfc54c7a57e7432fd9a6ffeae1ce36fe5a313fa782b1c96529ea3d/gevent-25.9.1-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:6ea78b39a2c51d47ff0f130f4c755a9a4bbb2dd9721149420ad4712743911a51", size = 2109046 }, - { url = "https://files.pythonhosted.org/packages/a6/5a/7bb5ec8e43a2c6444853c4a9f955f3e72f479d7c24ea86c95fb264a2de65/gevent-25.9.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:dc45cd3e1cc07514a419960af932a62eb8515552ed004e56755e4bf20bad30c5", size = 1827099 }, - { url = "https://files.pythonhosted.org/packages/ca/d4/b63a0a60635470d7d986ef19897e893c15326dd69e8fb342c76a4f07fe9e/gevent-25.9.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:34e01e50c71eaf67e92c186ee0196a039d6e4f4b35670396baed4a2d8f1b347f", size = 2172623 }, - { url = "https://files.pythonhosted.org/packages/d5/98/caf06d5d22a7c129c1fb2fc1477306902a2c8ddfd399cd26bbbd4caf2141/gevent-25.9.1-cp312-cp312-win_amd64.whl", hash = "sha256:4acd6bcd5feabf22c7c5174bd3b9535ee9f088d2bbce789f740ad8d6554b18f3", size = 1682837 }, + { url = "https://files.pythonhosted.org/packages/81/86/03f8db0704fed41b0fa830425845f1eb4e20c92efa3f18751ee17809e9c6/gevent-25.9.1-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:18e5aff9e8342dc954adb9c9c524db56c2f3557999463445ba3d9cbe3dada7b7", size = 1792418, upload-time = "2025-09-17T15:41:24.384Z" }, + { url = "https://files.pythonhosted.org/packages/5f/35/f6b3a31f0849a62cfa2c64574bcc68a781d5499c3195e296e892a121a3cf/gevent-25.9.1-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:1cdf6db28f050ee103441caa8b0448ace545364f775059d5e2de089da975c457", size = 1875700, upload-time = "2025-09-17T15:48:59.652Z" }, + { url = "https://files.pythonhosted.org/packages/66/1e/75055950aa9b48f553e061afa9e3728061b5ccecca358cef19166e4ab74a/gevent-25.9.1-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:812debe235a8295be3b2a63b136c2474241fa5c58af55e6a0f8cfc29d4936235", size = 1831365, upload-time = "2025-09-17T15:49:19.426Z" }, + { url = "https://files.pythonhosted.org/packages/31/e8/5c1f6968e5547e501cfa03dcb0239dff55e44c3660a37ec534e32a0c008f/gevent-25.9.1-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:b28b61ff9216a3d73fe8f35669eefcafa957f143ac534faf77e8a19eb9e6883a", size = 2122087, upload-time = "2025-09-17T15:15:12.329Z" }, + { url = "https://files.pythonhosted.org/packages/c0/2c/ebc5d38a7542af9fb7657bfe10932a558bb98c8a94e4748e827d3823fced/gevent-25.9.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:5e4b6278b37373306fc6b1e5f0f1cf56339a1377f67c35972775143d8d7776ff", size = 1808776, upload-time = "2025-09-17T15:52:40.16Z" }, + { url = "https://files.pythonhosted.org/packages/e6/26/e1d7d6c8ffbf76fe1fbb4e77bdb7f47d419206adc391ec40a8ace6ebbbf0/gevent-25.9.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:d99f0cb2ce43c2e8305bf75bee61a8bde06619d21b9d0316ea190fc7a0620a56", size = 2179141, upload-time = "2025-09-17T15:24:09.895Z" }, + { url = "https://files.pythonhosted.org/packages/1d/6c/bb21fd9c095506aeeaa616579a356aa50935165cc0f1e250e1e0575620a7/gevent-25.9.1-cp311-cp311-win_amd64.whl", hash = "sha256:72152517ecf548e2f838c61b4be76637d99279dbaa7e01b3924df040aa996586", size = 1677941, upload-time = "2025-09-17T19:59:50.185Z" }, + { url = "https://files.pythonhosted.org/packages/f7/49/e55930ba5259629eb28ac7ee1abbca971996a9165f902f0249b561602f24/gevent-25.9.1-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:46b188248c84ffdec18a686fcac5dbb32365d76912e14fda350db5dc0bfd4f86", size = 2955991, upload-time = "2025-09-17T14:52:30.568Z" }, + { url = "https://files.pythonhosted.org/packages/aa/88/63dc9e903980e1da1e16541ec5c70f2b224ec0a8e34088cb42794f1c7f52/gevent-25.9.1-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:f2b54ea3ca6f0c763281cd3f96010ac7e98c2e267feb1221b5a26e2ca0b9a692", size = 1808503, upload-time = "2025-09-17T15:41:25.59Z" }, + { url = "https://files.pythonhosted.org/packages/7a/8d/7236c3a8f6ef7e94c22e658397009596fa90f24c7d19da11ad7ab3a9248e/gevent-25.9.1-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:7a834804ac00ed8a92a69d3826342c677be651b1c3cd66cc35df8bc711057aa2", size = 1890001, upload-time = "2025-09-17T15:49:01.227Z" }, + { url = "https://files.pythonhosted.org/packages/4f/63/0d7f38c4a2085ecce26b50492fc6161aa67250d381e26d6a7322c309b00f/gevent-25.9.1-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:323a27192ec4da6b22a9e51c3d9d896ff20bc53fdc9e45e56eaab76d1c39dd74", size = 1855335, upload-time = "2025-09-17T15:49:20.582Z" }, + { url = "https://files.pythonhosted.org/packages/95/18/da5211dfc54c7a57e7432fd9a6ffeae1ce36fe5a313fa782b1c96529ea3d/gevent-25.9.1-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:6ea78b39a2c51d47ff0f130f4c755a9a4bbb2dd9721149420ad4712743911a51", size = 2109046, upload-time = "2025-09-17T15:15:13.817Z" }, + { url = "https://files.pythonhosted.org/packages/a6/5a/7bb5ec8e43a2c6444853c4a9f955f3e72f479d7c24ea86c95fb264a2de65/gevent-25.9.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:dc45cd3e1cc07514a419960af932a62eb8515552ed004e56755e4bf20bad30c5", size = 1827099, upload-time = "2025-09-17T15:52:41.384Z" }, + { url = "https://files.pythonhosted.org/packages/ca/d4/b63a0a60635470d7d986ef19897e893c15326dd69e8fb342c76a4f07fe9e/gevent-25.9.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:34e01e50c71eaf67e92c186ee0196a039d6e4f4b35670396baed4a2d8f1b347f", size = 2172623, upload-time = "2025-09-17T15:24:12.03Z" }, + { url = "https://files.pythonhosted.org/packages/d5/98/caf06d5d22a7c129c1fb2fc1477306902a2c8ddfd399cd26bbbd4caf2141/gevent-25.9.1-cp312-cp312-win_amd64.whl", hash = "sha256:4acd6bcd5feabf22c7c5174bd3b9535ee9f088d2bbce789f740ad8d6554b18f3", size = 1682837, upload-time = "2025-09-17T19:48:47.318Z" }, ] [[package]] @@ -2168,9 +2201,9 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "smmap" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/72/94/63b0fc47eb32792c7ba1fe1b694daec9a63620db1e313033d18140c2320a/gitdb-4.0.12.tar.gz", hash = "sha256:5ef71f855d191a3326fcfbc0d5da835f26b13fbcba60c32c21091c349ffdb571", size = 394684 } +sdist = { url = "https://files.pythonhosted.org/packages/72/94/63b0fc47eb32792c7ba1fe1b694daec9a63620db1e313033d18140c2320a/gitdb-4.0.12.tar.gz", hash = "sha256:5ef71f855d191a3326fcfbc0d5da835f26b13fbcba60c32c21091c349ffdb571", size = 394684, upload-time = "2025-01-02T07:20:46.413Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/a0/61/5c78b91c3143ed5c14207f463aecfc8f9dbb5092fb2869baf37c273b2705/gitdb-4.0.12-py3-none-any.whl", hash = "sha256:67073e15955400952c6565cc3e707c554a4eea2e428946f7a4c162fab9bd9bcf", size = 62794 }, + { url = "https://files.pythonhosted.org/packages/a0/61/5c78b91c3143ed5c14207f463aecfc8f9dbb5092fb2869baf37c273b2705/gitdb-4.0.12-py3-none-any.whl", hash = "sha256:67073e15955400952c6565cc3e707c554a4eea2e428946f7a4c162fab9bd9bcf", size = 62794, upload-time = "2025-01-02T07:20:43.624Z" }, ] [[package]] @@ -2180,31 +2213,31 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "gitdb" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/9a/c8/dd58967d119baab745caec2f9d853297cec1989ec1d63f677d3880632b88/gitpython-3.1.45.tar.gz", hash = "sha256:85b0ee964ceddf211c41b9f27a49086010a190fd8132a24e21f362a4b36a791c", size = 215076 } +sdist = { url = "https://files.pythonhosted.org/packages/9a/c8/dd58967d119baab745caec2f9d853297cec1989ec1d63f677d3880632b88/gitpython-3.1.45.tar.gz", hash = "sha256:85b0ee964ceddf211c41b9f27a49086010a190fd8132a24e21f362a4b36a791c", size = 215076, upload-time = "2025-07-24T03:45:54.871Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/01/61/d4b89fec821f72385526e1b9d9a3a0385dda4a72b206d28049e2c7cd39b8/gitpython-3.1.45-py3-none-any.whl", hash = "sha256:8908cb2e02fb3b93b7eb0f2827125cb699869470432cc885f019b8fd0fccff77", size = 208168 }, + { url = "https://files.pythonhosted.org/packages/01/61/d4b89fec821f72385526e1b9d9a3a0385dda4a72b206d28049e2c7cd39b8/gitpython-3.1.45-py3-none-any.whl", hash = "sha256:8908cb2e02fb3b93b7eb0f2827125cb699869470432cc885f019b8fd0fccff77", size = 208168, upload-time = "2025-07-24T03:45:52.517Z" }, ] [[package]] name = "gmpy2" version = "2.2.1" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/07/bd/c6c154ce734a3e6187871b323297d8e5f3bdf9feaafc5212381538bc19e4/gmpy2-2.2.1.tar.gz", hash = "sha256:e83e07567441b78cb87544910cb3cc4fe94e7da987e93ef7622e76fb96650432", size = 234228 } +sdist = { url = "https://files.pythonhosted.org/packages/07/bd/c6c154ce734a3e6187871b323297d8e5f3bdf9feaafc5212381538bc19e4/gmpy2-2.2.1.tar.gz", hash = "sha256:e83e07567441b78cb87544910cb3cc4fe94e7da987e93ef7622e76fb96650432", size = 234228, upload-time = "2024-07-21T05:33:00.715Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/ac/ec/ab67751ac0c4088ed21cf9a2a7f9966bf702ca8ebfc3204879cf58c90179/gmpy2-2.2.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:98e947491c67523d3147a500f377bb64d0b115e4ab8a12d628fb324bb0e142bf", size = 880346 }, - { url = "https://files.pythonhosted.org/packages/97/7c/bdc4a7a2b0e543787a9354e80fdcf846c4e9945685218cef4ca938d25594/gmpy2-2.2.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4ccd319a3a87529484167ae1391f937ac4a8724169fd5822bbb541d1eab612b0", size = 694518 }, - { url = "https://files.pythonhosted.org/packages/fc/44/ea903003bb4c3af004912fb0d6488e346bd76968f11a7472a1e60dee7dd7/gmpy2-2.2.1-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:827bcd433e5d62f1b732f45e6949419da4a53915d6c80a3c7a5a03d5a783a03a", size = 1653491 }, - { url = "https://files.pythonhosted.org/packages/c9/70/5bce281b7cd664c04f1c9d47a37087db37b2be887bce738340e912ad86c8/gmpy2-2.2.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b7131231fc96f57272066295c81cbf11b3233a9471659bca29ddc90a7bde9bfa", size = 1706487 }, - { url = "https://files.pythonhosted.org/packages/2a/52/1f773571f21cf0319fc33218a1b384f29de43053965c05ed32f7e6729115/gmpy2-2.2.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:1cc6f2bb68ee00c20aae554e111dc781a76140e00c31e4eda5c8f2d4168ed06c", size = 1637415 }, - { url = "https://files.pythonhosted.org/packages/99/4c/390daf67c221b3f4f10b5b7d9293e61e4dbd48956a38947679c5a701af27/gmpy2-2.2.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:ae388fe46e3d20af4675451a4b6c12fc1bb08e6e0e69ee47072638be21bf42d8", size = 1657781 }, - { url = "https://files.pythonhosted.org/packages/61/cd/86e47bccb3636389e29c4654a0e5ac52926d832897f2f64632639b63ffc1/gmpy2-2.2.1-cp311-cp311-win_amd64.whl", hash = "sha256:8b472ee3c123b77979374da2293ebf2c170b88212e173d64213104956d4678fb", size = 1203346 }, - { url = "https://files.pythonhosted.org/packages/9a/ee/8f9f65e2bac334cfe13b3fc3f8962d5fc2858ebcf4517690d2d24afa6d0e/gmpy2-2.2.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:90d03a1be1b1ad3944013fae5250316c3f4e6aec45ecdf189a5c7422d640004d", size = 885231 }, - { url = "https://files.pythonhosted.org/packages/07/1c/bf29f6bf8acd72c3cf85d04e7db1bb26dd5507ee2387770bb787bc54e2a5/gmpy2-2.2.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:bd09dd43d199908c1d1d501c5de842b3bf754f99b94af5b5ef0e26e3b716d2d5", size = 696569 }, - { url = "https://files.pythonhosted.org/packages/7c/cc/38d33eadeccd81b604a95b67d43c71b246793b7c441f1d7c3b41978cd1cf/gmpy2-2.2.1-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3232859fda3e96fd1aecd6235ae20476ed4506562bcdef6796a629b78bb96acd", size = 1655776 }, - { url = "https://files.pythonhosted.org/packages/96/8d/d017599d6db8e9b96d6e84ea5102c33525cb71c82876b1813a2ece5d94ec/gmpy2-2.2.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:30fba6f7cf43fb7f8474216701b5aaddfa5e6a06d560e88a67f814062934e863", size = 1707529 }, - { url = "https://files.pythonhosted.org/packages/d0/93/91b4a0af23ae4216fd7ebcfd955dcbe152c5ef170598aee421310834de0a/gmpy2-2.2.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:9b33cae533ede8173bc7d4bb855b388c5b636ca9f22a32c949f2eb7e0cc531b2", size = 1634195 }, - { url = "https://files.pythonhosted.org/packages/d7/ba/08ee99f19424cd33d5f0f17b2184e34d2fa886eebafcd3e164ccba15d9f2/gmpy2-2.2.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:954e7e1936c26e370ca31bbd49729ebeeb2006a8f9866b1e778ebb89add2e941", size = 1656779 }, - { url = "https://files.pythonhosted.org/packages/14/e1/7b32ae2b23c8363d87b7f4bbac9abe9a1f820c2417d2e99ca3b4afd9379b/gmpy2-2.2.1-cp312-cp312-win_amd64.whl", hash = "sha256:c929870137b20d9c3f7dd97f43615b2d2c1a2470e50bafd9a5eea2e844f462e9", size = 1204668 }, + { url = "https://files.pythonhosted.org/packages/ac/ec/ab67751ac0c4088ed21cf9a2a7f9966bf702ca8ebfc3204879cf58c90179/gmpy2-2.2.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:98e947491c67523d3147a500f377bb64d0b115e4ab8a12d628fb324bb0e142bf", size = 880346, upload-time = "2024-07-21T05:31:25.531Z" }, + { url = "https://files.pythonhosted.org/packages/97/7c/bdc4a7a2b0e543787a9354e80fdcf846c4e9945685218cef4ca938d25594/gmpy2-2.2.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4ccd319a3a87529484167ae1391f937ac4a8724169fd5822bbb541d1eab612b0", size = 694518, upload-time = "2024-07-21T05:31:27.78Z" }, + { url = "https://files.pythonhosted.org/packages/fc/44/ea903003bb4c3af004912fb0d6488e346bd76968f11a7472a1e60dee7dd7/gmpy2-2.2.1-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:827bcd433e5d62f1b732f45e6949419da4a53915d6c80a3c7a5a03d5a783a03a", size = 1653491, upload-time = "2024-07-21T05:31:29.968Z" }, + { url = "https://files.pythonhosted.org/packages/c9/70/5bce281b7cd664c04f1c9d47a37087db37b2be887bce738340e912ad86c8/gmpy2-2.2.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b7131231fc96f57272066295c81cbf11b3233a9471659bca29ddc90a7bde9bfa", size = 1706487, upload-time = "2024-07-21T05:31:32.476Z" }, + { url = "https://files.pythonhosted.org/packages/2a/52/1f773571f21cf0319fc33218a1b384f29de43053965c05ed32f7e6729115/gmpy2-2.2.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:1cc6f2bb68ee00c20aae554e111dc781a76140e00c31e4eda5c8f2d4168ed06c", size = 1637415, upload-time = "2024-07-21T05:31:34.591Z" }, + { url = "https://files.pythonhosted.org/packages/99/4c/390daf67c221b3f4f10b5b7d9293e61e4dbd48956a38947679c5a701af27/gmpy2-2.2.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:ae388fe46e3d20af4675451a4b6c12fc1bb08e6e0e69ee47072638be21bf42d8", size = 1657781, upload-time = "2024-07-21T05:31:36.81Z" }, + { url = "https://files.pythonhosted.org/packages/61/cd/86e47bccb3636389e29c4654a0e5ac52926d832897f2f64632639b63ffc1/gmpy2-2.2.1-cp311-cp311-win_amd64.whl", hash = "sha256:8b472ee3c123b77979374da2293ebf2c170b88212e173d64213104956d4678fb", size = 1203346, upload-time = "2024-07-21T05:31:39.344Z" }, + { url = "https://files.pythonhosted.org/packages/9a/ee/8f9f65e2bac334cfe13b3fc3f8962d5fc2858ebcf4517690d2d24afa6d0e/gmpy2-2.2.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:90d03a1be1b1ad3944013fae5250316c3f4e6aec45ecdf189a5c7422d640004d", size = 885231, upload-time = "2024-07-21T05:31:41.471Z" }, + { url = "https://files.pythonhosted.org/packages/07/1c/bf29f6bf8acd72c3cf85d04e7db1bb26dd5507ee2387770bb787bc54e2a5/gmpy2-2.2.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:bd09dd43d199908c1d1d501c5de842b3bf754f99b94af5b5ef0e26e3b716d2d5", size = 696569, upload-time = "2024-07-21T05:31:43.768Z" }, + { url = "https://files.pythonhosted.org/packages/7c/cc/38d33eadeccd81b604a95b67d43c71b246793b7c441f1d7c3b41978cd1cf/gmpy2-2.2.1-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3232859fda3e96fd1aecd6235ae20476ed4506562bcdef6796a629b78bb96acd", size = 1655776, upload-time = "2024-07-21T05:31:46.272Z" }, + { url = "https://files.pythonhosted.org/packages/96/8d/d017599d6db8e9b96d6e84ea5102c33525cb71c82876b1813a2ece5d94ec/gmpy2-2.2.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:30fba6f7cf43fb7f8474216701b5aaddfa5e6a06d560e88a67f814062934e863", size = 1707529, upload-time = "2024-07-21T05:31:48.732Z" }, + { url = "https://files.pythonhosted.org/packages/d0/93/91b4a0af23ae4216fd7ebcfd955dcbe152c5ef170598aee421310834de0a/gmpy2-2.2.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:9b33cae533ede8173bc7d4bb855b388c5b636ca9f22a32c949f2eb7e0cc531b2", size = 1634195, upload-time = "2024-07-21T05:31:50.99Z" }, + { url = "https://files.pythonhosted.org/packages/d7/ba/08ee99f19424cd33d5f0f17b2184e34d2fa886eebafcd3e164ccba15d9f2/gmpy2-2.2.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:954e7e1936c26e370ca31bbd49729ebeeb2006a8f9866b1e778ebb89add2e941", size = 1656779, upload-time = "2024-07-21T05:31:53.657Z" }, + { url = "https://files.pythonhosted.org/packages/14/e1/7b32ae2b23c8363d87b7f4bbac9abe9a1f820c2417d2e99ca3b4afd9379b/gmpy2-2.2.1-cp312-cp312-win_amd64.whl", hash = "sha256:c929870137b20d9c3f7dd97f43615b2d2c1a2470e50bafd9a5eea2e844f462e9", size = 1204668, upload-time = "2024-07-21T05:31:56.264Z" }, ] [[package]] @@ -2214,9 +2247,9 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "beautifulsoup4" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/89/97/b49c69893cddea912c7a660a4b6102c6b02cd268f8c7162dd70b7c16f753/google-3.0.0.tar.gz", hash = "sha256:143530122ee5130509ad5e989f0512f7cb218b2d4eddbafbad40fd10e8d8ccbe", size = 44978 } +sdist = { url = "https://files.pythonhosted.org/packages/89/97/b49c69893cddea912c7a660a4b6102c6b02cd268f8c7162dd70b7c16f753/google-3.0.0.tar.gz", hash = "sha256:143530122ee5130509ad5e989f0512f7cb218b2d4eddbafbad40fd10e8d8ccbe", size = 44978, upload-time = "2020-07-11T14:50:45.678Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/ac/35/17c9141c4ae21e9a29a43acdfd848e3e468a810517f862cad07977bf8fe9/google-3.0.0-py2.py3-none-any.whl", hash = "sha256:889cf695f84e4ae2c55fbc0cfdaf4c1e729417fa52ab1db0485202ba173e4935", size = 45258 }, + { url = "https://files.pythonhosted.org/packages/ac/35/17c9141c4ae21e9a29a43acdfd848e3e468a810517f862cad07977bf8fe9/google-3.0.0-py2.py3-none-any.whl", hash = "sha256:889cf695f84e4ae2c55fbc0cfdaf4c1e729417fa52ab1db0485202ba173e4935", size = 45258, upload-time = "2020-07-11T14:49:58.287Z" }, ] [[package]] @@ -2230,9 +2263,9 @@ dependencies = [ { name = "protobuf" }, { name = "requests" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/b2/8f/ecd68579bd2bf5e9321df60dcdee6e575adf77fedacb1d8378760b2b16b6/google-api-core-2.18.0.tar.gz", hash = "sha256:62d97417bfc674d6cef251e5c4d639a9655e00c45528c4364fbfebb478ce72a9", size = 148047 } +sdist = { url = "https://files.pythonhosted.org/packages/b2/8f/ecd68579bd2bf5e9321df60dcdee6e575adf77fedacb1d8378760b2b16b6/google-api-core-2.18.0.tar.gz", hash = "sha256:62d97417bfc674d6cef251e5c4d639a9655e00c45528c4364fbfebb478ce72a9", size = 148047, upload-time = "2024-03-21T20:16:56.269Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/86/75/59a3ad90d9b4ff5b3e0537611dbe885aeb96124521c9d35aa079f1e0f2c9/google_api_core-2.18.0-py3-none-any.whl", hash = "sha256:5a63aa102e0049abe85b5b88cb9409234c1f70afcda21ce1e40b285b9629c1d6", size = 138293 }, + { url = "https://files.pythonhosted.org/packages/86/75/59a3ad90d9b4ff5b3e0537611dbe885aeb96124521c9d35aa079f1e0f2c9/google_api_core-2.18.0-py3-none-any.whl", hash = "sha256:5a63aa102e0049abe85b5b88cb9409234c1f70afcda21ce1e40b285b9629c1d6", size = 138293, upload-time = "2024-03-21T20:16:53.645Z" }, ] [package.optional-dependencies] @@ -2252,9 +2285,9 @@ dependencies = [ { name = "httplib2" }, { name = "uritemplate" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/35/8b/d990f947c261304a5c1599d45717d02c27d46af5f23e1fee5dc19c8fa79d/google-api-python-client-2.90.0.tar.gz", hash = "sha256:cbcb3ba8be37c6806676a49df16ac412077e5e5dc7fa967941eff977b31fba03", size = 10891311 } +sdist = { url = "https://files.pythonhosted.org/packages/35/8b/d990f947c261304a5c1599d45717d02c27d46af5f23e1fee5dc19c8fa79d/google-api-python-client-2.90.0.tar.gz", hash = "sha256:cbcb3ba8be37c6806676a49df16ac412077e5e5dc7fa967941eff977b31fba03", size = 10891311, upload-time = "2023-06-20T16:29:25.008Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/39/03/209b5c36a621ae644dc7d4743746cd3b38b18e133f8779ecaf6b95cc01ce/google_api_python_client-2.90.0-py2.py3-none-any.whl", hash = "sha256:4a41ffb7797d4f28e44635fb1e7076240b741c6493e7c3233c0e4421cec7c913", size = 11379891 }, + { url = "https://files.pythonhosted.org/packages/39/03/209b5c36a621ae644dc7d4743746cd3b38b18e133f8779ecaf6b95cc01ce/google_api_python_client-2.90.0-py2.py3-none-any.whl", hash = "sha256:4a41ffb7797d4f28e44635fb1e7076240b741c6493e7c3233c0e4421cec7c913", size = 11379891, upload-time = "2023-06-20T16:29:19.532Z" }, ] [[package]] @@ -2266,9 +2299,9 @@ dependencies = [ { name = "pyasn1-modules" }, { name = "rsa" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/18/b2/f14129111cfd61793609643a07ecb03651a71dd65c6974f63b0310ff4b45/google-auth-2.29.0.tar.gz", hash = "sha256:672dff332d073227550ffc7457868ac4218d6c500b155fe6cc17d2b13602c360", size = 244326 } +sdist = { url = "https://files.pythonhosted.org/packages/18/b2/f14129111cfd61793609643a07ecb03651a71dd65c6974f63b0310ff4b45/google-auth-2.29.0.tar.gz", hash = "sha256:672dff332d073227550ffc7457868ac4218d6c500b155fe6cc17d2b13602c360", size = 244326, upload-time = "2024-03-20T17:24:27.72Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/9e/8d/ddbcf81ec751d8ee5fd18ac11ff38a0e110f39dfbf105e6d9db69d556dd0/google_auth-2.29.0-py2.py3-none-any.whl", hash = "sha256:d452ad095688cd52bae0ad6fafe027f6a6d6f560e810fec20914e17a09526415", size = 189186 }, + { url = "https://files.pythonhosted.org/packages/9e/8d/ddbcf81ec751d8ee5fd18ac11ff38a0e110f39dfbf105e6d9db69d556dd0/google_auth-2.29.0-py2.py3-none-any.whl", hash = "sha256:d452ad095688cd52bae0ad6fafe027f6a6d6f560e810fec20914e17a09526415", size = 189186, upload-time = "2024-03-20T17:24:24.292Z" }, ] [[package]] @@ -2279,9 +2312,9 @@ dependencies = [ { name = "google-auth" }, { name = "httplib2" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/56/be/217a598a818567b28e859ff087f347475c807a5649296fb5a817c58dacef/google-auth-httplib2-0.2.0.tar.gz", hash = "sha256:38aa7badf48f974f1eb9861794e9c0cb2a0511a4ec0679b1f886d108f5640e05", size = 10842 } +sdist = { url = "https://files.pythonhosted.org/packages/56/be/217a598a818567b28e859ff087f347475c807a5649296fb5a817c58dacef/google-auth-httplib2-0.2.0.tar.gz", hash = "sha256:38aa7badf48f974f1eb9861794e9c0cb2a0511a4ec0679b1f886d108f5640e05", size = 10842, upload-time = "2023-12-12T17:40:30.722Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/be/8a/fe34d2f3f9470a27b01c9e76226965863f153d5fbe276f83608562e49c04/google_auth_httplib2-0.2.0-py2.py3-none-any.whl", hash = "sha256:b65a0a2123300dd71281a7bf6e64d65a0759287df52729bdd1ae2e47dc311a3d", size = 9253 }, + { url = "https://files.pythonhosted.org/packages/be/8a/fe34d2f3f9470a27b01c9e76226965863f153d5fbe276f83608562e49c04/google_auth_httplib2-0.2.0-py2.py3-none-any.whl", hash = "sha256:b65a0a2123300dd71281a7bf6e64d65a0759287df52729bdd1ae2e47dc311a3d", size = 9253, upload-time = "2023-12-12T17:40:13.055Z" }, ] [[package]] @@ -2301,9 +2334,9 @@ dependencies = [ { name = "pydantic" }, { name = "shapely" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/47/21/5930a1420f82bec246ae09e1b7cc8458544f3befe669193b33a7b5c0691c/google-cloud-aiplatform-1.49.0.tar.gz", hash = "sha256:e6e6d01079bb5def49e4be4db4d12b13c624b5c661079c869c13c855e5807429", size = 5766450 } +sdist = { url = "https://files.pythonhosted.org/packages/47/21/5930a1420f82bec246ae09e1b7cc8458544f3befe669193b33a7b5c0691c/google-cloud-aiplatform-1.49.0.tar.gz", hash = "sha256:e6e6d01079bb5def49e4be4db4d12b13c624b5c661079c869c13c855e5807429", size = 5766450, upload-time = "2024-04-29T17:25:31.646Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/39/6a/7d9e1c03c814e760361fe8b0ffd373ead4124ace66ed33bb16d526ae1ecf/google_cloud_aiplatform-1.49.0-py2.py3-none-any.whl", hash = "sha256:8072d9e0c18d8942c704233d1a93b8d6312fc7b278786a283247950e28ae98df", size = 4914049 }, + { url = "https://files.pythonhosted.org/packages/39/6a/7d9e1c03c814e760361fe8b0ffd373ead4124ace66ed33bb16d526ae1ecf/google_cloud_aiplatform-1.49.0-py2.py3-none-any.whl", hash = "sha256:8072d9e0c18d8942c704233d1a93b8d6312fc7b278786a283247950e28ae98df", size = 4914049, upload-time = "2024-04-29T17:25:27.625Z" }, ] [[package]] @@ -2319,9 +2352,9 @@ dependencies = [ { name = "python-dateutil" }, { name = "requests" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/f1/2f/3dda76b3ec029578838b1fe6396e6b86eb574200352240e23dea49265bb7/google_cloud_bigquery-3.30.0.tar.gz", hash = "sha256:7e27fbafc8ed33cc200fe05af12ecd74d279fe3da6692585a3cef7aee90575b6", size = 474389 } +sdist = { url = "https://files.pythonhosted.org/packages/f1/2f/3dda76b3ec029578838b1fe6396e6b86eb574200352240e23dea49265bb7/google_cloud_bigquery-3.30.0.tar.gz", hash = "sha256:7e27fbafc8ed33cc200fe05af12ecd74d279fe3da6692585a3cef7aee90575b6", size = 474389, upload-time = "2025-02-27T18:49:45.416Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/0c/6d/856a6ca55c1d9d99129786c929a27dd9d31992628ebbff7f5d333352981f/google_cloud_bigquery-3.30.0-py2.py3-none-any.whl", hash = "sha256:f4d28d846a727f20569c9b2d2f4fa703242daadcb2ec4240905aa485ba461877", size = 247885 }, + { url = "https://files.pythonhosted.org/packages/0c/6d/856a6ca55c1d9d99129786c929a27dd9d31992628ebbff7f5d333352981f/google_cloud_bigquery-3.30.0-py2.py3-none-any.whl", hash = "sha256:f4d28d846a727f20569c9b2d2f4fa703242daadcb2ec4240905aa485ba461877", size = 247885, upload-time = "2025-02-27T18:49:43.454Z" }, ] [[package]] @@ -2332,9 +2365,9 @@ dependencies = [ { name = "google-api-core" }, { name = "google-auth" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/a6/03/ef0bc99d0e0faf4fdbe67ac445e18cdaa74824fd93cd069e7bb6548cb52d/google_cloud_core-2.5.0.tar.gz", hash = "sha256:7c1b7ef5c92311717bd05301aa1a91ffbc565673d3b0b4163a52d8413a186963", size = 36027 } +sdist = { url = "https://files.pythonhosted.org/packages/a6/03/ef0bc99d0e0faf4fdbe67ac445e18cdaa74824fd93cd069e7bb6548cb52d/google_cloud_core-2.5.0.tar.gz", hash = "sha256:7c1b7ef5c92311717bd05301aa1a91ffbc565673d3b0b4163a52d8413a186963", size = 36027, upload-time = "2025-10-29T23:17:39.513Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/89/20/bfa472e327c8edee00f04beecc80baeddd2ab33ee0e86fd7654da49d45e9/google_cloud_core-2.5.0-py3-none-any.whl", hash = "sha256:67d977b41ae6c7211ee830c7912e41003ea8194bff15ae7d72fd6f51e57acabc", size = 29469 }, + { url = "https://files.pythonhosted.org/packages/89/20/bfa472e327c8edee00f04beecc80baeddd2ab33ee0e86fd7654da49d45e9/google_cloud_core-2.5.0-py3-none-any.whl", hash = "sha256:67d977b41ae6c7211ee830c7912e41003ea8194bff15ae7d72fd6f51e57acabc", size = 29469, upload-time = "2025-10-29T23:17:38.548Z" }, ] [[package]] @@ -2349,9 +2382,9 @@ dependencies = [ { name = "proto-plus" }, { name = "protobuf" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/fc/19/b95d0e8814ce42522e434cdd85c0cb6236d874d9adf6685fc8e6d1fda9d1/google_cloud_resource_manager-1.15.0.tar.gz", hash = "sha256:3d0b78c3daa713f956d24e525b35e9e9a76d597c438837171304d431084cedaf", size = 449227 } +sdist = { url = "https://files.pythonhosted.org/packages/fc/19/b95d0e8814ce42522e434cdd85c0cb6236d874d9adf6685fc8e6d1fda9d1/google_cloud_resource_manager-1.15.0.tar.gz", hash = "sha256:3d0b78c3daa713f956d24e525b35e9e9a76d597c438837171304d431084cedaf", size = 449227, upload-time = "2025-10-20T14:57:01.108Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/8c/93/5aef41a5f146ad4559dd7040ae5fa8e7ddcab4dfadbef6cb4b66d775e690/google_cloud_resource_manager-1.15.0-py3-none-any.whl", hash = "sha256:0ccde5db644b269ddfdf7b407a2c7b60bdbf459f8e666344a5285601d00c7f6d", size = 397151 }, + { url = "https://files.pythonhosted.org/packages/8c/93/5aef41a5f146ad4559dd7040ae5fa8e7ddcab4dfadbef6cb4b66d775e690/google_cloud_resource_manager-1.15.0-py3-none-any.whl", hash = "sha256:0ccde5db644b269ddfdf7b407a2c7b60bdbf459f8e666344a5285601d00c7f6d", size = 397151, upload-time = "2025-10-20T14:53:45.409Z" }, ] [[package]] @@ -2366,29 +2399,29 @@ dependencies = [ { name = "google-resumable-media" }, { name = "requests" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/17/c5/0bc3f97cf4c14a731ecc5a95c5cde6883aec7289dc74817f9b41f866f77e/google-cloud-storage-2.16.0.tar.gz", hash = "sha256:dda485fa503710a828d01246bd16ce9db0823dc51bbca742ce96a6817d58669f", size = 5525307 } +sdist = { url = "https://files.pythonhosted.org/packages/17/c5/0bc3f97cf4c14a731ecc5a95c5cde6883aec7289dc74817f9b41f866f77e/google-cloud-storage-2.16.0.tar.gz", hash = "sha256:dda485fa503710a828d01246bd16ce9db0823dc51bbca742ce96a6817d58669f", size = 5525307, upload-time = "2024-03-18T23:55:37.102Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/cb/e5/7d045d188f4ef85d94b9e3ae1bf876170c6b9f4c9a950124978efc36f680/google_cloud_storage-2.16.0-py2.py3-none-any.whl", hash = "sha256:91a06b96fb79cf9cdfb4e759f178ce11ea885c79938f89590344d079305f5852", size = 125604 }, + { url = "https://files.pythonhosted.org/packages/cb/e5/7d045d188f4ef85d94b9e3ae1bf876170c6b9f4c9a950124978efc36f680/google_cloud_storage-2.16.0-py2.py3-none-any.whl", hash = "sha256:91a06b96fb79cf9cdfb4e759f178ce11ea885c79938f89590344d079305f5852", size = 125604, upload-time = "2024-03-18T23:55:33.987Z" }, ] [[package]] name = "google-crc32c" version = "1.7.1" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/19/ae/87802e6d9f9d69adfaedfcfd599266bf386a54d0be058b532d04c794f76d/google_crc32c-1.7.1.tar.gz", hash = "sha256:2bff2305f98846f3e825dbeec9ee406f89da7962accdb29356e4eadc251bd472", size = 14495 } +sdist = { url = "https://files.pythonhosted.org/packages/19/ae/87802e6d9f9d69adfaedfcfd599266bf386a54d0be058b532d04c794f76d/google_crc32c-1.7.1.tar.gz", hash = "sha256:2bff2305f98846f3e825dbeec9ee406f89da7962accdb29356e4eadc251bd472", size = 14495, upload-time = "2025-03-26T14:29:13.32Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/f7/94/220139ea87822b6fdfdab4fb9ba81b3fff7ea2c82e2af34adc726085bffc/google_crc32c-1.7.1-cp311-cp311-macosx_12_0_arm64.whl", hash = "sha256:6fbab4b935989e2c3610371963ba1b86afb09537fd0c633049be82afe153ac06", size = 30468 }, - { url = "https://files.pythonhosted.org/packages/94/97/789b23bdeeb9d15dc2904660463ad539d0318286d7633fe2760c10ed0c1c/google_crc32c-1.7.1-cp311-cp311-macosx_12_0_x86_64.whl", hash = "sha256:ed66cbe1ed9cbaaad9392b5259b3eba4a9e565420d734e6238813c428c3336c9", size = 30313 }, - { url = "https://files.pythonhosted.org/packages/81/b8/976a2b843610c211e7ccb3e248996a61e87dbb2c09b1499847e295080aec/google_crc32c-1.7.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ee6547b657621b6cbed3562ea7826c3e11cab01cd33b74e1f677690652883e77", size = 33048 }, - { url = "https://files.pythonhosted.org/packages/c9/16/a3842c2cf591093b111d4a5e2bfb478ac6692d02f1b386d2a33283a19dc9/google_crc32c-1.7.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d68e17bad8f7dd9a49181a1f5a8f4b251c6dbc8cc96fb79f1d321dfd57d66f53", size = 32669 }, - { url = "https://files.pythonhosted.org/packages/04/17/ed9aba495916fcf5fe4ecb2267ceb851fc5f273c4e4625ae453350cfd564/google_crc32c-1.7.1-cp311-cp311-win_amd64.whl", hash = "sha256:6335de12921f06e1f774d0dd1fbea6bf610abe0887a1638f64d694013138be5d", size = 33476 }, - { url = "https://files.pythonhosted.org/packages/dd/b7/787e2453cf8639c94b3d06c9d61f512234a82e1d12d13d18584bd3049904/google_crc32c-1.7.1-cp312-cp312-macosx_12_0_arm64.whl", hash = "sha256:2d73a68a653c57281401871dd4aeebbb6af3191dcac751a76ce430df4d403194", size = 30470 }, - { url = "https://files.pythonhosted.org/packages/ed/b4/6042c2b0cbac3ec3a69bb4c49b28d2f517b7a0f4a0232603c42c58e22b44/google_crc32c-1.7.1-cp312-cp312-macosx_12_0_x86_64.whl", hash = "sha256:22beacf83baaf59f9d3ab2bbb4db0fb018da8e5aebdce07ef9f09fce8220285e", size = 30315 }, - { url = "https://files.pythonhosted.org/packages/29/ad/01e7a61a5d059bc57b702d9ff6a18b2585ad97f720bd0a0dbe215df1ab0e/google_crc32c-1.7.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:19eafa0e4af11b0a4eb3974483d55d2d77ad1911e6cf6f832e1574f6781fd337", size = 33180 }, - { url = "https://files.pythonhosted.org/packages/3b/a5/7279055cf004561894ed3a7bfdf5bf90a53f28fadd01af7cd166e88ddf16/google_crc32c-1.7.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b6d86616faaea68101195c6bdc40c494e4d76f41e07a37ffdef270879c15fb65", size = 32794 }, - { url = "https://files.pythonhosted.org/packages/0f/d6/77060dbd140c624e42ae3ece3df53b9d811000729a5c821b9fd671ceaac6/google_crc32c-1.7.1-cp312-cp312-win_amd64.whl", hash = "sha256:b7491bdc0c7564fcf48c0179d2048ab2f7c7ba36b84ccd3a3e1c3f7a72d3bba6", size = 33477 }, - { url = "https://files.pythonhosted.org/packages/16/1b/1693372bf423ada422f80fd88260dbfd140754adb15cbc4d7e9a68b1cb8e/google_crc32c-1.7.1-pp311-pypy311_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:85fef7fae11494e747c9fd1359a527e5970fc9603c90764843caabd3a16a0a48", size = 28241 }, - { url = "https://files.pythonhosted.org/packages/fd/3c/2a19a60a473de48717b4efb19398c3f914795b64a96cf3fbe82588044f78/google_crc32c-1.7.1-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6efb97eb4369d52593ad6f75e7e10d053cf00c48983f7a973105bc70b0ac4d82", size = 28048 }, + { url = "https://files.pythonhosted.org/packages/f7/94/220139ea87822b6fdfdab4fb9ba81b3fff7ea2c82e2af34adc726085bffc/google_crc32c-1.7.1-cp311-cp311-macosx_12_0_arm64.whl", hash = "sha256:6fbab4b935989e2c3610371963ba1b86afb09537fd0c633049be82afe153ac06", size = 30468, upload-time = "2025-03-26T14:32:52.215Z" }, + { url = "https://files.pythonhosted.org/packages/94/97/789b23bdeeb9d15dc2904660463ad539d0318286d7633fe2760c10ed0c1c/google_crc32c-1.7.1-cp311-cp311-macosx_12_0_x86_64.whl", hash = "sha256:ed66cbe1ed9cbaaad9392b5259b3eba4a9e565420d734e6238813c428c3336c9", size = 30313, upload-time = "2025-03-26T14:57:38.758Z" }, + { url = "https://files.pythonhosted.org/packages/81/b8/976a2b843610c211e7ccb3e248996a61e87dbb2c09b1499847e295080aec/google_crc32c-1.7.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ee6547b657621b6cbed3562ea7826c3e11cab01cd33b74e1f677690652883e77", size = 33048, upload-time = "2025-03-26T14:41:30.679Z" }, + { url = "https://files.pythonhosted.org/packages/c9/16/a3842c2cf591093b111d4a5e2bfb478ac6692d02f1b386d2a33283a19dc9/google_crc32c-1.7.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d68e17bad8f7dd9a49181a1f5a8f4b251c6dbc8cc96fb79f1d321dfd57d66f53", size = 32669, upload-time = "2025-03-26T14:41:31.432Z" }, + { url = "https://files.pythonhosted.org/packages/04/17/ed9aba495916fcf5fe4ecb2267ceb851fc5f273c4e4625ae453350cfd564/google_crc32c-1.7.1-cp311-cp311-win_amd64.whl", hash = "sha256:6335de12921f06e1f774d0dd1fbea6bf610abe0887a1638f64d694013138be5d", size = 33476, upload-time = "2025-03-26T14:29:10.211Z" }, + { url = "https://files.pythonhosted.org/packages/dd/b7/787e2453cf8639c94b3d06c9d61f512234a82e1d12d13d18584bd3049904/google_crc32c-1.7.1-cp312-cp312-macosx_12_0_arm64.whl", hash = "sha256:2d73a68a653c57281401871dd4aeebbb6af3191dcac751a76ce430df4d403194", size = 30470, upload-time = "2025-03-26T14:34:31.655Z" }, + { url = "https://files.pythonhosted.org/packages/ed/b4/6042c2b0cbac3ec3a69bb4c49b28d2f517b7a0f4a0232603c42c58e22b44/google_crc32c-1.7.1-cp312-cp312-macosx_12_0_x86_64.whl", hash = "sha256:22beacf83baaf59f9d3ab2bbb4db0fb018da8e5aebdce07ef9f09fce8220285e", size = 30315, upload-time = "2025-03-26T15:01:54.634Z" }, + { url = "https://files.pythonhosted.org/packages/29/ad/01e7a61a5d059bc57b702d9ff6a18b2585ad97f720bd0a0dbe215df1ab0e/google_crc32c-1.7.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:19eafa0e4af11b0a4eb3974483d55d2d77ad1911e6cf6f832e1574f6781fd337", size = 33180, upload-time = "2025-03-26T14:41:32.168Z" }, + { url = "https://files.pythonhosted.org/packages/3b/a5/7279055cf004561894ed3a7bfdf5bf90a53f28fadd01af7cd166e88ddf16/google_crc32c-1.7.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b6d86616faaea68101195c6bdc40c494e4d76f41e07a37ffdef270879c15fb65", size = 32794, upload-time = "2025-03-26T14:41:33.264Z" }, + { url = "https://files.pythonhosted.org/packages/0f/d6/77060dbd140c624e42ae3ece3df53b9d811000729a5c821b9fd671ceaac6/google_crc32c-1.7.1-cp312-cp312-win_amd64.whl", hash = "sha256:b7491bdc0c7564fcf48c0179d2048ab2f7c7ba36b84ccd3a3e1c3f7a72d3bba6", size = 33477, upload-time = "2025-03-26T14:29:10.94Z" }, + { url = "https://files.pythonhosted.org/packages/16/1b/1693372bf423ada422f80fd88260dbfd140754adb15cbc4d7e9a68b1cb8e/google_crc32c-1.7.1-pp311-pypy311_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:85fef7fae11494e747c9fd1359a527e5970fc9603c90764843caabd3a16a0a48", size = 28241, upload-time = "2025-03-26T14:41:45.898Z" }, + { url = "https://files.pythonhosted.org/packages/fd/3c/2a19a60a473de48717b4efb19398c3f914795b64a96cf3fbe82588044f78/google_crc32c-1.7.1-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6efb97eb4369d52593ad6f75e7e10d053cf00c48983f7a973105bc70b0ac4d82", size = 28048, upload-time = "2025-03-26T14:41:46.696Z" }, ] [[package]] @@ -2398,9 +2431,9 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "google-crc32c" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/64/d7/520b62a35b23038ff005e334dba3ffc75fcf583bee26723f1fd8fd4b6919/google_resumable_media-2.8.0.tar.gz", hash = "sha256:f1157ed8b46994d60a1bc432544db62352043113684d4e030ee02e77ebe9a1ae", size = 2163265 } +sdist = { url = "https://files.pythonhosted.org/packages/64/d7/520b62a35b23038ff005e334dba3ffc75fcf583bee26723f1fd8fd4b6919/google_resumable_media-2.8.0.tar.gz", hash = "sha256:f1157ed8b46994d60a1bc432544db62352043113684d4e030ee02e77ebe9a1ae", size = 2163265, upload-time = "2025-11-17T15:38:06.659Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/1f/0b/93afde9cfe012260e9fe1522f35c9b72d6ee222f316586b1f23ecf44d518/google_resumable_media-2.8.0-py3-none-any.whl", hash = "sha256:dd14a116af303845a8d932ddae161a26e86cc229645bc98b39f026f9b1717582", size = 81340 }, + { url = "https://files.pythonhosted.org/packages/1f/0b/93afde9cfe012260e9fe1522f35c9b72d6ee222f316586b1f23ecf44d518/google_resumable_media-2.8.0-py3-none-any.whl", hash = "sha256:dd14a116af303845a8d932ddae161a26e86cc229645bc98b39f026f9b1717582", size = 81340, upload-time = "2025-11-17T15:38:05.594Z" }, ] [[package]] @@ -2410,9 +2443,9 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "protobuf" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/d2/dc/291cebf3c73e108ef8210f19cb83d671691354f4f7dd956445560d778715/googleapis-common-protos-1.63.0.tar.gz", hash = "sha256:17ad01b11d5f1d0171c06d3ba5c04c54474e883b66b949722b4938ee2694ef4e", size = 121646 } +sdist = { url = "https://files.pythonhosted.org/packages/d2/dc/291cebf3c73e108ef8210f19cb83d671691354f4f7dd956445560d778715/googleapis-common-protos-1.63.0.tar.gz", hash = "sha256:17ad01b11d5f1d0171c06d3ba5c04c54474e883b66b949722b4938ee2694ef4e", size = 121646, upload-time = "2024-03-11T12:33:15.765Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/dc/a6/12a0c976140511d8bc8a16ad15793b2aef29ac927baa0786ccb7ddbb6e1c/googleapis_common_protos-1.63.0-py2.py3-none-any.whl", hash = "sha256:ae45f75702f7c08b541f750854a678bd8f534a1a6bace6afe975f1d0a82d6632", size = 229141 }, + { url = "https://files.pythonhosted.org/packages/dc/a6/12a0c976140511d8bc8a16ad15793b2aef29ac927baa0786ccb7ddbb6e1c/googleapis_common_protos-1.63.0-py2.py3-none-any.whl", hash = "sha256:ae45f75702f7c08b541f750854a678bd8f534a1a6bace6afe975f1d0a82d6632", size = 229141, upload-time = "2024-03-11T12:33:14.052Z" }, ] [package.optional-dependencies] @@ -2430,9 +2463,9 @@ dependencies = [ { name = "graphql-core" }, { name = "yarl" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/06/9f/cf224a88ed71eb223b7aa0b9ff0aa10d7ecc9a4acdca2279eb046c26d5dc/gql-4.0.0.tar.gz", hash = "sha256:f22980844eb6a7c0266ffc70f111b9c7e7c7c13da38c3b439afc7eab3d7c9c8e", size = 215644 } +sdist = { url = "https://files.pythonhosted.org/packages/06/9f/cf224a88ed71eb223b7aa0b9ff0aa10d7ecc9a4acdca2279eb046c26d5dc/gql-4.0.0.tar.gz", hash = "sha256:f22980844eb6a7c0266ffc70f111b9c7e7c7c13da38c3b439afc7eab3d7c9c8e", size = 215644, upload-time = "2025-08-17T14:32:35.397Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/ac/94/30bbd09e8d45339fa77a48f5778d74d47e9242c11b3cd1093b3d994770a5/gql-4.0.0-py3-none-any.whl", hash = "sha256:f3beed7c531218eb24d97cb7df031b4a84fdb462f4a2beb86e2633d395937479", size = 89900 }, + { url = "https://files.pythonhosted.org/packages/ac/94/30bbd09e8d45339fa77a48f5778d74d47e9242c11b3cd1093b3d994770a5/gql-4.0.0-py3-none-any.whl", hash = "sha256:f3beed7c531218eb24d97cb7df031b4a84fdb462f4a2beb86e2633d395937479", size = 89900, upload-time = "2025-08-17T14:32:34.029Z" }, ] [package.optional-dependencies] @@ -2448,48 +2481,48 @@ requests = [ name = "graphql-core" version = "3.2.7" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/ac/9b/037a640a2983b09aed4a823f9cf1729e6d780b0671f854efa4727a7affbe/graphql_core-3.2.7.tar.gz", hash = "sha256:27b6904bdd3b43f2a0556dad5d579bdfdeab1f38e8e8788e555bdcb586a6f62c", size = 513484 } +sdist = { url = "https://files.pythonhosted.org/packages/ac/9b/037a640a2983b09aed4a823f9cf1729e6d780b0671f854efa4727a7affbe/graphql_core-3.2.7.tar.gz", hash = "sha256:27b6904bdd3b43f2a0556dad5d579bdfdeab1f38e8e8788e555bdcb586a6f62c", size = 513484, upload-time = "2025-11-01T22:30:40.436Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/0a/14/933037032608787fb92e365883ad6a741c235e0ff992865ec5d904a38f1e/graphql_core-3.2.7-py3-none-any.whl", hash = "sha256:17fc8f3ca4a42913d8e24d9ac9f08deddf0a0b2483076575757f6c412ead2ec0", size = 207262 }, + { url = "https://files.pythonhosted.org/packages/0a/14/933037032608787fb92e365883ad6a741c235e0ff992865ec5d904a38f1e/graphql_core-3.2.7-py3-none-any.whl", hash = "sha256:17fc8f3ca4a42913d8e24d9ac9f08deddf0a0b2483076575757f6c412ead2ec0", size = 207262, upload-time = "2025-11-01T22:30:38.912Z" }, ] [[package]] name = "graphviz" version = "0.21" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/f8/b3/3ac91e9be6b761a4b30d66ff165e54439dcd48b83f4e20d644867215f6ca/graphviz-0.21.tar.gz", hash = "sha256:20743e7183be82aaaa8ad6c93f8893c923bd6658a04c32ee115edb3c8a835f78", size = 200434 } +sdist = { url = "https://files.pythonhosted.org/packages/f8/b3/3ac91e9be6b761a4b30d66ff165e54439dcd48b83f4e20d644867215f6ca/graphviz-0.21.tar.gz", hash = "sha256:20743e7183be82aaaa8ad6c93f8893c923bd6658a04c32ee115edb3c8a835f78", size = 200434, upload-time = "2025-06-15T09:35:05.824Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/91/4c/e0ce1ef95d4000ebc1c11801f9b944fa5910ecc15b5e351865763d8657f8/graphviz-0.21-py3-none-any.whl", hash = "sha256:54f33de9f4f911d7e84e4191749cac8cc5653f815b06738c54db9a15ab8b1e42", size = 47300 }, + { url = "https://files.pythonhosted.org/packages/91/4c/e0ce1ef95d4000ebc1c11801f9b944fa5910ecc15b5e351865763d8657f8/graphviz-0.21-py3-none-any.whl", hash = "sha256:54f33de9f4f911d7e84e4191749cac8cc5653f815b06738c54db9a15ab8b1e42", size = 47300, upload-time = "2025-06-15T09:35:04.433Z" }, ] [[package]] name = "greenlet" version = "3.2.4" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/03/b8/704d753a5a45507a7aab61f18db9509302ed3d0a27ac7e0359ec2905b1a6/greenlet-3.2.4.tar.gz", hash = "sha256:0dca0d95ff849f9a364385f36ab49f50065d76964944638be9691e1832e9f86d", size = 188260 } +sdist = { url = "https://files.pythonhosted.org/packages/03/b8/704d753a5a45507a7aab61f18db9509302ed3d0a27ac7e0359ec2905b1a6/greenlet-3.2.4.tar.gz", hash = "sha256:0dca0d95ff849f9a364385f36ab49f50065d76964944638be9691e1832e9f86d", size = 188260, upload-time = "2025-08-07T13:24:33.51Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/a4/de/f28ced0a67749cac23fecb02b694f6473f47686dff6afaa211d186e2ef9c/greenlet-3.2.4-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:96378df1de302bc38e99c3a9aa311967b7dc80ced1dcc6f171e99842987882a2", size = 272305 }, - { url = "https://files.pythonhosted.org/packages/09/16/2c3792cba130000bf2a31c5272999113f4764fd9d874fb257ff588ac779a/greenlet-3.2.4-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:1ee8fae0519a337f2329cb78bd7a8e128ec0f881073d43f023c7b8d4831d5246", size = 632472 }, - { url = "https://files.pythonhosted.org/packages/ae/8f/95d48d7e3d433e6dae5b1682e4292242a53f22df82e6d3dda81b1701a960/greenlet-3.2.4-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:94abf90142c2a18151632371140b3dba4dee031633fe614cb592dbb6c9e17bc3", size = 644646 }, - { url = "https://files.pythonhosted.org/packages/d5/5e/405965351aef8c76b8ef7ad370e5da58d57ef6068df197548b015464001a/greenlet-3.2.4-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:4d1378601b85e2e5171b99be8d2dc85f594c79967599328f95c1dc1a40f1c633", size = 640519 }, - { url = "https://files.pythonhosted.org/packages/25/5d/382753b52006ce0218297ec1b628e048c4e64b155379331f25a7316eb749/greenlet-3.2.4-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:0db5594dce18db94f7d1650d7489909b57afde4c580806b8d9203b6e79cdc079", size = 639707 }, - { url = "https://files.pythonhosted.org/packages/1f/8e/abdd3f14d735b2929290a018ecf133c901be4874b858dd1c604b9319f064/greenlet-3.2.4-cp311-cp311-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2523e5246274f54fdadbce8494458a2ebdcdbc7b802318466ac5606d3cded1f8", size = 587684 }, - { url = "https://files.pythonhosted.org/packages/5d/65/deb2a69c3e5996439b0176f6651e0052542bb6c8f8ec2e3fba97c9768805/greenlet-3.2.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:1987de92fec508535687fb807a5cea1560f6196285a4cde35c100b8cd632cc52", size = 1116647 }, - { url = "https://files.pythonhosted.org/packages/3f/cc/b07000438a29ac5cfb2194bfc128151d52f333cee74dd7dfe3fb733fc16c/greenlet-3.2.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:55e9c5affaa6775e2c6b67659f3a71684de4c549b3dd9afca3bc773533d284fa", size = 1142073 }, - { url = "https://files.pythonhosted.org/packages/67/24/28a5b2fa42d12b3d7e5614145f0bd89714c34c08be6aabe39c14dd52db34/greenlet-3.2.4-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:c9c6de1940a7d828635fbd254d69db79e54619f165ee7ce32fda763a9cb6a58c", size = 1548385 }, - { url = "https://files.pythonhosted.org/packages/6a/05/03f2f0bdd0b0ff9a4f7b99333d57b53a7709c27723ec8123056b084e69cd/greenlet-3.2.4-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:03c5136e7be905045160b1b9fdca93dd6727b180feeafda6818e6496434ed8c5", size = 1613329 }, - { url = "https://files.pythonhosted.org/packages/d8/0f/30aef242fcab550b0b3520b8e3561156857c94288f0332a79928c31a52cf/greenlet-3.2.4-cp311-cp311-win_amd64.whl", hash = "sha256:9c40adce87eaa9ddb593ccb0fa6a07caf34015a29bf8d344811665b573138db9", size = 299100 }, - { url = "https://files.pythonhosted.org/packages/44/69/9b804adb5fd0671f367781560eb5eb586c4d495277c93bde4307b9e28068/greenlet-3.2.4-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:3b67ca49f54cede0186854a008109d6ee71f66bd57bb36abd6d0a0267b540cdd", size = 274079 }, - { url = "https://files.pythonhosted.org/packages/46/e9/d2a80c99f19a153eff70bc451ab78615583b8dac0754cfb942223d2c1a0d/greenlet-3.2.4-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:ddf9164e7a5b08e9d22511526865780a576f19ddd00d62f8a665949327fde8bb", size = 640997 }, - { url = "https://files.pythonhosted.org/packages/3b/16/035dcfcc48715ccd345f3a93183267167cdd162ad123cd93067d86f27ce4/greenlet-3.2.4-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:f28588772bb5fb869a8eb331374ec06f24a83a9c25bfa1f38b6993afe9c1e968", size = 655185 }, - { url = "https://files.pythonhosted.org/packages/31/da/0386695eef69ffae1ad726881571dfe28b41970173947e7c558d9998de0f/greenlet-3.2.4-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:5c9320971821a7cb77cfab8d956fa8e39cd07ca44b6070db358ceb7f8797c8c9", size = 649926 }, - { url = "https://files.pythonhosted.org/packages/68/88/69bf19fd4dc19981928ceacbc5fd4bb6bc2215d53199e367832e98d1d8fe/greenlet-3.2.4-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:c60a6d84229b271d44b70fb6e5fa23781abb5d742af7b808ae3f6efd7c9c60f6", size = 651839 }, - { url = "https://files.pythonhosted.org/packages/19/0d/6660d55f7373b2ff8152401a83e02084956da23ae58cddbfb0b330978fe9/greenlet-3.2.4-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:3b3812d8d0c9579967815af437d96623f45c0f2ae5f04e366de62a12d83a8fb0", size = 607586 }, - { url = "https://files.pythonhosted.org/packages/8e/1a/c953fdedd22d81ee4629afbb38d2f9d71e37d23caace44775a3a969147d4/greenlet-3.2.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:abbf57b5a870d30c4675928c37278493044d7c14378350b3aa5d484fa65575f0", size = 1123281 }, - { url = "https://files.pythonhosted.org/packages/3f/c7/12381b18e21aef2c6bd3a636da1088b888b97b7a0362fac2e4de92405f97/greenlet-3.2.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:20fb936b4652b6e307b8f347665e2c615540d4b42b3b4c8a321d8286da7e520f", size = 1151142 }, - { url = "https://files.pythonhosted.org/packages/27/45/80935968b53cfd3f33cf99ea5f08227f2646e044568c9b1555b58ffd61c2/greenlet-3.2.4-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:ee7a6ec486883397d70eec05059353b8e83eca9168b9f3f9a361971e77e0bcd0", size = 1564846 }, - { url = "https://files.pythonhosted.org/packages/69/02/b7c30e5e04752cb4db6202a3858b149c0710e5453b71a3b2aec5d78a1aab/greenlet-3.2.4-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:326d234cbf337c9c3def0676412eb7040a35a768efc92504b947b3e9cfc7543d", size = 1633814 }, - { url = "https://files.pythonhosted.org/packages/e9/08/b0814846b79399e585f974bbeebf5580fbe59e258ea7be64d9dfb253c84f/greenlet-3.2.4-cp312-cp312-win_amd64.whl", hash = "sha256:a7d4e128405eea3814a12cc2605e0e6aedb4035bf32697f72deca74de4105e02", size = 299899 }, + { url = "https://files.pythonhosted.org/packages/a4/de/f28ced0a67749cac23fecb02b694f6473f47686dff6afaa211d186e2ef9c/greenlet-3.2.4-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:96378df1de302bc38e99c3a9aa311967b7dc80ced1dcc6f171e99842987882a2", size = 272305, upload-time = "2025-08-07T13:15:41.288Z" }, + { url = "https://files.pythonhosted.org/packages/09/16/2c3792cba130000bf2a31c5272999113f4764fd9d874fb257ff588ac779a/greenlet-3.2.4-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:1ee8fae0519a337f2329cb78bd7a8e128ec0f881073d43f023c7b8d4831d5246", size = 632472, upload-time = "2025-08-07T13:42:55.044Z" }, + { url = "https://files.pythonhosted.org/packages/ae/8f/95d48d7e3d433e6dae5b1682e4292242a53f22df82e6d3dda81b1701a960/greenlet-3.2.4-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:94abf90142c2a18151632371140b3dba4dee031633fe614cb592dbb6c9e17bc3", size = 644646, upload-time = "2025-08-07T13:45:26.523Z" }, + { url = "https://files.pythonhosted.org/packages/d5/5e/405965351aef8c76b8ef7ad370e5da58d57ef6068df197548b015464001a/greenlet-3.2.4-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:4d1378601b85e2e5171b99be8d2dc85f594c79967599328f95c1dc1a40f1c633", size = 640519, upload-time = "2025-08-07T13:53:13.928Z" }, + { url = "https://files.pythonhosted.org/packages/25/5d/382753b52006ce0218297ec1b628e048c4e64b155379331f25a7316eb749/greenlet-3.2.4-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:0db5594dce18db94f7d1650d7489909b57afde4c580806b8d9203b6e79cdc079", size = 639707, upload-time = "2025-08-07T13:18:27.146Z" }, + { url = "https://files.pythonhosted.org/packages/1f/8e/abdd3f14d735b2929290a018ecf133c901be4874b858dd1c604b9319f064/greenlet-3.2.4-cp311-cp311-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2523e5246274f54fdadbce8494458a2ebdcdbc7b802318466ac5606d3cded1f8", size = 587684, upload-time = "2025-08-07T13:18:25.164Z" }, + { url = "https://files.pythonhosted.org/packages/5d/65/deb2a69c3e5996439b0176f6651e0052542bb6c8f8ec2e3fba97c9768805/greenlet-3.2.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:1987de92fec508535687fb807a5cea1560f6196285a4cde35c100b8cd632cc52", size = 1116647, upload-time = "2025-08-07T13:42:38.655Z" }, + { url = "https://files.pythonhosted.org/packages/3f/cc/b07000438a29ac5cfb2194bfc128151d52f333cee74dd7dfe3fb733fc16c/greenlet-3.2.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:55e9c5affaa6775e2c6b67659f3a71684de4c549b3dd9afca3bc773533d284fa", size = 1142073, upload-time = "2025-08-07T13:18:21.737Z" }, + { url = "https://files.pythonhosted.org/packages/67/24/28a5b2fa42d12b3d7e5614145f0bd89714c34c08be6aabe39c14dd52db34/greenlet-3.2.4-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:c9c6de1940a7d828635fbd254d69db79e54619f165ee7ce32fda763a9cb6a58c", size = 1548385, upload-time = "2025-11-04T12:42:11.067Z" }, + { url = "https://files.pythonhosted.org/packages/6a/05/03f2f0bdd0b0ff9a4f7b99333d57b53a7709c27723ec8123056b084e69cd/greenlet-3.2.4-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:03c5136e7be905045160b1b9fdca93dd6727b180feeafda6818e6496434ed8c5", size = 1613329, upload-time = "2025-11-04T12:42:12.928Z" }, + { url = "https://files.pythonhosted.org/packages/d8/0f/30aef242fcab550b0b3520b8e3561156857c94288f0332a79928c31a52cf/greenlet-3.2.4-cp311-cp311-win_amd64.whl", hash = "sha256:9c40adce87eaa9ddb593ccb0fa6a07caf34015a29bf8d344811665b573138db9", size = 299100, upload-time = "2025-08-07T13:44:12.287Z" }, + { url = "https://files.pythonhosted.org/packages/44/69/9b804adb5fd0671f367781560eb5eb586c4d495277c93bde4307b9e28068/greenlet-3.2.4-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:3b67ca49f54cede0186854a008109d6ee71f66bd57bb36abd6d0a0267b540cdd", size = 274079, upload-time = "2025-08-07T13:15:45.033Z" }, + { url = "https://files.pythonhosted.org/packages/46/e9/d2a80c99f19a153eff70bc451ab78615583b8dac0754cfb942223d2c1a0d/greenlet-3.2.4-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:ddf9164e7a5b08e9d22511526865780a576f19ddd00d62f8a665949327fde8bb", size = 640997, upload-time = "2025-08-07T13:42:56.234Z" }, + { url = "https://files.pythonhosted.org/packages/3b/16/035dcfcc48715ccd345f3a93183267167cdd162ad123cd93067d86f27ce4/greenlet-3.2.4-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:f28588772bb5fb869a8eb331374ec06f24a83a9c25bfa1f38b6993afe9c1e968", size = 655185, upload-time = "2025-08-07T13:45:27.624Z" }, + { url = "https://files.pythonhosted.org/packages/31/da/0386695eef69ffae1ad726881571dfe28b41970173947e7c558d9998de0f/greenlet-3.2.4-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:5c9320971821a7cb77cfab8d956fa8e39cd07ca44b6070db358ceb7f8797c8c9", size = 649926, upload-time = "2025-08-07T13:53:15.251Z" }, + { url = "https://files.pythonhosted.org/packages/68/88/69bf19fd4dc19981928ceacbc5fd4bb6bc2215d53199e367832e98d1d8fe/greenlet-3.2.4-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:c60a6d84229b271d44b70fb6e5fa23781abb5d742af7b808ae3f6efd7c9c60f6", size = 651839, upload-time = "2025-08-07T13:18:30.281Z" }, + { url = "https://files.pythonhosted.org/packages/19/0d/6660d55f7373b2ff8152401a83e02084956da23ae58cddbfb0b330978fe9/greenlet-3.2.4-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:3b3812d8d0c9579967815af437d96623f45c0f2ae5f04e366de62a12d83a8fb0", size = 607586, upload-time = "2025-08-07T13:18:28.544Z" }, + { url = "https://files.pythonhosted.org/packages/8e/1a/c953fdedd22d81ee4629afbb38d2f9d71e37d23caace44775a3a969147d4/greenlet-3.2.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:abbf57b5a870d30c4675928c37278493044d7c14378350b3aa5d484fa65575f0", size = 1123281, upload-time = "2025-08-07T13:42:39.858Z" }, + { url = "https://files.pythonhosted.org/packages/3f/c7/12381b18e21aef2c6bd3a636da1088b888b97b7a0362fac2e4de92405f97/greenlet-3.2.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:20fb936b4652b6e307b8f347665e2c615540d4b42b3b4c8a321d8286da7e520f", size = 1151142, upload-time = "2025-08-07T13:18:22.981Z" }, + { url = "https://files.pythonhosted.org/packages/27/45/80935968b53cfd3f33cf99ea5f08227f2646e044568c9b1555b58ffd61c2/greenlet-3.2.4-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:ee7a6ec486883397d70eec05059353b8e83eca9168b9f3f9a361971e77e0bcd0", size = 1564846, upload-time = "2025-11-04T12:42:15.191Z" }, + { url = "https://files.pythonhosted.org/packages/69/02/b7c30e5e04752cb4db6202a3858b149c0710e5453b71a3b2aec5d78a1aab/greenlet-3.2.4-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:326d234cbf337c9c3def0676412eb7040a35a768efc92504b947b3e9cfc7543d", size = 1633814, upload-time = "2025-11-04T12:42:17.175Z" }, + { url = "https://files.pythonhosted.org/packages/e9/08/b0814846b79399e585f974bbeebf5580fbe59e258ea7be64d9dfb253c84f/greenlet-3.2.4-cp312-cp312-win_amd64.whl", hash = "sha256:a7d4e128405eea3814a12cc2605e0e6aedb4035bf32697f72deca74de4105e02", size = 299899, upload-time = "2025-08-07T13:38:53.448Z" }, ] [[package]] @@ -2499,46 +2532,46 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/80/b3/ff0d704cdc5cf399d74aabd2bf1694d4c4c3231d4d74b011b8f39f686a86/grimp-3.13.tar.gz", hash = "sha256:759bf6e05186e6473ee71af4119ec181855b2b324f4fcdd78dee9e5b59d87874", size = 847508 } +sdist = { url = "https://files.pythonhosted.org/packages/80/b3/ff0d704cdc5cf399d74aabd2bf1694d4c4c3231d4d74b011b8f39f686a86/grimp-3.13.tar.gz", hash = "sha256:759bf6e05186e6473ee71af4119ec181855b2b324f4fcdd78dee9e5b59d87874", size = 847508, upload-time = "2025-10-29T13:04:57.704Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/45/cc/d272cf87728a7e6ddb44d3c57c1d3cbe7daf2ffe4dc76e3dc9b953b69ab1/grimp-3.13-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:57745996698932768274a2ed9ba3e5c424f60996c53ecaf1c82b75be9e819ee9", size = 2074518 }, - { url = "https://files.pythonhosted.org/packages/06/11/31dc622c5a0d1615b20532af2083f4bba2573aebbba5b9d6911dfd60a37d/grimp-3.13-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ca29f09710342b94fa6441f4d1102a0e49f0b463b1d91e43223baa949c5e9337", size = 1988182 }, - { url = "https://files.pythonhosted.org/packages/aa/83/a0e19beb5c42df09e9a60711b227b4f910ba57f46bea258a9e1df883976c/grimp-3.13-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:adda25aa158e11d96dd27166300b955c8ec0c76ce2fd1a13597e9af012aada06", size = 2145832 }, - { url = "https://files.pythonhosted.org/packages/bc/f5/13752205e290588e970fdc019b4ab2c063ca8da352295c332e34df5d5842/grimp-3.13-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:03e17029d75500a5282b40cb15cdae030bf14df9dfaa6a2b983f08898dfe74b6", size = 2106762 }, - { url = "https://files.pythonhosted.org/packages/ff/30/c4d62543beda4b9a483a6cd5b7dd5e4794aafb511f144d21a452467989a1/grimp-3.13-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6cbfc9d2d0ebc0631fb4012a002f3d8f4e3acb8325be34db525c0392674433b8", size = 2256674 }, - { url = "https://files.pythonhosted.org/packages/9b/ea/d07ed41b7121719c3f7bf30c9881dbde69efeacfc2daf4e4a628efe5f123/grimp-3.13-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:161449751a085484608c5b9f863e41e8fb2a98e93f7312ead5d831e487a94518", size = 2442699 }, - { url = "https://files.pythonhosted.org/packages/fe/a0/1923f0480756effb53c7e6cef02a3918bb519a86715992720838d44f0329/grimp-3.13-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:119628fbe7f941d1e784edac98e8ced7e78a0b966a4ff2c449e436ee860bd507", size = 2317145 }, - { url = "https://files.pythonhosted.org/packages/0d/d9/aef4c8350090653e34bc755a5d9e39cc300f5c46c651c1d50195f69bf9ab/grimp-3.13-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1ca1ac776baf1fa105342b23c72f2e7fdd6771d4cce8d2903d28f92fd34a9e8f", size = 2180288 }, - { url = "https://files.pythonhosted.org/packages/9f/2e/a206f76eccffa56310a1c5d5950ed34923a34ae360cb38e297604a288837/grimp-3.13-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:941ff414cc66458f56e6af93c618266091ea70bfdabe7a84039be31d937051ee", size = 2328696 }, - { url = "https://files.pythonhosted.org/packages/40/3b/88ff1554409b58faf2673854770e6fc6e90167a182f5166147b7618767d7/grimp-3.13-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:87ad9bcd1caaa2f77c369d61a04b9f2f1b87f4c3b23ae6891b2c943193c4ec62", size = 2367574 }, - { url = "https://files.pythonhosted.org/packages/b6/b3/e9c99ecd94567465a0926ae7136e589aed336f6979a4cddcb8dfba16d27c/grimp-3.13-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:751fe37104a4f023d5c6556558b723d843d44361245c20f51a5d196de00e4774", size = 2358842 }, - { url = "https://files.pythonhosted.org/packages/74/65/a5fffeeb9273e06dfbe962c8096331ba181ca8415c5f9d110b347f2c0c34/grimp-3.13-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:9b561f79ec0b3a4156937709737191ad57520f2d58fa1fc43cd79f67839a3cd7", size = 2382268 }, - { url = "https://files.pythonhosted.org/packages/d9/79/2f3b4323184329b26b46de2b6d1bd64ba1c26e0a9c3cfa0aaecec237b75e/grimp-3.13-cp311-cp311-win32.whl", hash = "sha256:52405ea8c8f20cf5d2d1866c80ee3f0243a38af82bd49d1464c5e254bf2e1f8f", size = 1759345 }, - { url = "https://files.pythonhosted.org/packages/b6/ce/e86cf73e412a6bf531cbfa5c733f8ca48b28ebea23a037338be763f24849/grimp-3.13-cp311-cp311-win_amd64.whl", hash = "sha256:6a45d1d3beeefad69717b3718e53680fb3579fe67696b86349d6f39b75e850bf", size = 1859382 }, - { url = "https://files.pythonhosted.org/packages/1d/06/ff7e3d72839f46f0fccdc79e1afe332318986751e20f65d7211a5e51366c/grimp-3.13-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:3e715c56ffdd055e5c84d27b4c02d83369b733e6a24579d42bbbc284bd0664a9", size = 2070161 }, - { url = "https://files.pythonhosted.org/packages/58/2f/a95bdf8996db9400fd7e288f32628b2177b8840fe5f6b7cd96247b5fa173/grimp-3.13-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f794dea35a4728b948ab8fec970ffbdf2589b34209f3ab902cf8a9148cf1eaad", size = 1984365 }, - { url = "https://files.pythonhosted.org/packages/1f/45/cc3d7f3b7b4d93e0b9d747dc45ed73a96203ba083dc857f24159eb6966b4/grimp-3.13-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69571270f2c27e8a64b968195aa7ecc126797112a9bf1e804ff39ba9f42d6f6d", size = 2145486 }, - { url = "https://files.pythonhosted.org/packages/16/92/a6e493b71cb5a9145ad414cc4790c3779853372b840a320f052b22879606/grimp-3.13-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:8f7b226398ae476762ef0afb5ef8f838d39c8e0e2f6d1a4378ce47059b221a4a", size = 2106747 }, - { url = "https://files.pythonhosted.org/packages/db/8d/36a09f39fe14ad8843ef3ff81090ef23abbd02984c1fcc1cef30e5713d82/grimp-3.13-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5498aeac4df0131a1787fcbe9bb460b52fc9b781ec6bba607fd6a7d6d3ea6fce", size = 2257027 }, - { url = "https://files.pythonhosted.org/packages/a1/7a/90f78787f80504caeef501f1bff47e8b9f6058d45995f1d4c921df17bfef/grimp-3.13-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4be702bb2b5c001a6baf709c452358470881e15e3e074cfc5308903603485dcb", size = 2441208 }, - { url = "https://files.pythonhosted.org/packages/61/71/0fbd3a3e914512b9602fa24c8ebc85a8925b101f04f8a8c1d1e220e0a717/grimp-3.13-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9fcf988f3e3d272a88f7be68f0c1d3719fee8624d902e9c0346b9015a0ea6a65", size = 2318758 }, - { url = "https://files.pythonhosted.org/packages/34/e9/29c685e88b3b0688f0a2e30c0825e02076ecdf22bc0e37b1468562eaa09a/grimp-3.13-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0ede36d104ff88c208140f978de3345f439345f35b8ef2b4390c59ef6984deba", size = 2180523 }, - { url = "https://files.pythonhosted.org/packages/86/bc/7cc09574b287b8850a45051e73272f365259d9b6ca58d7b8773265c6fe35/grimp-3.13-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:b35e44bb8dc80e0bd909a64387f722395453593a1884caca9dc0748efea33764", size = 2328855 }, - { url = "https://files.pythonhosted.org/packages/34/86/3b0845900c8f984a57c6afe3409b20638065462d48b6afec0fd409fd6118/grimp-3.13-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:becb88e9405fc40896acd6e2b9bbf6f242a5ae2fd43a1ec0a32319ab6c10a227", size = 2367756 }, - { url = "https://files.pythonhosted.org/packages/06/2d/4e70e8c06542db92c3fffaecb43ebfc4114a411505bff574d4da7d82c7db/grimp-3.13-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:a66585b4af46c3fbadbef495483514bee037e8c3075ed179ba4f13e494eb7899", size = 2358595 }, - { url = "https://files.pythonhosted.org/packages/dd/06/c511d39eb6c73069af277f4e74991f1f29a05d90cab61f5416b9fc43932f/grimp-3.13-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:29f68c6e2ff70d782ca0e989ec4ec44df73ba847937bcbb6191499224a2f84e2", size = 2381464 }, - { url = "https://files.pythonhosted.org/packages/86/f5/42197d69e4c9e2e7eed091d06493da3824e07c37324155569aa895c3b5f7/grimp-3.13-cp312-cp312-win32.whl", hash = "sha256:cc996dcd1a44ae52d257b9a3e98838f8ecfdc42f7c62c8c82c2fcd3828155c98", size = 1758510 }, - { url = "https://files.pythonhosted.org/packages/30/dd/59c5f19f51e25f3dbf1c9e88067a88165f649ba1b8e4174dbaf1c950f78b/grimp-3.13-cp312-cp312-win_amd64.whl", hash = "sha256:e2966435947e45b11568f04a65863dcf836343c11ae44aeefdaa7f07eb1a0576", size = 1859530 }, - { url = "https://files.pythonhosted.org/packages/e5/81/82de1b5d82701214b1f8e32b2e71fde8e1edbb4f2cdca9beb22ee6c8796d/grimp-3.13-pp311-pypy311_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5a6a3c76525b018c85c0e3a632d94d72be02225f8ada56670f3f213cf0762be4", size = 2145955 }, - { url = "https://files.pythonhosted.org/packages/8c/ae/ada18cb73bdf97094af1c60070a5b85549482a57c509ee9a23fdceed4fc3/grimp-3.13-pp311-pypy311_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:239e9b347af4da4cf69465bfa7b2901127f6057bc73416ba8187fb1eabafc6ea", size = 2107150 }, - { url = "https://files.pythonhosted.org/packages/10/5e/6d8c65643ad5a1b6e00cc2cd8f56fc063923485f07c59a756fa61eefe7f2/grimp-3.13-pp311-pypy311_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d6db85ce2dc2f804a2edd1c1e9eaa46d282e1f0051752a83ca08ca8b87f87376", size = 2257515 }, - { url = "https://files.pythonhosted.org/packages/b2/62/72cbfd7d0f2b95a53edd01d5f6b0d02bde38db739a727e35b76c13e0d0a8/grimp-3.13-pp311-pypy311_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e000f3590bcc6ff7c781ebbc1ac4eb919f97180f13cc4002c868822167bd9aed", size = 2441262 }, - { url = "https://files.pythonhosted.org/packages/18/00/b9209ab385567c3bddffb5d9eeecf9cb432b05c30ca8f35904b06e206a89/grimp-3.13-pp311-pypy311_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e2374c217c862c1af933a430192d6a7c6723ed1d90303f1abbc26f709bbb9263", size = 2318557 }, - { url = "https://files.pythonhosted.org/packages/11/4d/a3d73c11d09da00a53ceafe2884a71c78f5a76186af6d633cadd6c85d850/grimp-3.13-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ed0ff17d559ff2e7fa1be8ae086bc4fedcace5d7b12017f60164db8d9a8d806", size = 2180811 }, - { url = "https://files.pythonhosted.org/packages/c1/9a/1cdfaa7d7beefd8859b190dfeba11d5ec074e8702b2903e9f182d662ed63/grimp-3.13-pp311-pypy311_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:43960234aabce018c8d796ec8b77c484a1c9cbb6a3bc036a0d307c8dade9874c", size = 2329205 }, - { url = "https://files.pythonhosted.org/packages/86/73/b36f86ef98df96e7e8a6166dfa60c8db5d597f051e613a3112f39a870b4c/grimp-3.13-pp311-pypy311_pp73-musllinux_1_2_armv7l.whl", hash = "sha256:44420b638b3e303f32314bd4d309f15de1666629035acd1cdd3720c15917ac85", size = 2368745 }, - { url = "https://files.pythonhosted.org/packages/02/2f/0ce37872fad5c4b82d727f6e435fd5bc76f701279bddc9666710318940cf/grimp-3.13-pp311-pypy311_pp73-musllinux_1_2_i686.whl", hash = "sha256:f6127fdb982cf135612504d34aa16b841f421e54751fcd54f80b9531decb2b3f", size = 2358753 }, - { url = "https://files.pythonhosted.org/packages/bb/23/935c888ac9ee71184fe5adf5ea86648746739be23c85932857ac19fc1d17/grimp-3.13-pp311-pypy311_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:69893a9ef1edea25226ed17e8e8981e32900c59703972e0780c0e927ce624f75", size = 2383066 }, + { url = "https://files.pythonhosted.org/packages/45/cc/d272cf87728a7e6ddb44d3c57c1d3cbe7daf2ffe4dc76e3dc9b953b69ab1/grimp-3.13-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:57745996698932768274a2ed9ba3e5c424f60996c53ecaf1c82b75be9e819ee9", size = 2074518, upload-time = "2025-10-29T13:03:58.51Z" }, + { url = "https://files.pythonhosted.org/packages/06/11/31dc622c5a0d1615b20532af2083f4bba2573aebbba5b9d6911dfd60a37d/grimp-3.13-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ca29f09710342b94fa6441f4d1102a0e49f0b463b1d91e43223baa949c5e9337", size = 1988182, upload-time = "2025-10-29T13:03:50.129Z" }, + { url = "https://files.pythonhosted.org/packages/aa/83/a0e19beb5c42df09e9a60711b227b4f910ba57f46bea258a9e1df883976c/grimp-3.13-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:adda25aa158e11d96dd27166300b955c8ec0c76ce2fd1a13597e9af012aada06", size = 2145832, upload-time = "2025-10-29T13:02:35.218Z" }, + { url = "https://files.pythonhosted.org/packages/bc/f5/13752205e290588e970fdc019b4ab2c063ca8da352295c332e34df5d5842/grimp-3.13-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:03e17029d75500a5282b40cb15cdae030bf14df9dfaa6a2b983f08898dfe74b6", size = 2106762, upload-time = "2025-10-29T13:02:51.681Z" }, + { url = "https://files.pythonhosted.org/packages/ff/30/c4d62543beda4b9a483a6cd5b7dd5e4794aafb511f144d21a452467989a1/grimp-3.13-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6cbfc9d2d0ebc0631fb4012a002f3d8f4e3acb8325be34db525c0392674433b8", size = 2256674, upload-time = "2025-10-29T13:03:27.923Z" }, + { url = "https://files.pythonhosted.org/packages/9b/ea/d07ed41b7121719c3f7bf30c9881dbde69efeacfc2daf4e4a628efe5f123/grimp-3.13-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:161449751a085484608c5b9f863e41e8fb2a98e93f7312ead5d831e487a94518", size = 2442699, upload-time = "2025-10-29T13:03:04.451Z" }, + { url = "https://files.pythonhosted.org/packages/fe/a0/1923f0480756effb53c7e6cef02a3918bb519a86715992720838d44f0329/grimp-3.13-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:119628fbe7f941d1e784edac98e8ced7e78a0b966a4ff2c449e436ee860bd507", size = 2317145, upload-time = "2025-10-29T13:03:15.941Z" }, + { url = "https://files.pythonhosted.org/packages/0d/d9/aef4c8350090653e34bc755a5d9e39cc300f5c46c651c1d50195f69bf9ab/grimp-3.13-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1ca1ac776baf1fa105342b23c72f2e7fdd6771d4cce8d2903d28f92fd34a9e8f", size = 2180288, upload-time = "2025-10-29T13:03:41.023Z" }, + { url = "https://files.pythonhosted.org/packages/9f/2e/a206f76eccffa56310a1c5d5950ed34923a34ae360cb38e297604a288837/grimp-3.13-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:941ff414cc66458f56e6af93c618266091ea70bfdabe7a84039be31d937051ee", size = 2328696, upload-time = "2025-10-29T13:04:06.888Z" }, + { url = "https://files.pythonhosted.org/packages/40/3b/88ff1554409b58faf2673854770e6fc6e90167a182f5166147b7618767d7/grimp-3.13-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:87ad9bcd1caaa2f77c369d61a04b9f2f1b87f4c3b23ae6891b2c943193c4ec62", size = 2367574, upload-time = "2025-10-29T13:04:21.404Z" }, + { url = "https://files.pythonhosted.org/packages/b6/b3/e9c99ecd94567465a0926ae7136e589aed336f6979a4cddcb8dfba16d27c/grimp-3.13-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:751fe37104a4f023d5c6556558b723d843d44361245c20f51a5d196de00e4774", size = 2358842, upload-time = "2025-10-29T13:04:34.26Z" }, + { url = "https://files.pythonhosted.org/packages/74/65/a5fffeeb9273e06dfbe962c8096331ba181ca8415c5f9d110b347f2c0c34/grimp-3.13-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:9b561f79ec0b3a4156937709737191ad57520f2d58fa1fc43cd79f67839a3cd7", size = 2382268, upload-time = "2025-10-29T13:04:46.864Z" }, + { url = "https://files.pythonhosted.org/packages/d9/79/2f3b4323184329b26b46de2b6d1bd64ba1c26e0a9c3cfa0aaecec237b75e/grimp-3.13-cp311-cp311-win32.whl", hash = "sha256:52405ea8c8f20cf5d2d1866c80ee3f0243a38af82bd49d1464c5e254bf2e1f8f", size = 1759345, upload-time = "2025-10-29T13:05:10.435Z" }, + { url = "https://files.pythonhosted.org/packages/b6/ce/e86cf73e412a6bf531cbfa5c733f8ca48b28ebea23a037338be763f24849/grimp-3.13-cp311-cp311-win_amd64.whl", hash = "sha256:6a45d1d3beeefad69717b3718e53680fb3579fe67696b86349d6f39b75e850bf", size = 1859382, upload-time = "2025-10-29T13:05:01.071Z" }, + { url = "https://files.pythonhosted.org/packages/1d/06/ff7e3d72839f46f0fccdc79e1afe332318986751e20f65d7211a5e51366c/grimp-3.13-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:3e715c56ffdd055e5c84d27b4c02d83369b733e6a24579d42bbbc284bd0664a9", size = 2070161, upload-time = "2025-10-29T13:03:59.755Z" }, + { url = "https://files.pythonhosted.org/packages/58/2f/a95bdf8996db9400fd7e288f32628b2177b8840fe5f6b7cd96247b5fa173/grimp-3.13-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f794dea35a4728b948ab8fec970ffbdf2589b34209f3ab902cf8a9148cf1eaad", size = 1984365, upload-time = "2025-10-29T13:03:51.805Z" }, + { url = "https://files.pythonhosted.org/packages/1f/45/cc3d7f3b7b4d93e0b9d747dc45ed73a96203ba083dc857f24159eb6966b4/grimp-3.13-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69571270f2c27e8a64b968195aa7ecc126797112a9bf1e804ff39ba9f42d6f6d", size = 2145486, upload-time = "2025-10-29T13:02:36.591Z" }, + { url = "https://files.pythonhosted.org/packages/16/92/a6e493b71cb5a9145ad414cc4790c3779853372b840a320f052b22879606/grimp-3.13-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:8f7b226398ae476762ef0afb5ef8f838d39c8e0e2f6d1a4378ce47059b221a4a", size = 2106747, upload-time = "2025-10-29T13:02:53.084Z" }, + { url = "https://files.pythonhosted.org/packages/db/8d/36a09f39fe14ad8843ef3ff81090ef23abbd02984c1fcc1cef30e5713d82/grimp-3.13-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5498aeac4df0131a1787fcbe9bb460b52fc9b781ec6bba607fd6a7d6d3ea6fce", size = 2257027, upload-time = "2025-10-29T13:03:29.44Z" }, + { url = "https://files.pythonhosted.org/packages/a1/7a/90f78787f80504caeef501f1bff47e8b9f6058d45995f1d4c921df17bfef/grimp-3.13-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4be702bb2b5c001a6baf709c452358470881e15e3e074cfc5308903603485dcb", size = 2441208, upload-time = "2025-10-29T13:03:05.733Z" }, + { url = "https://files.pythonhosted.org/packages/61/71/0fbd3a3e914512b9602fa24c8ebc85a8925b101f04f8a8c1d1e220e0a717/grimp-3.13-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9fcf988f3e3d272a88f7be68f0c1d3719fee8624d902e9c0346b9015a0ea6a65", size = 2318758, upload-time = "2025-10-29T13:03:17.454Z" }, + { url = "https://files.pythonhosted.org/packages/34/e9/29c685e88b3b0688f0a2e30c0825e02076ecdf22bc0e37b1468562eaa09a/grimp-3.13-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0ede36d104ff88c208140f978de3345f439345f35b8ef2b4390c59ef6984deba", size = 2180523, upload-time = "2025-10-29T13:03:42.3Z" }, + { url = "https://files.pythonhosted.org/packages/86/bc/7cc09574b287b8850a45051e73272f365259d9b6ca58d7b8773265c6fe35/grimp-3.13-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:b35e44bb8dc80e0bd909a64387f722395453593a1884caca9dc0748efea33764", size = 2328855, upload-time = "2025-10-29T13:04:08.111Z" }, + { url = "https://files.pythonhosted.org/packages/34/86/3b0845900c8f984a57c6afe3409b20638065462d48b6afec0fd409fd6118/grimp-3.13-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:becb88e9405fc40896acd6e2b9bbf6f242a5ae2fd43a1ec0a32319ab6c10a227", size = 2367756, upload-time = "2025-10-29T13:04:22.736Z" }, + { url = "https://files.pythonhosted.org/packages/06/2d/4e70e8c06542db92c3fffaecb43ebfc4114a411505bff574d4da7d82c7db/grimp-3.13-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:a66585b4af46c3fbadbef495483514bee037e8c3075ed179ba4f13e494eb7899", size = 2358595, upload-time = "2025-10-29T13:04:35.595Z" }, + { url = "https://files.pythonhosted.org/packages/dd/06/c511d39eb6c73069af277f4e74991f1f29a05d90cab61f5416b9fc43932f/grimp-3.13-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:29f68c6e2ff70d782ca0e989ec4ec44df73ba847937bcbb6191499224a2f84e2", size = 2381464, upload-time = "2025-10-29T13:04:48.265Z" }, + { url = "https://files.pythonhosted.org/packages/86/f5/42197d69e4c9e2e7eed091d06493da3824e07c37324155569aa895c3b5f7/grimp-3.13-cp312-cp312-win32.whl", hash = "sha256:cc996dcd1a44ae52d257b9a3e98838f8ecfdc42f7c62c8c82c2fcd3828155c98", size = 1758510, upload-time = "2025-10-29T13:05:11.74Z" }, + { url = "https://files.pythonhosted.org/packages/30/dd/59c5f19f51e25f3dbf1c9e88067a88165f649ba1b8e4174dbaf1c950f78b/grimp-3.13-cp312-cp312-win_amd64.whl", hash = "sha256:e2966435947e45b11568f04a65863dcf836343c11ae44aeefdaa7f07eb1a0576", size = 1859530, upload-time = "2025-10-29T13:05:02.638Z" }, + { url = "https://files.pythonhosted.org/packages/e5/81/82de1b5d82701214b1f8e32b2e71fde8e1edbb4f2cdca9beb22ee6c8796d/grimp-3.13-pp311-pypy311_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5a6a3c76525b018c85c0e3a632d94d72be02225f8ada56670f3f213cf0762be4", size = 2145955, upload-time = "2025-10-29T13:02:47.559Z" }, + { url = "https://files.pythonhosted.org/packages/8c/ae/ada18cb73bdf97094af1c60070a5b85549482a57c509ee9a23fdceed4fc3/grimp-3.13-pp311-pypy311_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:239e9b347af4da4cf69465bfa7b2901127f6057bc73416ba8187fb1eabafc6ea", size = 2107150, upload-time = "2025-10-29T13:02:59.891Z" }, + { url = "https://files.pythonhosted.org/packages/10/5e/6d8c65643ad5a1b6e00cc2cd8f56fc063923485f07c59a756fa61eefe7f2/grimp-3.13-pp311-pypy311_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d6db85ce2dc2f804a2edd1c1e9eaa46d282e1f0051752a83ca08ca8b87f87376", size = 2257515, upload-time = "2025-10-29T13:03:36.705Z" }, + { url = "https://files.pythonhosted.org/packages/b2/62/72cbfd7d0f2b95a53edd01d5f6b0d02bde38db739a727e35b76c13e0d0a8/grimp-3.13-pp311-pypy311_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e000f3590bcc6ff7c781ebbc1ac4eb919f97180f13cc4002c868822167bd9aed", size = 2441262, upload-time = "2025-10-29T13:03:12.158Z" }, + { url = "https://files.pythonhosted.org/packages/18/00/b9209ab385567c3bddffb5d9eeecf9cb432b05c30ca8f35904b06e206a89/grimp-3.13-pp311-pypy311_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e2374c217c862c1af933a430192d6a7c6723ed1d90303f1abbc26f709bbb9263", size = 2318557, upload-time = "2025-10-29T13:03:23.925Z" }, + { url = "https://files.pythonhosted.org/packages/11/4d/a3d73c11d09da00a53ceafe2884a71c78f5a76186af6d633cadd6c85d850/grimp-3.13-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ed0ff17d559ff2e7fa1be8ae086bc4fedcace5d7b12017f60164db8d9a8d806", size = 2180811, upload-time = "2025-10-29T13:03:47.461Z" }, + { url = "https://files.pythonhosted.org/packages/c1/9a/1cdfaa7d7beefd8859b190dfeba11d5ec074e8702b2903e9f182d662ed63/grimp-3.13-pp311-pypy311_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:43960234aabce018c8d796ec8b77c484a1c9cbb6a3bc036a0d307c8dade9874c", size = 2329205, upload-time = "2025-10-29T13:04:15.845Z" }, + { url = "https://files.pythonhosted.org/packages/86/73/b36f86ef98df96e7e8a6166dfa60c8db5d597f051e613a3112f39a870b4c/grimp-3.13-pp311-pypy311_pp73-musllinux_1_2_armv7l.whl", hash = "sha256:44420b638b3e303f32314bd4d309f15de1666629035acd1cdd3720c15917ac85", size = 2368745, upload-time = "2025-10-29T13:04:29.706Z" }, + { url = "https://files.pythonhosted.org/packages/02/2f/0ce37872fad5c4b82d727f6e435fd5bc76f701279bddc9666710318940cf/grimp-3.13-pp311-pypy311_pp73-musllinux_1_2_i686.whl", hash = "sha256:f6127fdb982cf135612504d34aa16b841f421e54751fcd54f80b9531decb2b3f", size = 2358753, upload-time = "2025-10-29T13:04:42.632Z" }, + { url = "https://files.pythonhosted.org/packages/bb/23/935c888ac9ee71184fe5adf5ea86648746739be23c85932857ac19fc1d17/grimp-3.13-pp311-pypy311_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:69893a9ef1edea25226ed17e8e8981e32900c59703972e0780c0e927ce624f75", size = 2383066, upload-time = "2025-10-29T13:04:55.073Z" }, ] [[package]] @@ -2550,9 +2583,9 @@ dependencies = [ { name = "grpcio" }, { name = "protobuf" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/76/1e/1011451679a983f2f5c6771a1682542ecb027776762ad031fd0d7129164b/grpc_google_iam_v1-0.14.3.tar.gz", hash = "sha256:879ac4ef33136c5491a6300e27575a9ec760f6cdf9a2518798c1b8977a5dc389", size = 23745 } +sdist = { url = "https://files.pythonhosted.org/packages/76/1e/1011451679a983f2f5c6771a1682542ecb027776762ad031fd0d7129164b/grpc_google_iam_v1-0.14.3.tar.gz", hash = "sha256:879ac4ef33136c5491a6300e27575a9ec760f6cdf9a2518798c1b8977a5dc389", size = 23745, upload-time = "2025-10-15T21:14:53.318Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/4a/bd/330a1bbdb1afe0b96311249e699b6dc9cfc17916394fd4503ac5aca2514b/grpc_google_iam_v1-0.14.3-py3-none-any.whl", hash = "sha256:7a7f697e017a067206a3dfef44e4c634a34d3dee135fe7d7a4613fe3e59217e6", size = 32690 }, + { url = "https://files.pythonhosted.org/packages/4a/bd/330a1bbdb1afe0b96311249e699b6dc9cfc17916394fd4503ac5aca2514b/grpc_google_iam_v1-0.14.3-py3-none-any.whl", hash = "sha256:7a7f697e017a067206a3dfef44e4c634a34d3dee135fe7d7a4613fe3e59217e6", size = 32690, upload-time = "2025-10-15T21:14:51.72Z" }, ] [[package]] @@ -2562,28 +2595,28 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/b6/e0/318c1ce3ae5a17894d5791e87aea147587c9e702f24122cc7a5c8bbaeeb1/grpcio-1.76.0.tar.gz", hash = "sha256:7be78388d6da1a25c0d5ec506523db58b18be22d9c37d8d3a32c08be4987bd73", size = 12785182 } +sdist = { url = "https://files.pythonhosted.org/packages/b6/e0/318c1ce3ae5a17894d5791e87aea147587c9e702f24122cc7a5c8bbaeeb1/grpcio-1.76.0.tar.gz", hash = "sha256:7be78388d6da1a25c0d5ec506523db58b18be22d9c37d8d3a32c08be4987bd73", size = 12785182, upload-time = "2025-10-21T16:23:12.106Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/a0/00/8163a1beeb6971f66b4bbe6ac9457b97948beba8dd2fc8e1281dce7f79ec/grpcio-1.76.0-cp311-cp311-linux_armv7l.whl", hash = "sha256:2e1743fbd7f5fa713a1b0a8ac8ebabf0ec980b5d8809ec358d488e273b9cf02a", size = 5843567 }, - { url = "https://files.pythonhosted.org/packages/10/c1/934202f5cf335e6d852530ce14ddb0fef21be612ba9ecbbcbd4d748ca32d/grpcio-1.76.0-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:a8c2cf1209497cf659a667d7dea88985e834c24b7c3b605e6254cbb5076d985c", size = 11848017 }, - { url = "https://files.pythonhosted.org/packages/11/0b/8dec16b1863d74af6eb3543928600ec2195af49ca58b16334972f6775663/grpcio-1.76.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:08caea849a9d3c71a542827d6df9d5a69067b0a1efbea8a855633ff5d9571465", size = 6412027 }, - { url = "https://files.pythonhosted.org/packages/d7/64/7b9e6e7ab910bea9d46f2c090380bab274a0b91fb0a2fe9b0cd399fffa12/grpcio-1.76.0-cp311-cp311-manylinux2014_i686.manylinux_2_17_i686.whl", hash = "sha256:f0e34c2079d47ae9f6188211db9e777c619a21d4faba6977774e8fa43b085e48", size = 7075913 }, - { url = "https://files.pythonhosted.org/packages/68/86/093c46e9546073cefa789bd76d44c5cb2abc824ca62af0c18be590ff13ba/grpcio-1.76.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:8843114c0cfce61b40ad48df65abcfc00d4dba82eae8718fab5352390848c5da", size = 6615417 }, - { url = "https://files.pythonhosted.org/packages/f7/b6/5709a3a68500a9c03da6fb71740dcdd5ef245e39266461a03f31a57036d8/grpcio-1.76.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:8eddfb4d203a237da6f3cc8a540dad0517d274b5a1e9e636fd8d2c79b5c1d397", size = 7199683 }, - { url = "https://files.pythonhosted.org/packages/91/d3/4b1f2bf16ed52ce0b508161df3a2d186e4935379a159a834cb4a7d687429/grpcio-1.76.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:32483fe2aab2c3794101c2a159070584e5db11d0aa091b2c0ea9c4fc43d0d749", size = 8163109 }, - { url = "https://files.pythonhosted.org/packages/5c/61/d9043f95f5f4cf085ac5dd6137b469d41befb04bd80280952ffa2a4c3f12/grpcio-1.76.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:dcfe41187da8992c5f40aa8c5ec086fa3672834d2be57a32384c08d5a05b4c00", size = 7626676 }, - { url = "https://files.pythonhosted.org/packages/36/95/fd9a5152ca02d8881e4dd419cdd790e11805979f499a2e5b96488b85cf27/grpcio-1.76.0-cp311-cp311-win32.whl", hash = "sha256:2107b0c024d1b35f4083f11245c0e23846ae64d02f40b2b226684840260ed054", size = 3997688 }, - { url = "https://files.pythonhosted.org/packages/60/9c/5c359c8d4c9176cfa3c61ecd4efe5affe1f38d9bae81e81ac7186b4c9cc8/grpcio-1.76.0-cp311-cp311-win_amd64.whl", hash = "sha256:522175aba7af9113c48ec10cc471b9b9bd4f6ceb36aeb4544a8e2c80ed9d252d", size = 4709315 }, - { url = "https://files.pythonhosted.org/packages/bf/05/8e29121994b8d959ffa0afd28996d452f291b48cfc0875619de0bde2c50c/grpcio-1.76.0-cp312-cp312-linux_armv7l.whl", hash = "sha256:81fd9652b37b36f16138611c7e884eb82e0cec137c40d3ef7c3f9b3ed00f6ed8", size = 5799718 }, - { url = "https://files.pythonhosted.org/packages/d9/75/11d0e66b3cdf998c996489581bdad8900db79ebd83513e45c19548f1cba4/grpcio-1.76.0-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:04bbe1bfe3a68bbfd4e52402ab7d4eb59d72d02647ae2042204326cf4bbad280", size = 11825627 }, - { url = "https://files.pythonhosted.org/packages/28/50/2f0aa0498bc188048f5d9504dcc5c2c24f2eb1a9337cd0fa09a61a2e75f0/grpcio-1.76.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:d388087771c837cdb6515539f43b9d4bf0b0f23593a24054ac16f7a960be16f4", size = 6359167 }, - { url = "https://files.pythonhosted.org/packages/66/e5/bbf0bb97d29ede1d59d6588af40018cfc345b17ce979b7b45424628dc8bb/grpcio-1.76.0-cp312-cp312-manylinux2014_i686.manylinux_2_17_i686.whl", hash = "sha256:9f8f757bebaaea112c00dba718fc0d3260052ce714e25804a03f93f5d1c6cc11", size = 7044267 }, - { url = "https://files.pythonhosted.org/packages/f5/86/f6ec2164f743d9609691115ae8ece098c76b894ebe4f7c94a655c6b03e98/grpcio-1.76.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:980a846182ce88c4f2f7e2c22c56aefd515daeb36149d1c897f83cf57999e0b6", size = 6573963 }, - { url = "https://files.pythonhosted.org/packages/60/bc/8d9d0d8505feccfdf38a766d262c71e73639c165b311c9457208b56d92ae/grpcio-1.76.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:f92f88e6c033db65a5ae3d97905c8fea9c725b63e28d5a75cb73b49bda5024d8", size = 7164484 }, - { url = "https://files.pythonhosted.org/packages/67/e6/5d6c2fc10b95edf6df9b8f19cf10a34263b7fd48493936fffd5085521292/grpcio-1.76.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:4baf3cbe2f0be3289eb68ac8ae771156971848bb8aaff60bad42005539431980", size = 8127777 }, - { url = "https://files.pythonhosted.org/packages/3f/c8/dce8ff21c86abe025efe304d9e31fdb0deaaa3b502b6a78141080f206da0/grpcio-1.76.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:615ba64c208aaceb5ec83bfdce7728b80bfeb8be97562944836a7a0a9647d882", size = 7594014 }, - { url = "https://files.pythonhosted.org/packages/e0/42/ad28191ebf983a5d0ecef90bab66baa5a6b18f2bfdef9d0a63b1973d9f75/grpcio-1.76.0-cp312-cp312-win32.whl", hash = "sha256:45d59a649a82df5718fd9527ce775fd66d1af35e6d31abdcdc906a49c6822958", size = 3984750 }, - { url = "https://files.pythonhosted.org/packages/9e/00/7bd478cbb851c04a48baccaa49b75abaa8e4122f7d86da797500cccdd771/grpcio-1.76.0-cp312-cp312-win_amd64.whl", hash = "sha256:c088e7a90b6017307f423efbb9d1ba97a22aa2170876223f9709e9d1de0b5347", size = 4704003 }, + { url = "https://files.pythonhosted.org/packages/a0/00/8163a1beeb6971f66b4bbe6ac9457b97948beba8dd2fc8e1281dce7f79ec/grpcio-1.76.0-cp311-cp311-linux_armv7l.whl", hash = "sha256:2e1743fbd7f5fa713a1b0a8ac8ebabf0ec980b5d8809ec358d488e273b9cf02a", size = 5843567, upload-time = "2025-10-21T16:20:52.829Z" }, + { url = "https://files.pythonhosted.org/packages/10/c1/934202f5cf335e6d852530ce14ddb0fef21be612ba9ecbbcbd4d748ca32d/grpcio-1.76.0-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:a8c2cf1209497cf659a667d7dea88985e834c24b7c3b605e6254cbb5076d985c", size = 11848017, upload-time = "2025-10-21T16:20:56.705Z" }, + { url = "https://files.pythonhosted.org/packages/11/0b/8dec16b1863d74af6eb3543928600ec2195af49ca58b16334972f6775663/grpcio-1.76.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:08caea849a9d3c71a542827d6df9d5a69067b0a1efbea8a855633ff5d9571465", size = 6412027, upload-time = "2025-10-21T16:20:59.3Z" }, + { url = "https://files.pythonhosted.org/packages/d7/64/7b9e6e7ab910bea9d46f2c090380bab274a0b91fb0a2fe9b0cd399fffa12/grpcio-1.76.0-cp311-cp311-manylinux2014_i686.manylinux_2_17_i686.whl", hash = "sha256:f0e34c2079d47ae9f6188211db9e777c619a21d4faba6977774e8fa43b085e48", size = 7075913, upload-time = "2025-10-21T16:21:01.645Z" }, + { url = "https://files.pythonhosted.org/packages/68/86/093c46e9546073cefa789bd76d44c5cb2abc824ca62af0c18be590ff13ba/grpcio-1.76.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:8843114c0cfce61b40ad48df65abcfc00d4dba82eae8718fab5352390848c5da", size = 6615417, upload-time = "2025-10-21T16:21:03.844Z" }, + { url = "https://files.pythonhosted.org/packages/f7/b6/5709a3a68500a9c03da6fb71740dcdd5ef245e39266461a03f31a57036d8/grpcio-1.76.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:8eddfb4d203a237da6f3cc8a540dad0517d274b5a1e9e636fd8d2c79b5c1d397", size = 7199683, upload-time = "2025-10-21T16:21:06.195Z" }, + { url = "https://files.pythonhosted.org/packages/91/d3/4b1f2bf16ed52ce0b508161df3a2d186e4935379a159a834cb4a7d687429/grpcio-1.76.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:32483fe2aab2c3794101c2a159070584e5db11d0aa091b2c0ea9c4fc43d0d749", size = 8163109, upload-time = "2025-10-21T16:21:08.498Z" }, + { url = "https://files.pythonhosted.org/packages/5c/61/d9043f95f5f4cf085ac5dd6137b469d41befb04bd80280952ffa2a4c3f12/grpcio-1.76.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:dcfe41187da8992c5f40aa8c5ec086fa3672834d2be57a32384c08d5a05b4c00", size = 7626676, upload-time = "2025-10-21T16:21:10.693Z" }, + { url = "https://files.pythonhosted.org/packages/36/95/fd9a5152ca02d8881e4dd419cdd790e11805979f499a2e5b96488b85cf27/grpcio-1.76.0-cp311-cp311-win32.whl", hash = "sha256:2107b0c024d1b35f4083f11245c0e23846ae64d02f40b2b226684840260ed054", size = 3997688, upload-time = "2025-10-21T16:21:12.746Z" }, + { url = "https://files.pythonhosted.org/packages/60/9c/5c359c8d4c9176cfa3c61ecd4efe5affe1f38d9bae81e81ac7186b4c9cc8/grpcio-1.76.0-cp311-cp311-win_amd64.whl", hash = "sha256:522175aba7af9113c48ec10cc471b9b9bd4f6ceb36aeb4544a8e2c80ed9d252d", size = 4709315, upload-time = "2025-10-21T16:21:15.26Z" }, + { url = "https://files.pythonhosted.org/packages/bf/05/8e29121994b8d959ffa0afd28996d452f291b48cfc0875619de0bde2c50c/grpcio-1.76.0-cp312-cp312-linux_armv7l.whl", hash = "sha256:81fd9652b37b36f16138611c7e884eb82e0cec137c40d3ef7c3f9b3ed00f6ed8", size = 5799718, upload-time = "2025-10-21T16:21:17.939Z" }, + { url = "https://files.pythonhosted.org/packages/d9/75/11d0e66b3cdf998c996489581bdad8900db79ebd83513e45c19548f1cba4/grpcio-1.76.0-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:04bbe1bfe3a68bbfd4e52402ab7d4eb59d72d02647ae2042204326cf4bbad280", size = 11825627, upload-time = "2025-10-21T16:21:20.466Z" }, + { url = "https://files.pythonhosted.org/packages/28/50/2f0aa0498bc188048f5d9504dcc5c2c24f2eb1a9337cd0fa09a61a2e75f0/grpcio-1.76.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:d388087771c837cdb6515539f43b9d4bf0b0f23593a24054ac16f7a960be16f4", size = 6359167, upload-time = "2025-10-21T16:21:23.122Z" }, + { url = "https://files.pythonhosted.org/packages/66/e5/bbf0bb97d29ede1d59d6588af40018cfc345b17ce979b7b45424628dc8bb/grpcio-1.76.0-cp312-cp312-manylinux2014_i686.manylinux_2_17_i686.whl", hash = "sha256:9f8f757bebaaea112c00dba718fc0d3260052ce714e25804a03f93f5d1c6cc11", size = 7044267, upload-time = "2025-10-21T16:21:25.995Z" }, + { url = "https://files.pythonhosted.org/packages/f5/86/f6ec2164f743d9609691115ae8ece098c76b894ebe4f7c94a655c6b03e98/grpcio-1.76.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:980a846182ce88c4f2f7e2c22c56aefd515daeb36149d1c897f83cf57999e0b6", size = 6573963, upload-time = "2025-10-21T16:21:28.631Z" }, + { url = "https://files.pythonhosted.org/packages/60/bc/8d9d0d8505feccfdf38a766d262c71e73639c165b311c9457208b56d92ae/grpcio-1.76.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:f92f88e6c033db65a5ae3d97905c8fea9c725b63e28d5a75cb73b49bda5024d8", size = 7164484, upload-time = "2025-10-21T16:21:30.837Z" }, + { url = "https://files.pythonhosted.org/packages/67/e6/5d6c2fc10b95edf6df9b8f19cf10a34263b7fd48493936fffd5085521292/grpcio-1.76.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:4baf3cbe2f0be3289eb68ac8ae771156971848bb8aaff60bad42005539431980", size = 8127777, upload-time = "2025-10-21T16:21:33.577Z" }, + { url = "https://files.pythonhosted.org/packages/3f/c8/dce8ff21c86abe025efe304d9e31fdb0deaaa3b502b6a78141080f206da0/grpcio-1.76.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:615ba64c208aaceb5ec83bfdce7728b80bfeb8be97562944836a7a0a9647d882", size = 7594014, upload-time = "2025-10-21T16:21:41.882Z" }, + { url = "https://files.pythonhosted.org/packages/e0/42/ad28191ebf983a5d0ecef90bab66baa5a6b18f2bfdef9d0a63b1973d9f75/grpcio-1.76.0-cp312-cp312-win32.whl", hash = "sha256:45d59a649a82df5718fd9527ce775fd66d1af35e6d31abdcdc906a49c6822958", size = 3984750, upload-time = "2025-10-21T16:21:44.006Z" }, + { url = "https://files.pythonhosted.org/packages/9e/00/7bd478cbb851c04a48baccaa49b75abaa8e4122f7d86da797500cccdd771/grpcio-1.76.0-cp312-cp312-win_amd64.whl", hash = "sha256:c088e7a90b6017307f423efbb9d1ba97a22aa2170876223f9709e9d1de0b5347", size = 4704003, upload-time = "2025-10-21T16:21:46.244Z" }, ] [[package]] @@ -2595,9 +2628,9 @@ dependencies = [ { name = "grpcio" }, { name = "protobuf" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/7c/d7/013ef01c5a1c2fd0932c27c904934162f69f41ca0f28396d3ffe4d386123/grpcio-status-1.62.3.tar.gz", hash = "sha256:289bdd7b2459794a12cf95dc0cb727bd4a1742c37bd823f760236c937e53a485", size = 13063 } +sdist = { url = "https://files.pythonhosted.org/packages/7c/d7/013ef01c5a1c2fd0932c27c904934162f69f41ca0f28396d3ffe4d386123/grpcio-status-1.62.3.tar.gz", hash = "sha256:289bdd7b2459794a12cf95dc0cb727bd4a1742c37bd823f760236c937e53a485", size = 13063, upload-time = "2024-08-06T00:37:08.003Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/90/40/972271de05f9315c0d69f9f7ebbcadd83bc85322f538637d11bb8c67803d/grpcio_status-1.62.3-py3-none-any.whl", hash = "sha256:f9049b762ba8de6b1086789d8315846e094edac2c50beaf462338b301a8fd4b8", size = 14448 }, + { url = "https://files.pythonhosted.org/packages/90/40/972271de05f9315c0d69f9f7ebbcadd83bc85322f538637d11bb8c67803d/grpcio_status-1.62.3-py3-none-any.whl", hash = "sha256:f9049b762ba8de6b1086789d8315846e094edac2c50beaf462338b301a8fd4b8", size = 14448, upload-time = "2024-08-06T00:30:15.702Z" }, ] [[package]] @@ -2609,24 +2642,24 @@ dependencies = [ { name = "protobuf" }, { name = "setuptools" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/54/fa/b69bd8040eafc09b88bb0ec0fea59e8aacd1a801e688af087cead213b0d0/grpcio-tools-1.62.3.tar.gz", hash = "sha256:7c7136015c3d62c3eef493efabaf9e3380e3e66d24ee8e94c01cb71377f57833", size = 4538520 } +sdist = { url = "https://files.pythonhosted.org/packages/54/fa/b69bd8040eafc09b88bb0ec0fea59e8aacd1a801e688af087cead213b0d0/grpcio-tools-1.62.3.tar.gz", hash = "sha256:7c7136015c3d62c3eef493efabaf9e3380e3e66d24ee8e94c01cb71377f57833", size = 4538520, upload-time = "2024-08-06T00:37:11.035Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/23/52/2dfe0a46b63f5ebcd976570aa5fc62f793d5a8b169e211c6a5aede72b7ae/grpcio_tools-1.62.3-cp311-cp311-macosx_10_10_universal2.whl", hash = "sha256:703f46e0012af83a36082b5f30341113474ed0d91e36640da713355cd0ea5d23", size = 5147623 }, - { url = "https://files.pythonhosted.org/packages/f0/2e/29fdc6c034e058482e054b4a3c2432f84ff2e2765c1342d4f0aa8a5c5b9a/grpcio_tools-1.62.3-cp311-cp311-manylinux_2_17_aarch64.whl", hash = "sha256:7cc83023acd8bc72cf74c2edbe85b52098501d5b74d8377bfa06f3e929803492", size = 2719538 }, - { url = "https://files.pythonhosted.org/packages/f9/60/abe5deba32d9ec2c76cdf1a2f34e404c50787074a2fee6169568986273f1/grpcio_tools-1.62.3-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7ff7d58a45b75df67d25f8f144936a3e44aabd91afec833ee06826bd02b7fbe7", size = 3070964 }, - { url = "https://files.pythonhosted.org/packages/bc/ad/e2b066684c75f8d9a48508cde080a3a36618064b9cadac16d019ca511444/grpcio_tools-1.62.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7f2483ea232bd72d98a6dc6d7aefd97e5bc80b15cd909b9e356d6f3e326b6e43", size = 2805003 }, - { url = "https://files.pythonhosted.org/packages/9c/3f/59bf7af786eae3f9d24ee05ce75318b87f541d0950190ecb5ffb776a1a58/grpcio_tools-1.62.3-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:962c84b4da0f3b14b3cdb10bc3837ebc5f136b67d919aea8d7bb3fd3df39528a", size = 3685154 }, - { url = "https://files.pythonhosted.org/packages/f1/79/4dd62478b91e27084c67b35a2316ce8a967bd8b6cb8d6ed6c86c3a0df7cb/grpcio_tools-1.62.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:8ad0473af5544f89fc5a1ece8676dd03bdf160fb3230f967e05d0f4bf89620e3", size = 3297942 }, - { url = "https://files.pythonhosted.org/packages/b8/cb/86449ecc58bea056b52c0b891f26977afc8c4464d88c738f9648da941a75/grpcio_tools-1.62.3-cp311-cp311-win32.whl", hash = "sha256:db3bc9fa39afc5e4e2767da4459df82b095ef0cab2f257707be06c44a1c2c3e5", size = 910231 }, - { url = "https://files.pythonhosted.org/packages/45/a4/9736215e3945c30ab6843280b0c6e1bff502910156ea2414cd77fbf1738c/grpcio_tools-1.62.3-cp311-cp311-win_amd64.whl", hash = "sha256:e0898d412a434e768a0c7e365acabe13ff1558b767e400936e26b5b6ed1ee51f", size = 1052496 }, - { url = "https://files.pythonhosted.org/packages/2a/a5/d6887eba415ce318ae5005e8dfac3fa74892400b54b6d37b79e8b4f14f5e/grpcio_tools-1.62.3-cp312-cp312-macosx_10_10_universal2.whl", hash = "sha256:d102b9b21c4e1e40af9a2ab3c6d41afba6bd29c0aa50ca013bf85c99cdc44ac5", size = 5147690 }, - { url = "https://files.pythonhosted.org/packages/8a/7c/3cde447a045e83ceb4b570af8afe67ffc86896a2fe7f59594dc8e5d0a645/grpcio_tools-1.62.3-cp312-cp312-manylinux_2_17_aarch64.whl", hash = "sha256:0a52cc9444df978438b8d2332c0ca99000521895229934a59f94f37ed896b133", size = 2720538 }, - { url = "https://files.pythonhosted.org/packages/88/07/f83f2750d44ac4f06c07c37395b9c1383ef5c994745f73c6bfaf767f0944/grpcio_tools-1.62.3-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:141d028bf5762d4a97f981c501da873589df3f7e02f4c1260e1921e565b376fa", size = 3071571 }, - { url = "https://files.pythonhosted.org/packages/37/74/40175897deb61e54aca716bc2e8919155b48f33aafec8043dda9592d8768/grpcio_tools-1.62.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:47a5c093ab256dec5714a7a345f8cc89315cb57c298b276fa244f37a0ba507f0", size = 2806207 }, - { url = "https://files.pythonhosted.org/packages/ec/ee/d8de915105a217cbcb9084d684abdc032030dcd887277f2ef167372287fe/grpcio_tools-1.62.3-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:f6831fdec2b853c9daa3358535c55eed3694325889aa714070528cf8f92d7d6d", size = 3685815 }, - { url = "https://files.pythonhosted.org/packages/fd/d9/4360a6c12be3d7521b0b8c39e5d3801d622fbb81cc2721dbd3eee31e28c8/grpcio_tools-1.62.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:e02d7c1a02e3814c94ba0cfe43d93e872c758bd8fd5c2797f894d0c49b4a1dfc", size = 3298378 }, - { url = "https://files.pythonhosted.org/packages/29/3b/7cdf4a9e5a3e0a35a528b48b111355cd14da601413a4f887aa99b6da468f/grpcio_tools-1.62.3-cp312-cp312-win32.whl", hash = "sha256:b881fd9505a84457e9f7e99362eeedd86497b659030cf57c6f0070df6d9c2b9b", size = 910416 }, - { url = "https://files.pythonhosted.org/packages/6c/66/dd3ec249e44c1cc15e902e783747819ed41ead1336fcba72bf841f72c6e9/grpcio_tools-1.62.3-cp312-cp312-win_amd64.whl", hash = "sha256:11c625eebefd1fd40a228fc8bae385e448c7e32a6ae134e43cf13bbc23f902b7", size = 1052856 }, + { url = "https://files.pythonhosted.org/packages/23/52/2dfe0a46b63f5ebcd976570aa5fc62f793d5a8b169e211c6a5aede72b7ae/grpcio_tools-1.62.3-cp311-cp311-macosx_10_10_universal2.whl", hash = "sha256:703f46e0012af83a36082b5f30341113474ed0d91e36640da713355cd0ea5d23", size = 5147623, upload-time = "2024-08-06T00:30:54.894Z" }, + { url = "https://files.pythonhosted.org/packages/f0/2e/29fdc6c034e058482e054b4a3c2432f84ff2e2765c1342d4f0aa8a5c5b9a/grpcio_tools-1.62.3-cp311-cp311-manylinux_2_17_aarch64.whl", hash = "sha256:7cc83023acd8bc72cf74c2edbe85b52098501d5b74d8377bfa06f3e929803492", size = 2719538, upload-time = "2024-08-06T00:30:57.928Z" }, + { url = "https://files.pythonhosted.org/packages/f9/60/abe5deba32d9ec2c76cdf1a2f34e404c50787074a2fee6169568986273f1/grpcio_tools-1.62.3-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7ff7d58a45b75df67d25f8f144936a3e44aabd91afec833ee06826bd02b7fbe7", size = 3070964, upload-time = "2024-08-06T00:31:00.267Z" }, + { url = "https://files.pythonhosted.org/packages/bc/ad/e2b066684c75f8d9a48508cde080a3a36618064b9cadac16d019ca511444/grpcio_tools-1.62.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7f2483ea232bd72d98a6dc6d7aefd97e5bc80b15cd909b9e356d6f3e326b6e43", size = 2805003, upload-time = "2024-08-06T00:31:02.565Z" }, + { url = "https://files.pythonhosted.org/packages/9c/3f/59bf7af786eae3f9d24ee05ce75318b87f541d0950190ecb5ffb776a1a58/grpcio_tools-1.62.3-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:962c84b4da0f3b14b3cdb10bc3837ebc5f136b67d919aea8d7bb3fd3df39528a", size = 3685154, upload-time = "2024-08-06T00:31:05.339Z" }, + { url = "https://files.pythonhosted.org/packages/f1/79/4dd62478b91e27084c67b35a2316ce8a967bd8b6cb8d6ed6c86c3a0df7cb/grpcio_tools-1.62.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:8ad0473af5544f89fc5a1ece8676dd03bdf160fb3230f967e05d0f4bf89620e3", size = 3297942, upload-time = "2024-08-06T00:31:08.456Z" }, + { url = "https://files.pythonhosted.org/packages/b8/cb/86449ecc58bea056b52c0b891f26977afc8c4464d88c738f9648da941a75/grpcio_tools-1.62.3-cp311-cp311-win32.whl", hash = "sha256:db3bc9fa39afc5e4e2767da4459df82b095ef0cab2f257707be06c44a1c2c3e5", size = 910231, upload-time = "2024-08-06T00:31:11.464Z" }, + { url = "https://files.pythonhosted.org/packages/45/a4/9736215e3945c30ab6843280b0c6e1bff502910156ea2414cd77fbf1738c/grpcio_tools-1.62.3-cp311-cp311-win_amd64.whl", hash = "sha256:e0898d412a434e768a0c7e365acabe13ff1558b767e400936e26b5b6ed1ee51f", size = 1052496, upload-time = "2024-08-06T00:31:13.665Z" }, + { url = "https://files.pythonhosted.org/packages/2a/a5/d6887eba415ce318ae5005e8dfac3fa74892400b54b6d37b79e8b4f14f5e/grpcio_tools-1.62.3-cp312-cp312-macosx_10_10_universal2.whl", hash = "sha256:d102b9b21c4e1e40af9a2ab3c6d41afba6bd29c0aa50ca013bf85c99cdc44ac5", size = 5147690, upload-time = "2024-08-06T00:31:16.436Z" }, + { url = "https://files.pythonhosted.org/packages/8a/7c/3cde447a045e83ceb4b570af8afe67ffc86896a2fe7f59594dc8e5d0a645/grpcio_tools-1.62.3-cp312-cp312-manylinux_2_17_aarch64.whl", hash = "sha256:0a52cc9444df978438b8d2332c0ca99000521895229934a59f94f37ed896b133", size = 2720538, upload-time = "2024-08-06T00:31:18.905Z" }, + { url = "https://files.pythonhosted.org/packages/88/07/f83f2750d44ac4f06c07c37395b9c1383ef5c994745f73c6bfaf767f0944/grpcio_tools-1.62.3-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:141d028bf5762d4a97f981c501da873589df3f7e02f4c1260e1921e565b376fa", size = 3071571, upload-time = "2024-08-06T00:31:21.684Z" }, + { url = "https://files.pythonhosted.org/packages/37/74/40175897deb61e54aca716bc2e8919155b48f33aafec8043dda9592d8768/grpcio_tools-1.62.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:47a5c093ab256dec5714a7a345f8cc89315cb57c298b276fa244f37a0ba507f0", size = 2806207, upload-time = "2024-08-06T00:31:24.208Z" }, + { url = "https://files.pythonhosted.org/packages/ec/ee/d8de915105a217cbcb9084d684abdc032030dcd887277f2ef167372287fe/grpcio_tools-1.62.3-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:f6831fdec2b853c9daa3358535c55eed3694325889aa714070528cf8f92d7d6d", size = 3685815, upload-time = "2024-08-06T00:31:26.917Z" }, + { url = "https://files.pythonhosted.org/packages/fd/d9/4360a6c12be3d7521b0b8c39e5d3801d622fbb81cc2721dbd3eee31e28c8/grpcio_tools-1.62.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:e02d7c1a02e3814c94ba0cfe43d93e872c758bd8fd5c2797f894d0c49b4a1dfc", size = 3298378, upload-time = "2024-08-06T00:31:30.401Z" }, + { url = "https://files.pythonhosted.org/packages/29/3b/7cdf4a9e5a3e0a35a528b48b111355cd14da601413a4f887aa99b6da468f/grpcio_tools-1.62.3-cp312-cp312-win32.whl", hash = "sha256:b881fd9505a84457e9f7e99362eeedd86497b659030cf57c6f0070df6d9c2b9b", size = 910416, upload-time = "2024-08-06T00:31:33.118Z" }, + { url = "https://files.pythonhosted.org/packages/6c/66/dd3ec249e44c1cc15e902e783747819ed41ead1336fcba72bf841f72c6e9/grpcio_tools-1.62.3-cp312-cp312-win_amd64.whl", hash = "sha256:11c625eebefd1fd40a228fc8bae385e448c7e32a6ae134e43cf13bbc23f902b7", size = 1052856, upload-time = "2024-08-06T00:31:36.519Z" }, ] [[package]] @@ -2636,18 +2669,18 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "packaging" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/34/72/9614c465dc206155d93eff0ca20d42e1e35afc533971379482de953521a4/gunicorn-23.0.0.tar.gz", hash = "sha256:f014447a0101dc57e294f6c18ca6b40227a4c90e9bdb586042628030cba004ec", size = 375031 } +sdist = { url = "https://files.pythonhosted.org/packages/34/72/9614c465dc206155d93eff0ca20d42e1e35afc533971379482de953521a4/gunicorn-23.0.0.tar.gz", hash = "sha256:f014447a0101dc57e294f6c18ca6b40227a4c90e9bdb586042628030cba004ec", size = 375031, upload-time = "2024-08-10T20:25:27.378Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/cb/7d/6dac2a6e1eba33ee43f318edbed4ff29151a49b5d37f080aad1e6469bca4/gunicorn-23.0.0-py3-none-any.whl", hash = "sha256:ec400d38950de4dfd418cff8328b2c8faed0edb0d517d3394e457c317908ca4d", size = 85029 }, + { url = "https://files.pythonhosted.org/packages/cb/7d/6dac2a6e1eba33ee43f318edbed4ff29151a49b5d37f080aad1e6469bca4/gunicorn-23.0.0-py3-none-any.whl", hash = "sha256:ec400d38950de4dfd418cff8328b2c8faed0edb0d517d3394e457c317908ca4d", size = 85029, upload-time = "2024-08-10T20:25:24.996Z" }, ] [[package]] name = "h11" version = "0.16.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/01/ee/02a2c011bdab74c6fb3c75474d40b3052059d95df7e73351460c8588d963/h11-0.16.0.tar.gz", hash = "sha256:4e35b956cf45792e4caa5885e69fba00bdbc6ffafbfa020300e549b208ee5ff1", size = 101250 } +sdist = { url = "https://files.pythonhosted.org/packages/01/ee/02a2c011bdab74c6fb3c75474d40b3052059d95df7e73351460c8588d963/h11-0.16.0.tar.gz", hash = "sha256:4e35b956cf45792e4caa5885e69fba00bdbc6ffafbfa020300e549b208ee5ff1", size = 101250, upload-time = "2025-04-24T03:35:25.427Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/04/4b/29cac41a4d98d144bf5f6d33995617b185d14b22401f75ca86f384e87ff1/h11-0.16.0-py3-none-any.whl", hash = "sha256:63cf8bbe7522de3bf65932fda1d9c2772064ffb3dae62d55932da54b31cb6c86", size = 37515 }, + { url = "https://files.pythonhosted.org/packages/04/4b/29cac41a4d98d144bf5f6d33995617b185d14b22401f75ca86f384e87ff1/h11-0.16.0-py3-none-any.whl", hash = "sha256:63cf8bbe7522de3bf65932fda1d9c2772064ffb3dae62d55932da54b31cb6c86", size = 37515, upload-time = "2025-04-24T03:35:24.344Z" }, ] [[package]] @@ -2658,67 +2691,67 @@ dependencies = [ { name = "hpack" }, { name = "hyperframe" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/1d/17/afa56379f94ad0fe8defd37d6eb3f89a25404ffc71d4d848893d270325fc/h2-4.3.0.tar.gz", hash = "sha256:6c59efe4323fa18b47a632221a1888bd7fde6249819beda254aeca909f221bf1", size = 2152026 } +sdist = { url = "https://files.pythonhosted.org/packages/1d/17/afa56379f94ad0fe8defd37d6eb3f89a25404ffc71d4d848893d270325fc/h2-4.3.0.tar.gz", hash = "sha256:6c59efe4323fa18b47a632221a1888bd7fde6249819beda254aeca909f221bf1", size = 2152026, upload-time = "2025-08-23T18:12:19.778Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/69/b2/119f6e6dcbd96f9069ce9a2665e0146588dc9f88f29549711853645e736a/h2-4.3.0-py3-none-any.whl", hash = "sha256:c438f029a25f7945c69e0ccf0fb951dc3f73a5f6412981daee861431b70e2bdd", size = 61779 }, + { url = "https://files.pythonhosted.org/packages/69/b2/119f6e6dcbd96f9069ce9a2665e0146588dc9f88f29549711853645e736a/h2-4.3.0-py3-none-any.whl", hash = "sha256:c438f029a25f7945c69e0ccf0fb951dc3f73a5f6412981daee861431b70e2bdd", size = 61779, upload-time = "2025-08-23T18:12:17.779Z" }, ] [[package]] name = "hf-xet" version = "1.2.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/5e/6e/0f11bacf08a67f7fb5ee09740f2ca54163863b07b70d579356e9222ce5d8/hf_xet-1.2.0.tar.gz", hash = "sha256:a8c27070ca547293b6890c4bf389f713f80e8c478631432962bb7f4bc0bd7d7f", size = 506020 } +sdist = { url = "https://files.pythonhosted.org/packages/5e/6e/0f11bacf08a67f7fb5ee09740f2ca54163863b07b70d579356e9222ce5d8/hf_xet-1.2.0.tar.gz", hash = "sha256:a8c27070ca547293b6890c4bf389f713f80e8c478631432962bb7f4bc0bd7d7f", size = 506020, upload-time = "2025-10-24T19:04:32.129Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/96/2d/22338486473df5923a9ab7107d375dbef9173c338ebef5098ef593d2b560/hf_xet-1.2.0-cp37-abi3-macosx_10_12_x86_64.whl", hash = "sha256:46740d4ac024a7ca9b22bebf77460ff43332868b661186a8e46c227fdae01848", size = 2866099 }, - { url = "https://files.pythonhosted.org/packages/7f/8c/c5becfa53234299bc2210ba314eaaae36c2875e0045809b82e40a9544f0c/hf_xet-1.2.0-cp37-abi3-macosx_11_0_arm64.whl", hash = "sha256:27df617a076420d8845bea087f59303da8be17ed7ec0cd7ee3b9b9f579dff0e4", size = 2722178 }, - { url = "https://files.pythonhosted.org/packages/9a/92/cf3ab0b652b082e66876d08da57fcc6fa2f0e6c70dfbbafbd470bb73eb47/hf_xet-1.2.0-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3651fd5bfe0281951b988c0facbe726aa5e347b103a675f49a3fa8144c7968fd", size = 3320214 }, - { url = "https://files.pythonhosted.org/packages/46/92/3f7ec4a1b6a65bf45b059b6d4a5d38988f63e193056de2f420137e3c3244/hf_xet-1.2.0-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:d06fa97c8562fb3ee7a378dd9b51e343bc5bc8190254202c9771029152f5e08c", size = 3229054 }, - { url = "https://files.pythonhosted.org/packages/0b/dd/7ac658d54b9fb7999a0ccb07ad863b413cbaf5cf172f48ebcd9497ec7263/hf_xet-1.2.0-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:4c1428c9ae73ec0939410ec73023c4f842927f39db09b063b9482dac5a3bb737", size = 3413812 }, - { url = "https://files.pythonhosted.org/packages/92/68/89ac4e5b12a9ff6286a12174c8538a5930e2ed662091dd2572bbe0a18c8a/hf_xet-1.2.0-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:a55558084c16b09b5ed32ab9ed38421e2d87cf3f1f89815764d1177081b99865", size = 3508920 }, - { url = "https://files.pythonhosted.org/packages/cb/44/870d44b30e1dcfb6a65932e3e1506c103a8a5aea9103c337e7a53180322c/hf_xet-1.2.0-cp37-abi3-win_amd64.whl", hash = "sha256:e6584a52253f72c9f52f9e549d5895ca7a471608495c4ecaa6cc73dba2b24d69", size = 2905735 }, + { url = "https://files.pythonhosted.org/packages/96/2d/22338486473df5923a9ab7107d375dbef9173c338ebef5098ef593d2b560/hf_xet-1.2.0-cp37-abi3-macosx_10_12_x86_64.whl", hash = "sha256:46740d4ac024a7ca9b22bebf77460ff43332868b661186a8e46c227fdae01848", size = 2866099, upload-time = "2025-10-24T19:04:15.366Z" }, + { url = "https://files.pythonhosted.org/packages/7f/8c/c5becfa53234299bc2210ba314eaaae36c2875e0045809b82e40a9544f0c/hf_xet-1.2.0-cp37-abi3-macosx_11_0_arm64.whl", hash = "sha256:27df617a076420d8845bea087f59303da8be17ed7ec0cd7ee3b9b9f579dff0e4", size = 2722178, upload-time = "2025-10-24T19:04:13.695Z" }, + { url = "https://files.pythonhosted.org/packages/9a/92/cf3ab0b652b082e66876d08da57fcc6fa2f0e6c70dfbbafbd470bb73eb47/hf_xet-1.2.0-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3651fd5bfe0281951b988c0facbe726aa5e347b103a675f49a3fa8144c7968fd", size = 3320214, upload-time = "2025-10-24T19:04:03.596Z" }, + { url = "https://files.pythonhosted.org/packages/46/92/3f7ec4a1b6a65bf45b059b6d4a5d38988f63e193056de2f420137e3c3244/hf_xet-1.2.0-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:d06fa97c8562fb3ee7a378dd9b51e343bc5bc8190254202c9771029152f5e08c", size = 3229054, upload-time = "2025-10-24T19:04:01.949Z" }, + { url = "https://files.pythonhosted.org/packages/0b/dd/7ac658d54b9fb7999a0ccb07ad863b413cbaf5cf172f48ebcd9497ec7263/hf_xet-1.2.0-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:4c1428c9ae73ec0939410ec73023c4f842927f39db09b063b9482dac5a3bb737", size = 3413812, upload-time = "2025-10-24T19:04:24.585Z" }, + { url = "https://files.pythonhosted.org/packages/92/68/89ac4e5b12a9ff6286a12174c8538a5930e2ed662091dd2572bbe0a18c8a/hf_xet-1.2.0-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:a55558084c16b09b5ed32ab9ed38421e2d87cf3f1f89815764d1177081b99865", size = 3508920, upload-time = "2025-10-24T19:04:26.927Z" }, + { url = "https://files.pythonhosted.org/packages/cb/44/870d44b30e1dcfb6a65932e3e1506c103a8a5aea9103c337e7a53180322c/hf_xet-1.2.0-cp37-abi3-win_amd64.whl", hash = "sha256:e6584a52253f72c9f52f9e549d5895ca7a471608495c4ecaa6cc73dba2b24d69", size = 2905735, upload-time = "2025-10-24T19:04:35.928Z" }, ] [[package]] name = "hiredis" version = "3.3.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/65/82/d2817ce0653628e0a0cb128533f6af0dd6318a49f3f3a6a7bd1f2f2154af/hiredis-3.3.0.tar.gz", hash = "sha256:105596aad9249634361815c574351f1bd50455dc23b537c2940066c4a9dea685", size = 89048 } +sdist = { url = "https://files.pythonhosted.org/packages/65/82/d2817ce0653628e0a0cb128533f6af0dd6318a49f3f3a6a7bd1f2f2154af/hiredis-3.3.0.tar.gz", hash = "sha256:105596aad9249634361815c574351f1bd50455dc23b537c2940066c4a9dea685", size = 89048, upload-time = "2025-10-14T16:33:34.263Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/34/0c/be3b1093f93a7c823ca16fbfbb83d3a1de671bbd2add8da1fe2bcfccb2b8/hiredis-3.3.0-cp311-cp311-macosx_10_15_universal2.whl", hash = "sha256:63ee6c1ae6a2462a2439eb93c38ab0315cd5f4b6d769c6a34903058ba538b5d6", size = 81813 }, - { url = "https://files.pythonhosted.org/packages/95/2b/ed722d392ac59a7eee548d752506ef32c06ffdd0bce9cf91125a74b8edf9/hiredis-3.3.0-cp311-cp311-macosx_10_15_x86_64.whl", hash = "sha256:31eda3526e2065268a8f97fbe3d0e9a64ad26f1d89309e953c80885c511ea2ae", size = 46049 }, - { url = "https://files.pythonhosted.org/packages/e5/61/8ace8027d5b3f6b28e1dc55f4a504be038ba8aa8bf71882b703e8f874c91/hiredis-3.3.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a26bae1b61b7bcafe3d0d0c7d012fb66ab3c95f2121dbea336df67e344e39089", size = 41814 }, - { url = "https://files.pythonhosted.org/packages/23/0e/380ade1ffb21034976663a5128f0383533f35caccdba13ff0537dd5ace79/hiredis-3.3.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b9546079f7fd5c50fbff9c791710049b32eebe7f9b94debec1e8b9f4c048cba2", size = 167572 }, - { url = "https://files.pythonhosted.org/packages/ca/60/b4a8d2177575b896730f73e6890644591aa56790a75c2b6d6f2302a1dae6/hiredis-3.3.0-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:ae327fc13b1157b694d53f92d50920c0051e30b0c245f980a7036e299d039ab4", size = 179373 }, - { url = "https://files.pythonhosted.org/packages/31/53/a473a18d27cfe8afda7772ff9adfba1718fd31d5e9c224589dc17774fa0b/hiredis-3.3.0-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:4016e50a8be5740a59c5af5252e5ad16c395021a999ad24c6604f0d9faf4d346", size = 177504 }, - { url = "https://files.pythonhosted.org/packages/7e/0f/f6ee4c26b149063dbf5b1b6894b4a7a1f00a50e3d0cfd30a22d4c3479db3/hiredis-3.3.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c17b473f273465a3d2168a57a5b43846165105ac217d5652a005e14068589ddc", size = 169449 }, - { url = "https://files.pythonhosted.org/packages/64/38/e3e113172289e1261ccd43e387a577dd268b0b9270721b5678735803416c/hiredis-3.3.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:9ecd9b09b11bd0b8af87d29c3f5da628d2bdc2a6c23d2dd264d2da082bd4bf32", size = 164010 }, - { url = "https://files.pythonhosted.org/packages/8d/9a/ccf4999365691ea73d0dd2ee95ee6ef23ebc9a835a7417f81765bc49eade/hiredis-3.3.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:00fb04eac208cd575d14f246e74a468561081ce235937ab17d77cde73aefc66c", size = 174623 }, - { url = "https://files.pythonhosted.org/packages/ed/c7/ee55fa2ade078b7c4f17e8ddc9bc28881d0b71b794ebf9db4cfe4c8f0623/hiredis-3.3.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:60814a7d0b718adf3bfe2c32c6878b0e00d6ae290ad8e47f60d7bba3941234a6", size = 167650 }, - { url = "https://files.pythonhosted.org/packages/bf/06/f6cd90275dcb0ba03f69767805151eb60b602bc25830648bd607660e1f97/hiredis-3.3.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:fcbd1a15e935aa323b5b2534b38419511b7909b4b8ee548e42b59090a1b37bb1", size = 165452 }, - { url = "https://files.pythonhosted.org/packages/c3/10/895177164a6c4409a07717b5ae058d84a908e1ab629f0401110b02aaadda/hiredis-3.3.0-cp311-cp311-win32.whl", hash = "sha256:73679607c5a19f4bcfc9cf6eb54480bcd26617b68708ac8b1079da9721be5449", size = 20394 }, - { url = "https://files.pythonhosted.org/packages/3c/c7/1e8416ae4d4134cb62092c61cabd76b3d720507ee08edd19836cdeea4c7a/hiredis-3.3.0-cp311-cp311-win_amd64.whl", hash = "sha256:30a4df3d48f32538de50648d44146231dde5ad7f84f8f08818820f426840ae97", size = 22336 }, - { url = "https://files.pythonhosted.org/packages/48/1c/ed28ae5d704f5c7e85b946fa327f30d269e6272c847fef7e91ba5fc86193/hiredis-3.3.0-cp312-cp312-macosx_10_15_universal2.whl", hash = "sha256:5b8e1d6a2277ec5b82af5dce11534d3ed5dffeb131fd9b210bc1940643b39b5f", size = 82026 }, - { url = "https://files.pythonhosted.org/packages/f4/9b/79f30c5c40e248291023b7412bfdef4ad9a8a92d9e9285d65d600817dac7/hiredis-3.3.0-cp312-cp312-macosx_10_15_x86_64.whl", hash = "sha256:c4981de4d335f996822419e8a8b3b87367fcef67dc5fb74d3bff4df9f6f17783", size = 46217 }, - { url = "https://files.pythonhosted.org/packages/e7/c3/02b9ed430ad9087aadd8afcdf616717452d16271b701fa47edfe257b681e/hiredis-3.3.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:1706480a683e328ae9ba5d704629dee2298e75016aa0207e7067b9c40cecc271", size = 41858 }, - { url = "https://files.pythonhosted.org/packages/f1/98/b2a42878b82130a535c7aa20bc937ba2d07d72e9af3ad1ad93e837c419b5/hiredis-3.3.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:0a95cef9989736ac313639f8f545b76b60b797e44e65834aabbb54e4fad8d6c8", size = 170195 }, - { url = "https://files.pythonhosted.org/packages/66/1d/9dcde7a75115d3601b016113d9b90300726fa8e48aacdd11bf01a453c145/hiredis-3.3.0-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:ca2802934557ccc28a954414c245ba7ad904718e9712cb67c05152cf6b9dd0a3", size = 181808 }, - { url = "https://files.pythonhosted.org/packages/56/a1/60f6bda9b20b4e73c85f7f5f046bc2c154a5194fc94eb6861e1fd97ced52/hiredis-3.3.0-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:fe730716775f61e76d75810a38ee4c349d3af3896450f1525f5a4034cf8f2ed7", size = 180578 }, - { url = "https://files.pythonhosted.org/packages/d9/01/859d21de65085f323a701824e23ea3330a0ac05f8e184544d7aa5c26128d/hiredis-3.3.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:749faa69b1ce1f741f5eaf743435ac261a9262e2d2d66089192477e7708a9abc", size = 172508 }, - { url = "https://files.pythonhosted.org/packages/99/a8/28fd526e554c80853d0fbf57ef2a3235f00e4ed34ce0e622e05d27d0f788/hiredis-3.3.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:95c9427f2ac3f1dd016a3da4e1161fa9d82f221346c8f3fdd6f3f77d4e28946c", size = 166341 }, - { url = "https://files.pythonhosted.org/packages/f2/91/ded746b7d2914f557fbbf77be55e90d21f34ba758ae10db6591927c642c8/hiredis-3.3.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:c863ee44fe7bff25e41f3a5105c936a63938b76299b802d758f40994ab340071", size = 176765 }, - { url = "https://files.pythonhosted.org/packages/d6/4c/04aa46ff386532cb5f08ee495c2bf07303e93c0acf2fa13850e031347372/hiredis-3.3.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:2213c7eb8ad5267434891f3241c7776e3bafd92b5933fc57d53d4456247dc542", size = 170312 }, - { url = "https://files.pythonhosted.org/packages/90/6e/67f9d481c63f542a9cf4c9f0ea4e5717db0312fb6f37fb1f78f3a66de93c/hiredis-3.3.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:a172bae3e2837d74530cd60b06b141005075db1b814d966755977c69bd882ce8", size = 167965 }, - { url = "https://files.pythonhosted.org/packages/7a/df/dde65144d59c3c0d85e43255798f1fa0c48d413e668cfd92b3d9f87924ef/hiredis-3.3.0-cp312-cp312-win32.whl", hash = "sha256:cb91363b9fd6d41c80df9795e12fffbaf5c399819e6ae8120f414dedce6de068", size = 20533 }, - { url = "https://files.pythonhosted.org/packages/f5/a9/55a4ac9c16fdf32e92e9e22c49f61affe5135e177ca19b014484e28950f7/hiredis-3.3.0-cp312-cp312-win_amd64.whl", hash = "sha256:04ec150e95eea3de9ff8bac754978aa17b8bf30a86d4ab2689862020945396b0", size = 22379 }, + { url = "https://files.pythonhosted.org/packages/34/0c/be3b1093f93a7c823ca16fbfbb83d3a1de671bbd2add8da1fe2bcfccb2b8/hiredis-3.3.0-cp311-cp311-macosx_10_15_universal2.whl", hash = "sha256:63ee6c1ae6a2462a2439eb93c38ab0315cd5f4b6d769c6a34903058ba538b5d6", size = 81813, upload-time = "2025-10-14T16:32:00.576Z" }, + { url = "https://files.pythonhosted.org/packages/95/2b/ed722d392ac59a7eee548d752506ef32c06ffdd0bce9cf91125a74b8edf9/hiredis-3.3.0-cp311-cp311-macosx_10_15_x86_64.whl", hash = "sha256:31eda3526e2065268a8f97fbe3d0e9a64ad26f1d89309e953c80885c511ea2ae", size = 46049, upload-time = "2025-10-14T16:32:01.319Z" }, + { url = "https://files.pythonhosted.org/packages/e5/61/8ace8027d5b3f6b28e1dc55f4a504be038ba8aa8bf71882b703e8f874c91/hiredis-3.3.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a26bae1b61b7bcafe3d0d0c7d012fb66ab3c95f2121dbea336df67e344e39089", size = 41814, upload-time = "2025-10-14T16:32:02.076Z" }, + { url = "https://files.pythonhosted.org/packages/23/0e/380ade1ffb21034976663a5128f0383533f35caccdba13ff0537dd5ace79/hiredis-3.3.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b9546079f7fd5c50fbff9c791710049b32eebe7f9b94debec1e8b9f4c048cba2", size = 167572, upload-time = "2025-10-14T16:32:03.125Z" }, + { url = "https://files.pythonhosted.org/packages/ca/60/b4a8d2177575b896730f73e6890644591aa56790a75c2b6d6f2302a1dae6/hiredis-3.3.0-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:ae327fc13b1157b694d53f92d50920c0051e30b0c245f980a7036e299d039ab4", size = 179373, upload-time = "2025-10-14T16:32:04.04Z" }, + { url = "https://files.pythonhosted.org/packages/31/53/a473a18d27cfe8afda7772ff9adfba1718fd31d5e9c224589dc17774fa0b/hiredis-3.3.0-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:4016e50a8be5740a59c5af5252e5ad16c395021a999ad24c6604f0d9faf4d346", size = 177504, upload-time = "2025-10-14T16:32:04.934Z" }, + { url = "https://files.pythonhosted.org/packages/7e/0f/f6ee4c26b149063dbf5b1b6894b4a7a1f00a50e3d0cfd30a22d4c3479db3/hiredis-3.3.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c17b473f273465a3d2168a57a5b43846165105ac217d5652a005e14068589ddc", size = 169449, upload-time = "2025-10-14T16:32:05.808Z" }, + { url = "https://files.pythonhosted.org/packages/64/38/e3e113172289e1261ccd43e387a577dd268b0b9270721b5678735803416c/hiredis-3.3.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:9ecd9b09b11bd0b8af87d29c3f5da628d2bdc2a6c23d2dd264d2da082bd4bf32", size = 164010, upload-time = "2025-10-14T16:32:06.695Z" }, + { url = "https://files.pythonhosted.org/packages/8d/9a/ccf4999365691ea73d0dd2ee95ee6ef23ebc9a835a7417f81765bc49eade/hiredis-3.3.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:00fb04eac208cd575d14f246e74a468561081ce235937ab17d77cde73aefc66c", size = 174623, upload-time = "2025-10-14T16:32:07.627Z" }, + { url = "https://files.pythonhosted.org/packages/ed/c7/ee55fa2ade078b7c4f17e8ddc9bc28881d0b71b794ebf9db4cfe4c8f0623/hiredis-3.3.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:60814a7d0b718adf3bfe2c32c6878b0e00d6ae290ad8e47f60d7bba3941234a6", size = 167650, upload-time = "2025-10-14T16:32:08.615Z" }, + { url = "https://files.pythonhosted.org/packages/bf/06/f6cd90275dcb0ba03f69767805151eb60b602bc25830648bd607660e1f97/hiredis-3.3.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:fcbd1a15e935aa323b5b2534b38419511b7909b4b8ee548e42b59090a1b37bb1", size = 165452, upload-time = "2025-10-14T16:32:09.561Z" }, + { url = "https://files.pythonhosted.org/packages/c3/10/895177164a6c4409a07717b5ae058d84a908e1ab629f0401110b02aaadda/hiredis-3.3.0-cp311-cp311-win32.whl", hash = "sha256:73679607c5a19f4bcfc9cf6eb54480bcd26617b68708ac8b1079da9721be5449", size = 20394, upload-time = "2025-10-14T16:32:10.469Z" }, + { url = "https://files.pythonhosted.org/packages/3c/c7/1e8416ae4d4134cb62092c61cabd76b3d720507ee08edd19836cdeea4c7a/hiredis-3.3.0-cp311-cp311-win_amd64.whl", hash = "sha256:30a4df3d48f32538de50648d44146231dde5ad7f84f8f08818820f426840ae97", size = 22336, upload-time = "2025-10-14T16:32:11.221Z" }, + { url = "https://files.pythonhosted.org/packages/48/1c/ed28ae5d704f5c7e85b946fa327f30d269e6272c847fef7e91ba5fc86193/hiredis-3.3.0-cp312-cp312-macosx_10_15_universal2.whl", hash = "sha256:5b8e1d6a2277ec5b82af5dce11534d3ed5dffeb131fd9b210bc1940643b39b5f", size = 82026, upload-time = "2025-10-14T16:32:12.004Z" }, + { url = "https://files.pythonhosted.org/packages/f4/9b/79f30c5c40e248291023b7412bfdef4ad9a8a92d9e9285d65d600817dac7/hiredis-3.3.0-cp312-cp312-macosx_10_15_x86_64.whl", hash = "sha256:c4981de4d335f996822419e8a8b3b87367fcef67dc5fb74d3bff4df9f6f17783", size = 46217, upload-time = "2025-10-14T16:32:13.133Z" }, + { url = "https://files.pythonhosted.org/packages/e7/c3/02b9ed430ad9087aadd8afcdf616717452d16271b701fa47edfe257b681e/hiredis-3.3.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:1706480a683e328ae9ba5d704629dee2298e75016aa0207e7067b9c40cecc271", size = 41858, upload-time = "2025-10-14T16:32:13.98Z" }, + { url = "https://files.pythonhosted.org/packages/f1/98/b2a42878b82130a535c7aa20bc937ba2d07d72e9af3ad1ad93e837c419b5/hiredis-3.3.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:0a95cef9989736ac313639f8f545b76b60b797e44e65834aabbb54e4fad8d6c8", size = 170195, upload-time = "2025-10-14T16:32:14.728Z" }, + { url = "https://files.pythonhosted.org/packages/66/1d/9dcde7a75115d3601b016113d9b90300726fa8e48aacdd11bf01a453c145/hiredis-3.3.0-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:ca2802934557ccc28a954414c245ba7ad904718e9712cb67c05152cf6b9dd0a3", size = 181808, upload-time = "2025-10-14T16:32:15.622Z" }, + { url = "https://files.pythonhosted.org/packages/56/a1/60f6bda9b20b4e73c85f7f5f046bc2c154a5194fc94eb6861e1fd97ced52/hiredis-3.3.0-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:fe730716775f61e76d75810a38ee4c349d3af3896450f1525f5a4034cf8f2ed7", size = 180578, upload-time = "2025-10-14T16:32:16.514Z" }, + { url = "https://files.pythonhosted.org/packages/d9/01/859d21de65085f323a701824e23ea3330a0ac05f8e184544d7aa5c26128d/hiredis-3.3.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:749faa69b1ce1f741f5eaf743435ac261a9262e2d2d66089192477e7708a9abc", size = 172508, upload-time = "2025-10-14T16:32:17.411Z" }, + { url = "https://files.pythonhosted.org/packages/99/a8/28fd526e554c80853d0fbf57ef2a3235f00e4ed34ce0e622e05d27d0f788/hiredis-3.3.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:95c9427f2ac3f1dd016a3da4e1161fa9d82f221346c8f3fdd6f3f77d4e28946c", size = 166341, upload-time = "2025-10-14T16:32:18.561Z" }, + { url = "https://files.pythonhosted.org/packages/f2/91/ded746b7d2914f557fbbf77be55e90d21f34ba758ae10db6591927c642c8/hiredis-3.3.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:c863ee44fe7bff25e41f3a5105c936a63938b76299b802d758f40994ab340071", size = 176765, upload-time = "2025-10-14T16:32:19.491Z" }, + { url = "https://files.pythonhosted.org/packages/d6/4c/04aa46ff386532cb5f08ee495c2bf07303e93c0acf2fa13850e031347372/hiredis-3.3.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:2213c7eb8ad5267434891f3241c7776e3bafd92b5933fc57d53d4456247dc542", size = 170312, upload-time = "2025-10-14T16:32:20.404Z" }, + { url = "https://files.pythonhosted.org/packages/90/6e/67f9d481c63f542a9cf4c9f0ea4e5717db0312fb6f37fb1f78f3a66de93c/hiredis-3.3.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:a172bae3e2837d74530cd60b06b141005075db1b814d966755977c69bd882ce8", size = 167965, upload-time = "2025-10-14T16:32:21.259Z" }, + { url = "https://files.pythonhosted.org/packages/7a/df/dde65144d59c3c0d85e43255798f1fa0c48d413e668cfd92b3d9f87924ef/hiredis-3.3.0-cp312-cp312-win32.whl", hash = "sha256:cb91363b9fd6d41c80df9795e12fffbaf5c399819e6ae8120f414dedce6de068", size = 20533, upload-time = "2025-10-14T16:32:22.192Z" }, + { url = "https://files.pythonhosted.org/packages/f5/a9/55a4ac9c16fdf32e92e9e22c49f61affe5135e177ca19b014484e28950f7/hiredis-3.3.0-cp312-cp312-win_amd64.whl", hash = "sha256:04ec150e95eea3de9ff8bac754978aa17b8bf30a86d4ab2689862020945396b0", size = 22379, upload-time = "2025-10-14T16:32:22.916Z" }, ] [[package]] name = "hpack" version = "4.1.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/2c/48/71de9ed269fdae9c8057e5a4c0aa7402e8bb16f2c6e90b3aa53327b113f8/hpack-4.1.0.tar.gz", hash = "sha256:ec5eca154f7056aa06f196a557655c5b009b382873ac8d1e66e79e87535f1dca", size = 51276 } +sdist = { url = "https://files.pythonhosted.org/packages/2c/48/71de9ed269fdae9c8057e5a4c0aa7402e8bb16f2c6e90b3aa53327b113f8/hpack-4.1.0.tar.gz", hash = "sha256:ec5eca154f7056aa06f196a557655c5b009b382873ac8d1e66e79e87535f1dca", size = 51276, upload-time = "2025-01-22T21:44:58.347Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/07/c6/80c95b1b2b94682a72cbdbfb85b81ae2daffa4291fbfa1b1464502ede10d/hpack-4.1.0-py3-none-any.whl", hash = "sha256:157ac792668d995c657d93111f46b4535ed114f0c9c8d672271bbec7eae1b496", size = 34357 }, + { url = "https://files.pythonhosted.org/packages/07/c6/80c95b1b2b94682a72cbdbfb85b81ae2daffa4291fbfa1b1464502ede10d/hpack-4.1.0-py3-none-any.whl", hash = "sha256:157ac792668d995c657d93111f46b4535ed114f0c9c8d672271bbec7eae1b496", size = 34357, upload-time = "2025-01-22T21:44:56.92Z" }, ] [[package]] @@ -2729,9 +2762,9 @@ dependencies = [ { name = "six" }, { name = "webencodings" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/ac/b6/b55c3f49042f1df3dcd422b7f224f939892ee94f22abcf503a9b7339eaf2/html5lib-1.1.tar.gz", hash = "sha256:b2e5b40261e20f354d198eae92afc10d750afb487ed5e50f9c4eaf07c184146f", size = 272215 } +sdist = { url = "https://files.pythonhosted.org/packages/ac/b6/b55c3f49042f1df3dcd422b7f224f939892ee94f22abcf503a9b7339eaf2/html5lib-1.1.tar.gz", hash = "sha256:b2e5b40261e20f354d198eae92afc10d750afb487ed5e50f9c4eaf07c184146f", size = 272215, upload-time = "2020-06-22T23:32:38.834Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/6c/dd/a834df6482147d48e225a49515aabc28974ad5a4ca3215c18a882565b028/html5lib-1.1-py2.py3-none-any.whl", hash = "sha256:0d78f8fde1c230e99fe37986a60526d7049ed4bf8a9fadbad5f00e22e58e041d", size = 112173 }, + { url = "https://files.pythonhosted.org/packages/6c/dd/a834df6482147d48e225a49515aabc28974ad5a4ca3215c18a882565b028/html5lib-1.1-py2.py3-none-any.whl", hash = "sha256:0d78f8fde1c230e99fe37986a60526d7049ed4bf8a9fadbad5f00e22e58e041d", size = 112173, upload-time = "2020-06-22T23:32:36.781Z" }, ] [[package]] @@ -2742,9 +2775,9 @@ dependencies = [ { name = "certifi" }, { name = "h11" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/06/94/82699a10bca87a5556c9c59b5963f2d039dbd239f25bc2a63907a05a14cb/httpcore-1.0.9.tar.gz", hash = "sha256:6e34463af53fd2ab5d807f399a9b45ea31c3dfa2276f15a2c3f00afff6e176e8", size = 85484 } +sdist = { url = "https://files.pythonhosted.org/packages/06/94/82699a10bca87a5556c9c59b5963f2d039dbd239f25bc2a63907a05a14cb/httpcore-1.0.9.tar.gz", hash = "sha256:6e34463af53fd2ab5d807f399a9b45ea31c3dfa2276f15a2c3f00afff6e176e8", size = 85484, upload-time = "2025-04-24T22:06:22.219Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/7e/f5/f66802a942d491edb555dd61e3a9961140fd64c90bce1eafd741609d334d/httpcore-1.0.9-py3-none-any.whl", hash = "sha256:2d400746a40668fc9dec9810239072b40b4484b640a8c38fd654a024c7a1bf55", size = 78784 }, + { url = "https://files.pythonhosted.org/packages/7e/f5/f66802a942d491edb555dd61e3a9961140fd64c90bce1eafd741609d334d/httpcore-1.0.9-py3-none-any.whl", hash = "sha256:2d400746a40668fc9dec9810239072b40b4484b640a8c38fd654a024c7a1bf55", size = 78784, upload-time = "2025-04-24T22:06:20.566Z" }, ] [[package]] @@ -2754,31 +2787,31 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "pyparsing" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/52/77/6653db69c1f7ecfe5e3f9726fdadc981794656fcd7d98c4209fecfea9993/httplib2-0.31.0.tar.gz", hash = "sha256:ac7ab497c50975147d4f7b1ade44becc7df2f8954d42b38b3d69c515f531135c", size = 250759 } +sdist = { url = "https://files.pythonhosted.org/packages/52/77/6653db69c1f7ecfe5e3f9726fdadc981794656fcd7d98c4209fecfea9993/httplib2-0.31.0.tar.gz", hash = "sha256:ac7ab497c50975147d4f7b1ade44becc7df2f8954d42b38b3d69c515f531135c", size = 250759, upload-time = "2025-09-11T12:16:03.403Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/8c/a2/0d269db0f6163be503775dc8b6a6fa15820cc9fdc866f6ba608d86b721f2/httplib2-0.31.0-py3-none-any.whl", hash = "sha256:b9cd78abea9b4e43a7714c6e0f8b6b8561a6fc1e95d5dbd367f5bf0ef35f5d24", size = 91148 }, + { url = "https://files.pythonhosted.org/packages/8c/a2/0d269db0f6163be503775dc8b6a6fa15820cc9fdc866f6ba608d86b721f2/httplib2-0.31.0-py3-none-any.whl", hash = "sha256:b9cd78abea9b4e43a7714c6e0f8b6b8561a6fc1e95d5dbd367f5bf0ef35f5d24", size = 91148, upload-time = "2025-09-11T12:16:01.803Z" }, ] [[package]] name = "httptools" version = "0.7.1" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/b5/46/120a669232c7bdedb9d52d4aeae7e6c7dfe151e99dc70802e2fc7a5e1993/httptools-0.7.1.tar.gz", hash = "sha256:abd72556974f8e7c74a259655924a717a2365b236c882c3f6f8a45fe94703ac9", size = 258961 } +sdist = { url = "https://files.pythonhosted.org/packages/b5/46/120a669232c7bdedb9d52d4aeae7e6c7dfe151e99dc70802e2fc7a5e1993/httptools-0.7.1.tar.gz", hash = "sha256:abd72556974f8e7c74a259655924a717a2365b236c882c3f6f8a45fe94703ac9", size = 258961, upload-time = "2025-10-10T03:55:08.559Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/9c/08/17e07e8d89ab8f343c134616d72eebfe03798835058e2ab579dcc8353c06/httptools-0.7.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:474d3b7ab469fefcca3697a10d11a32ee2b9573250206ba1e50d5980910da657", size = 206521 }, - { url = "https://files.pythonhosted.org/packages/aa/06/c9c1b41ff52f16aee526fd10fbda99fa4787938aa776858ddc4a1ea825ec/httptools-0.7.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a3c3b7366bb6c7b96bd72d0dbe7f7d5eead261361f013be5f6d9590465ea1c70", size = 110375 }, - { url = "https://files.pythonhosted.org/packages/cc/cc/10935db22fda0ee34c76f047590ca0a8bd9de531406a3ccb10a90e12ea21/httptools-0.7.1-cp311-cp311-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:379b479408b8747f47f3b253326183d7c009a3936518cdb70db58cffd369d9df", size = 456621 }, - { url = "https://files.pythonhosted.org/packages/0e/84/875382b10d271b0c11aa5d414b44f92f8dd53e9b658aec338a79164fa548/httptools-0.7.1-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:cad6b591a682dcc6cf1397c3900527f9affef1e55a06c4547264796bbd17cf5e", size = 454954 }, - { url = "https://files.pythonhosted.org/packages/30/e1/44f89b280f7e46c0b1b2ccee5737d46b3bb13136383958f20b580a821ca0/httptools-0.7.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:eb844698d11433d2139bbeeb56499102143beb582bd6c194e3ba69c22f25c274", size = 440175 }, - { url = "https://files.pythonhosted.org/packages/6f/7e/b9287763159e700e335028bc1824359dc736fa9b829dacedace91a39b37e/httptools-0.7.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:f65744d7a8bdb4bda5e1fa23e4ba16832860606fcc09d674d56e425e991539ec", size = 440310 }, - { url = "https://files.pythonhosted.org/packages/b3/07/5b614f592868e07f5c94b1f301b5e14a21df4e8076215a3bccb830a687d8/httptools-0.7.1-cp311-cp311-win_amd64.whl", hash = "sha256:135fbe974b3718eada677229312e97f3b31f8a9c8ffa3ae6f565bf808d5b6bcb", size = 86875 }, - { url = "https://files.pythonhosted.org/packages/53/7f/403e5d787dc4942316e515e949b0c8a013d84078a915910e9f391ba9b3ed/httptools-0.7.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:38e0c83a2ea9746ebbd643bdfb521b9aa4a91703e2cd705c20443405d2fd16a5", size = 206280 }, - { url = "https://files.pythonhosted.org/packages/2a/0d/7f3fd28e2ce311ccc998c388dd1c53b18120fda3b70ebb022b135dc9839b/httptools-0.7.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f25bbaf1235e27704f1a7b86cd3304eabc04f569c828101d94a0e605ef7205a5", size = 110004 }, - { url = "https://files.pythonhosted.org/packages/84/a6/b3965e1e146ef5762870bbe76117876ceba51a201e18cc31f5703e454596/httptools-0.7.1-cp312-cp312-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:2c15f37ef679ab9ecc06bfc4e6e8628c32a8e4b305459de7cf6785acd57e4d03", size = 517655 }, - { url = "https://files.pythonhosted.org/packages/11/7d/71fee6f1844e6fa378f2eddde6c3e41ce3a1fb4b2d81118dd544e3441ec0/httptools-0.7.1-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:7fe6e96090df46b36ccfaf746f03034e5ab723162bc51b0a4cf58305324036f2", size = 511440 }, - { url = "https://files.pythonhosted.org/packages/22/a5/079d216712a4f3ffa24af4a0381b108aa9c45b7a5cc6eb141f81726b1823/httptools-0.7.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:f72fdbae2dbc6e68b8239defb48e6a5937b12218e6ffc2c7846cc37befa84362", size = 495186 }, - { url = "https://files.pythonhosted.org/packages/e9/9e/025ad7b65278745dee3bd0ebf9314934c4592560878308a6121f7f812084/httptools-0.7.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:e99c7b90a29fd82fea9ef57943d501a16f3404d7b9ee81799d41639bdaae412c", size = 499192 }, - { url = "https://files.pythonhosted.org/packages/6d/de/40a8f202b987d43afc4d54689600ff03ce65680ede2f31df348d7f368b8f/httptools-0.7.1-cp312-cp312-win_amd64.whl", hash = "sha256:3e14f530fefa7499334a79b0cf7e7cd2992870eb893526fb097d51b4f2d0f321", size = 86694 }, + { url = "https://files.pythonhosted.org/packages/9c/08/17e07e8d89ab8f343c134616d72eebfe03798835058e2ab579dcc8353c06/httptools-0.7.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:474d3b7ab469fefcca3697a10d11a32ee2b9573250206ba1e50d5980910da657", size = 206521, upload-time = "2025-10-10T03:54:31.002Z" }, + { url = "https://files.pythonhosted.org/packages/aa/06/c9c1b41ff52f16aee526fd10fbda99fa4787938aa776858ddc4a1ea825ec/httptools-0.7.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a3c3b7366bb6c7b96bd72d0dbe7f7d5eead261361f013be5f6d9590465ea1c70", size = 110375, upload-time = "2025-10-10T03:54:31.941Z" }, + { url = "https://files.pythonhosted.org/packages/cc/cc/10935db22fda0ee34c76f047590ca0a8bd9de531406a3ccb10a90e12ea21/httptools-0.7.1-cp311-cp311-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:379b479408b8747f47f3b253326183d7c009a3936518cdb70db58cffd369d9df", size = 456621, upload-time = "2025-10-10T03:54:33.176Z" }, + { url = "https://files.pythonhosted.org/packages/0e/84/875382b10d271b0c11aa5d414b44f92f8dd53e9b658aec338a79164fa548/httptools-0.7.1-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:cad6b591a682dcc6cf1397c3900527f9affef1e55a06c4547264796bbd17cf5e", size = 454954, upload-time = "2025-10-10T03:54:34.226Z" }, + { url = "https://files.pythonhosted.org/packages/30/e1/44f89b280f7e46c0b1b2ccee5737d46b3bb13136383958f20b580a821ca0/httptools-0.7.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:eb844698d11433d2139bbeeb56499102143beb582bd6c194e3ba69c22f25c274", size = 440175, upload-time = "2025-10-10T03:54:35.942Z" }, + { url = "https://files.pythonhosted.org/packages/6f/7e/b9287763159e700e335028bc1824359dc736fa9b829dacedace91a39b37e/httptools-0.7.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:f65744d7a8bdb4bda5e1fa23e4ba16832860606fcc09d674d56e425e991539ec", size = 440310, upload-time = "2025-10-10T03:54:37.1Z" }, + { url = "https://files.pythonhosted.org/packages/b3/07/5b614f592868e07f5c94b1f301b5e14a21df4e8076215a3bccb830a687d8/httptools-0.7.1-cp311-cp311-win_amd64.whl", hash = "sha256:135fbe974b3718eada677229312e97f3b31f8a9c8ffa3ae6f565bf808d5b6bcb", size = 86875, upload-time = "2025-10-10T03:54:38.421Z" }, + { url = "https://files.pythonhosted.org/packages/53/7f/403e5d787dc4942316e515e949b0c8a013d84078a915910e9f391ba9b3ed/httptools-0.7.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:38e0c83a2ea9746ebbd643bdfb521b9aa4a91703e2cd705c20443405d2fd16a5", size = 206280, upload-time = "2025-10-10T03:54:39.274Z" }, + { url = "https://files.pythonhosted.org/packages/2a/0d/7f3fd28e2ce311ccc998c388dd1c53b18120fda3b70ebb022b135dc9839b/httptools-0.7.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f25bbaf1235e27704f1a7b86cd3304eabc04f569c828101d94a0e605ef7205a5", size = 110004, upload-time = "2025-10-10T03:54:40.403Z" }, + { url = "https://files.pythonhosted.org/packages/84/a6/b3965e1e146ef5762870bbe76117876ceba51a201e18cc31f5703e454596/httptools-0.7.1-cp312-cp312-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:2c15f37ef679ab9ecc06bfc4e6e8628c32a8e4b305459de7cf6785acd57e4d03", size = 517655, upload-time = "2025-10-10T03:54:41.347Z" }, + { url = "https://files.pythonhosted.org/packages/11/7d/71fee6f1844e6fa378f2eddde6c3e41ce3a1fb4b2d81118dd544e3441ec0/httptools-0.7.1-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:7fe6e96090df46b36ccfaf746f03034e5ab723162bc51b0a4cf58305324036f2", size = 511440, upload-time = "2025-10-10T03:54:42.452Z" }, + { url = "https://files.pythonhosted.org/packages/22/a5/079d216712a4f3ffa24af4a0381b108aa9c45b7a5cc6eb141f81726b1823/httptools-0.7.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:f72fdbae2dbc6e68b8239defb48e6a5937b12218e6ffc2c7846cc37befa84362", size = 495186, upload-time = "2025-10-10T03:54:43.937Z" }, + { url = "https://files.pythonhosted.org/packages/e9/9e/025ad7b65278745dee3bd0ebf9314934c4592560878308a6121f7f812084/httptools-0.7.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:e99c7b90a29fd82fea9ef57943d501a16f3404d7b9ee81799d41639bdaae412c", size = 499192, upload-time = "2025-10-10T03:54:45.003Z" }, + { url = "https://files.pythonhosted.org/packages/6d/de/40a8f202b987d43afc4d54689600ff03ce65680ede2f31df348d7f368b8f/httptools-0.7.1-cp312-cp312-win_amd64.whl", hash = "sha256:3e14f530fefa7499334a79b0cf7e7cd2992870eb893526fb097d51b4f2d0f321", size = 86694, upload-time = "2025-10-10T03:54:45.923Z" }, ] [[package]] @@ -2792,9 +2825,9 @@ dependencies = [ { name = "idna" }, { name = "sniffio" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/78/82/08f8c936781f67d9e6b9eeb8a0c8b4e406136ea4c3d1f89a5db71d42e0e6/httpx-0.27.2.tar.gz", hash = "sha256:f7c2be1d2f3c3c3160d441802406b206c2b76f5947b11115e6df10c6c65e66c2", size = 144189 } +sdist = { url = "https://files.pythonhosted.org/packages/78/82/08f8c936781f67d9e6b9eeb8a0c8b4e406136ea4c3d1f89a5db71d42e0e6/httpx-0.27.2.tar.gz", hash = "sha256:f7c2be1d2f3c3c3160d441802406b206c2b76f5947b11115e6df10c6c65e66c2", size = 144189, upload-time = "2024-08-27T12:54:01.334Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/56/95/9377bcb415797e44274b51d46e3249eba641711cf3348050f76ee7b15ffc/httpx-0.27.2-py3-none-any.whl", hash = "sha256:7bb2708e112d8fdd7829cd4243970f0c223274051cb35ee80c03301ee29a3df0", size = 76395 }, + { url = "https://files.pythonhosted.org/packages/56/95/9377bcb415797e44274b51d46e3249eba641711cf3348050f76ee7b15ffc/httpx-0.27.2-py3-none-any.whl", hash = "sha256:7bb2708e112d8fdd7829cd4243970f0c223274051cb35ee80c03301ee29a3df0", size = 76395, upload-time = "2024-08-27T12:53:59.653Z" }, ] [package.optional-dependencies] @@ -2809,9 +2842,9 @@ socks = [ name = "httpx-sse" version = "0.4.3" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/0f/4c/751061ffa58615a32c31b2d82e8482be8dd4a89154f003147acee90f2be9/httpx_sse-0.4.3.tar.gz", hash = "sha256:9b1ed0127459a66014aec3c56bebd93da3c1bc8bb6618c8082039a44889a755d", size = 15943 } +sdist = { url = "https://files.pythonhosted.org/packages/0f/4c/751061ffa58615a32c31b2d82e8482be8dd4a89154f003147acee90f2be9/httpx_sse-0.4.3.tar.gz", hash = "sha256:9b1ed0127459a66014aec3c56bebd93da3c1bc8bb6618c8082039a44889a755d", size = 15943, upload-time = "2025-10-10T21:48:22.271Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/d2/fd/6668e5aec43ab844de6fc74927e155a3b37bf40d7c3790e49fc0406b6578/httpx_sse-0.4.3-py3-none-any.whl", hash = "sha256:0ac1c9fe3c0afad2e0ebb25a934a59f4c7823b60792691f779fad2c5568830fc", size = 8960 }, + { url = "https://files.pythonhosted.org/packages/d2/fd/6668e5aec43ab844de6fc74927e155a3b37bf40d7c3790e49fc0406b6578/httpx_sse-0.4.3-py3-none-any.whl", hash = "sha256:0ac1c9fe3c0afad2e0ebb25a934a59f4c7823b60792691f779fad2c5568830fc", size = 8960, upload-time = "2025-10-10T21:48:21.158Z" }, ] [[package]] @@ -2828,9 +2861,9 @@ dependencies = [ { name = "tqdm" }, { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/98/63/4910c5fa9128fdadf6a9c5ac138e8b1b6cee4ca44bf7915bbfbce4e355ee/huggingface_hub-0.36.0.tar.gz", hash = "sha256:47b3f0e2539c39bf5cde015d63b72ec49baff67b6931c3d97f3f84532e2b8d25", size = 463358 } +sdist = { url = "https://files.pythonhosted.org/packages/98/63/4910c5fa9128fdadf6a9c5ac138e8b1b6cee4ca44bf7915bbfbce4e355ee/huggingface_hub-0.36.0.tar.gz", hash = "sha256:47b3f0e2539c39bf5cde015d63b72ec49baff67b6931c3d97f3f84532e2b8d25", size = 463358, upload-time = "2025-10-23T12:12:01.413Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/cb/bd/1a875e0d592d447cbc02805fd3fe0f497714d6a2583f59d14fa9ebad96eb/huggingface_hub-0.36.0-py3-none-any.whl", hash = "sha256:7bcc9ad17d5b3f07b57c78e79d527102d08313caa278a641993acddcb894548d", size = 566094 }, + { url = "https://files.pythonhosted.org/packages/cb/bd/1a875e0d592d447cbc02805fd3fe0f497714d6a2583f59d14fa9ebad96eb/huggingface_hub-0.36.0-py3-none-any.whl", hash = "sha256:7bcc9ad17d5b3f07b57c78e79d527102d08313caa278a641993acddcb894548d", size = 566094, upload-time = "2025-10-23T12:11:59.557Z" }, ] [[package]] @@ -2840,18 +2873,18 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "pyreadline3", marker = "sys_platform == 'win32'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/cc/3f/2c29224acb2e2df4d2046e4c73ee2662023c58ff5b113c4c1adac0886c43/humanfriendly-10.0.tar.gz", hash = "sha256:6b0b831ce8f15f7300721aa49829fc4e83921a9a301cc7f606be6686a2288ddc", size = 360702 } +sdist = { url = "https://files.pythonhosted.org/packages/cc/3f/2c29224acb2e2df4d2046e4c73ee2662023c58ff5b113c4c1adac0886c43/humanfriendly-10.0.tar.gz", hash = "sha256:6b0b831ce8f15f7300721aa49829fc4e83921a9a301cc7f606be6686a2288ddc", size = 360702, upload-time = "2021-09-17T21:40:43.31Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/f0/0f/310fb31e39e2d734ccaa2c0fb981ee41f7bd5056ce9bc29b2248bd569169/humanfriendly-10.0-py2.py3-none-any.whl", hash = "sha256:1697e1a8a8f550fd43c2865cd84542fc175a61dcb779b6fee18cf6b6ccba1477", size = 86794 }, + { url = "https://files.pythonhosted.org/packages/f0/0f/310fb31e39e2d734ccaa2c0fb981ee41f7bd5056ce9bc29b2248bd569169/humanfriendly-10.0-py2.py3-none-any.whl", hash = "sha256:1697e1a8a8f550fd43c2865cd84542fc175a61dcb779b6fee18cf6b6ccba1477", size = 86794, upload-time = "2021-09-17T21:40:39.897Z" }, ] [[package]] name = "hyperframe" version = "6.1.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/02/e7/94f8232d4a74cc99514c13a9f995811485a6903d48e5d952771ef6322e30/hyperframe-6.1.0.tar.gz", hash = "sha256:f630908a00854a7adeabd6382b43923a4c4cd4b821fcb527e6ab9e15382a3b08", size = 26566 } +sdist = { url = "https://files.pythonhosted.org/packages/02/e7/94f8232d4a74cc99514c13a9f995811485a6903d48e5d952771ef6322e30/hyperframe-6.1.0.tar.gz", hash = "sha256:f630908a00854a7adeabd6382b43923a4c4cd4b821fcb527e6ab9e15382a3b08", size = 26566, upload-time = "2025-01-22T21:41:49.302Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/48/30/47d0bf6072f7252e6521f3447ccfa40b421b6824517f82854703d0f5a98b/hyperframe-6.1.0-py3-none-any.whl", hash = "sha256:b03380493a519fce58ea5af42e4a42317bf9bd425596f7a0835ffce80f1a42e5", size = 13007 }, + { url = "https://files.pythonhosted.org/packages/48/30/47d0bf6072f7252e6521f3447ccfa40b421b6824517f82854703d0f5a98b/hyperframe-6.1.0-py3-none-any.whl", hash = "sha256:b03380493a519fce58ea5af42e4a42317bf9bd425596f7a0835ffce80f1a42e5", size = 13007, upload-time = "2025-01-22T21:41:47.295Z" }, ] [[package]] @@ -2861,18 +2894,18 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "sortedcontainers" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/4a/99/a3c6eb3fdd6bfa01433d674b0f12cd9102aa99630689427422d920aea9c6/hypothesis-6.148.2.tar.gz", hash = "sha256:07e65d34d687ddff3e92a3ac6b43966c193356896813aec79f0a611c5018f4b1", size = 469984 } +sdist = { url = "https://files.pythonhosted.org/packages/4a/99/a3c6eb3fdd6bfa01433d674b0f12cd9102aa99630689427422d920aea9c6/hypothesis-6.148.2.tar.gz", hash = "sha256:07e65d34d687ddff3e92a3ac6b43966c193356896813aec79f0a611c5018f4b1", size = 469984, upload-time = "2025-11-18T20:21:17.047Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/b1/d2/c2673aca0127e204965e0e9b3b7a0e91e9b12993859ac8758abd22669b89/hypothesis-6.148.2-py3-none-any.whl", hash = "sha256:bf8ddc829009da73b321994b902b1964bcc3e5c3f0ed9a1c1e6a1631ab97c5fa", size = 536986 }, + { url = "https://files.pythonhosted.org/packages/b1/d2/c2673aca0127e204965e0e9b3b7a0e91e9b12993859ac8758abd22669b89/hypothesis-6.148.2-py3-none-any.whl", hash = "sha256:bf8ddc829009da73b321994b902b1964bcc3e5c3f0ed9a1c1e6a1631ab97c5fa", size = 536986, upload-time = "2025-11-18T20:21:15.212Z" }, ] [[package]] name = "idna" version = "3.11" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/6f/6d/0703ccc57f3a7233505399edb88de3cbd678da106337b9fcde432b65ed60/idna-3.11.tar.gz", hash = "sha256:795dafcc9c04ed0c1fb032c2aa73654d8e8c5023a7df64a53f39190ada629902", size = 194582 } +sdist = { url = "https://files.pythonhosted.org/packages/6f/6d/0703ccc57f3a7233505399edb88de3cbd678da106337b9fcde432b65ed60/idna-3.11.tar.gz", hash = "sha256:795dafcc9c04ed0c1fb032c2aa73654d8e8c5023a7df64a53f39190ada629902", size = 194582, upload-time = "2025-10-12T14:55:20.501Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/0e/61/66938bbb5fc52dbdf84594873d5b51fb1f7c7794e9c0f5bd885f30bc507b/idna-3.11-py3-none-any.whl", hash = "sha256:771a87f49d9defaf64091e6e6fe9c18d4833f140bd19464795bc32d966ca37ea", size = 71008 }, + { url = "https://files.pythonhosted.org/packages/0e/61/66938bbb5fc52dbdf84594873d5b51fb1f7c7794e9c0f5bd885f30bc507b/idna-3.11-py3-none-any.whl", hash = "sha256:771a87f49d9defaf64091e6e6fe9c18d4833f140bd19464795bc32d966ca37ea", size = 71008, upload-time = "2025-10-12T14:55:18.883Z" }, ] [[package]] @@ -2885,9 +2918,9 @@ dependencies = [ { name = "rich" }, { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/50/20/cc371a35123cd6afe4c8304cf199a53530a05f7437eda79ce84d9c6f6949/import_linter-2.7.tar.gz", hash = "sha256:7bea754fac9cde54182c81eeb48f649eea20b865219c39f7ac2abd23775d07d2", size = 219914 } +sdist = { url = "https://files.pythonhosted.org/packages/50/20/cc371a35123cd6afe4c8304cf199a53530a05f7437eda79ce84d9c6f6949/import_linter-2.7.tar.gz", hash = "sha256:7bea754fac9cde54182c81eeb48f649eea20b865219c39f7ac2abd23775d07d2", size = 219914, upload-time = "2025-11-19T11:44:28.193Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/a8/b5/26a1d198f3de0676354a628f6e2a65334b744855d77e25eea739287eea9a/import_linter-2.7-py3-none-any.whl", hash = "sha256:be03bbd467b3f0b4535fb3ee12e07995d9837864b307df2e78888364e0ba012d", size = 46197 }, + { url = "https://files.pythonhosted.org/packages/a8/b5/26a1d198f3de0676354a628f6e2a65334b744855d77e25eea739287eea9a/import_linter-2.7-py3-none-any.whl", hash = "sha256:be03bbd467b3f0b4535fb3ee12e07995d9837864b307df2e78888364e0ba012d", size = 46197, upload-time = "2025-11-19T11:44:27.023Z" }, ] [[package]] @@ -2897,39 +2930,39 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "zipp" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/c0/bd/fa8ce65b0a7d4b6d143ec23b0f5fd3f7ab80121078c465bc02baeaab22dc/importlib_metadata-8.4.0.tar.gz", hash = "sha256:9a547d3bc3608b025f93d403fdd1aae741c24fbb8314df4b155675742ce303c5", size = 54320 } +sdist = { url = "https://files.pythonhosted.org/packages/c0/bd/fa8ce65b0a7d4b6d143ec23b0f5fd3f7ab80121078c465bc02baeaab22dc/importlib_metadata-8.4.0.tar.gz", hash = "sha256:9a547d3bc3608b025f93d403fdd1aae741c24fbb8314df4b155675742ce303c5", size = 54320, upload-time = "2024-08-20T17:11:42.348Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/c0/14/362d31bf1076b21e1bcdcb0dc61944822ff263937b804a79231df2774d28/importlib_metadata-8.4.0-py3-none-any.whl", hash = "sha256:66f342cc6ac9818fc6ff340576acd24d65ba0b3efabb2b4ac08b598965a4a2f1", size = 26269 }, + { url = "https://files.pythonhosted.org/packages/c0/14/362d31bf1076b21e1bcdcb0dc61944822ff263937b804a79231df2774d28/importlib_metadata-8.4.0-py3-none-any.whl", hash = "sha256:66f342cc6ac9818fc6ff340576acd24d65ba0b3efabb2b4ac08b598965a4a2f1", size = 26269, upload-time = "2024-08-20T17:11:41.102Z" }, ] [[package]] name = "importlib-resources" version = "6.5.2" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/cf/8c/f834fbf984f691b4f7ff60f50b514cc3de5cc08abfc3295564dd89c5e2e7/importlib_resources-6.5.2.tar.gz", hash = "sha256:185f87adef5bcc288449d98fb4fba07cea78bc036455dd44c5fc4a2fe78fed2c", size = 44693 } +sdist = { url = "https://files.pythonhosted.org/packages/cf/8c/f834fbf984f691b4f7ff60f50b514cc3de5cc08abfc3295564dd89c5e2e7/importlib_resources-6.5.2.tar.gz", hash = "sha256:185f87adef5bcc288449d98fb4fba07cea78bc036455dd44c5fc4a2fe78fed2c", size = 44693, upload-time = "2025-01-03T18:51:56.698Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/a4/ed/1f1afb2e9e7f38a545d628f864d562a5ae64fe6f7a10e28ffb9b185b4e89/importlib_resources-6.5.2-py3-none-any.whl", hash = "sha256:789cfdc3ed28c78b67a06acb8126751ced69a3d5f79c095a98298cd8a760ccec", size = 37461 }, + { url = "https://files.pythonhosted.org/packages/a4/ed/1f1afb2e9e7f38a545d628f864d562a5ae64fe6f7a10e28ffb9b185b4e89/importlib_resources-6.5.2-py3-none-any.whl", hash = "sha256:789cfdc3ed28c78b67a06acb8126751ced69a3d5f79c095a98298cd8a760ccec", size = 37461, upload-time = "2025-01-03T18:51:54.306Z" }, ] [[package]] name = "iniconfig" version = "2.3.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/72/34/14ca021ce8e5dfedc35312d08ba8bf51fdd999c576889fc2c24cb97f4f10/iniconfig-2.3.0.tar.gz", hash = "sha256:c76315c77db068650d49c5b56314774a7804df16fee4402c1f19d6d15d8c4730", size = 20503 } +sdist = { url = "https://files.pythonhosted.org/packages/72/34/14ca021ce8e5dfedc35312d08ba8bf51fdd999c576889fc2c24cb97f4f10/iniconfig-2.3.0.tar.gz", hash = "sha256:c76315c77db068650d49c5b56314774a7804df16fee4402c1f19d6d15d8c4730", size = 20503, upload-time = "2025-10-18T21:55:43.219Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/cb/b1/3846dd7f199d53cb17f49cba7e651e9ce294d8497c8c150530ed11865bb8/iniconfig-2.3.0-py3-none-any.whl", hash = "sha256:f631c04d2c48c52b84d0d0549c99ff3859c98df65b3101406327ecc7d53fbf12", size = 7484 }, + { url = "https://files.pythonhosted.org/packages/cb/b1/3846dd7f199d53cb17f49cba7e651e9ce294d8497c8c150530ed11865bb8/iniconfig-2.3.0-py3-none-any.whl", hash = "sha256:f631c04d2c48c52b84d0d0549c99ff3859c98df65b3101406327ecc7d53fbf12", size = 7484, upload-time = "2025-10-18T21:55:41.639Z" }, ] [[package]] name = "intersystems-irispython" -version = "5.3.0" +version = "5.3.1" source = { registry = "https://pypi.org/simple" } wheels = [ - { url = "https://files.pythonhosted.org/packages/5d/56/16d93576b50408d97a5cbbd055d8da024d585e96a360e2adc95b41ae6284/intersystems_irispython-5.3.0-cp38.cp39.cp310.cp311.cp312.cp313-cp38.cp39.cp310.cp311.cp312.cp313-macosx_10_9_universal2.whl", hash = "sha256:59d3176a35867a55b1ab69a6b5c75438b460291bccb254c2d2f4173be08b6e55", size = 6594480, upload-time = "2025-10-09T20:47:27.629Z" }, - { url = "https://files.pythonhosted.org/packages/99/bc/19e144ee805ea6ee0df6342a711e722c84347c05a75b3bf040c5fbe19982/intersystems_irispython-5.3.0-cp38.cp39.cp310.cp311.cp312.cp313-cp38.cp39.cp310.cp311.cp312.cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:56bccefd1997c25f9f9f6c4086214c18d4fdaac0a93319d4b21dd9a6c59c9e51", size = 14779928, upload-time = "2025-10-09T20:47:30.564Z" }, - { url = "https://files.pythonhosted.org/packages/e6/fb/59ba563a80b39e9450b4627b5696019aa831dce27dacc3831b8c1e669102/intersystems_irispython-5.3.0-cp38.cp39.cp310.cp311.cp312.cp313-cp38.cp39.cp310.cp311.cp312.cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3e160adc0785c55bb64e4264b8e99075691a15b0afa5d8d529f1b4bac7e57b81", size = 14422035, upload-time = "2025-10-09T20:47:32.552Z" }, - { url = "https://files.pythonhosted.org/packages/c1/68/ade8ad43f0ed1e5fba60e1710fa5ddeb01285f031e465e8c006329072e63/intersystems_irispython-5.3.0-cp38.cp39.cp310.cp311.cp312.cp313-cp38.cp39.cp310.cp311.cp312.cp313-win32.whl", hash = "sha256:820f2c5729119e5173a5bf6d6ac2a41275c4f1ffba6af6c59ea313ecd8f499cc", size = 2824316, upload-time = "2025-10-09T20:47:28.998Z" }, - { url = "https://files.pythonhosted.org/packages/f4/03/cd45cb94e42c01dc525efebf3c562543a18ee55b67fde4022665ca672351/intersystems_irispython-5.3.0-cp38.cp39.cp310.cp311.cp312.cp313-cp38.cp39.cp310.cp311.cp312.cp313-win_amd64.whl", hash = "sha256:fc07ec24bc50b6f01573221cd7d86f2937549effe31c24af8db118e0131e340c", size = 3463297, upload-time = "2025-10-09T20:47:34.636Z" }, + { url = "https://files.pythonhosted.org/packages/33/5b/8eac672a6ef26bef6ef79a7c9557096167b50c4d3577d558ae6999c195fe/intersystems_irispython-5.3.1-cp38.cp39.cp310.cp311.cp312.cp313.cp314-cp38.cp39.cp310.cp311.cp312.cp313.cp314-macosx_10_9_universal2.whl", hash = "sha256:634c9b4ec620837d830ff49543aeb2797a1ce8d8570a0e868398b85330dfcc4d", size = 6736686, upload-time = "2025-12-19T16:24:57.734Z" }, + { url = "https://files.pythonhosted.org/packages/ba/17/bab3e525ffb6711355f7feea18c1b7dced9c2484cecbcdd83f74550398c0/intersystems_irispython-5.3.1-cp38.cp39.cp310.cp311.cp312.cp313.cp314-cp38.cp39.cp310.cp311.cp312.cp313.cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:cf912f30f85e2a42f2c2ea77fbeb98a24154d5ea7428a50382786a684ec4f583", size = 16005259, upload-time = "2025-12-19T16:25:05.578Z" }, + { url = "https://files.pythonhosted.org/packages/39/59/9bb79d9e32e3e55fc9aed8071a797b4497924cbc6457cea9255bb09320b7/intersystems_irispython-5.3.1-cp38.cp39.cp310.cp311.cp312.cp313.cp314-cp38.cp39.cp310.cp311.cp312.cp313.cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:be5659a6bb57593910f2a2417eddb9f5dc2f93a337ead6ddca778f557b8a359a", size = 15638040, upload-time = "2025-12-19T16:24:54.429Z" }, + { url = "https://files.pythonhosted.org/packages/cf/47/654ccf9c5cca4f5491f070888544165c9e2a6a485e320ea703e4e38d2358/intersystems_irispython-5.3.1-cp38.cp39.cp310.cp311.cp312.cp313.cp314-cp38.cp39.cp310.cp311.cp312.cp313.cp314-win32.whl", hash = "sha256:583e4f17088c1e0530f32efda1c0ccb02993cbc22035bc8b4c71d8693b04ee7e", size = 2879644, upload-time = "2025-12-19T16:24:59.945Z" }, + { url = "https://files.pythonhosted.org/packages/68/95/19cc13d09f1b4120bd41b1434509052e1d02afd27f2679266d7ad9cc1750/intersystems_irispython-5.3.1-cp38.cp39.cp310.cp311.cp312.cp313.cp314-cp38.cp39.cp310.cp311.cp312.cp313.cp314-win_amd64.whl", hash = "sha256:1d5d40450a0cdeec2a1f48d12d946a8a8ffc7c128576fcae7d58e66e3a127eae", size = 3522092, upload-time = "2025-12-19T16:25:01.834Z" }, ] [[package]] @@ -2939,31 +2972,31 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "sortedcontainers" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/50/fb/396d568039d21344639db96d940d40eb62befe704ef849b27949ded5c3bb/intervaltree-3.1.0.tar.gz", hash = "sha256:902b1b88936918f9b2a19e0e5eb7ccb430ae45cde4f39ea4b36932920d33952d", size = 32861 } +sdist = { url = "https://files.pythonhosted.org/packages/50/fb/396d568039d21344639db96d940d40eb62befe704ef849b27949ded5c3bb/intervaltree-3.1.0.tar.gz", hash = "sha256:902b1b88936918f9b2a19e0e5eb7ccb430ae45cde4f39ea4b36932920d33952d", size = 32861, upload-time = "2020-08-03T08:01:11.392Z" } [[package]] name = "isodate" version = "0.7.2" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/54/4d/e940025e2ce31a8ce1202635910747e5a87cc3a6a6bb2d00973375014749/isodate-0.7.2.tar.gz", hash = "sha256:4cd1aa0f43ca76f4a6c6c0292a85f40b35ec2e43e315b59f06e6d32171a953e6", size = 29705 } +sdist = { url = "https://files.pythonhosted.org/packages/54/4d/e940025e2ce31a8ce1202635910747e5a87cc3a6a6bb2d00973375014749/isodate-0.7.2.tar.gz", hash = "sha256:4cd1aa0f43ca76f4a6c6c0292a85f40b35ec2e43e315b59f06e6d32171a953e6", size = 29705, upload-time = "2024-10-08T23:04:11.5Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/15/aa/0aca39a37d3c7eb941ba736ede56d689e7be91cab5d9ca846bde3999eba6/isodate-0.7.2-py3-none-any.whl", hash = "sha256:28009937d8031054830160fce6d409ed342816b543597cece116d966c6d99e15", size = 22320 }, + { url = "https://files.pythonhosted.org/packages/15/aa/0aca39a37d3c7eb941ba736ede56d689e7be91cab5d9ca846bde3999eba6/isodate-0.7.2-py3-none-any.whl", hash = "sha256:28009937d8031054830160fce6d409ed342816b543597cece116d966c6d99e15", size = 22320, upload-time = "2024-10-08T23:04:09.501Z" }, ] [[package]] name = "itsdangerous" version = "2.2.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/9c/cb/8ac0172223afbccb63986cc25049b154ecfb5e85932587206f42317be31d/itsdangerous-2.2.0.tar.gz", hash = "sha256:e0050c0b7da1eea53ffaf149c0cfbb5c6e2e2b69c4bef22c81fa6eb73e5f6173", size = 54410 } +sdist = { url = "https://files.pythonhosted.org/packages/9c/cb/8ac0172223afbccb63986cc25049b154ecfb5e85932587206f42317be31d/itsdangerous-2.2.0.tar.gz", hash = "sha256:e0050c0b7da1eea53ffaf149c0cfbb5c6e2e2b69c4bef22c81fa6eb73e5f6173", size = 54410, upload-time = "2024-04-16T21:28:15.614Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/04/96/92447566d16df59b2a776c0fb82dbc4d9e07cd95062562af01e408583fc4/itsdangerous-2.2.0-py3-none-any.whl", hash = "sha256:c6242fc49e35958c8b15141343aa660db5fc54d4f13a1db01a3f5891b98700ef", size = 16234 }, + { url = "https://files.pythonhosted.org/packages/04/96/92447566d16df59b2a776c0fb82dbc4d9e07cd95062562af01e408583fc4/itsdangerous-2.2.0-py3-none-any.whl", hash = "sha256:c6242fc49e35958c8b15141343aa660db5fc54d4f13a1db01a3f5891b98700ef", size = 16234, upload-time = "2024-04-16T21:28:14.499Z" }, ] [[package]] name = "jieba" version = "0.42.1" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/c6/cb/18eeb235f833b726522d7ebed54f2278ce28ba9438e3135ab0278d9792a2/jieba-0.42.1.tar.gz", hash = "sha256:055ca12f62674fafed09427f176506079bc135638a14e23e25be909131928db2", size = 19214172 } +sdist = { url = "https://files.pythonhosted.org/packages/c6/cb/18eeb235f833b726522d7ebed54f2278ce28ba9438e3135ab0278d9792a2/jieba-0.42.1.tar.gz", hash = "sha256:055ca12f62674fafed09427f176506079bc135638a14e23e25be909131928db2", size = 19214172, upload-time = "2020-01-20T14:27:23.5Z" } [[package]] name = "jinja2" @@ -2972,70 +3005,78 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "markupsafe" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/df/bf/f7da0350254c0ed7c72f3e33cef02e048281fec7ecec5f032d4aac52226b/jinja2-3.1.6.tar.gz", hash = "sha256:0137fb05990d35f1275a587e9aee6d56da821fc83491a0fb838183be43f66d6d", size = 245115 } +sdist = { url = "https://files.pythonhosted.org/packages/df/bf/f7da0350254c0ed7c72f3e33cef02e048281fec7ecec5f032d4aac52226b/jinja2-3.1.6.tar.gz", hash = "sha256:0137fb05990d35f1275a587e9aee6d56da821fc83491a0fb838183be43f66d6d", size = 245115, upload-time = "2025-03-05T20:05:02.478Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/62/a1/3d680cbfd5f4b8f15abc1d571870c5fc3e594bb582bc3b64ea099db13e56/jinja2-3.1.6-py3-none-any.whl", hash = "sha256:85ece4451f492d0c13c5dd7c13a64681a86afae63a5f347908daf103ce6d2f67", size = 134899 }, + { url = "https://files.pythonhosted.org/packages/62/a1/3d680cbfd5f4b8f15abc1d571870c5fc3e594bb582bc3b64ea099db13e56/jinja2-3.1.6-py3-none-any.whl", hash = "sha256:85ece4451f492d0c13c5dd7c13a64681a86afae63a5f347908daf103ce6d2f67", size = 134899, upload-time = "2025-03-05T20:05:00.369Z" }, ] [[package]] name = "jiter" version = "0.12.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/45/9d/e0660989c1370e25848bb4c52d061c71837239738ad937e83edca174c273/jiter-0.12.0.tar.gz", hash = "sha256:64dfcd7d5c168b38d3f9f8bba7fc639edb3418abcc74f22fdbe6b8938293f30b", size = 168294 } +sdist = { url = "https://files.pythonhosted.org/packages/45/9d/e0660989c1370e25848bb4c52d061c71837239738ad937e83edca174c273/jiter-0.12.0.tar.gz", hash = "sha256:64dfcd7d5c168b38d3f9f8bba7fc639edb3418abcc74f22fdbe6b8938293f30b", size = 168294, upload-time = "2025-11-09T20:49:23.302Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/32/f9/eaca4633486b527ebe7e681c431f529b63fe2709e7c5242fc0f43f77ce63/jiter-0.12.0-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:d8f8a7e317190b2c2d60eb2e8aa835270b008139562d70fe732e1c0020ec53c9", size = 316435 }, - { url = "https://files.pythonhosted.org/packages/10/c1/40c9f7c22f5e6ff715f28113ebaba27ab85f9af2660ad6e1dd6425d14c19/jiter-0.12.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:2218228a077e784c6c8f1a8e5d6b8cb1dea62ce25811c356364848554b2056cd", size = 320548 }, - { url = "https://files.pythonhosted.org/packages/6b/1b/efbb68fe87e7711b00d2cfd1f26bb4bfc25a10539aefeaa7727329ffb9cb/jiter-0.12.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9354ccaa2982bf2188fd5f57f79f800ef622ec67beb8329903abf6b10da7d423", size = 351915 }, - { url = "https://files.pythonhosted.org/packages/15/2d/c06e659888c128ad1e838123d0638f0efad90cc30860cb5f74dd3f2fc0b3/jiter-0.12.0-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:8f2607185ea89b4af9a604d4c7ec40e45d3ad03ee66998b031134bc510232bb7", size = 368966 }, - { url = "https://files.pythonhosted.org/packages/6b/20/058db4ae5fb07cf6a4ab2e9b9294416f606d8e467fb74c2184b2a1eeacba/jiter-0.12.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3a585a5e42d25f2e71db5f10b171f5e5ea641d3aa44f7df745aa965606111cc2", size = 482047 }, - { url = "https://files.pythonhosted.org/packages/49/bb/dc2b1c122275e1de2eb12905015d61e8316b2f888bdaac34221c301495d6/jiter-0.12.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:bd9e21d34edff5a663c631f850edcb786719c960ce887a5661e9c828a53a95d9", size = 380835 }, - { url = "https://files.pythonhosted.org/packages/23/7d/38f9cd337575349de16da575ee57ddb2d5a64d425c9367f5ef9e4612e32e/jiter-0.12.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4a612534770470686cd5431478dc5a1b660eceb410abade6b1b74e320ca98de6", size = 364587 }, - { url = "https://files.pythonhosted.org/packages/f0/a3/b13e8e61e70f0bb06085099c4e2462647f53cc2ca97614f7fedcaa2bb9f3/jiter-0.12.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:3985aea37d40a908f887b34d05111e0aae822943796ebf8338877fee2ab67725", size = 390492 }, - { url = "https://files.pythonhosted.org/packages/07/71/e0d11422ed027e21422f7bc1883c61deba2d9752b720538430c1deadfbca/jiter-0.12.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:b1207af186495f48f72529f8d86671903c8c10127cac6381b11dddc4aaa52df6", size = 522046 }, - { url = "https://files.pythonhosted.org/packages/9f/59/b968a9aa7102a8375dbbdfbd2aeebe563c7e5dddf0f47c9ef1588a97e224/jiter-0.12.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:ef2fb241de583934c9915a33120ecc06d94aa3381a134570f59eed784e87001e", size = 513392 }, - { url = "https://files.pythonhosted.org/packages/ca/e4/7df62002499080dbd61b505c5cb351aa09e9959d176cac2aa8da6f93b13b/jiter-0.12.0-cp311-cp311-win32.whl", hash = "sha256:453b6035672fecce8007465896a25b28a6b59cfe8fbc974b2563a92f5a92a67c", size = 206096 }, - { url = "https://files.pythonhosted.org/packages/bb/60/1032b30ae0572196b0de0e87dce3b6c26a1eff71aad5fe43dee3082d32e0/jiter-0.12.0-cp311-cp311-win_amd64.whl", hash = "sha256:ca264b9603973c2ad9435c71a8ec8b49f8f715ab5ba421c85a51cde9887e421f", size = 204899 }, - { url = "https://files.pythonhosted.org/packages/49/d5/c145e526fccdb834063fb45c071df78b0cc426bbaf6de38b0781f45d956f/jiter-0.12.0-cp311-cp311-win_arm64.whl", hash = "sha256:cb00ef392e7d684f2754598c02c409f376ddcef857aae796d559e6cacc2d78a5", size = 188070 }, - { url = "https://files.pythonhosted.org/packages/92/c9/5b9f7b4983f1b542c64e84165075335e8a236fa9e2ea03a0c79780062be8/jiter-0.12.0-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:305e061fa82f4680607a775b2e8e0bcb071cd2205ac38e6ef48c8dd5ebe1cf37", size = 314449 }, - { url = "https://files.pythonhosted.org/packages/98/6e/e8efa0e78de00db0aee82c0cf9e8b3f2027efd7f8a71f859d8f4be8e98ef/jiter-0.12.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:5c1860627048e302a528333c9307c818c547f214d8659b0705d2195e1a94b274", size = 319855 }, - { url = "https://files.pythonhosted.org/packages/20/26/894cd88e60b5d58af53bec5c6759d1292bd0b37a8b5f60f07abf7a63ae5f/jiter-0.12.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:df37577a4f8408f7e0ec3205d2a8f87672af8f17008358063a4d6425b6081ce3", size = 350171 }, - { url = "https://files.pythonhosted.org/packages/f5/27/a7b818b9979ac31b3763d25f3653ec3a954044d5e9f5d87f2f247d679fd1/jiter-0.12.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:75fdd787356c1c13a4f40b43c2156276ef7a71eb487d98472476476d803fb2cf", size = 365590 }, - { url = "https://files.pythonhosted.org/packages/ba/7e/e46195801a97673a83746170b17984aa8ac4a455746354516d02ca5541b4/jiter-0.12.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1eb5db8d9c65b112aacf14fcd0faae9913d07a8afea5ed06ccdd12b724e966a1", size = 479462 }, - { url = "https://files.pythonhosted.org/packages/ca/75/f833bfb009ab4bd11b1c9406d333e3b4357709ed0570bb48c7c06d78c7dd/jiter-0.12.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:73c568cc27c473f82480abc15d1301adf333a7ea4f2e813d6a2c7d8b6ba8d0df", size = 378983 }, - { url = "https://files.pythonhosted.org/packages/71/b3/7a69d77943cc837d30165643db753471aff5df39692d598da880a6e51c24/jiter-0.12.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4321e8a3d868919bcb1abb1db550d41f2b5b326f72df29e53b2df8b006eb9403", size = 361328 }, - { url = "https://files.pythonhosted.org/packages/b0/ac/a78f90caf48d65ba70d8c6efc6f23150bc39dc3389d65bbec2a95c7bc628/jiter-0.12.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:0a51bad79f8cc9cac2b4b705039f814049142e0050f30d91695a2d9a6611f126", size = 386740 }, - { url = "https://files.pythonhosted.org/packages/39/b6/5d31c2cc8e1b6a6bcf3c5721e4ca0a3633d1ab4754b09bc7084f6c4f5327/jiter-0.12.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:2a67b678f6a5f1dd6c36d642d7db83e456bc8b104788262aaefc11a22339f5a9", size = 520875 }, - { url = "https://files.pythonhosted.org/packages/30/b5/4df540fae4e9f68c54b8dab004bd8c943a752f0b00efd6e7d64aa3850339/jiter-0.12.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:efe1a211fe1fd14762adea941e3cfd6c611a136e28da6c39272dbb7a1bbe6a86", size = 511457 }, - { url = "https://files.pythonhosted.org/packages/07/65/86b74010e450a1a77b2c1aabb91d4a91dd3cd5afce99f34d75fd1ac64b19/jiter-0.12.0-cp312-cp312-win32.whl", hash = "sha256:d779d97c834b4278276ec703dc3fc1735fca50af63eb7262f05bdb4e62203d44", size = 204546 }, - { url = "https://files.pythonhosted.org/packages/1c/c7/6659f537f9562d963488e3e55573498a442503ced01f7e169e96a6110383/jiter-0.12.0-cp312-cp312-win_amd64.whl", hash = "sha256:e8269062060212b373316fe69236096aaf4c49022d267c6736eebd66bbbc60bb", size = 205196 }, - { url = "https://files.pythonhosted.org/packages/21/f4/935304f5169edadfec7f9c01eacbce4c90bb9a82035ac1de1f3bd2d40be6/jiter-0.12.0-cp312-cp312-win_arm64.whl", hash = "sha256:06cb970936c65de926d648af0ed3d21857f026b1cf5525cb2947aa5e01e05789", size = 186100 }, + { url = "https://files.pythonhosted.org/packages/32/f9/eaca4633486b527ebe7e681c431f529b63fe2709e7c5242fc0f43f77ce63/jiter-0.12.0-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:d8f8a7e317190b2c2d60eb2e8aa835270b008139562d70fe732e1c0020ec53c9", size = 316435, upload-time = "2025-11-09T20:47:02.087Z" }, + { url = "https://files.pythonhosted.org/packages/10/c1/40c9f7c22f5e6ff715f28113ebaba27ab85f9af2660ad6e1dd6425d14c19/jiter-0.12.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:2218228a077e784c6c8f1a8e5d6b8cb1dea62ce25811c356364848554b2056cd", size = 320548, upload-time = "2025-11-09T20:47:03.409Z" }, + { url = "https://files.pythonhosted.org/packages/6b/1b/efbb68fe87e7711b00d2cfd1f26bb4bfc25a10539aefeaa7727329ffb9cb/jiter-0.12.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9354ccaa2982bf2188fd5f57f79f800ef622ec67beb8329903abf6b10da7d423", size = 351915, upload-time = "2025-11-09T20:47:05.171Z" }, + { url = "https://files.pythonhosted.org/packages/15/2d/c06e659888c128ad1e838123d0638f0efad90cc30860cb5f74dd3f2fc0b3/jiter-0.12.0-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:8f2607185ea89b4af9a604d4c7ec40e45d3ad03ee66998b031134bc510232bb7", size = 368966, upload-time = "2025-11-09T20:47:06.508Z" }, + { url = "https://files.pythonhosted.org/packages/6b/20/058db4ae5fb07cf6a4ab2e9b9294416f606d8e467fb74c2184b2a1eeacba/jiter-0.12.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3a585a5e42d25f2e71db5f10b171f5e5ea641d3aa44f7df745aa965606111cc2", size = 482047, upload-time = "2025-11-09T20:47:08.382Z" }, + { url = "https://files.pythonhosted.org/packages/49/bb/dc2b1c122275e1de2eb12905015d61e8316b2f888bdaac34221c301495d6/jiter-0.12.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:bd9e21d34edff5a663c631f850edcb786719c960ce887a5661e9c828a53a95d9", size = 380835, upload-time = "2025-11-09T20:47:09.81Z" }, + { url = "https://files.pythonhosted.org/packages/23/7d/38f9cd337575349de16da575ee57ddb2d5a64d425c9367f5ef9e4612e32e/jiter-0.12.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4a612534770470686cd5431478dc5a1b660eceb410abade6b1b74e320ca98de6", size = 364587, upload-time = "2025-11-09T20:47:11.529Z" }, + { url = "https://files.pythonhosted.org/packages/f0/a3/b13e8e61e70f0bb06085099c4e2462647f53cc2ca97614f7fedcaa2bb9f3/jiter-0.12.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:3985aea37d40a908f887b34d05111e0aae822943796ebf8338877fee2ab67725", size = 390492, upload-time = "2025-11-09T20:47:12.993Z" }, + { url = "https://files.pythonhosted.org/packages/07/71/e0d11422ed027e21422f7bc1883c61deba2d9752b720538430c1deadfbca/jiter-0.12.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:b1207af186495f48f72529f8d86671903c8c10127cac6381b11dddc4aaa52df6", size = 522046, upload-time = "2025-11-09T20:47:14.6Z" }, + { url = "https://files.pythonhosted.org/packages/9f/59/b968a9aa7102a8375dbbdfbd2aeebe563c7e5dddf0f47c9ef1588a97e224/jiter-0.12.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:ef2fb241de583934c9915a33120ecc06d94aa3381a134570f59eed784e87001e", size = 513392, upload-time = "2025-11-09T20:47:16.011Z" }, + { url = "https://files.pythonhosted.org/packages/ca/e4/7df62002499080dbd61b505c5cb351aa09e9959d176cac2aa8da6f93b13b/jiter-0.12.0-cp311-cp311-win32.whl", hash = "sha256:453b6035672fecce8007465896a25b28a6b59cfe8fbc974b2563a92f5a92a67c", size = 206096, upload-time = "2025-11-09T20:47:17.344Z" }, + { url = "https://files.pythonhosted.org/packages/bb/60/1032b30ae0572196b0de0e87dce3b6c26a1eff71aad5fe43dee3082d32e0/jiter-0.12.0-cp311-cp311-win_amd64.whl", hash = "sha256:ca264b9603973c2ad9435c71a8ec8b49f8f715ab5ba421c85a51cde9887e421f", size = 204899, upload-time = "2025-11-09T20:47:19.365Z" }, + { url = "https://files.pythonhosted.org/packages/49/d5/c145e526fccdb834063fb45c071df78b0cc426bbaf6de38b0781f45d956f/jiter-0.12.0-cp311-cp311-win_arm64.whl", hash = "sha256:cb00ef392e7d684f2754598c02c409f376ddcef857aae796d559e6cacc2d78a5", size = 188070, upload-time = "2025-11-09T20:47:20.75Z" }, + { url = "https://files.pythonhosted.org/packages/92/c9/5b9f7b4983f1b542c64e84165075335e8a236fa9e2ea03a0c79780062be8/jiter-0.12.0-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:305e061fa82f4680607a775b2e8e0bcb071cd2205ac38e6ef48c8dd5ebe1cf37", size = 314449, upload-time = "2025-11-09T20:47:22.999Z" }, + { url = "https://files.pythonhosted.org/packages/98/6e/e8efa0e78de00db0aee82c0cf9e8b3f2027efd7f8a71f859d8f4be8e98ef/jiter-0.12.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:5c1860627048e302a528333c9307c818c547f214d8659b0705d2195e1a94b274", size = 319855, upload-time = "2025-11-09T20:47:24.779Z" }, + { url = "https://files.pythonhosted.org/packages/20/26/894cd88e60b5d58af53bec5c6759d1292bd0b37a8b5f60f07abf7a63ae5f/jiter-0.12.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:df37577a4f8408f7e0ec3205d2a8f87672af8f17008358063a4d6425b6081ce3", size = 350171, upload-time = "2025-11-09T20:47:26.469Z" }, + { url = "https://files.pythonhosted.org/packages/f5/27/a7b818b9979ac31b3763d25f3653ec3a954044d5e9f5d87f2f247d679fd1/jiter-0.12.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:75fdd787356c1c13a4f40b43c2156276ef7a71eb487d98472476476d803fb2cf", size = 365590, upload-time = "2025-11-09T20:47:27.918Z" }, + { url = "https://files.pythonhosted.org/packages/ba/7e/e46195801a97673a83746170b17984aa8ac4a455746354516d02ca5541b4/jiter-0.12.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1eb5db8d9c65b112aacf14fcd0faae9913d07a8afea5ed06ccdd12b724e966a1", size = 479462, upload-time = "2025-11-09T20:47:29.654Z" }, + { url = "https://files.pythonhosted.org/packages/ca/75/f833bfb009ab4bd11b1c9406d333e3b4357709ed0570bb48c7c06d78c7dd/jiter-0.12.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:73c568cc27c473f82480abc15d1301adf333a7ea4f2e813d6a2c7d8b6ba8d0df", size = 378983, upload-time = "2025-11-09T20:47:31.026Z" }, + { url = "https://files.pythonhosted.org/packages/71/b3/7a69d77943cc837d30165643db753471aff5df39692d598da880a6e51c24/jiter-0.12.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4321e8a3d868919bcb1abb1db550d41f2b5b326f72df29e53b2df8b006eb9403", size = 361328, upload-time = "2025-11-09T20:47:33.286Z" }, + { url = "https://files.pythonhosted.org/packages/b0/ac/a78f90caf48d65ba70d8c6efc6f23150bc39dc3389d65bbec2a95c7bc628/jiter-0.12.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:0a51bad79f8cc9cac2b4b705039f814049142e0050f30d91695a2d9a6611f126", size = 386740, upload-time = "2025-11-09T20:47:34.703Z" }, + { url = "https://files.pythonhosted.org/packages/39/b6/5d31c2cc8e1b6a6bcf3c5721e4ca0a3633d1ab4754b09bc7084f6c4f5327/jiter-0.12.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:2a67b678f6a5f1dd6c36d642d7db83e456bc8b104788262aaefc11a22339f5a9", size = 520875, upload-time = "2025-11-09T20:47:36.058Z" }, + { url = "https://files.pythonhosted.org/packages/30/b5/4df540fae4e9f68c54b8dab004bd8c943a752f0b00efd6e7d64aa3850339/jiter-0.12.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:efe1a211fe1fd14762adea941e3cfd6c611a136e28da6c39272dbb7a1bbe6a86", size = 511457, upload-time = "2025-11-09T20:47:37.932Z" }, + { url = "https://files.pythonhosted.org/packages/07/65/86b74010e450a1a77b2c1aabb91d4a91dd3cd5afce99f34d75fd1ac64b19/jiter-0.12.0-cp312-cp312-win32.whl", hash = "sha256:d779d97c834b4278276ec703dc3fc1735fca50af63eb7262f05bdb4e62203d44", size = 204546, upload-time = "2025-11-09T20:47:40.47Z" }, + { url = "https://files.pythonhosted.org/packages/1c/c7/6659f537f9562d963488e3e55573498a442503ced01f7e169e96a6110383/jiter-0.12.0-cp312-cp312-win_amd64.whl", hash = "sha256:e8269062060212b373316fe69236096aaf4c49022d267c6736eebd66bbbc60bb", size = 205196, upload-time = "2025-11-09T20:47:41.794Z" }, + { url = "https://files.pythonhosted.org/packages/21/f4/935304f5169edadfec7f9c01eacbce4c90bb9a82035ac1de1f3bd2d40be6/jiter-0.12.0-cp312-cp312-win_arm64.whl", hash = "sha256:06cb970936c65de926d648af0ed3d21857f026b1cf5525cb2947aa5e01e05789", size = 186100, upload-time = "2025-11-09T20:47:43.007Z" }, + { url = "https://files.pythonhosted.org/packages/fe/54/5339ef1ecaa881c6948669956567a64d2670941925f245c434f494ffb0e5/jiter-0.12.0-graalpy311-graalpy242_311_native-macosx_10_12_x86_64.whl", hash = "sha256:4739a4657179ebf08f85914ce50332495811004cc1747852e8b2041ed2aab9b8", size = 311144, upload-time = "2025-11-09T20:49:10.503Z" }, + { url = "https://files.pythonhosted.org/packages/27/74/3446c652bffbd5e81ab354e388b1b5fc1d20daac34ee0ed11ff096b1b01a/jiter-0.12.0-graalpy311-graalpy242_311_native-macosx_11_0_arm64.whl", hash = "sha256:41da8def934bf7bec16cb24bd33c0ca62126d2d45d81d17b864bd5ad721393c3", size = 305877, upload-time = "2025-11-09T20:49:12.269Z" }, + { url = "https://files.pythonhosted.org/packages/a1/f4/ed76ef9043450f57aac2d4fbeb27175aa0eb9c38f833be6ef6379b3b9a86/jiter-0.12.0-graalpy311-graalpy242_311_native-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9c44ee814f499c082e69872d426b624987dbc5943ab06e9bbaa4f81989fdb79e", size = 340419, upload-time = "2025-11-09T20:49:13.803Z" }, + { url = "https://files.pythonhosted.org/packages/21/01/857d4608f5edb0664aa791a3d45702e1a5bcfff9934da74035e7b9803846/jiter-0.12.0-graalpy311-graalpy242_311_native-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cd2097de91cf03eaa27b3cbdb969addf83f0179c6afc41bbc4513705e013c65d", size = 347212, upload-time = "2025-11-09T20:49:15.643Z" }, + { url = "https://files.pythonhosted.org/packages/cb/f5/12efb8ada5f5c9edc1d4555fe383c1fb2eac05ac5859258a72d61981d999/jiter-0.12.0-graalpy312-graalpy250_312_native-macosx_10_12_x86_64.whl", hash = "sha256:e8547883d7b96ef2e5fe22b88f8a4c8725a56e7f4abafff20fd5272d634c7ecb", size = 309974, upload-time = "2025-11-09T20:49:17.187Z" }, + { url = "https://files.pythonhosted.org/packages/85/15/d6eb3b770f6a0d332675141ab3962fd4a7c270ede3515d9f3583e1d28276/jiter-0.12.0-graalpy312-graalpy250_312_native-macosx_11_0_arm64.whl", hash = "sha256:89163163c0934854a668ed783a2546a0617f71706a2551a4a0666d91ab365d6b", size = 304233, upload-time = "2025-11-09T20:49:18.734Z" }, + { url = "https://files.pythonhosted.org/packages/8c/3e/e7e06743294eea2cf02ced6aa0ff2ad237367394e37a0e2b4a1108c67a36/jiter-0.12.0-graalpy312-graalpy250_312_native-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d96b264ab7d34bbb2312dedc47ce07cd53f06835eacbc16dde3761f47c3a9e7f", size = 338537, upload-time = "2025-11-09T20:49:20.317Z" }, + { url = "https://files.pythonhosted.org/packages/2f/9c/6753e6522b8d0ef07d3a3d239426669e984fb0eba15a315cdbc1253904e4/jiter-0.12.0-graalpy312-graalpy250_312_native-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c24e864cb30ab82311c6425655b0cdab0a98c5d973b065c66a3f020740c2324c", size = 346110, upload-time = "2025-11-09T20:49:21.817Z" }, ] [[package]] name = "jmespath" version = "0.10.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/3c/56/3f325b1eef9791759784aa5046a8f6a1aff8f7c898a2e34506771d3b99d8/jmespath-0.10.0.tar.gz", hash = "sha256:b85d0567b8666149a93172712e68920734333c0ce7e89b78b3e987f71e5ed4f9", size = 21607 } +sdist = { url = "https://files.pythonhosted.org/packages/3c/56/3f325b1eef9791759784aa5046a8f6a1aff8f7c898a2e34506771d3b99d8/jmespath-0.10.0.tar.gz", hash = "sha256:b85d0567b8666149a93172712e68920734333c0ce7e89b78b3e987f71e5ed4f9", size = 21607, upload-time = "2020-05-12T22:03:47.267Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/07/cb/5f001272b6faeb23c1c9e0acc04d48eaaf5c862c17709d20e3469c6e0139/jmespath-0.10.0-py2.py3-none-any.whl", hash = "sha256:cdf6525904cc597730141d61b36f2e4b8ecc257c420fa2f4549bac2c2d0cb72f", size = 24489 }, + { url = "https://files.pythonhosted.org/packages/07/cb/5f001272b6faeb23c1c9e0acc04d48eaaf5c862c17709d20e3469c6e0139/jmespath-0.10.0-py2.py3-none-any.whl", hash = "sha256:cdf6525904cc597730141d61b36f2e4b8ecc257c420fa2f4549bac2c2d0cb72f", size = 24489, upload-time = "2020-05-12T22:03:45.643Z" }, ] [[package]] name = "joblib" version = "1.5.2" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/e8/5d/447af5ea094b9e4c4054f82e223ada074c552335b9b4b2d14bd9b35a67c4/joblib-1.5.2.tar.gz", hash = "sha256:3faa5c39054b2f03ca547da9b2f52fde67c06240c31853f306aea97f13647b55", size = 331077 } +sdist = { url = "https://files.pythonhosted.org/packages/e8/5d/447af5ea094b9e4c4054f82e223ada074c552335b9b4b2d14bd9b35a67c4/joblib-1.5.2.tar.gz", hash = "sha256:3faa5c39054b2f03ca547da9b2f52fde67c06240c31853f306aea97f13647b55", size = 331077, upload-time = "2025-08-27T12:15:46.575Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/1e/e8/685f47e0d754320684db4425a0967f7d3fa70126bffd76110b7009a0090f/joblib-1.5.2-py3-none-any.whl", hash = "sha256:4e1f0bdbb987e6d843c70cf43714cb276623def372df3c22fe5266b2670bc241", size = 308396 }, + { url = "https://files.pythonhosted.org/packages/1e/e8/685f47e0d754320684db4425a0967f7d3fa70126bffd76110b7009a0090f/joblib-1.5.2-py3-none-any.whl", hash = "sha256:4e1f0bdbb987e6d843c70cf43714cb276623def372df3c22fe5266b2670bc241", size = 308396, upload-time = "2025-08-27T12:15:45.188Z" }, ] [[package]] name = "json-repair" -version = "0.54.1" +version = "0.54.3" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/00/46/d3a4d9a3dad39bb4a2ad16b8adb9fe2e8611b20b71197fe33daa6768e85d/json_repair-0.54.1.tar.gz", hash = "sha256:d010bc31f1fc66e7c36dc33bff5f8902674498ae5cb8e801ad455a53b455ad1d", size = 38555 } +sdist = { url = "https://files.pythonhosted.org/packages/b5/86/48b12ac02032f121ac7e5f11a32143edca6c1e3d19ffc54d6fb9ca0aafd0/json_repair-0.54.3.tar.gz", hash = "sha256:e50feec9725e52ac91f12184609754684ac1656119dfbd31de09bdaf9a1d8bf6", size = 38626, upload-time = "2025-12-15T09:41:58.594Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/db/96/c9aad7ee949cc1bf15df91f347fbc2d3bd10b30b80c7df689ce6fe9332b5/json_repair-0.54.1-py3-none-any.whl", hash = "sha256:016160c5db5d5fe443164927bb58d2dfbba5f43ad85719fa9bc51c713a443ab1", size = 29311 }, + { url = "https://files.pythonhosted.org/packages/e9/08/abe317237add63c3e62f18a981bccf92112b431835b43d844aedaf61f4a0/json_repair-0.54.3-py3-none-any.whl", hash = "sha256:4cdc132ee27d4780576f71bf27a113877046224a808bfc17392e079cb344fb81", size = 29357, upload-time = "2025-12-15T09:41:57.436Z" }, ] [[package]] @@ -3048,9 +3089,9 @@ dependencies = [ { name = "referencing" }, { name = "rpds-py" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/74/69/f7185de793a29082a9f3c7728268ffb31cb5095131a9c139a74078e27336/jsonschema-4.25.1.tar.gz", hash = "sha256:e4a9655ce0da0c0b67a085847e00a3a51449e1157f4f75e9fb5aa545e122eb85", size = 357342 } +sdist = { url = "https://files.pythonhosted.org/packages/74/69/f7185de793a29082a9f3c7728268ffb31cb5095131a9c139a74078e27336/jsonschema-4.25.1.tar.gz", hash = "sha256:e4a9655ce0da0c0b67a085847e00a3a51449e1157f4f75e9fb5aa545e122eb85", size = 357342, upload-time = "2025-08-18T17:03:50.038Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/bf/9c/8c95d856233c1f82500c2450b8c68576b4cf1c871db3afac5c34ff84e6fd/jsonschema-4.25.1-py3-none-any.whl", hash = "sha256:3fba0169e345c7175110351d456342c364814cfcf3b964ba4587f22915230a63", size = 90040 }, + { url = "https://files.pythonhosted.org/packages/bf/9c/8c95d856233c1f82500c2450b8c68576b4cf1c871db3afac5c34ff84e6fd/jsonschema-4.25.1-py3-none-any.whl", hash = "sha256:3fba0169e345c7175110351d456342c364814cfcf3b964ba4587f22915230a63", size = 90040, upload-time = "2025-08-18T17:03:48.373Z" }, ] [[package]] @@ -3060,18 +3101,18 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "referencing" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/19/74/a633ee74eb36c44aa6d1095e7cc5569bebf04342ee146178e2d36600708b/jsonschema_specifications-2025.9.1.tar.gz", hash = "sha256:b540987f239e745613c7a9176f3edb72b832a4ac465cf02712288397832b5e8d", size = 32855 } +sdist = { url = "https://files.pythonhosted.org/packages/19/74/a633ee74eb36c44aa6d1095e7cc5569bebf04342ee146178e2d36600708b/jsonschema_specifications-2025.9.1.tar.gz", hash = "sha256:b540987f239e745613c7a9176f3edb72b832a4ac465cf02712288397832b5e8d", size = 32855, upload-time = "2025-09-08T01:34:59.186Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/41/45/1a4ed80516f02155c51f51e8cedb3c1902296743db0bbc66608a0db2814f/jsonschema_specifications-2025.9.1-py3-none-any.whl", hash = "sha256:98802fee3a11ee76ecaca44429fda8a41bff98b00a0f2838151b113f210cc6fe", size = 18437 }, + { url = "https://files.pythonhosted.org/packages/41/45/1a4ed80516f02155c51f51e8cedb3c1902296743db0bbc66608a0db2814f/jsonschema_specifications-2025.9.1-py3-none-any.whl", hash = "sha256:98802fee3a11ee76ecaca44429fda8a41bff98b00a0f2838151b113f210cc6fe", size = 18437, upload-time = "2025-09-08T01:34:57.871Z" }, ] [[package]] name = "kaitaistruct" version = "0.11" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/27/b8/ca7319556912f68832daa4b81425314857ec08dfccd8dbc8c0f65c992108/kaitaistruct-0.11.tar.gz", hash = "sha256:053ee764288e78b8e53acf748e9733268acbd579b8d82a427b1805453625d74b", size = 11519 } +sdist = { url = "https://files.pythonhosted.org/packages/27/b8/ca7319556912f68832daa4b81425314857ec08dfccd8dbc8c0f65c992108/kaitaistruct-0.11.tar.gz", hash = "sha256:053ee764288e78b8e53acf748e9733268acbd579b8d82a427b1805453625d74b", size = 11519, upload-time = "2025-09-08T15:46:25.037Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/4a/4a/cf14bf3b1f5ffb13c69cf5f0ea78031247790558ee88984a8bdd22fae60d/kaitaistruct-0.11-py2.py3-none-any.whl", hash = "sha256:5c6ce79177b4e193a577ecd359e26516d1d6d000a0bffd6e1010f2a46a62a561", size = 11372 }, + { url = "https://files.pythonhosted.org/packages/4a/4a/cf14bf3b1f5ffb13c69cf5f0ea78031247790558ee88984a8bdd22fae60d/kaitaistruct-0.11-py2.py3-none-any.whl", hash = "sha256:5c6ce79177b4e193a577ecd359e26516d1d6d000a0bffd6e1010f2a46a62a561", size = 11372, upload-time = "2025-09-08T15:46:23.635Z" }, ] [[package]] @@ -3084,9 +3125,9 @@ dependencies = [ { name = "tzdata" }, { name = "vine" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/0f/d3/5ff936d8319ac86b9c409f1501b07c426e6ad41966fedace9ef1b966e23f/kombu-5.5.4.tar.gz", hash = "sha256:886600168275ebeada93b888e831352fe578168342f0d1d5833d88ba0d847363", size = 461992 } +sdist = { url = "https://files.pythonhosted.org/packages/0f/d3/5ff936d8319ac86b9c409f1501b07c426e6ad41966fedace9ef1b966e23f/kombu-5.5.4.tar.gz", hash = "sha256:886600168275ebeada93b888e831352fe578168342f0d1d5833d88ba0d847363", size = 461992, upload-time = "2025-06-01T10:19:22.281Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/ef/70/a07dcf4f62598c8ad579df241af55ced65bed76e42e45d3c368a6d82dbc1/kombu-5.5.4-py3-none-any.whl", hash = "sha256:a12ed0557c238897d8e518f1d1fdf84bd1516c5e305af2dacd85c2015115feb8", size = 210034 }, + { url = "https://files.pythonhosted.org/packages/ef/70/a07dcf4f62598c8ad579df241af55ced65bed76e42e45d3c368a6d82dbc1/kombu-5.5.4-py3-none-any.whl", hash = "sha256:a12ed0557c238897d8e518f1d1fdf84bd1516c5e305af2dacd85c2015115feb8", size = 210034, upload-time = "2025-06-01T10:19:20.436Z" }, ] [[package]] @@ -3106,9 +3147,9 @@ dependencies = [ { name = "urllib3" }, { name = "websocket-client" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/ae/52/19ebe8004c243fdfa78268a96727c71e08f00ff6fe69a301d0b7fcbce3c2/kubernetes-33.1.0.tar.gz", hash = "sha256:f64d829843a54c251061a8e7a14523b521f2dc5c896cf6d65ccf348648a88993", size = 1036779 } +sdist = { url = "https://files.pythonhosted.org/packages/ae/52/19ebe8004c243fdfa78268a96727c71e08f00ff6fe69a301d0b7fcbce3c2/kubernetes-33.1.0.tar.gz", hash = "sha256:f64d829843a54c251061a8e7a14523b521f2dc5c896cf6d65ccf348648a88993", size = 1036779, upload-time = "2025-06-09T21:57:58.521Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/89/43/d9bebfc3db7dea6ec80df5cb2aad8d274dd18ec2edd6c4f21f32c237cbbb/kubernetes-33.1.0-py2.py3-none-any.whl", hash = "sha256:544de42b24b64287f7e0aa9513c93cb503f7f40eea39b20f66810011a86eabc5", size = 1941335 }, + { url = "https://files.pythonhosted.org/packages/89/43/d9bebfc3db7dea6ec80df5cb2aad8d274dd18ec2edd6c4f21f32c237cbbb/kubernetes-33.1.0-py2.py3-none-any.whl", hash = "sha256:544de42b24b64287f7e0aa9513c93cb503f7f40eea39b20f66810011a86eabc5", size = 1941335, upload-time = "2025-06-09T21:57:56.327Z" }, ] [[package]] @@ -3118,7 +3159,7 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "six" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/0e/72/a3add0e4eec4eb9e2569554f7c70f4a3c27712f40e3284d483e88094cc0e/langdetect-1.0.9.tar.gz", hash = "sha256:cbc1fef89f8d062739774bd51eda3da3274006b3661d199c2655f6b3f6d605a0", size = 981474 } +sdist = { url = "https://files.pythonhosted.org/packages/0e/72/a3add0e4eec4eb9e2569554f7c70f4a3c27712f40e3284d483e88094cc0e/langdetect-1.0.9.tar.gz", hash = "sha256:cbc1fef89f8d062739774bd51eda3da3274006b3661d199c2655f6b3f6d605a0", size = 981474, upload-time = "2021-05-07T07:54:13.562Z" } [[package]] name = "langfuse" @@ -3133,9 +3174,9 @@ dependencies = [ { name = "pydantic" }, { name = "wrapt" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/3c/e9/22c9c05d877ab85da6d9008aaa7360f2a9ad58787a8e36e00b1b5be9a990/langfuse-2.51.5.tar.gz", hash = "sha256:55bc37b5c5d3ae133c1a95db09117cfb3117add110ba02ebbf2ce45ac4395c5b", size = 117574 } +sdist = { url = "https://files.pythonhosted.org/packages/3c/e9/22c9c05d877ab85da6d9008aaa7360f2a9ad58787a8e36e00b1b5be9a990/langfuse-2.51.5.tar.gz", hash = "sha256:55bc37b5c5d3ae133c1a95db09117cfb3117add110ba02ebbf2ce45ac4395c5b", size = 117574, upload-time = "2024-10-09T00:59:15.016Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/03/f7/242a13ca094c78464b7d4df77dfe7d4c44ed77b15fed3d2e3486afa5d2e1/langfuse-2.51.5-py3-none-any.whl", hash = "sha256:b95401ca710ef94b521afa6541933b6f93d7cfd4a97523c8fc75bca4d6d219fb", size = 214281 }, + { url = "https://files.pythonhosted.org/packages/03/f7/242a13ca094c78464b7d4df77dfe7d4c44ed77b15fed3d2e3486afa5d2e1/langfuse-2.51.5-py3-none-any.whl", hash = "sha256:b95401ca710ef94b521afa6541933b6f93d7cfd4a97523c8fc75bca4d6d219fb", size = 214281, upload-time = "2024-10-09T00:59:12.596Z" }, ] [[package]] @@ -3149,9 +3190,9 @@ dependencies = [ { name = "requests" }, { name = "requests-toolbelt" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/6c/56/201dd94d492ae47c1bf9b50cacc1985113dc2288d8f15857e1f4a6818376/langsmith-0.1.147.tar.gz", hash = "sha256:2e933220318a4e73034657103b3b1a3a6109cc5db3566a7e8e03be8d6d7def7a", size = 300453 } +sdist = { url = "https://files.pythonhosted.org/packages/6c/56/201dd94d492ae47c1bf9b50cacc1985113dc2288d8f15857e1f4a6818376/langsmith-0.1.147.tar.gz", hash = "sha256:2e933220318a4e73034657103b3b1a3a6109cc5db3566a7e8e03be8d6d7def7a", size = 300453, upload-time = "2024-11-27T17:32:41.297Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/de/f0/63b06b99b730b9954f8709f6f7d9b8d076fa0a973e472efe278089bde42b/langsmith-0.1.147-py3-none-any.whl", hash = "sha256:7166fc23b965ccf839d64945a78e9f1157757add228b086141eb03a60d699a15", size = 311812 }, + { url = "https://files.pythonhosted.org/packages/de/f0/63b06b99b730b9954f8709f6f7d9b8d076fa0a973e472efe278089bde42b/langsmith-0.1.147-py3-none-any.whl", hash = "sha256:7166fc23b965ccf839d64945a78e9f1157757add228b086141eb03a60d699a15", size = 311812, upload-time = "2024-11-27T17:32:39.569Z" }, ] [[package]] @@ -3172,108 +3213,108 @@ dependencies = [ { name = "tiktoken" }, { name = "tokenizers" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/8c/65/71fe4851709fa4a612e41b80001a9ad803fea979d21b90970093fd65eded/litellm-1.77.1.tar.gz", hash = "sha256:76bab5203115efb9588244e5bafbfc07a800a239be75d8dc6b1b9d17394c6418", size = 10275745 } +sdist = { url = "https://files.pythonhosted.org/packages/8c/65/71fe4851709fa4a612e41b80001a9ad803fea979d21b90970093fd65eded/litellm-1.77.1.tar.gz", hash = "sha256:76bab5203115efb9588244e5bafbfc07a800a239be75d8dc6b1b9d17394c6418", size = 10275745, upload-time = "2025-09-13T21:05:21.377Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/bb/dc/ff4f119cd4d783742c9648a03e0ba5c2b52fc385b2ae9f0d32acf3a78241/litellm-1.77.1-py3-none-any.whl", hash = "sha256:407761dc3c35fbcd41462d3fe65dd3ed70aac705f37cde318006c18940f695a0", size = 9067070 }, + { url = "https://files.pythonhosted.org/packages/bb/dc/ff4f119cd4d783742c9648a03e0ba5c2b52fc385b2ae9f0d32acf3a78241/litellm-1.77.1-py3-none-any.whl", hash = "sha256:407761dc3c35fbcd41462d3fe65dd3ed70aac705f37cde318006c18940f695a0", size = 9067070, upload-time = "2025-09-13T21:05:18.078Z" }, ] [[package]] name = "llvmlite" version = "0.45.1" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/99/8d/5baf1cef7f9c084fb35a8afbde88074f0d6a727bc63ef764fe0e7543ba40/llvmlite-0.45.1.tar.gz", hash = "sha256:09430bb9d0bb58fc45a45a57c7eae912850bedc095cd0810a57de109c69e1c32", size = 185600 } +sdist = { url = "https://files.pythonhosted.org/packages/99/8d/5baf1cef7f9c084fb35a8afbde88074f0d6a727bc63ef764fe0e7543ba40/llvmlite-0.45.1.tar.gz", hash = "sha256:09430bb9d0bb58fc45a45a57c7eae912850bedc095cd0810a57de109c69e1c32", size = 185600, upload-time = "2025-10-01T17:59:52.046Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/04/ad/9bdc87b2eb34642c1cfe6bcb4f5db64c21f91f26b010f263e7467e7536a3/llvmlite-0.45.1-cp311-cp311-macosx_10_15_x86_64.whl", hash = "sha256:60f92868d5d3af30b4239b50e1717cb4e4e54f6ac1c361a27903b318d0f07f42", size = 43043526 }, - { url = "https://files.pythonhosted.org/packages/a5/ea/c25c6382f452a943b4082da5e8c1665ce29a62884e2ec80608533e8e82d5/llvmlite-0.45.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:98baab513e19beb210f1ef39066288784839a44cd504e24fff5d17f1b3cf0860", size = 37253118 }, - { url = "https://files.pythonhosted.org/packages/fe/af/85fc237de98b181dbbe8647324331238d6c52a3554327ccdc83ced28efba/llvmlite-0.45.1-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:3adc2355694d6a6fbcc024d59bb756677e7de506037c878022d7b877e7613a36", size = 56288209 }, - { url = "https://files.pythonhosted.org/packages/0a/df/3daf95302ff49beff4230065e3178cd40e71294968e8d55baf4a9e560814/llvmlite-0.45.1-cp311-cp311-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:2f3377a6db40f563058c9515dedcc8a3e562d8693a106a28f2ddccf2c8fcf6ca", size = 55140958 }, - { url = "https://files.pythonhosted.org/packages/a4/56/4c0d503fe03bac820ecdeb14590cf9a248e120f483bcd5c009f2534f23f0/llvmlite-0.45.1-cp311-cp311-win_amd64.whl", hash = "sha256:f9c272682d91e0d57f2a76c6d9ebdfccc603a01828cdbe3d15273bdca0c3363a", size = 38132232 }, - { url = "https://files.pythonhosted.org/packages/e2/7c/82cbd5c656e8991bcc110c69d05913be2229302a92acb96109e166ae31fb/llvmlite-0.45.1-cp312-cp312-macosx_10_15_x86_64.whl", hash = "sha256:28e763aba92fe9c72296911e040231d486447c01d4f90027c8e893d89d49b20e", size = 43043524 }, - { url = "https://files.pythonhosted.org/packages/9d/bc/5314005bb2c7ee9f33102c6456c18cc81745d7055155d1218f1624463774/llvmlite-0.45.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:1a53f4b74ee9fd30cb3d27d904dadece67a7575198bd80e687ee76474620735f", size = 37253123 }, - { url = "https://files.pythonhosted.org/packages/96/76/0f7154952f037cb320b83e1c952ec4a19d5d689cf7d27cb8a26887d7bbc1/llvmlite-0.45.1-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:5b3796b1b1e1c14dcae34285d2f4ea488402fbd2c400ccf7137603ca3800864f", size = 56288211 }, - { url = "https://files.pythonhosted.org/packages/00/b1/0b581942be2683ceb6862d558979e87387e14ad65a1e4db0e7dd671fa315/llvmlite-0.45.1-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:779e2f2ceefef0f4368548685f0b4adde34e5f4b457e90391f570a10b348d433", size = 55140958 }, - { url = "https://files.pythonhosted.org/packages/33/94/9ba4ebcf4d541a325fd8098ddc073b663af75cc8b065b6059848f7d4dce7/llvmlite-0.45.1-cp312-cp312-win_amd64.whl", hash = "sha256:9e6c9949baf25d9aa9cd7cf0f6d011b9ca660dd17f5ba2b23bdbdb77cc86b116", size = 38132231 }, + { url = "https://files.pythonhosted.org/packages/04/ad/9bdc87b2eb34642c1cfe6bcb4f5db64c21f91f26b010f263e7467e7536a3/llvmlite-0.45.1-cp311-cp311-macosx_10_15_x86_64.whl", hash = "sha256:60f92868d5d3af30b4239b50e1717cb4e4e54f6ac1c361a27903b318d0f07f42", size = 43043526, upload-time = "2025-10-01T18:03:15.051Z" }, + { url = "https://files.pythonhosted.org/packages/a5/ea/c25c6382f452a943b4082da5e8c1665ce29a62884e2ec80608533e8e82d5/llvmlite-0.45.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:98baab513e19beb210f1ef39066288784839a44cd504e24fff5d17f1b3cf0860", size = 37253118, upload-time = "2025-10-01T18:04:06.783Z" }, + { url = "https://files.pythonhosted.org/packages/fe/af/85fc237de98b181dbbe8647324331238d6c52a3554327ccdc83ced28efba/llvmlite-0.45.1-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:3adc2355694d6a6fbcc024d59bb756677e7de506037c878022d7b877e7613a36", size = 56288209, upload-time = "2025-10-01T18:01:00.168Z" }, + { url = "https://files.pythonhosted.org/packages/0a/df/3daf95302ff49beff4230065e3178cd40e71294968e8d55baf4a9e560814/llvmlite-0.45.1-cp311-cp311-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:2f3377a6db40f563058c9515dedcc8a3e562d8693a106a28f2ddccf2c8fcf6ca", size = 55140958, upload-time = "2025-10-01T18:02:11.199Z" }, + { url = "https://files.pythonhosted.org/packages/a4/56/4c0d503fe03bac820ecdeb14590cf9a248e120f483bcd5c009f2534f23f0/llvmlite-0.45.1-cp311-cp311-win_amd64.whl", hash = "sha256:f9c272682d91e0d57f2a76c6d9ebdfccc603a01828cdbe3d15273bdca0c3363a", size = 38132232, upload-time = "2025-10-01T18:04:52.181Z" }, + { url = "https://files.pythonhosted.org/packages/e2/7c/82cbd5c656e8991bcc110c69d05913be2229302a92acb96109e166ae31fb/llvmlite-0.45.1-cp312-cp312-macosx_10_15_x86_64.whl", hash = "sha256:28e763aba92fe9c72296911e040231d486447c01d4f90027c8e893d89d49b20e", size = 43043524, upload-time = "2025-10-01T18:03:30.666Z" }, + { url = "https://files.pythonhosted.org/packages/9d/bc/5314005bb2c7ee9f33102c6456c18cc81745d7055155d1218f1624463774/llvmlite-0.45.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:1a53f4b74ee9fd30cb3d27d904dadece67a7575198bd80e687ee76474620735f", size = 37253123, upload-time = "2025-10-01T18:04:18.177Z" }, + { url = "https://files.pythonhosted.org/packages/96/76/0f7154952f037cb320b83e1c952ec4a19d5d689cf7d27cb8a26887d7bbc1/llvmlite-0.45.1-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:5b3796b1b1e1c14dcae34285d2f4ea488402fbd2c400ccf7137603ca3800864f", size = 56288211, upload-time = "2025-10-01T18:01:24.079Z" }, + { url = "https://files.pythonhosted.org/packages/00/b1/0b581942be2683ceb6862d558979e87387e14ad65a1e4db0e7dd671fa315/llvmlite-0.45.1-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:779e2f2ceefef0f4368548685f0b4adde34e5f4b457e90391f570a10b348d433", size = 55140958, upload-time = "2025-10-01T18:02:30.482Z" }, + { url = "https://files.pythonhosted.org/packages/33/94/9ba4ebcf4d541a325fd8098ddc073b663af75cc8b065b6059848f7d4dce7/llvmlite-0.45.1-cp312-cp312-win_amd64.whl", hash = "sha256:9e6c9949baf25d9aa9cd7cf0f6d011b9ca660dd17f5ba2b23bdbdb77cc86b116", size = 38132231, upload-time = "2025-10-01T18:05:03.664Z" }, ] [[package]] name = "lxml" version = "6.0.2" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/aa/88/262177de60548e5a2bfc46ad28232c9e9cbde697bd94132aeb80364675cb/lxml-6.0.2.tar.gz", hash = "sha256:cd79f3367bd74b317dda655dc8fcfa304d9eb6e4fb06b7168c5cf27f96e0cd62", size = 4073426 } +sdist = { url = "https://files.pythonhosted.org/packages/aa/88/262177de60548e5a2bfc46ad28232c9e9cbde697bd94132aeb80364675cb/lxml-6.0.2.tar.gz", hash = "sha256:cd79f3367bd74b317dda655dc8fcfa304d9eb6e4fb06b7168c5cf27f96e0cd62", size = 4073426, upload-time = "2025-09-22T04:04:59.287Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/77/d5/becbe1e2569b474a23f0c672ead8a29ac50b2dc1d5b9de184831bda8d14c/lxml-6.0.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:13e35cbc684aadf05d8711a5d1b5857c92e5e580efa9a0d2be197199c8def607", size = 8634365 }, - { url = "https://files.pythonhosted.org/packages/28/66/1ced58f12e804644426b85d0bb8a4478ca77bc1761455da310505f1a3526/lxml-6.0.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3b1675e096e17c6fe9c0e8c81434f5736c0739ff9ac6123c87c2d452f48fc938", size = 4650793 }, - { url = "https://files.pythonhosted.org/packages/11/84/549098ffea39dfd167e3f174b4ce983d0eed61f9d8d25b7bf2a57c3247fc/lxml-6.0.2-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:8ac6e5811ae2870953390452e3476694196f98d447573234592d30488147404d", size = 4944362 }, - { url = "https://files.pythonhosted.org/packages/ac/bd/f207f16abf9749d2037453d56b643a7471d8fde855a231a12d1e095c4f01/lxml-6.0.2-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:5aa0fc67ae19d7a64c3fe725dc9a1bb11f80e01f78289d05c6f62545affec438", size = 5083152 }, - { url = "https://files.pythonhosted.org/packages/15/ae/bd813e87d8941d52ad5b65071b1affb48da01c4ed3c9c99e40abb266fbff/lxml-6.0.2-cp311-cp311-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:de496365750cc472b4e7902a485d3f152ecf57bd3ba03ddd5578ed8ceb4c5964", size = 5023539 }, - { url = "https://files.pythonhosted.org/packages/02/cd/9bfef16bd1d874fbe0cb51afb00329540f30a3283beb9f0780adbb7eec03/lxml-6.0.2-cp311-cp311-manylinux_2_26_i686.manylinux_2_28_i686.whl", hash = "sha256:200069a593c5e40b8f6fc0d84d86d970ba43138c3e68619ffa234bc9bb806a4d", size = 5344853 }, - { url = "https://files.pythonhosted.org/packages/b8/89/ea8f91594bc5dbb879734d35a6f2b0ad50605d7fb419de2b63d4211765cc/lxml-6.0.2-cp311-cp311-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:7d2de809c2ee3b888b59f995625385f74629707c9355e0ff856445cdcae682b7", size = 5225133 }, - { url = "https://files.pythonhosted.org/packages/b9/37/9c735274f5dbec726b2db99b98a43950395ba3d4a1043083dba2ad814170/lxml-6.0.2-cp311-cp311-manylinux_2_31_armv7l.whl", hash = "sha256:b2c3da8d93cf5db60e8858c17684c47d01fee6405e554fb55018dd85fc23b178", size = 4677944 }, - { url = "https://files.pythonhosted.org/packages/20/28/7dfe1ba3475d8bfca3878365075abe002e05d40dfaaeb7ec01b4c587d533/lxml-6.0.2-cp311-cp311-manylinux_2_38_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:442de7530296ef5e188373a1ea5789a46ce90c4847e597856570439621d9c553", size = 5284535 }, - { url = "https://files.pythonhosted.org/packages/e7/cf/5f14bc0de763498fc29510e3532bf2b4b3a1c1d5d0dff2e900c16ba021ef/lxml-6.0.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:2593c77efde7bfea7f6389f1ab249b15ed4aa5bc5cb5131faa3b843c429fbedb", size = 5067343 }, - { url = "https://files.pythonhosted.org/packages/1c/b0/bb8275ab5472f32b28cfbbcc6db7c9d092482d3439ca279d8d6fa02f7025/lxml-6.0.2-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:3e3cb08855967a20f553ff32d147e14329b3ae70ced6edc2f282b94afbc74b2a", size = 4725419 }, - { url = "https://files.pythonhosted.org/packages/25/4c/7c222753bc72edca3b99dbadba1b064209bc8ed4ad448af990e60dcce462/lxml-6.0.2-cp311-cp311-musllinux_1_2_riscv64.whl", hash = "sha256:2ed6c667fcbb8c19c6791bbf40b7268ef8ddf5a96940ba9404b9f9a304832f6c", size = 5275008 }, - { url = "https://files.pythonhosted.org/packages/6c/8c/478a0dc6b6ed661451379447cdbec77c05741a75736d97e5b2b729687828/lxml-6.0.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:b8f18914faec94132e5b91e69d76a5c1d7b0c73e2489ea8929c4aaa10b76bbf7", size = 5248906 }, - { url = "https://files.pythonhosted.org/packages/2d/d9/5be3a6ab2784cdf9accb0703b65e1b64fcdd9311c9f007630c7db0cfcce1/lxml-6.0.2-cp311-cp311-win32.whl", hash = "sha256:6605c604e6daa9e0d7f0a2137bdc47a2e93b59c60a65466353e37f8272f47c46", size = 3610357 }, - { url = "https://files.pythonhosted.org/packages/e2/7d/ca6fb13349b473d5732fb0ee3eec8f6c80fc0688e76b7d79c1008481bf1f/lxml-6.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:e5867f2651016a3afd8dd2c8238baa66f1e2802f44bc17e236f547ace6647078", size = 4036583 }, - { url = "https://files.pythonhosted.org/packages/ab/a2/51363b5ecd3eab46563645f3a2c3836a2fc67d01a1b87c5017040f39f567/lxml-6.0.2-cp311-cp311-win_arm64.whl", hash = "sha256:4197fb2534ee05fd3e7afaab5d8bfd6c2e186f65ea7f9cd6a82809c887bd1285", size = 3680591 }, - { url = "https://files.pythonhosted.org/packages/f3/c8/8ff2bc6b920c84355146cd1ab7d181bc543b89241cfb1ebee824a7c81457/lxml-6.0.2-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:a59f5448ba2ceccd06995c95ea59a7674a10de0810f2ce90c9006f3cbc044456", size = 8661887 }, - { url = "https://files.pythonhosted.org/packages/37/6f/9aae1008083bb501ef63284220ce81638332f9ccbfa53765b2b7502203cf/lxml-6.0.2-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:e8113639f3296706fbac34a30813929e29247718e88173ad849f57ca59754924", size = 4667818 }, - { url = "https://files.pythonhosted.org/packages/f1/ca/31fb37f99f37f1536c133476674c10b577e409c0a624384147653e38baf2/lxml-6.0.2-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:a8bef9b9825fa8bc816a6e641bb67219489229ebc648be422af695f6e7a4fa7f", size = 4950807 }, - { url = "https://files.pythonhosted.org/packages/da/87/f6cb9442e4bada8aab5ae7e1046264f62fdbeaa6e3f6211b93f4c0dd97f1/lxml-6.0.2-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:65ea18d710fd14e0186c2f973dc60bb52039a275f82d3c44a0e42b43440ea534", size = 5109179 }, - { url = "https://files.pythonhosted.org/packages/c8/20/a7760713e65888db79bbae4f6146a6ae5c04e4a204a3c48896c408cd6ed2/lxml-6.0.2-cp312-cp312-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c371aa98126a0d4c739ca93ceffa0fd7a5d732e3ac66a46e74339acd4d334564", size = 5023044 }, - { url = "https://files.pythonhosted.org/packages/a2/b0/7e64e0460fcb36471899f75831509098f3fd7cd02a3833ac517433cb4f8f/lxml-6.0.2-cp312-cp312-manylinux_2_26_i686.manylinux_2_28_i686.whl", hash = "sha256:700efd30c0fa1a3581d80a748157397559396090a51d306ea59a70020223d16f", size = 5359685 }, - { url = "https://files.pythonhosted.org/packages/b9/e1/e5df362e9ca4e2f48ed6411bd4b3a0ae737cc842e96877f5bf9428055ab4/lxml-6.0.2-cp312-cp312-manylinux_2_26_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:c33e66d44fe60e72397b487ee92e01da0d09ba2d66df8eae42d77b6d06e5eba0", size = 5654127 }, - { url = "https://files.pythonhosted.org/packages/c6/d1/232b3309a02d60f11e71857778bfcd4acbdb86c07db8260caf7d008b08f8/lxml-6.0.2-cp312-cp312-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:90a345bbeaf9d0587a3aaffb7006aa39ccb6ff0e96a57286c0cb2fd1520ea192", size = 5253958 }, - { url = "https://files.pythonhosted.org/packages/35/35/d955a070994725c4f7d80583a96cab9c107c57a125b20bb5f708fe941011/lxml-6.0.2-cp312-cp312-manylinux_2_31_armv7l.whl", hash = "sha256:064fdadaf7a21af3ed1dcaa106b854077fbeada827c18f72aec9346847cd65d0", size = 4711541 }, - { url = "https://files.pythonhosted.org/packages/1e/be/667d17363b38a78c4bd63cfd4b4632029fd68d2c2dc81f25ce9eb5224dd5/lxml-6.0.2-cp312-cp312-manylinux_2_38_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:fbc74f42c3525ac4ffa4b89cbdd00057b6196bcefe8bce794abd42d33a018092", size = 5267426 }, - { url = "https://files.pythonhosted.org/packages/ea/47/62c70aa4a1c26569bc958c9ca86af2bb4e1f614e8c04fb2989833874f7ae/lxml-6.0.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:6ddff43f702905a4e32bc24f3f2e2edfe0f8fde3277d481bffb709a4cced7a1f", size = 5064917 }, - { url = "https://files.pythonhosted.org/packages/bd/55/6ceddaca353ebd0f1908ef712c597f8570cc9c58130dbb89903198e441fd/lxml-6.0.2-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:6da5185951d72e6f5352166e3da7b0dc27aa70bd1090b0eb3f7f7212b53f1bb8", size = 4788795 }, - { url = "https://files.pythonhosted.org/packages/cf/e8/fd63e15da5e3fd4c2146f8bbb3c14e94ab850589beab88e547b2dbce22e1/lxml-6.0.2-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:57a86e1ebb4020a38d295c04fc79603c7899e0df71588043eb218722dabc087f", size = 5676759 }, - { url = "https://files.pythonhosted.org/packages/76/47/b3ec58dc5c374697f5ba37412cd2728f427d056315d124dd4b61da381877/lxml-6.0.2-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:2047d8234fe735ab77802ce5f2297e410ff40f5238aec569ad7c8e163d7b19a6", size = 5255666 }, - { url = "https://files.pythonhosted.org/packages/19/93/03ba725df4c3d72afd9596eef4a37a837ce8e4806010569bedfcd2cb68fd/lxml-6.0.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:6f91fd2b2ea15a6800c8e24418c0775a1694eefc011392da73bc6cef2623b322", size = 5277989 }, - { url = "https://files.pythonhosted.org/packages/c6/80/c06de80bfce881d0ad738576f243911fccf992687ae09fd80b734712b39c/lxml-6.0.2-cp312-cp312-win32.whl", hash = "sha256:3ae2ce7d6fedfb3414a2b6c5e20b249c4c607f72cb8d2bb7cc9c6ec7c6f4e849", size = 3611456 }, - { url = "https://files.pythonhosted.org/packages/f7/d7/0cdfb6c3e30893463fb3d1e52bc5f5f99684a03c29a0b6b605cfae879cd5/lxml-6.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:72c87e5ee4e58a8354fb9c7c84cbf95a1c8236c127a5d1b7683f04bed8361e1f", size = 4011793 }, - { url = "https://files.pythonhosted.org/packages/ea/7b/93c73c67db235931527301ed3785f849c78991e2e34f3fd9a6663ffda4c5/lxml-6.0.2-cp312-cp312-win_arm64.whl", hash = "sha256:61cb10eeb95570153e0c0e554f58df92ecf5109f75eacad4a95baa709e26c3d6", size = 3672836 }, - { url = "https://files.pythonhosted.org/packages/0b/11/29d08bc103a62c0eba8016e7ed5aeebbf1e4312e83b0b1648dd203b0e87d/lxml-6.0.2-pp311-pypy311_pp73-macosx_10_15_x86_64.whl", hash = "sha256:1c06035eafa8404b5cf475bb37a9f6088b0aca288d4ccc9d69389750d5543700", size = 3949829 }, - { url = "https://files.pythonhosted.org/packages/12/b3/52ab9a3b31e5ab8238da241baa19eec44d2ab426532441ee607165aebb52/lxml-6.0.2-pp311-pypy311_pp73-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:c7d13103045de1bdd6fe5d61802565f1a3537d70cd3abf596aa0af62761921ee", size = 4226277 }, - { url = "https://files.pythonhosted.org/packages/a0/33/1eaf780c1baad88224611df13b1c2a9dfa460b526cacfe769103ff50d845/lxml-6.0.2-pp311-pypy311_pp73-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:0a3c150a95fbe5ac91de323aa756219ef9cf7fde5a3f00e2281e30f33fa5fa4f", size = 4330433 }, - { url = "https://files.pythonhosted.org/packages/7a/c1/27428a2ff348e994ab4f8777d3a0ad510b6b92d37718e5887d2da99952a2/lxml-6.0.2-pp311-pypy311_pp73-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:60fa43be34f78bebb27812ed90f1925ec99560b0fa1decdb7d12b84d857d31e9", size = 4272119 }, - { url = "https://files.pythonhosted.org/packages/f0/d0/3020fa12bcec4ab62f97aab026d57c2f0cfd480a558758d9ca233bb6a79d/lxml-6.0.2-pp311-pypy311_pp73-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:21c73b476d3cfe836be731225ec3421fa2f048d84f6df6a8e70433dff1376d5a", size = 4417314 }, - { url = "https://files.pythonhosted.org/packages/6c/77/d7f491cbc05303ac6801651aabeb262d43f319288c1ea96c66b1d2692ff3/lxml-6.0.2-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:27220da5be049e936c3aca06f174e8827ca6445a4353a1995584311487fc4e3e", size = 3518768 }, + { url = "https://files.pythonhosted.org/packages/77/d5/becbe1e2569b474a23f0c672ead8a29ac50b2dc1d5b9de184831bda8d14c/lxml-6.0.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:13e35cbc684aadf05d8711a5d1b5857c92e5e580efa9a0d2be197199c8def607", size = 8634365, upload-time = "2025-09-22T04:00:45.672Z" }, + { url = "https://files.pythonhosted.org/packages/28/66/1ced58f12e804644426b85d0bb8a4478ca77bc1761455da310505f1a3526/lxml-6.0.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3b1675e096e17c6fe9c0e8c81434f5736c0739ff9ac6123c87c2d452f48fc938", size = 4650793, upload-time = "2025-09-22T04:00:47.783Z" }, + { url = "https://files.pythonhosted.org/packages/11/84/549098ffea39dfd167e3f174b4ce983d0eed61f9d8d25b7bf2a57c3247fc/lxml-6.0.2-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:8ac6e5811ae2870953390452e3476694196f98d447573234592d30488147404d", size = 4944362, upload-time = "2025-09-22T04:00:49.845Z" }, + { url = "https://files.pythonhosted.org/packages/ac/bd/f207f16abf9749d2037453d56b643a7471d8fde855a231a12d1e095c4f01/lxml-6.0.2-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:5aa0fc67ae19d7a64c3fe725dc9a1bb11f80e01f78289d05c6f62545affec438", size = 5083152, upload-time = "2025-09-22T04:00:51.709Z" }, + { url = "https://files.pythonhosted.org/packages/15/ae/bd813e87d8941d52ad5b65071b1affb48da01c4ed3c9c99e40abb266fbff/lxml-6.0.2-cp311-cp311-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:de496365750cc472b4e7902a485d3f152ecf57bd3ba03ddd5578ed8ceb4c5964", size = 5023539, upload-time = "2025-09-22T04:00:53.593Z" }, + { url = "https://files.pythonhosted.org/packages/02/cd/9bfef16bd1d874fbe0cb51afb00329540f30a3283beb9f0780adbb7eec03/lxml-6.0.2-cp311-cp311-manylinux_2_26_i686.manylinux_2_28_i686.whl", hash = "sha256:200069a593c5e40b8f6fc0d84d86d970ba43138c3e68619ffa234bc9bb806a4d", size = 5344853, upload-time = "2025-09-22T04:00:55.524Z" }, + { url = "https://files.pythonhosted.org/packages/b8/89/ea8f91594bc5dbb879734d35a6f2b0ad50605d7fb419de2b63d4211765cc/lxml-6.0.2-cp311-cp311-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:7d2de809c2ee3b888b59f995625385f74629707c9355e0ff856445cdcae682b7", size = 5225133, upload-time = "2025-09-22T04:00:57.269Z" }, + { url = "https://files.pythonhosted.org/packages/b9/37/9c735274f5dbec726b2db99b98a43950395ba3d4a1043083dba2ad814170/lxml-6.0.2-cp311-cp311-manylinux_2_31_armv7l.whl", hash = "sha256:b2c3da8d93cf5db60e8858c17684c47d01fee6405e554fb55018dd85fc23b178", size = 4677944, upload-time = "2025-09-22T04:00:59.052Z" }, + { url = "https://files.pythonhosted.org/packages/20/28/7dfe1ba3475d8bfca3878365075abe002e05d40dfaaeb7ec01b4c587d533/lxml-6.0.2-cp311-cp311-manylinux_2_38_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:442de7530296ef5e188373a1ea5789a46ce90c4847e597856570439621d9c553", size = 5284535, upload-time = "2025-09-22T04:01:01.335Z" }, + { url = "https://files.pythonhosted.org/packages/e7/cf/5f14bc0de763498fc29510e3532bf2b4b3a1c1d5d0dff2e900c16ba021ef/lxml-6.0.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:2593c77efde7bfea7f6389f1ab249b15ed4aa5bc5cb5131faa3b843c429fbedb", size = 5067343, upload-time = "2025-09-22T04:01:03.13Z" }, + { url = "https://files.pythonhosted.org/packages/1c/b0/bb8275ab5472f32b28cfbbcc6db7c9d092482d3439ca279d8d6fa02f7025/lxml-6.0.2-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:3e3cb08855967a20f553ff32d147e14329b3ae70ced6edc2f282b94afbc74b2a", size = 4725419, upload-time = "2025-09-22T04:01:05.013Z" }, + { url = "https://files.pythonhosted.org/packages/25/4c/7c222753bc72edca3b99dbadba1b064209bc8ed4ad448af990e60dcce462/lxml-6.0.2-cp311-cp311-musllinux_1_2_riscv64.whl", hash = "sha256:2ed6c667fcbb8c19c6791bbf40b7268ef8ddf5a96940ba9404b9f9a304832f6c", size = 5275008, upload-time = "2025-09-22T04:01:07.327Z" }, + { url = "https://files.pythonhosted.org/packages/6c/8c/478a0dc6b6ed661451379447cdbec77c05741a75736d97e5b2b729687828/lxml-6.0.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:b8f18914faec94132e5b91e69d76a5c1d7b0c73e2489ea8929c4aaa10b76bbf7", size = 5248906, upload-time = "2025-09-22T04:01:09.452Z" }, + { url = "https://files.pythonhosted.org/packages/2d/d9/5be3a6ab2784cdf9accb0703b65e1b64fcdd9311c9f007630c7db0cfcce1/lxml-6.0.2-cp311-cp311-win32.whl", hash = "sha256:6605c604e6daa9e0d7f0a2137bdc47a2e93b59c60a65466353e37f8272f47c46", size = 3610357, upload-time = "2025-09-22T04:01:11.102Z" }, + { url = "https://files.pythonhosted.org/packages/e2/7d/ca6fb13349b473d5732fb0ee3eec8f6c80fc0688e76b7d79c1008481bf1f/lxml-6.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:e5867f2651016a3afd8dd2c8238baa66f1e2802f44bc17e236f547ace6647078", size = 4036583, upload-time = "2025-09-22T04:01:12.766Z" }, + { url = "https://files.pythonhosted.org/packages/ab/a2/51363b5ecd3eab46563645f3a2c3836a2fc67d01a1b87c5017040f39f567/lxml-6.0.2-cp311-cp311-win_arm64.whl", hash = "sha256:4197fb2534ee05fd3e7afaab5d8bfd6c2e186f65ea7f9cd6a82809c887bd1285", size = 3680591, upload-time = "2025-09-22T04:01:14.874Z" }, + { url = "https://files.pythonhosted.org/packages/f3/c8/8ff2bc6b920c84355146cd1ab7d181bc543b89241cfb1ebee824a7c81457/lxml-6.0.2-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:a59f5448ba2ceccd06995c95ea59a7674a10de0810f2ce90c9006f3cbc044456", size = 8661887, upload-time = "2025-09-22T04:01:17.265Z" }, + { url = "https://files.pythonhosted.org/packages/37/6f/9aae1008083bb501ef63284220ce81638332f9ccbfa53765b2b7502203cf/lxml-6.0.2-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:e8113639f3296706fbac34a30813929e29247718e88173ad849f57ca59754924", size = 4667818, upload-time = "2025-09-22T04:01:19.688Z" }, + { url = "https://files.pythonhosted.org/packages/f1/ca/31fb37f99f37f1536c133476674c10b577e409c0a624384147653e38baf2/lxml-6.0.2-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:a8bef9b9825fa8bc816a6e641bb67219489229ebc648be422af695f6e7a4fa7f", size = 4950807, upload-time = "2025-09-22T04:01:21.487Z" }, + { url = "https://files.pythonhosted.org/packages/da/87/f6cb9442e4bada8aab5ae7e1046264f62fdbeaa6e3f6211b93f4c0dd97f1/lxml-6.0.2-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:65ea18d710fd14e0186c2f973dc60bb52039a275f82d3c44a0e42b43440ea534", size = 5109179, upload-time = "2025-09-22T04:01:23.32Z" }, + { url = "https://files.pythonhosted.org/packages/c8/20/a7760713e65888db79bbae4f6146a6ae5c04e4a204a3c48896c408cd6ed2/lxml-6.0.2-cp312-cp312-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c371aa98126a0d4c739ca93ceffa0fd7a5d732e3ac66a46e74339acd4d334564", size = 5023044, upload-time = "2025-09-22T04:01:25.118Z" }, + { url = "https://files.pythonhosted.org/packages/a2/b0/7e64e0460fcb36471899f75831509098f3fd7cd02a3833ac517433cb4f8f/lxml-6.0.2-cp312-cp312-manylinux_2_26_i686.manylinux_2_28_i686.whl", hash = "sha256:700efd30c0fa1a3581d80a748157397559396090a51d306ea59a70020223d16f", size = 5359685, upload-time = "2025-09-22T04:01:27.398Z" }, + { url = "https://files.pythonhosted.org/packages/b9/e1/e5df362e9ca4e2f48ed6411bd4b3a0ae737cc842e96877f5bf9428055ab4/lxml-6.0.2-cp312-cp312-manylinux_2_26_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:c33e66d44fe60e72397b487ee92e01da0d09ba2d66df8eae42d77b6d06e5eba0", size = 5654127, upload-time = "2025-09-22T04:01:29.629Z" }, + { url = "https://files.pythonhosted.org/packages/c6/d1/232b3309a02d60f11e71857778bfcd4acbdb86c07db8260caf7d008b08f8/lxml-6.0.2-cp312-cp312-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:90a345bbeaf9d0587a3aaffb7006aa39ccb6ff0e96a57286c0cb2fd1520ea192", size = 5253958, upload-time = "2025-09-22T04:01:31.535Z" }, + { url = "https://files.pythonhosted.org/packages/35/35/d955a070994725c4f7d80583a96cab9c107c57a125b20bb5f708fe941011/lxml-6.0.2-cp312-cp312-manylinux_2_31_armv7l.whl", hash = "sha256:064fdadaf7a21af3ed1dcaa106b854077fbeada827c18f72aec9346847cd65d0", size = 4711541, upload-time = "2025-09-22T04:01:33.801Z" }, + { url = "https://files.pythonhosted.org/packages/1e/be/667d17363b38a78c4bd63cfd4b4632029fd68d2c2dc81f25ce9eb5224dd5/lxml-6.0.2-cp312-cp312-manylinux_2_38_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:fbc74f42c3525ac4ffa4b89cbdd00057b6196bcefe8bce794abd42d33a018092", size = 5267426, upload-time = "2025-09-22T04:01:35.639Z" }, + { url = "https://files.pythonhosted.org/packages/ea/47/62c70aa4a1c26569bc958c9ca86af2bb4e1f614e8c04fb2989833874f7ae/lxml-6.0.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:6ddff43f702905a4e32bc24f3f2e2edfe0f8fde3277d481bffb709a4cced7a1f", size = 5064917, upload-time = "2025-09-22T04:01:37.448Z" }, + { url = "https://files.pythonhosted.org/packages/bd/55/6ceddaca353ebd0f1908ef712c597f8570cc9c58130dbb89903198e441fd/lxml-6.0.2-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:6da5185951d72e6f5352166e3da7b0dc27aa70bd1090b0eb3f7f7212b53f1bb8", size = 4788795, upload-time = "2025-09-22T04:01:39.165Z" }, + { url = "https://files.pythonhosted.org/packages/cf/e8/fd63e15da5e3fd4c2146f8bbb3c14e94ab850589beab88e547b2dbce22e1/lxml-6.0.2-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:57a86e1ebb4020a38d295c04fc79603c7899e0df71588043eb218722dabc087f", size = 5676759, upload-time = "2025-09-22T04:01:41.506Z" }, + { url = "https://files.pythonhosted.org/packages/76/47/b3ec58dc5c374697f5ba37412cd2728f427d056315d124dd4b61da381877/lxml-6.0.2-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:2047d8234fe735ab77802ce5f2297e410ff40f5238aec569ad7c8e163d7b19a6", size = 5255666, upload-time = "2025-09-22T04:01:43.363Z" }, + { url = "https://files.pythonhosted.org/packages/19/93/03ba725df4c3d72afd9596eef4a37a837ce8e4806010569bedfcd2cb68fd/lxml-6.0.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:6f91fd2b2ea15a6800c8e24418c0775a1694eefc011392da73bc6cef2623b322", size = 5277989, upload-time = "2025-09-22T04:01:45.215Z" }, + { url = "https://files.pythonhosted.org/packages/c6/80/c06de80bfce881d0ad738576f243911fccf992687ae09fd80b734712b39c/lxml-6.0.2-cp312-cp312-win32.whl", hash = "sha256:3ae2ce7d6fedfb3414a2b6c5e20b249c4c607f72cb8d2bb7cc9c6ec7c6f4e849", size = 3611456, upload-time = "2025-09-22T04:01:48.243Z" }, + { url = "https://files.pythonhosted.org/packages/f7/d7/0cdfb6c3e30893463fb3d1e52bc5f5f99684a03c29a0b6b605cfae879cd5/lxml-6.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:72c87e5ee4e58a8354fb9c7c84cbf95a1c8236c127a5d1b7683f04bed8361e1f", size = 4011793, upload-time = "2025-09-22T04:01:50.042Z" }, + { url = "https://files.pythonhosted.org/packages/ea/7b/93c73c67db235931527301ed3785f849c78991e2e34f3fd9a6663ffda4c5/lxml-6.0.2-cp312-cp312-win_arm64.whl", hash = "sha256:61cb10eeb95570153e0c0e554f58df92ecf5109f75eacad4a95baa709e26c3d6", size = 3672836, upload-time = "2025-09-22T04:01:52.145Z" }, + { url = "https://files.pythonhosted.org/packages/0b/11/29d08bc103a62c0eba8016e7ed5aeebbf1e4312e83b0b1648dd203b0e87d/lxml-6.0.2-pp311-pypy311_pp73-macosx_10_15_x86_64.whl", hash = "sha256:1c06035eafa8404b5cf475bb37a9f6088b0aca288d4ccc9d69389750d5543700", size = 3949829, upload-time = "2025-09-22T04:04:45.608Z" }, + { url = "https://files.pythonhosted.org/packages/12/b3/52ab9a3b31e5ab8238da241baa19eec44d2ab426532441ee607165aebb52/lxml-6.0.2-pp311-pypy311_pp73-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:c7d13103045de1bdd6fe5d61802565f1a3537d70cd3abf596aa0af62761921ee", size = 4226277, upload-time = "2025-09-22T04:04:47.754Z" }, + { url = "https://files.pythonhosted.org/packages/a0/33/1eaf780c1baad88224611df13b1c2a9dfa460b526cacfe769103ff50d845/lxml-6.0.2-pp311-pypy311_pp73-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:0a3c150a95fbe5ac91de323aa756219ef9cf7fde5a3f00e2281e30f33fa5fa4f", size = 4330433, upload-time = "2025-09-22T04:04:49.907Z" }, + { url = "https://files.pythonhosted.org/packages/7a/c1/27428a2ff348e994ab4f8777d3a0ad510b6b92d37718e5887d2da99952a2/lxml-6.0.2-pp311-pypy311_pp73-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:60fa43be34f78bebb27812ed90f1925ec99560b0fa1decdb7d12b84d857d31e9", size = 4272119, upload-time = "2025-09-22T04:04:51.801Z" }, + { url = "https://files.pythonhosted.org/packages/f0/d0/3020fa12bcec4ab62f97aab026d57c2f0cfd480a558758d9ca233bb6a79d/lxml-6.0.2-pp311-pypy311_pp73-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:21c73b476d3cfe836be731225ec3421fa2f048d84f6df6a8e70433dff1376d5a", size = 4417314, upload-time = "2025-09-22T04:04:55.024Z" }, + { url = "https://files.pythonhosted.org/packages/6c/77/d7f491cbc05303ac6801651aabeb262d43f319288c1ea96c66b1d2692ff3/lxml-6.0.2-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:27220da5be049e936c3aca06f174e8827ca6445a4353a1995584311487fc4e3e", size = 3518768, upload-time = "2025-09-22T04:04:57.097Z" }, ] [[package]] name = "lxml-stubs" version = "0.5.1" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/99/da/1a3a3e5d159b249fc2970d73437496b908de8e4716a089c69591b4ffa6fd/lxml-stubs-0.5.1.tar.gz", hash = "sha256:e0ec2aa1ce92d91278b719091ce4515c12adc1d564359dfaf81efa7d4feab79d", size = 14778 } +sdist = { url = "https://files.pythonhosted.org/packages/99/da/1a3a3e5d159b249fc2970d73437496b908de8e4716a089c69591b4ffa6fd/lxml-stubs-0.5.1.tar.gz", hash = "sha256:e0ec2aa1ce92d91278b719091ce4515c12adc1d564359dfaf81efa7d4feab79d", size = 14778, upload-time = "2024-01-10T09:37:46.521Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/1f/c9/e0f8e4e6e8a69e5959b06499582dca6349db6769cc7fdfb8a02a7c75a9ae/lxml_stubs-0.5.1-py3-none-any.whl", hash = "sha256:1f689e5dbc4b9247cb09ae820c7d34daeb1fdbd1db06123814b856dae7787272", size = 13584 }, + { url = "https://files.pythonhosted.org/packages/1f/c9/e0f8e4e6e8a69e5959b06499582dca6349db6769cc7fdfb8a02a7c75a9ae/lxml_stubs-0.5.1-py3-none-any.whl", hash = "sha256:1f689e5dbc4b9247cb09ae820c7d34daeb1fdbd1db06123814b856dae7787272", size = 13584, upload-time = "2024-01-10T09:37:44.931Z" }, ] [[package]] name = "lz4" version = "4.4.5" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/57/51/f1b86d93029f418033dddf9b9f79c8d2641e7454080478ee2aab5123173e/lz4-4.4.5.tar.gz", hash = "sha256:5f0b9e53c1e82e88c10d7c180069363980136b9d7a8306c4dca4f760d60c39f0", size = 172886 } +sdist = { url = "https://files.pythonhosted.org/packages/57/51/f1b86d93029f418033dddf9b9f79c8d2641e7454080478ee2aab5123173e/lz4-4.4.5.tar.gz", hash = "sha256:5f0b9e53c1e82e88c10d7c180069363980136b9d7a8306c4dca4f760d60c39f0", size = 172886, upload-time = "2025-11-03T13:02:36.061Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/93/5b/6edcd23319d9e28b1bedf32768c3d1fd56eed8223960a2c47dacd2cec2af/lz4-4.4.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:d6da84a26b3aa5da13a62e4b89ab36a396e9327de8cd48b436a3467077f8ccd4", size = 207391 }, - { url = "https://files.pythonhosted.org/packages/34/36/5f9b772e85b3d5769367a79973b8030afad0d6b724444083bad09becd66f/lz4-4.4.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:61d0ee03e6c616f4a8b69987d03d514e8896c8b1b7cc7598ad029e5c6aedfd43", size = 207146 }, - { url = "https://files.pythonhosted.org/packages/04/f4/f66da5647c0d72592081a37c8775feacc3d14d2625bbdaabd6307c274565/lz4-4.4.5-cp311-cp311-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:33dd86cea8375d8e5dd001e41f321d0a4b1eb7985f39be1b6a4f466cd480b8a7", size = 1292623 }, - { url = "https://files.pythonhosted.org/packages/85/fc/5df0f17467cdda0cad464a9197a447027879197761b55faad7ca29c29a04/lz4-4.4.5-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:609a69c68e7cfcfa9d894dc06be13f2e00761485b62df4e2472f1b66f7b405fb", size = 1279982 }, - { url = "https://files.pythonhosted.org/packages/25/3b/b55cb577aa148ed4e383e9700c36f70b651cd434e1c07568f0a86c9d5fbb/lz4-4.4.5-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:75419bb1a559af00250b8f1360d508444e80ed4b26d9d40ec5b09fe7875cb989", size = 1368674 }, - { url = "https://files.pythonhosted.org/packages/fb/31/e97e8c74c59ea479598e5c55cbe0b1334f03ee74ca97726e872944ed42df/lz4-4.4.5-cp311-cp311-win32.whl", hash = "sha256:12233624f1bc2cebc414f9efb3113a03e89acce3ab6f72035577bc61b270d24d", size = 88168 }, - { url = "https://files.pythonhosted.org/packages/18/47/715865a6c7071f417bef9b57c8644f29cb7a55b77742bd5d93a609274e7e/lz4-4.4.5-cp311-cp311-win_amd64.whl", hash = "sha256:8a842ead8ca7c0ee2f396ca5d878c4c40439a527ebad2b996b0444f0074ed004", size = 99491 }, - { url = "https://files.pythonhosted.org/packages/14/e7/ac120c2ca8caec5c945e6356ada2aa5cfabd83a01e3170f264a5c42c8231/lz4-4.4.5-cp311-cp311-win_arm64.whl", hash = "sha256:83bc23ef65b6ae44f3287c38cbf82c269e2e96a26e560aa551735883388dcc4b", size = 91271 }, - { url = "https://files.pythonhosted.org/packages/1b/ac/016e4f6de37d806f7cc8f13add0a46c9a7cfc41a5ddc2bc831d7954cf1ce/lz4-4.4.5-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:df5aa4cead2044bab83e0ebae56e0944cc7fcc1505c7787e9e1057d6d549897e", size = 207163 }, - { url = "https://files.pythonhosted.org/packages/8d/df/0fadac6e5bd31b6f34a1a8dbd4db6a7606e70715387c27368586455b7fc9/lz4-4.4.5-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:6d0bf51e7745484d2092b3a51ae6eb58c3bd3ce0300cf2b2c14f76c536d5697a", size = 207150 }, - { url = "https://files.pythonhosted.org/packages/b7/17/34e36cc49bb16ca73fb57fbd4c5eaa61760c6b64bce91fcb4e0f4a97f852/lz4-4.4.5-cp312-cp312-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:7b62f94b523c251cf32aa4ab555f14d39bd1a9df385b72443fd76d7c7fb051f5", size = 1292045 }, - { url = "https://files.pythonhosted.org/packages/90/1c/b1d8e3741e9fc89ed3b5f7ef5f22586c07ed6bb04e8343c2e98f0fa7ff04/lz4-4.4.5-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:2c3ea562c3af274264444819ae9b14dbbf1ab070aff214a05e97db6896c7597e", size = 1279546 }, - { url = "https://files.pythonhosted.org/packages/55/d9/e3867222474f6c1b76e89f3bd914595af69f55bf2c1866e984c548afdc15/lz4-4.4.5-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:24092635f47538b392c4eaeff14c7270d2c8e806bf4be2a6446a378591c5e69e", size = 1368249 }, - { url = "https://files.pythonhosted.org/packages/b2/e7/d667d337367686311c38b580d1ca3d5a23a6617e129f26becd4f5dc458df/lz4-4.4.5-cp312-cp312-win32.whl", hash = "sha256:214e37cfe270948ea7eb777229e211c601a3e0875541c1035ab408fbceaddf50", size = 88189 }, - { url = "https://files.pythonhosted.org/packages/a5/0b/a54cd7406995ab097fceb907c7eb13a6ddd49e0b231e448f1a81a50af65c/lz4-4.4.5-cp312-cp312-win_amd64.whl", hash = "sha256:713a777de88a73425cf08eb11f742cd2c98628e79a8673d6a52e3c5f0c116f33", size = 99497 }, - { url = "https://files.pythonhosted.org/packages/6a/7e/dc28a952e4bfa32ca16fa2eb026e7a6ce5d1411fcd5986cd08c74ec187b9/lz4-4.4.5-cp312-cp312-win_arm64.whl", hash = "sha256:a88cbb729cc333334ccfb52f070463c21560fca63afcf636a9f160a55fac3301", size = 91279 }, + { url = "https://files.pythonhosted.org/packages/93/5b/6edcd23319d9e28b1bedf32768c3d1fd56eed8223960a2c47dacd2cec2af/lz4-4.4.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:d6da84a26b3aa5da13a62e4b89ab36a396e9327de8cd48b436a3467077f8ccd4", size = 207391, upload-time = "2025-11-03T13:01:36.644Z" }, + { url = "https://files.pythonhosted.org/packages/34/36/5f9b772e85b3d5769367a79973b8030afad0d6b724444083bad09becd66f/lz4-4.4.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:61d0ee03e6c616f4a8b69987d03d514e8896c8b1b7cc7598ad029e5c6aedfd43", size = 207146, upload-time = "2025-11-03T13:01:37.928Z" }, + { url = "https://files.pythonhosted.org/packages/04/f4/f66da5647c0d72592081a37c8775feacc3d14d2625bbdaabd6307c274565/lz4-4.4.5-cp311-cp311-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:33dd86cea8375d8e5dd001e41f321d0a4b1eb7985f39be1b6a4f466cd480b8a7", size = 1292623, upload-time = "2025-11-03T13:01:39.341Z" }, + { url = "https://files.pythonhosted.org/packages/85/fc/5df0f17467cdda0cad464a9197a447027879197761b55faad7ca29c29a04/lz4-4.4.5-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:609a69c68e7cfcfa9d894dc06be13f2e00761485b62df4e2472f1b66f7b405fb", size = 1279982, upload-time = "2025-11-03T13:01:40.816Z" }, + { url = "https://files.pythonhosted.org/packages/25/3b/b55cb577aa148ed4e383e9700c36f70b651cd434e1c07568f0a86c9d5fbb/lz4-4.4.5-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:75419bb1a559af00250b8f1360d508444e80ed4b26d9d40ec5b09fe7875cb989", size = 1368674, upload-time = "2025-11-03T13:01:42.118Z" }, + { url = "https://files.pythonhosted.org/packages/fb/31/e97e8c74c59ea479598e5c55cbe0b1334f03ee74ca97726e872944ed42df/lz4-4.4.5-cp311-cp311-win32.whl", hash = "sha256:12233624f1bc2cebc414f9efb3113a03e89acce3ab6f72035577bc61b270d24d", size = 88168, upload-time = "2025-11-03T13:01:43.282Z" }, + { url = "https://files.pythonhosted.org/packages/18/47/715865a6c7071f417bef9b57c8644f29cb7a55b77742bd5d93a609274e7e/lz4-4.4.5-cp311-cp311-win_amd64.whl", hash = "sha256:8a842ead8ca7c0ee2f396ca5d878c4c40439a527ebad2b996b0444f0074ed004", size = 99491, upload-time = "2025-11-03T13:01:44.167Z" }, + { url = "https://files.pythonhosted.org/packages/14/e7/ac120c2ca8caec5c945e6356ada2aa5cfabd83a01e3170f264a5c42c8231/lz4-4.4.5-cp311-cp311-win_arm64.whl", hash = "sha256:83bc23ef65b6ae44f3287c38cbf82c269e2e96a26e560aa551735883388dcc4b", size = 91271, upload-time = "2025-11-03T13:01:45.016Z" }, + { url = "https://files.pythonhosted.org/packages/1b/ac/016e4f6de37d806f7cc8f13add0a46c9a7cfc41a5ddc2bc831d7954cf1ce/lz4-4.4.5-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:df5aa4cead2044bab83e0ebae56e0944cc7fcc1505c7787e9e1057d6d549897e", size = 207163, upload-time = "2025-11-03T13:01:45.895Z" }, + { url = "https://files.pythonhosted.org/packages/8d/df/0fadac6e5bd31b6f34a1a8dbd4db6a7606e70715387c27368586455b7fc9/lz4-4.4.5-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:6d0bf51e7745484d2092b3a51ae6eb58c3bd3ce0300cf2b2c14f76c536d5697a", size = 207150, upload-time = "2025-11-03T13:01:47.205Z" }, + { url = "https://files.pythonhosted.org/packages/b7/17/34e36cc49bb16ca73fb57fbd4c5eaa61760c6b64bce91fcb4e0f4a97f852/lz4-4.4.5-cp312-cp312-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:7b62f94b523c251cf32aa4ab555f14d39bd1a9df385b72443fd76d7c7fb051f5", size = 1292045, upload-time = "2025-11-03T13:01:48.667Z" }, + { url = "https://files.pythonhosted.org/packages/90/1c/b1d8e3741e9fc89ed3b5f7ef5f22586c07ed6bb04e8343c2e98f0fa7ff04/lz4-4.4.5-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:2c3ea562c3af274264444819ae9b14dbbf1ab070aff214a05e97db6896c7597e", size = 1279546, upload-time = "2025-11-03T13:01:50.159Z" }, + { url = "https://files.pythonhosted.org/packages/55/d9/e3867222474f6c1b76e89f3bd914595af69f55bf2c1866e984c548afdc15/lz4-4.4.5-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:24092635f47538b392c4eaeff14c7270d2c8e806bf4be2a6446a378591c5e69e", size = 1368249, upload-time = "2025-11-03T13:01:51.273Z" }, + { url = "https://files.pythonhosted.org/packages/b2/e7/d667d337367686311c38b580d1ca3d5a23a6617e129f26becd4f5dc458df/lz4-4.4.5-cp312-cp312-win32.whl", hash = "sha256:214e37cfe270948ea7eb777229e211c601a3e0875541c1035ab408fbceaddf50", size = 88189, upload-time = "2025-11-03T13:01:52.605Z" }, + { url = "https://files.pythonhosted.org/packages/a5/0b/a54cd7406995ab097fceb907c7eb13a6ddd49e0b231e448f1a81a50af65c/lz4-4.4.5-cp312-cp312-win_amd64.whl", hash = "sha256:713a777de88a73425cf08eb11f742cd2c98628e79a8673d6a52e3c5f0c116f33", size = 99497, upload-time = "2025-11-03T13:01:53.477Z" }, + { url = "https://files.pythonhosted.org/packages/6a/7e/dc28a952e4bfa32ca16fa2eb026e7a6ce5d1411fcd5986cd08c74ec187b9/lz4-4.4.5-cp312-cp312-win_arm64.whl", hash = "sha256:a88cbb729cc333334ccfb52f070463c21560fca63afcf636a9f160a55fac3301", size = 91279, upload-time = "2025-11-03T13:01:54.419Z" }, ] [[package]] @@ -3283,18 +3324,18 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "markupsafe" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/9e/38/bd5b78a920a64d708fe6bc8e0a2c075e1389d53bef8413725c63ba041535/mako-1.3.10.tar.gz", hash = "sha256:99579a6f39583fa7e5630a28c3c1f440e4e97a414b80372649c0ce338da2ea28", size = 392474 } +sdist = { url = "https://files.pythonhosted.org/packages/9e/38/bd5b78a920a64d708fe6bc8e0a2c075e1389d53bef8413725c63ba041535/mako-1.3.10.tar.gz", hash = "sha256:99579a6f39583fa7e5630a28c3c1f440e4e97a414b80372649c0ce338da2ea28", size = 392474, upload-time = "2025-04-10T12:44:31.16Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/87/fb/99f81ac72ae23375f22b7afdb7642aba97c00a713c217124420147681a2f/mako-1.3.10-py3-none-any.whl", hash = "sha256:baef24a52fc4fc514a0887ac600f9f1cff3d82c61d4d700a1fa84d597b88db59", size = 78509 }, + { url = "https://files.pythonhosted.org/packages/87/fb/99f81ac72ae23375f22b7afdb7642aba97c00a713c217124420147681a2f/mako-1.3.10-py3-none-any.whl", hash = "sha256:baef24a52fc4fc514a0887ac600f9f1cff3d82c61d4d700a1fa84d597b88db59", size = 78509, upload-time = "2025-04-10T12:50:53.297Z" }, ] [[package]] name = "markdown" version = "3.5.2" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/11/28/c5441a6642681d92de56063fa7984df56f783d3f1eba518dc3e7a253b606/Markdown-3.5.2.tar.gz", hash = "sha256:e1ac7b3dc550ee80e602e71c1d168002f062e49f1b11e26a36264dafd4df2ef8", size = 349398 } +sdist = { url = "https://files.pythonhosted.org/packages/11/28/c5441a6642681d92de56063fa7984df56f783d3f1eba518dc3e7a253b606/Markdown-3.5.2.tar.gz", hash = "sha256:e1ac7b3dc550ee80e602e71c1d168002f062e49f1b11e26a36264dafd4df2ef8", size = 349398, upload-time = "2024-01-10T15:19:38.261Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/42/f4/f0031854de10a0bc7821ef9fca0b92ca0d7aa6fbfbf504c5473ba825e49c/Markdown-3.5.2-py3-none-any.whl", hash = "sha256:d43323865d89fc0cb9b20c75fc8ad313af307cc087e84b657d9eec768eddeadd", size = 103870 }, + { url = "https://files.pythonhosted.org/packages/42/f4/f0031854de10a0bc7821ef9fca0b92ca0d7aa6fbfbf504c5473ba825e49c/Markdown-3.5.2-py3-none-any.whl", hash = "sha256:d43323865d89fc0cb9b20c75fc8ad313af307cc087e84b657d9eec768eddeadd", size = 103870, upload-time = "2024-01-10T15:19:36.071Z" }, ] [[package]] @@ -3304,39 +3345,39 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "mdurl" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/5b/f5/4ec618ed16cc4f8fb3b701563655a69816155e79e24a17b651541804721d/markdown_it_py-4.0.0.tar.gz", hash = "sha256:cb0a2b4aa34f932c007117b194e945bd74e0ec24133ceb5bac59009cda1cb9f3", size = 73070 } +sdist = { url = "https://files.pythonhosted.org/packages/5b/f5/4ec618ed16cc4f8fb3b701563655a69816155e79e24a17b651541804721d/markdown_it_py-4.0.0.tar.gz", hash = "sha256:cb0a2b4aa34f932c007117b194e945bd74e0ec24133ceb5bac59009cda1cb9f3", size = 73070, upload-time = "2025-08-11T12:57:52.854Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/94/54/e7d793b573f298e1c9013b8c4dade17d481164aa517d1d7148619c2cedbf/markdown_it_py-4.0.0-py3-none-any.whl", hash = "sha256:87327c59b172c5011896038353a81343b6754500a08cd7a4973bb48c6d578147", size = 87321 }, + { url = "https://files.pythonhosted.org/packages/94/54/e7d793b573f298e1c9013b8c4dade17d481164aa517d1d7148619c2cedbf/markdown_it_py-4.0.0-py3-none-any.whl", hash = "sha256:87327c59b172c5011896038353a81343b6754500a08cd7a4973bb48c6d578147", size = 87321, upload-time = "2025-08-11T12:57:51.923Z" }, ] [[package]] name = "markupsafe" version = "3.0.3" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/7e/99/7690b6d4034fffd95959cbe0c02de8deb3098cc577c67bb6a24fe5d7caa7/markupsafe-3.0.3.tar.gz", hash = "sha256:722695808f4b6457b320fdc131280796bdceb04ab50fe1795cd540799ebe1698", size = 80313 } +sdist = { url = "https://files.pythonhosted.org/packages/7e/99/7690b6d4034fffd95959cbe0c02de8deb3098cc577c67bb6a24fe5d7caa7/markupsafe-3.0.3.tar.gz", hash = "sha256:722695808f4b6457b320fdc131280796bdceb04ab50fe1795cd540799ebe1698", size = 80313, upload-time = "2025-09-27T18:37:40.426Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/08/db/fefacb2136439fc8dd20e797950e749aa1f4997ed584c62cfb8ef7c2be0e/markupsafe-3.0.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:1cc7ea17a6824959616c525620e387f6dd30fec8cb44f649e31712db02123dad", size = 11631 }, - { url = "https://files.pythonhosted.org/packages/e1/2e/5898933336b61975ce9dc04decbc0a7f2fee78c30353c5efba7f2d6ff27a/markupsafe-3.0.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4bd4cd07944443f5a265608cc6aab442e4f74dff8088b0dfc8238647b8f6ae9a", size = 12058 }, - { url = "https://files.pythonhosted.org/packages/1d/09/adf2df3699d87d1d8184038df46a9c80d78c0148492323f4693df54e17bb/markupsafe-3.0.3-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6b5420a1d9450023228968e7e6a9ce57f65d148ab56d2313fcd589eee96a7a50", size = 24287 }, - { url = "https://files.pythonhosted.org/packages/30/ac/0273f6fcb5f42e314c6d8cd99effae6a5354604d461b8d392b5ec9530a54/markupsafe-3.0.3-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0bf2a864d67e76e5c9a34dc26ec616a66b9888e25e7b9460e1c76d3293bd9dbf", size = 22940 }, - { url = "https://files.pythonhosted.org/packages/19/ae/31c1be199ef767124c042c6c3e904da327a2f7f0cd63a0337e1eca2967a8/markupsafe-3.0.3-cp311-cp311-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:bc51efed119bc9cfdf792cdeaa4d67e8f6fcccab66ed4bfdd6bde3e59bfcbb2f", size = 21887 }, - { url = "https://files.pythonhosted.org/packages/b2/76/7edcab99d5349a4532a459e1fe64f0b0467a3365056ae550d3bcf3f79e1e/markupsafe-3.0.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:068f375c472b3e7acbe2d5318dea141359e6900156b5b2ba06a30b169086b91a", size = 23692 }, - { url = "https://files.pythonhosted.org/packages/a4/28/6e74cdd26d7514849143d69f0bf2399f929c37dc2b31e6829fd2045b2765/markupsafe-3.0.3-cp311-cp311-musllinux_1_2_riscv64.whl", hash = "sha256:7be7b61bb172e1ed687f1754f8e7484f1c8019780f6f6b0786e76bb01c2ae115", size = 21471 }, - { url = "https://files.pythonhosted.org/packages/62/7e/a145f36a5c2945673e590850a6f8014318d5577ed7e5920a4b3448e0865d/markupsafe-3.0.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:f9e130248f4462aaa8e2552d547f36ddadbeaa573879158d721bbd33dfe4743a", size = 22923 }, - { url = "https://files.pythonhosted.org/packages/0f/62/d9c46a7f5c9adbeeeda52f5b8d802e1094e9717705a645efc71b0913a0a8/markupsafe-3.0.3-cp311-cp311-win32.whl", hash = "sha256:0db14f5dafddbb6d9208827849fad01f1a2609380add406671a26386cdf15a19", size = 14572 }, - { url = "https://files.pythonhosted.org/packages/83/8a/4414c03d3f891739326e1783338e48fb49781cc915b2e0ee052aa490d586/markupsafe-3.0.3-cp311-cp311-win_amd64.whl", hash = "sha256:de8a88e63464af587c950061a5e6a67d3632e36df62b986892331d4620a35c01", size = 15077 }, - { url = "https://files.pythonhosted.org/packages/35/73/893072b42e6862f319b5207adc9ae06070f095b358655f077f69a35601f0/markupsafe-3.0.3-cp311-cp311-win_arm64.whl", hash = "sha256:3b562dd9e9ea93f13d53989d23a7e775fdfd1066c33494ff43f5418bc8c58a5c", size = 13876 }, - { url = "https://files.pythonhosted.org/packages/5a/72/147da192e38635ada20e0a2e1a51cf8823d2119ce8883f7053879c2199b5/markupsafe-3.0.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:d53197da72cc091b024dd97249dfc7794d6a56530370992a5e1a08983ad9230e", size = 11615 }, - { url = "https://files.pythonhosted.org/packages/9a/81/7e4e08678a1f98521201c3079f77db69fb552acd56067661f8c2f534a718/markupsafe-3.0.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:1872df69a4de6aead3491198eaf13810b565bdbeec3ae2dc8780f14458ec73ce", size = 12020 }, - { url = "https://files.pythonhosted.org/packages/1e/2c/799f4742efc39633a1b54a92eec4082e4f815314869865d876824c257c1e/markupsafe-3.0.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:3a7e8ae81ae39e62a41ec302f972ba6ae23a5c5396c8e60113e9066ef893da0d", size = 24332 }, - { url = "https://files.pythonhosted.org/packages/3c/2e/8d0c2ab90a8c1d9a24f0399058ab8519a3279d1bd4289511d74e909f060e/markupsafe-3.0.3-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d6dd0be5b5b189d31db7cda48b91d7e0a9795f31430b7f271219ab30f1d3ac9d", size = 22947 }, - { url = "https://files.pythonhosted.org/packages/2c/54/887f3092a85238093a0b2154bd629c89444f395618842e8b0c41783898ea/markupsafe-3.0.3-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:94c6f0bb423f739146aec64595853541634bde58b2135f27f61c1ffd1cd4d16a", size = 21962 }, - { url = "https://files.pythonhosted.org/packages/c9/2f/336b8c7b6f4a4d95e91119dc8521402461b74a485558d8f238a68312f11c/markupsafe-3.0.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:be8813b57049a7dc738189df53d69395eba14fb99345e0a5994914a3864c8a4b", size = 23760 }, - { url = "https://files.pythonhosted.org/packages/32/43/67935f2b7e4982ffb50a4d169b724d74b62a3964bc1a9a527f5ac4f1ee2b/markupsafe-3.0.3-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:83891d0e9fb81a825d9a6d61e3f07550ca70a076484292a70fde82c4b807286f", size = 21529 }, - { url = "https://files.pythonhosted.org/packages/89/e0/4486f11e51bbba8b0c041098859e869e304d1c261e59244baa3d295d47b7/markupsafe-3.0.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:77f0643abe7495da77fb436f50f8dab76dbc6e5fd25d39589a0f1fe6548bfa2b", size = 23015 }, - { url = "https://files.pythonhosted.org/packages/2f/e1/78ee7a023dac597a5825441ebd17170785a9dab23de95d2c7508ade94e0e/markupsafe-3.0.3-cp312-cp312-win32.whl", hash = "sha256:d88b440e37a16e651bda4c7c2b930eb586fd15ca7406cb39e211fcff3bf3017d", size = 14540 }, - { url = "https://files.pythonhosted.org/packages/aa/5b/bec5aa9bbbb2c946ca2733ef9c4ca91c91b6a24580193e891b5f7dbe8e1e/markupsafe-3.0.3-cp312-cp312-win_amd64.whl", hash = "sha256:26a5784ded40c9e318cfc2bdb30fe164bdb8665ded9cd64d500a34fb42067b1c", size = 15105 }, - { url = "https://files.pythonhosted.org/packages/e5/f1/216fc1bbfd74011693a4fd837e7026152e89c4bcf3e77b6692fba9923123/markupsafe-3.0.3-cp312-cp312-win_arm64.whl", hash = "sha256:35add3b638a5d900e807944a078b51922212fb3dedb01633a8defc4b01a3c85f", size = 13906 }, + { url = "https://files.pythonhosted.org/packages/08/db/fefacb2136439fc8dd20e797950e749aa1f4997ed584c62cfb8ef7c2be0e/markupsafe-3.0.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:1cc7ea17a6824959616c525620e387f6dd30fec8cb44f649e31712db02123dad", size = 11631, upload-time = "2025-09-27T18:36:18.185Z" }, + { url = "https://files.pythonhosted.org/packages/e1/2e/5898933336b61975ce9dc04decbc0a7f2fee78c30353c5efba7f2d6ff27a/markupsafe-3.0.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4bd4cd07944443f5a265608cc6aab442e4f74dff8088b0dfc8238647b8f6ae9a", size = 12058, upload-time = "2025-09-27T18:36:19.444Z" }, + { url = "https://files.pythonhosted.org/packages/1d/09/adf2df3699d87d1d8184038df46a9c80d78c0148492323f4693df54e17bb/markupsafe-3.0.3-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6b5420a1d9450023228968e7e6a9ce57f65d148ab56d2313fcd589eee96a7a50", size = 24287, upload-time = "2025-09-27T18:36:20.768Z" }, + { url = "https://files.pythonhosted.org/packages/30/ac/0273f6fcb5f42e314c6d8cd99effae6a5354604d461b8d392b5ec9530a54/markupsafe-3.0.3-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0bf2a864d67e76e5c9a34dc26ec616a66b9888e25e7b9460e1c76d3293bd9dbf", size = 22940, upload-time = "2025-09-27T18:36:22.249Z" }, + { url = "https://files.pythonhosted.org/packages/19/ae/31c1be199ef767124c042c6c3e904da327a2f7f0cd63a0337e1eca2967a8/markupsafe-3.0.3-cp311-cp311-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:bc51efed119bc9cfdf792cdeaa4d67e8f6fcccab66ed4bfdd6bde3e59bfcbb2f", size = 21887, upload-time = "2025-09-27T18:36:23.535Z" }, + { url = "https://files.pythonhosted.org/packages/b2/76/7edcab99d5349a4532a459e1fe64f0b0467a3365056ae550d3bcf3f79e1e/markupsafe-3.0.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:068f375c472b3e7acbe2d5318dea141359e6900156b5b2ba06a30b169086b91a", size = 23692, upload-time = "2025-09-27T18:36:24.823Z" }, + { url = "https://files.pythonhosted.org/packages/a4/28/6e74cdd26d7514849143d69f0bf2399f929c37dc2b31e6829fd2045b2765/markupsafe-3.0.3-cp311-cp311-musllinux_1_2_riscv64.whl", hash = "sha256:7be7b61bb172e1ed687f1754f8e7484f1c8019780f6f6b0786e76bb01c2ae115", size = 21471, upload-time = "2025-09-27T18:36:25.95Z" }, + { url = "https://files.pythonhosted.org/packages/62/7e/a145f36a5c2945673e590850a6f8014318d5577ed7e5920a4b3448e0865d/markupsafe-3.0.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:f9e130248f4462aaa8e2552d547f36ddadbeaa573879158d721bbd33dfe4743a", size = 22923, upload-time = "2025-09-27T18:36:27.109Z" }, + { url = "https://files.pythonhosted.org/packages/0f/62/d9c46a7f5c9adbeeeda52f5b8d802e1094e9717705a645efc71b0913a0a8/markupsafe-3.0.3-cp311-cp311-win32.whl", hash = "sha256:0db14f5dafddbb6d9208827849fad01f1a2609380add406671a26386cdf15a19", size = 14572, upload-time = "2025-09-27T18:36:28.045Z" }, + { url = "https://files.pythonhosted.org/packages/83/8a/4414c03d3f891739326e1783338e48fb49781cc915b2e0ee052aa490d586/markupsafe-3.0.3-cp311-cp311-win_amd64.whl", hash = "sha256:de8a88e63464af587c950061a5e6a67d3632e36df62b986892331d4620a35c01", size = 15077, upload-time = "2025-09-27T18:36:29.025Z" }, + { url = "https://files.pythonhosted.org/packages/35/73/893072b42e6862f319b5207adc9ae06070f095b358655f077f69a35601f0/markupsafe-3.0.3-cp311-cp311-win_arm64.whl", hash = "sha256:3b562dd9e9ea93f13d53989d23a7e775fdfd1066c33494ff43f5418bc8c58a5c", size = 13876, upload-time = "2025-09-27T18:36:29.954Z" }, + { url = "https://files.pythonhosted.org/packages/5a/72/147da192e38635ada20e0a2e1a51cf8823d2119ce8883f7053879c2199b5/markupsafe-3.0.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:d53197da72cc091b024dd97249dfc7794d6a56530370992a5e1a08983ad9230e", size = 11615, upload-time = "2025-09-27T18:36:30.854Z" }, + { url = "https://files.pythonhosted.org/packages/9a/81/7e4e08678a1f98521201c3079f77db69fb552acd56067661f8c2f534a718/markupsafe-3.0.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:1872df69a4de6aead3491198eaf13810b565bdbeec3ae2dc8780f14458ec73ce", size = 12020, upload-time = "2025-09-27T18:36:31.971Z" }, + { url = "https://files.pythonhosted.org/packages/1e/2c/799f4742efc39633a1b54a92eec4082e4f815314869865d876824c257c1e/markupsafe-3.0.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:3a7e8ae81ae39e62a41ec302f972ba6ae23a5c5396c8e60113e9066ef893da0d", size = 24332, upload-time = "2025-09-27T18:36:32.813Z" }, + { url = "https://files.pythonhosted.org/packages/3c/2e/8d0c2ab90a8c1d9a24f0399058ab8519a3279d1bd4289511d74e909f060e/markupsafe-3.0.3-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d6dd0be5b5b189d31db7cda48b91d7e0a9795f31430b7f271219ab30f1d3ac9d", size = 22947, upload-time = "2025-09-27T18:36:33.86Z" }, + { url = "https://files.pythonhosted.org/packages/2c/54/887f3092a85238093a0b2154bd629c89444f395618842e8b0c41783898ea/markupsafe-3.0.3-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:94c6f0bb423f739146aec64595853541634bde58b2135f27f61c1ffd1cd4d16a", size = 21962, upload-time = "2025-09-27T18:36:35.099Z" }, + { url = "https://files.pythonhosted.org/packages/c9/2f/336b8c7b6f4a4d95e91119dc8521402461b74a485558d8f238a68312f11c/markupsafe-3.0.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:be8813b57049a7dc738189df53d69395eba14fb99345e0a5994914a3864c8a4b", size = 23760, upload-time = "2025-09-27T18:36:36.001Z" }, + { url = "https://files.pythonhosted.org/packages/32/43/67935f2b7e4982ffb50a4d169b724d74b62a3964bc1a9a527f5ac4f1ee2b/markupsafe-3.0.3-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:83891d0e9fb81a825d9a6d61e3f07550ca70a076484292a70fde82c4b807286f", size = 21529, upload-time = "2025-09-27T18:36:36.906Z" }, + { url = "https://files.pythonhosted.org/packages/89/e0/4486f11e51bbba8b0c041098859e869e304d1c261e59244baa3d295d47b7/markupsafe-3.0.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:77f0643abe7495da77fb436f50f8dab76dbc6e5fd25d39589a0f1fe6548bfa2b", size = 23015, upload-time = "2025-09-27T18:36:37.868Z" }, + { url = "https://files.pythonhosted.org/packages/2f/e1/78ee7a023dac597a5825441ebd17170785a9dab23de95d2c7508ade94e0e/markupsafe-3.0.3-cp312-cp312-win32.whl", hash = "sha256:d88b440e37a16e651bda4c7c2b930eb586fd15ca7406cb39e211fcff3bf3017d", size = 14540, upload-time = "2025-09-27T18:36:38.761Z" }, + { url = "https://files.pythonhosted.org/packages/aa/5b/bec5aa9bbbb2c946ca2733ef9c4ca91c91b6a24580193e891b5f7dbe8e1e/markupsafe-3.0.3-cp312-cp312-win_amd64.whl", hash = "sha256:26a5784ded40c9e318cfc2bdb30fe164bdb8665ded9cd64d500a34fb42067b1c", size = 15105, upload-time = "2025-09-27T18:36:39.701Z" }, + { url = "https://files.pythonhosted.org/packages/e5/f1/216fc1bbfd74011693a4fd837e7026152e89c4bcf3e77b6692fba9923123/markupsafe-3.0.3-cp312-cp312-win_arm64.whl", hash = "sha256:35add3b638a5d900e807944a078b51922212fb3dedb01633a8defc4b01a3c85f", size = 13906, upload-time = "2025-09-27T18:36:40.689Z" }, ] [[package]] @@ -3346,18 +3387,18 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "packaging" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/ab/5e/5e53d26b42ab75491cda89b871dab9e97c840bf12c63ec58a1919710cd06/marshmallow-3.26.1.tar.gz", hash = "sha256:e6d8affb6cb61d39d26402096dc0aee12d5a26d490a121f118d2e81dc0719dc6", size = 221825 } +sdist = { url = "https://files.pythonhosted.org/packages/ab/5e/5e53d26b42ab75491cda89b871dab9e97c840bf12c63ec58a1919710cd06/marshmallow-3.26.1.tar.gz", hash = "sha256:e6d8affb6cb61d39d26402096dc0aee12d5a26d490a121f118d2e81dc0719dc6", size = 221825, upload-time = "2025-02-03T15:32:25.093Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/34/75/51952c7b2d3873b44a0028b1bd26a25078c18f92f256608e8d1dc61b39fd/marshmallow-3.26.1-py3-none-any.whl", hash = "sha256:3350409f20a70a7e4e11a27661187b77cdcaeb20abca41c1454fe33636bea09c", size = 50878 }, + { url = "https://files.pythonhosted.org/packages/34/75/51952c7b2d3873b44a0028b1bd26a25078c18f92f256608e8d1dc61b39fd/marshmallow-3.26.1-py3-none-any.whl", hash = "sha256:3350409f20a70a7e4e11a27661187b77cdcaeb20abca41c1454fe33636bea09c", size = 50878, upload-time = "2025-02-03T15:32:22.295Z" }, ] [[package]] name = "mdurl" version = "0.1.2" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/d6/54/cfe61301667036ec958cb99bd3efefba235e65cdeb9c84d24a8293ba1d90/mdurl-0.1.2.tar.gz", hash = "sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba", size = 8729 } +sdist = { url = "https://files.pythonhosted.org/packages/d6/54/cfe61301667036ec958cb99bd3efefba235e65cdeb9c84d24a8293ba1d90/mdurl-0.1.2.tar.gz", hash = "sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba", size = 8729, upload-time = "2022-08-14T12:40:10.846Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/b3/38/89ba8ad64ae25be8de66a6d463314cf1eb366222074cfda9ee839c56a4b4/mdurl-0.1.2-py3-none-any.whl", hash = "sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8", size = 9979 }, + { url = "https://files.pythonhosted.org/packages/b3/38/89ba8ad64ae25be8de66a6d463314cf1eb366222074cfda9ee839c56a4b4/mdurl-0.1.2-py3-none-any.whl", hash = "sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8", size = 9979, upload-time = "2022-08-14T12:40:09.779Z" }, ] [[package]] @@ -3368,10 +3409,10 @@ dependencies = [ { name = "tqdm" }, ] wheels = [ - { url = "https://files.pythonhosted.org/packages/a9/b2/acc5024c8e8b6a0b034670b8e8af306ebd633ede777dcbf557eac4785937/milvus_lite-2.5.1-py3-none-macosx_10_9_x86_64.whl", hash = "sha256:6b014453200ba977be37ba660cb2d021030375fa6a35bc53c2e1d92980a0c512", size = 27934713 }, - { url = "https://files.pythonhosted.org/packages/9b/2e/746f5bb1d6facd1e73eb4af6dd5efda11125b0f29d7908a097485ca6cad9/milvus_lite-2.5.1-py3-none-macosx_11_0_arm64.whl", hash = "sha256:a2e031088bf308afe5f8567850412d618cfb05a65238ed1a6117f60decccc95a", size = 24421451 }, - { url = "https://files.pythonhosted.org/packages/2e/cf/3d1fee5c16c7661cf53977067a34820f7269ed8ba99fe9cf35efc1700866/milvus_lite-2.5.1-py3-none-manylinux2014_aarch64.whl", hash = "sha256:a13277e9bacc6933dea172e42231f7e6135bd3bdb073dd2688ee180418abd8d9", size = 45337093 }, - { url = "https://files.pythonhosted.org/packages/d3/82/41d9b80f09b82e066894d9b508af07b7b0fa325ce0322980674de49106a0/milvus_lite-2.5.1-py3-none-manylinux2014_x86_64.whl", hash = "sha256:25ce13f4b8d46876dd2b7ac8563d7d8306da7ff3999bb0d14b116b30f71d706c", size = 55263911 }, + { url = "https://files.pythonhosted.org/packages/a9/b2/acc5024c8e8b6a0b034670b8e8af306ebd633ede777dcbf557eac4785937/milvus_lite-2.5.1-py3-none-macosx_10_9_x86_64.whl", hash = "sha256:6b014453200ba977be37ba660cb2d021030375fa6a35bc53c2e1d92980a0c512", size = 27934713, upload-time = "2025-06-30T04:23:37.028Z" }, + { url = "https://files.pythonhosted.org/packages/9b/2e/746f5bb1d6facd1e73eb4af6dd5efda11125b0f29d7908a097485ca6cad9/milvus_lite-2.5.1-py3-none-macosx_11_0_arm64.whl", hash = "sha256:a2e031088bf308afe5f8567850412d618cfb05a65238ed1a6117f60decccc95a", size = 24421451, upload-time = "2025-06-30T04:23:51.747Z" }, + { url = "https://files.pythonhosted.org/packages/2e/cf/3d1fee5c16c7661cf53977067a34820f7269ed8ba99fe9cf35efc1700866/milvus_lite-2.5.1-py3-none-manylinux2014_aarch64.whl", hash = "sha256:a13277e9bacc6933dea172e42231f7e6135bd3bdb073dd2688ee180418abd8d9", size = 45337093, upload-time = "2025-06-30T04:24:06.706Z" }, + { url = "https://files.pythonhosted.org/packages/d3/82/41d9b80f09b82e066894d9b508af07b7b0fa325ce0322980674de49106a0/milvus_lite-2.5.1-py3-none-manylinux2014_x86_64.whl", hash = "sha256:25ce13f4b8d46876dd2b7ac8563d7d8306da7ff3999bb0d14b116b30f71d706c", size = 55263911, upload-time = "2025-06-30T04:24:19.434Z" }, ] [[package]] @@ -3399,49 +3440,49 @@ dependencies = [ { name = "typing-extensions" }, { name = "uvicorn" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/8d/8e/2a2d0cd5b1b985c5278202805f48aae6f2adc3ddc0fce3385ec50e07e258/mlflow_skinny-3.6.0.tar.gz", hash = "sha256:cc04706b5b6faace9faf95302a6e04119485e1bfe98ddc9b85b81984e80944b6", size = 1963286 } +sdist = { url = "https://files.pythonhosted.org/packages/8d/8e/2a2d0cd5b1b985c5278202805f48aae6f2adc3ddc0fce3385ec50e07e258/mlflow_skinny-3.6.0.tar.gz", hash = "sha256:cc04706b5b6faace9faf95302a6e04119485e1bfe98ddc9b85b81984e80944b6", size = 1963286, upload-time = "2025-11-07T18:33:52.596Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/0e/78/e8fdc3e1708bdfd1eba64f41ce96b461cae1b505aa08b69352ac99b4caa4/mlflow_skinny-3.6.0-py3-none-any.whl", hash = "sha256:c83b34fce592acb2cc6bddcb507587a6d9ef3f590d9e7a8658c85e0980596d78", size = 2364629 }, + { url = "https://files.pythonhosted.org/packages/0e/78/e8fdc3e1708bdfd1eba64f41ce96b461cae1b505aa08b69352ac99b4caa4/mlflow_skinny-3.6.0-py3-none-any.whl", hash = "sha256:c83b34fce592acb2cc6bddcb507587a6d9ef3f590d9e7a8658c85e0980596d78", size = 2364629, upload-time = "2025-11-07T18:33:50.744Z" }, ] [[package]] name = "mmh3" version = "5.2.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/a7/af/f28c2c2f51f31abb4725f9a64bc7863d5f491f6539bd26aee2a1d21a649e/mmh3-5.2.0.tar.gz", hash = "sha256:1efc8fec8478e9243a78bb993422cf79f8ff85cb4cf6b79647480a31e0d950a8", size = 33582 } +sdist = { url = "https://files.pythonhosted.org/packages/a7/af/f28c2c2f51f31abb4725f9a64bc7863d5f491f6539bd26aee2a1d21a649e/mmh3-5.2.0.tar.gz", hash = "sha256:1efc8fec8478e9243a78bb993422cf79f8ff85cb4cf6b79647480a31e0d950a8", size = 33582, upload-time = "2025-07-29T07:43:48.49Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/f7/87/399567b3796e134352e11a8b973cd470c06b2ecfad5468fe580833be442b/mmh3-5.2.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:7901c893e704ee3c65f92d39b951f8f34ccf8e8566768c58103fb10e55afb8c1", size = 56107 }, - { url = "https://files.pythonhosted.org/packages/c3/09/830af30adf8678955b247d97d3d9543dd2fd95684f3cd41c0cd9d291da9f/mmh3-5.2.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4a5f5536b1cbfa72318ab3bfc8a8188b949260baed186b75f0abc75b95d8c051", size = 40635 }, - { url = "https://files.pythonhosted.org/packages/07/14/eaba79eef55b40d653321765ac5e8f6c9ac38780b8a7c2a2f8df8ee0fb72/mmh3-5.2.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:cedac4f4054b8f7859e5aed41aaa31ad03fce6851901a7fdc2af0275ac533c10", size = 40078 }, - { url = "https://files.pythonhosted.org/packages/bb/26/83a0f852e763f81b2265d446b13ed6d49ee49e1fc0c47b9655977e6f3d81/mmh3-5.2.0-cp311-cp311-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:eb756caf8975882630ce4e9fbbeb9d3401242a72528230422c9ab3a0d278e60c", size = 97262 }, - { url = "https://files.pythonhosted.org/packages/00/7d/b7133b10d12239aeaebf6878d7eaf0bf7d3738c44b4aba3c564588f6d802/mmh3-5.2.0-cp311-cp311-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:097e13c8b8a66c5753c6968b7640faefe85d8e38992703c1f666eda6ef4c3762", size = 103118 }, - { url = "https://files.pythonhosted.org/packages/7b/3e/62f0b5dce2e22fd5b7d092aba285abd7959ea2b17148641e029f2eab1ffa/mmh3-5.2.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:a7c0c7845566b9686480e6a7e9044db4afb60038d5fabd19227443f0104eeee4", size = 106072 }, - { url = "https://files.pythonhosted.org/packages/66/84/ea88bb816edfe65052c757a1c3408d65c4201ddbd769d4a287b0f1a628b2/mmh3-5.2.0-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:61ac226af521a572700f863d6ecddc6ece97220ce7174e311948ff8c8919a363", size = 112925 }, - { url = "https://files.pythonhosted.org/packages/2e/13/c9b1c022807db575fe4db806f442d5b5784547e2e82cff36133e58ea31c7/mmh3-5.2.0-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:582f9dbeefe15c32a5fa528b79b088b599a1dfe290a4436351c6090f90ddebb8", size = 120583 }, - { url = "https://files.pythonhosted.org/packages/8a/5f/0e2dfe1a38f6a78788b7eb2b23432cee24623aeabbc907fed07fc17d6935/mmh3-5.2.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:2ebfc46b39168ab1cd44670a32ea5489bcbc74a25795c61b6d888c5c2cf654ed", size = 99127 }, - { url = "https://files.pythonhosted.org/packages/77/27/aefb7d663b67e6a0c4d61a513c83e39ba2237e8e4557fa7122a742a23de5/mmh3-5.2.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:1556e31e4bd0ac0c17eaf220be17a09c171d7396919c3794274cb3415a9d3646", size = 98544 }, - { url = "https://files.pythonhosted.org/packages/ab/97/a21cc9b1a7c6e92205a1b5fa030cdf62277d177570c06a239eca7bd6dd32/mmh3-5.2.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:81df0dae22cd0da87f1c978602750f33d17fb3d21fb0f326c89dc89834fea79b", size = 106262 }, - { url = "https://files.pythonhosted.org/packages/43/18/db19ae82ea63c8922a880e1498a75342311f8aa0c581c4dd07711473b5f7/mmh3-5.2.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:eba01ec3bd4a49b9ac5ca2bc6a73ff5f3af53374b8556fcc2966dd2af9eb7779", size = 109824 }, - { url = "https://files.pythonhosted.org/packages/9f/f5/41dcf0d1969125fc6f61d8618b107c79130b5af50b18a4651210ea52ab40/mmh3-5.2.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:e9a011469b47b752e7d20de296bb34591cdfcbe76c99c2e863ceaa2aa61113d2", size = 97255 }, - { url = "https://files.pythonhosted.org/packages/32/b3/cce9eaa0efac1f0e735bb178ef9d1d2887b4927fe0ec16609d5acd492dda/mmh3-5.2.0-cp311-cp311-win32.whl", hash = "sha256:bc44fc2b886243d7c0d8daeb37864e16f232e5b56aaec27cc781d848264cfd28", size = 40779 }, - { url = "https://files.pythonhosted.org/packages/7c/e9/3fa0290122e6d5a7041b50ae500b8a9f4932478a51e48f209a3879fe0b9b/mmh3-5.2.0-cp311-cp311-win_amd64.whl", hash = "sha256:8ebf241072cf2777a492d0e09252f8cc2b3edd07dfdb9404b9757bffeb4f2cee", size = 41549 }, - { url = "https://files.pythonhosted.org/packages/3a/54/c277475b4102588e6f06b2e9095ee758dfe31a149312cdbf62d39a9f5c30/mmh3-5.2.0-cp311-cp311-win_arm64.whl", hash = "sha256:b5f317a727bba0e633a12e71228bc6a4acb4f471a98b1c003163b917311ea9a9", size = 39336 }, - { url = "https://files.pythonhosted.org/packages/bf/6a/d5aa7edb5c08e0bd24286c7d08341a0446f9a2fbbb97d96a8a6dd81935ee/mmh3-5.2.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:384eda9361a7bf83a85e09447e1feafe081034af9dd428893701b959230d84be", size = 56141 }, - { url = "https://files.pythonhosted.org/packages/08/49/131d0fae6447bc4a7299ebdb1a6fb9d08c9f8dcf97d75ea93e8152ddf7ab/mmh3-5.2.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:2c9da0d568569cc87315cb063486d761e38458b8ad513fedd3dc9263e1b81bcd", size = 40681 }, - { url = "https://files.pythonhosted.org/packages/8f/6f/9221445a6bcc962b7f5ff3ba18ad55bba624bacdc7aa3fc0a518db7da8ec/mmh3-5.2.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:86d1be5d63232e6eb93c50881aea55ff06eb86d8e08f9b5417c8c9b10db9db96", size = 40062 }, - { url = "https://files.pythonhosted.org/packages/1e/d4/6bb2d0fef81401e0bb4c297d1eb568b767de4ce6fc00890bc14d7b51ecc4/mmh3-5.2.0-cp312-cp312-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:bf7bee43e17e81671c447e9c83499f53d99bf440bc6d9dc26a841e21acfbe094", size = 97333 }, - { url = "https://files.pythonhosted.org/packages/44/e0/ccf0daff8134efbb4fbc10a945ab53302e358c4b016ada9bf97a6bdd50c1/mmh3-5.2.0-cp312-cp312-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:7aa18cdb58983ee660c9c400b46272e14fa253c675ed963d3812487f8ca42037", size = 103310 }, - { url = "https://files.pythonhosted.org/packages/02/63/1965cb08a46533faca0e420e06aff8bbaf9690a6f0ac6ae6e5b2e4544687/mmh3-5.2.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ae9d032488fcec32d22be6542d1a836f00247f40f320844dbb361393b5b22773", size = 106178 }, - { url = "https://files.pythonhosted.org/packages/c2/41/c883ad8e2c234013f27f92061200afc11554ea55edd1bcf5e1accd803a85/mmh3-5.2.0-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:e1861fb6b1d0453ed7293200139c0a9011eeb1376632e048e3766945b13313c5", size = 113035 }, - { url = "https://files.pythonhosted.org/packages/df/b5/1ccade8b1fa625d634a18bab7bf08a87457e09d5ec8cf83ca07cbea9d400/mmh3-5.2.0-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:99bb6a4d809aa4e528ddfe2c85dd5239b78b9dd14be62cca0329db78505e7b50", size = 120784 }, - { url = "https://files.pythonhosted.org/packages/77/1c/919d9171fcbdcdab242e06394464ccf546f7d0f3b31e0d1e3a630398782e/mmh3-5.2.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:1f8d8b627799f4e2fcc7c034fed8f5f24dc7724ff52f69838a3d6d15f1ad4765", size = 99137 }, - { url = "https://files.pythonhosted.org/packages/66/8a/1eebef5bd6633d36281d9fc83cf2e9ba1ba0e1a77dff92aacab83001cee4/mmh3-5.2.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:b5995088dd7023d2d9f310a0c67de5a2b2e06a570ecfd00f9ff4ab94a67cde43", size = 98664 }, - { url = "https://files.pythonhosted.org/packages/13/41/a5d981563e2ee682b21fb65e29cc0f517a6734a02b581359edd67f9d0360/mmh3-5.2.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:1a5f4d2e59d6bba8ef01b013c472741835ad961e7c28f50c82b27c57748744a4", size = 106459 }, - { url = "https://files.pythonhosted.org/packages/24/31/342494cd6ab792d81e083680875a2c50fa0c5df475ebf0b67784f13e4647/mmh3-5.2.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:fd6e6c3d90660d085f7e73710eab6f5545d4854b81b0135a3526e797009dbda3", size = 110038 }, - { url = "https://files.pythonhosted.org/packages/28/44/efda282170a46bb4f19c3e2b90536513b1d821c414c28469a227ca5a1789/mmh3-5.2.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:c4a2f3d83879e3de2eb8cbf562e71563a8ed15ee9b9c2e77ca5d9f73072ac15c", size = 97545 }, - { url = "https://files.pythonhosted.org/packages/68/8f/534ae319c6e05d714f437e7206f78c17e66daca88164dff70286b0e8ea0c/mmh3-5.2.0-cp312-cp312-win32.whl", hash = "sha256:2421b9d665a0b1ad724ec7332fb5a98d075f50bc51a6ff854f3a1882bd650d49", size = 40805 }, - { url = "https://files.pythonhosted.org/packages/b8/f6/f6abdcfefcedab3c964868048cfe472764ed358c2bf6819a70dd4ed4ed3a/mmh3-5.2.0-cp312-cp312-win_amd64.whl", hash = "sha256:72d80005b7634a3a2220f81fbeb94775ebd12794623bb2e1451701ea732b4aa3", size = 41597 }, - { url = "https://files.pythonhosted.org/packages/15/fd/f7420e8cbce45c259c770cac5718badf907b302d3a99ec587ba5ce030237/mmh3-5.2.0-cp312-cp312-win_arm64.whl", hash = "sha256:3d6bfd9662a20c054bc216f861fa330c2dac7c81e7fb8307b5e32ab5b9b4d2e0", size = 39350 }, + { url = "https://files.pythonhosted.org/packages/f7/87/399567b3796e134352e11a8b973cd470c06b2ecfad5468fe580833be442b/mmh3-5.2.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:7901c893e704ee3c65f92d39b951f8f34ccf8e8566768c58103fb10e55afb8c1", size = 56107, upload-time = "2025-07-29T07:41:57.07Z" }, + { url = "https://files.pythonhosted.org/packages/c3/09/830af30adf8678955b247d97d3d9543dd2fd95684f3cd41c0cd9d291da9f/mmh3-5.2.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4a5f5536b1cbfa72318ab3bfc8a8188b949260baed186b75f0abc75b95d8c051", size = 40635, upload-time = "2025-07-29T07:41:57.903Z" }, + { url = "https://files.pythonhosted.org/packages/07/14/eaba79eef55b40d653321765ac5e8f6c9ac38780b8a7c2a2f8df8ee0fb72/mmh3-5.2.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:cedac4f4054b8f7859e5aed41aaa31ad03fce6851901a7fdc2af0275ac533c10", size = 40078, upload-time = "2025-07-29T07:41:58.772Z" }, + { url = "https://files.pythonhosted.org/packages/bb/26/83a0f852e763f81b2265d446b13ed6d49ee49e1fc0c47b9655977e6f3d81/mmh3-5.2.0-cp311-cp311-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:eb756caf8975882630ce4e9fbbeb9d3401242a72528230422c9ab3a0d278e60c", size = 97262, upload-time = "2025-07-29T07:41:59.678Z" }, + { url = "https://files.pythonhosted.org/packages/00/7d/b7133b10d12239aeaebf6878d7eaf0bf7d3738c44b4aba3c564588f6d802/mmh3-5.2.0-cp311-cp311-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:097e13c8b8a66c5753c6968b7640faefe85d8e38992703c1f666eda6ef4c3762", size = 103118, upload-time = "2025-07-29T07:42:01.197Z" }, + { url = "https://files.pythonhosted.org/packages/7b/3e/62f0b5dce2e22fd5b7d092aba285abd7959ea2b17148641e029f2eab1ffa/mmh3-5.2.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:a7c0c7845566b9686480e6a7e9044db4afb60038d5fabd19227443f0104eeee4", size = 106072, upload-time = "2025-07-29T07:42:02.601Z" }, + { url = "https://files.pythonhosted.org/packages/66/84/ea88bb816edfe65052c757a1c3408d65c4201ddbd769d4a287b0f1a628b2/mmh3-5.2.0-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:61ac226af521a572700f863d6ecddc6ece97220ce7174e311948ff8c8919a363", size = 112925, upload-time = "2025-07-29T07:42:03.632Z" }, + { url = "https://files.pythonhosted.org/packages/2e/13/c9b1c022807db575fe4db806f442d5b5784547e2e82cff36133e58ea31c7/mmh3-5.2.0-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:582f9dbeefe15c32a5fa528b79b088b599a1dfe290a4436351c6090f90ddebb8", size = 120583, upload-time = "2025-07-29T07:42:04.991Z" }, + { url = "https://files.pythonhosted.org/packages/8a/5f/0e2dfe1a38f6a78788b7eb2b23432cee24623aeabbc907fed07fc17d6935/mmh3-5.2.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:2ebfc46b39168ab1cd44670a32ea5489bcbc74a25795c61b6d888c5c2cf654ed", size = 99127, upload-time = "2025-07-29T07:42:05.929Z" }, + { url = "https://files.pythonhosted.org/packages/77/27/aefb7d663b67e6a0c4d61a513c83e39ba2237e8e4557fa7122a742a23de5/mmh3-5.2.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:1556e31e4bd0ac0c17eaf220be17a09c171d7396919c3794274cb3415a9d3646", size = 98544, upload-time = "2025-07-29T07:42:06.87Z" }, + { url = "https://files.pythonhosted.org/packages/ab/97/a21cc9b1a7c6e92205a1b5fa030cdf62277d177570c06a239eca7bd6dd32/mmh3-5.2.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:81df0dae22cd0da87f1c978602750f33d17fb3d21fb0f326c89dc89834fea79b", size = 106262, upload-time = "2025-07-29T07:42:07.804Z" }, + { url = "https://files.pythonhosted.org/packages/43/18/db19ae82ea63c8922a880e1498a75342311f8aa0c581c4dd07711473b5f7/mmh3-5.2.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:eba01ec3bd4a49b9ac5ca2bc6a73ff5f3af53374b8556fcc2966dd2af9eb7779", size = 109824, upload-time = "2025-07-29T07:42:08.735Z" }, + { url = "https://files.pythonhosted.org/packages/9f/f5/41dcf0d1969125fc6f61d8618b107c79130b5af50b18a4651210ea52ab40/mmh3-5.2.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:e9a011469b47b752e7d20de296bb34591cdfcbe76c99c2e863ceaa2aa61113d2", size = 97255, upload-time = "2025-07-29T07:42:09.706Z" }, + { url = "https://files.pythonhosted.org/packages/32/b3/cce9eaa0efac1f0e735bb178ef9d1d2887b4927fe0ec16609d5acd492dda/mmh3-5.2.0-cp311-cp311-win32.whl", hash = "sha256:bc44fc2b886243d7c0d8daeb37864e16f232e5b56aaec27cc781d848264cfd28", size = 40779, upload-time = "2025-07-29T07:42:10.546Z" }, + { url = "https://files.pythonhosted.org/packages/7c/e9/3fa0290122e6d5a7041b50ae500b8a9f4932478a51e48f209a3879fe0b9b/mmh3-5.2.0-cp311-cp311-win_amd64.whl", hash = "sha256:8ebf241072cf2777a492d0e09252f8cc2b3edd07dfdb9404b9757bffeb4f2cee", size = 41549, upload-time = "2025-07-29T07:42:11.399Z" }, + { url = "https://files.pythonhosted.org/packages/3a/54/c277475b4102588e6f06b2e9095ee758dfe31a149312cdbf62d39a9f5c30/mmh3-5.2.0-cp311-cp311-win_arm64.whl", hash = "sha256:b5f317a727bba0e633a12e71228bc6a4acb4f471a98b1c003163b917311ea9a9", size = 39336, upload-time = "2025-07-29T07:42:12.209Z" }, + { url = "https://files.pythonhosted.org/packages/bf/6a/d5aa7edb5c08e0bd24286c7d08341a0446f9a2fbbb97d96a8a6dd81935ee/mmh3-5.2.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:384eda9361a7bf83a85e09447e1feafe081034af9dd428893701b959230d84be", size = 56141, upload-time = "2025-07-29T07:42:13.456Z" }, + { url = "https://files.pythonhosted.org/packages/08/49/131d0fae6447bc4a7299ebdb1a6fb9d08c9f8dcf97d75ea93e8152ddf7ab/mmh3-5.2.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:2c9da0d568569cc87315cb063486d761e38458b8ad513fedd3dc9263e1b81bcd", size = 40681, upload-time = "2025-07-29T07:42:14.306Z" }, + { url = "https://files.pythonhosted.org/packages/8f/6f/9221445a6bcc962b7f5ff3ba18ad55bba624bacdc7aa3fc0a518db7da8ec/mmh3-5.2.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:86d1be5d63232e6eb93c50881aea55ff06eb86d8e08f9b5417c8c9b10db9db96", size = 40062, upload-time = "2025-07-29T07:42:15.08Z" }, + { url = "https://files.pythonhosted.org/packages/1e/d4/6bb2d0fef81401e0bb4c297d1eb568b767de4ce6fc00890bc14d7b51ecc4/mmh3-5.2.0-cp312-cp312-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:bf7bee43e17e81671c447e9c83499f53d99bf440bc6d9dc26a841e21acfbe094", size = 97333, upload-time = "2025-07-29T07:42:16.436Z" }, + { url = "https://files.pythonhosted.org/packages/44/e0/ccf0daff8134efbb4fbc10a945ab53302e358c4b016ada9bf97a6bdd50c1/mmh3-5.2.0-cp312-cp312-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:7aa18cdb58983ee660c9c400b46272e14fa253c675ed963d3812487f8ca42037", size = 103310, upload-time = "2025-07-29T07:42:17.796Z" }, + { url = "https://files.pythonhosted.org/packages/02/63/1965cb08a46533faca0e420e06aff8bbaf9690a6f0ac6ae6e5b2e4544687/mmh3-5.2.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ae9d032488fcec32d22be6542d1a836f00247f40f320844dbb361393b5b22773", size = 106178, upload-time = "2025-07-29T07:42:19.281Z" }, + { url = "https://files.pythonhosted.org/packages/c2/41/c883ad8e2c234013f27f92061200afc11554ea55edd1bcf5e1accd803a85/mmh3-5.2.0-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:e1861fb6b1d0453ed7293200139c0a9011eeb1376632e048e3766945b13313c5", size = 113035, upload-time = "2025-07-29T07:42:20.356Z" }, + { url = "https://files.pythonhosted.org/packages/df/b5/1ccade8b1fa625d634a18bab7bf08a87457e09d5ec8cf83ca07cbea9d400/mmh3-5.2.0-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:99bb6a4d809aa4e528ddfe2c85dd5239b78b9dd14be62cca0329db78505e7b50", size = 120784, upload-time = "2025-07-29T07:42:21.377Z" }, + { url = "https://files.pythonhosted.org/packages/77/1c/919d9171fcbdcdab242e06394464ccf546f7d0f3b31e0d1e3a630398782e/mmh3-5.2.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:1f8d8b627799f4e2fcc7c034fed8f5f24dc7724ff52f69838a3d6d15f1ad4765", size = 99137, upload-time = "2025-07-29T07:42:22.344Z" }, + { url = "https://files.pythonhosted.org/packages/66/8a/1eebef5bd6633d36281d9fc83cf2e9ba1ba0e1a77dff92aacab83001cee4/mmh3-5.2.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:b5995088dd7023d2d9f310a0c67de5a2b2e06a570ecfd00f9ff4ab94a67cde43", size = 98664, upload-time = "2025-07-29T07:42:23.269Z" }, + { url = "https://files.pythonhosted.org/packages/13/41/a5d981563e2ee682b21fb65e29cc0f517a6734a02b581359edd67f9d0360/mmh3-5.2.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:1a5f4d2e59d6bba8ef01b013c472741835ad961e7c28f50c82b27c57748744a4", size = 106459, upload-time = "2025-07-29T07:42:24.238Z" }, + { url = "https://files.pythonhosted.org/packages/24/31/342494cd6ab792d81e083680875a2c50fa0c5df475ebf0b67784f13e4647/mmh3-5.2.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:fd6e6c3d90660d085f7e73710eab6f5545d4854b81b0135a3526e797009dbda3", size = 110038, upload-time = "2025-07-29T07:42:25.629Z" }, + { url = "https://files.pythonhosted.org/packages/28/44/efda282170a46bb4f19c3e2b90536513b1d821c414c28469a227ca5a1789/mmh3-5.2.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:c4a2f3d83879e3de2eb8cbf562e71563a8ed15ee9b9c2e77ca5d9f73072ac15c", size = 97545, upload-time = "2025-07-29T07:42:27.04Z" }, + { url = "https://files.pythonhosted.org/packages/68/8f/534ae319c6e05d714f437e7206f78c17e66daca88164dff70286b0e8ea0c/mmh3-5.2.0-cp312-cp312-win32.whl", hash = "sha256:2421b9d665a0b1ad724ec7332fb5a98d075f50bc51a6ff854f3a1882bd650d49", size = 40805, upload-time = "2025-07-29T07:42:28.032Z" }, + { url = "https://files.pythonhosted.org/packages/b8/f6/f6abdcfefcedab3c964868048cfe472764ed358c2bf6819a70dd4ed4ed3a/mmh3-5.2.0-cp312-cp312-win_amd64.whl", hash = "sha256:72d80005b7634a3a2220f81fbeb94775ebd12794623bb2e1451701ea732b4aa3", size = 41597, upload-time = "2025-07-29T07:42:28.894Z" }, + { url = "https://files.pythonhosted.org/packages/15/fd/f7420e8cbce45c259c770cac5718badf907b302d3a99ec587ba5ce030237/mmh3-5.2.0-cp312-cp312-win_arm64.whl", hash = "sha256:3d6bfd9662a20c054bc216f861fa330c2dac7c81e7fb8307b5e32ab5b9b4d2e0", size = 39350, upload-time = "2025-07-29T07:42:29.794Z" }, ] [[package]] @@ -3453,18 +3494,18 @@ dependencies = [ { name = "pymysql" }, { name = "sqlalchemy" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/01/03/2ef4de1c8d970288f018b6b63439563336c51f26f57706dc51e4c395fdbe/mo_vector-0.1.13.tar.gz", hash = "sha256:8526c37e99157a0c9866bf3868600e877980464eccb212f8ea71971c0630eb69", size = 16926 } +sdist = { url = "https://files.pythonhosted.org/packages/01/03/2ef4de1c8d970288f018b6b63439563336c51f26f57706dc51e4c395fdbe/mo_vector-0.1.13.tar.gz", hash = "sha256:8526c37e99157a0c9866bf3868600e877980464eccb212f8ea71971c0630eb69", size = 16926, upload-time = "2025-06-18T09:27:27.906Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/0d/e7/514f5cf5909f96adf09b78146a9e5c92f82abcc212bc3f88456bf2640c23/mo_vector-0.1.13-py3-none-any.whl", hash = "sha256:f7d619acc3e92ed59631e6b3a12508240e22cf428c87daf022c0d87fbd5da459", size = 20091 }, + { url = "https://files.pythonhosted.org/packages/0d/e7/514f5cf5909f96adf09b78146a9e5c92f82abcc212bc3f88456bf2640c23/mo_vector-0.1.13-py3-none-any.whl", hash = "sha256:f7d619acc3e92ed59631e6b3a12508240e22cf428c87daf022c0d87fbd5da459", size = 20091, upload-time = "2025-06-18T09:27:26.899Z" }, ] [[package]] name = "mpmath" version = "1.3.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/e0/47/dd32fa426cc72114383ac549964eecb20ecfd886d1e5ccf5340b55b02f57/mpmath-1.3.0.tar.gz", hash = "sha256:7a28eb2a9774d00c7bc92411c19a89209d5da7c4c9a9e227be8330a23a25b91f", size = 508106 } +sdist = { url = "https://files.pythonhosted.org/packages/e0/47/dd32fa426cc72114383ac549964eecb20ecfd886d1e5ccf5340b55b02f57/mpmath-1.3.0.tar.gz", hash = "sha256:7a28eb2a9774d00c7bc92411c19a89209d5da7c4c9a9e227be8330a23a25b91f", size = 508106, upload-time = "2023-03-07T16:47:11.061Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/43/e3/7d92a15f894aa0c9c4b49b8ee9ac9850d6e63b03c9c32c0367a13ae62209/mpmath-1.3.0-py3-none-any.whl", hash = "sha256:a0b2b9fe80bbcd81a6647ff13108738cfb482d481d826cc0e02f5b35e5c88d2c", size = 536198 }, + { url = "https://files.pythonhosted.org/packages/43/e3/7d92a15f894aa0c9c4b49b8ee9ac9850d6e63b03c9c32c0367a13ae62209/mpmath-1.3.0-py3-none-any.whl", hash = "sha256:a0b2b9fe80bbcd81a6647ff13108738cfb482d481d826cc0e02f5b35e5c88d2c", size = 536198, upload-time = "2023-03-07T16:47:09.197Z" }, ] [[package]] @@ -3476,9 +3517,9 @@ dependencies = [ { name = "pyjwt", extra = ["crypto"] }, { name = "requests" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/cf/0e/c857c46d653e104019a84f22d4494f2119b4fe9f896c92b4b864b3b045cc/msal-1.34.0.tar.gz", hash = "sha256:76ba83b716ea5a6d75b0279c0ac353a0e05b820ca1f6682c0eb7f45190c43c2f", size = 153961 } +sdist = { url = "https://files.pythonhosted.org/packages/cf/0e/c857c46d653e104019a84f22d4494f2119b4fe9f896c92b4b864b3b045cc/msal-1.34.0.tar.gz", hash = "sha256:76ba83b716ea5a6d75b0279c0ac353a0e05b820ca1f6682c0eb7f45190c43c2f", size = 153961, upload-time = "2025-09-22T23:05:48.989Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/c2/dc/18d48843499e278538890dc709e9ee3dea8375f8be8e82682851df1b48b5/msal-1.34.0-py3-none-any.whl", hash = "sha256:f669b1644e4950115da7a176441b0e13ec2975c29528d8b9e81316023676d6e1", size = 116987 }, + { url = "https://files.pythonhosted.org/packages/c2/dc/18d48843499e278538890dc709e9ee3dea8375f8be8e82682851df1b48b5/msal-1.34.0-py3-none-any.whl", hash = "sha256:f669b1644e4950115da7a176441b0e13ec2975c29528d8b9e81316023676d6e1", size = 116987, upload-time = "2025-09-22T23:05:47.294Z" }, ] [[package]] @@ -3488,54 +3529,54 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "msal" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/01/99/5d239b6156eddf761a636bded1118414d161bd6b7b37a9335549ed159396/msal_extensions-1.3.1.tar.gz", hash = "sha256:c5b0fd10f65ef62b5f1d62f4251d51cbcaf003fcedae8c91b040a488614be1a4", size = 23315 } +sdist = { url = "https://files.pythonhosted.org/packages/01/99/5d239b6156eddf761a636bded1118414d161bd6b7b37a9335549ed159396/msal_extensions-1.3.1.tar.gz", hash = "sha256:c5b0fd10f65ef62b5f1d62f4251d51cbcaf003fcedae8c91b040a488614be1a4", size = 23315, upload-time = "2025-03-14T23:51:03.902Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/5e/75/bd9b7bb966668920f06b200e84454c8f3566b102183bc55c5473d96cb2b9/msal_extensions-1.3.1-py3-none-any.whl", hash = "sha256:96d3de4d034504e969ac5e85bae8106c8373b5c6568e4c8fa7af2eca9dbe6bca", size = 20583 }, + { url = "https://files.pythonhosted.org/packages/5e/75/bd9b7bb966668920f06b200e84454c8f3566b102183bc55c5473d96cb2b9/msal_extensions-1.3.1-py3-none-any.whl", hash = "sha256:96d3de4d034504e969ac5e85bae8106c8373b5c6568e4c8fa7af2eca9dbe6bca", size = 20583, upload-time = "2025-03-14T23:51:03.016Z" }, ] [[package]] name = "multidict" version = "6.7.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/80/1e/5492c365f222f907de1039b91f922b93fa4f764c713ee858d235495d8f50/multidict-6.7.0.tar.gz", hash = "sha256:c6e99d9a65ca282e578dfea819cfa9c0a62b2499d8677392e09feaf305e9e6f5", size = 101834 } +sdist = { url = "https://files.pythonhosted.org/packages/80/1e/5492c365f222f907de1039b91f922b93fa4f764c713ee858d235495d8f50/multidict-6.7.0.tar.gz", hash = "sha256:c6e99d9a65ca282e578dfea819cfa9c0a62b2499d8677392e09feaf305e9e6f5", size = 101834, upload-time = "2025-10-06T14:52:30.657Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/34/9e/5c727587644d67b2ed479041e4b1c58e30afc011e3d45d25bbe35781217c/multidict-6.7.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:4d409aa42a94c0b3fa617708ef5276dfe81012ba6753a0370fcc9d0195d0a1fc", size = 76604 }, - { url = "https://files.pythonhosted.org/packages/17/e4/67b5c27bd17c085a5ea8f1ec05b8a3e5cba0ca734bfcad5560fb129e70ca/multidict-6.7.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:14c9e076eede3b54c636f8ce1c9c252b5f057c62131211f0ceeec273810c9721", size = 44715 }, - { url = "https://files.pythonhosted.org/packages/4d/e1/866a5d77be6ea435711bef2a4291eed11032679b6b28b56b4776ab06ba3e/multidict-6.7.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4c09703000a9d0fa3c3404b27041e574cc7f4df4c6563873246d0e11812a94b6", size = 44332 }, - { url = "https://files.pythonhosted.org/packages/31/61/0c2d50241ada71ff61a79518db85ada85fdabfcf395d5968dae1cbda04e5/multidict-6.7.0-cp311-cp311-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:a265acbb7bb33a3a2d626afbe756371dce0279e7b17f4f4eda406459c2b5ff1c", size = 245212 }, - { url = "https://files.pythonhosted.org/packages/ac/e0/919666a4e4b57fff1b57f279be1c9316e6cdc5de8a8b525d76f6598fefc7/multidict-6.7.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:51cb455de290ae462593e5b1cb1118c5c22ea7f0d3620d9940bf695cea5a4bd7", size = 246671 }, - { url = "https://files.pythonhosted.org/packages/a1/cc/d027d9c5a520f3321b65adea289b965e7bcbd2c34402663f482648c716ce/multidict-6.7.0-cp311-cp311-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:db99677b4457c7a5c5a949353e125ba72d62b35f74e26da141530fbb012218a7", size = 225491 }, - { url = "https://files.pythonhosted.org/packages/75/c4/bbd633980ce6155a28ff04e6a6492dd3335858394d7bb752d8b108708558/multidict-6.7.0-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:f470f68adc395e0183b92a2f4689264d1ea4b40504a24d9882c27375e6662bb9", size = 257322 }, - { url = "https://files.pythonhosted.org/packages/4c/6d/d622322d344f1f053eae47e033b0b3f965af01212de21b10bcf91be991fb/multidict-6.7.0-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:0db4956f82723cc1c270de9c6e799b4c341d327762ec78ef82bb962f79cc07d8", size = 254694 }, - { url = "https://files.pythonhosted.org/packages/a8/9f/78f8761c2705d4c6d7516faed63c0ebdac569f6db1bef95e0d5218fdc146/multidict-6.7.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:3e56d780c238f9e1ae66a22d2adf8d16f485381878250db8d496623cd38b22bd", size = 246715 }, - { url = "https://files.pythonhosted.org/packages/78/59/950818e04f91b9c2b95aab3d923d9eabd01689d0dcd889563988e9ea0fd8/multidict-6.7.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:9d14baca2ee12c1a64740d4531356ba50b82543017f3ad6de0deb943c5979abb", size = 243189 }, - { url = "https://files.pythonhosted.org/packages/7a/3d/77c79e1934cad2ee74991840f8a0110966d9599b3af95964c0cd79bb905b/multidict-6.7.0-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:295a92a76188917c7f99cda95858c822f9e4aae5824246bba9b6b44004ddd0a6", size = 237845 }, - { url = "https://files.pythonhosted.org/packages/63/1b/834ce32a0a97a3b70f86437f685f880136677ac00d8bce0027e9fd9c2db7/multidict-6.7.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:39f1719f57adbb767ef592a50ae5ebb794220d1188f9ca93de471336401c34d2", size = 246374 }, - { url = "https://files.pythonhosted.org/packages/23/ef/43d1c3ba205b5dec93dc97f3fba179dfa47910fc73aaaea4f7ceb41cec2a/multidict-6.7.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:0a13fb8e748dfc94749f622de065dd5c1def7e0d2216dba72b1d8069a389c6ff", size = 253345 }, - { url = "https://files.pythonhosted.org/packages/6b/03/eaf95bcc2d19ead522001f6a650ef32811aa9e3624ff0ad37c445c7a588c/multidict-6.7.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:e3aa16de190d29a0ea1b48253c57d99a68492c8dd8948638073ab9e74dc9410b", size = 246940 }, - { url = "https://files.pythonhosted.org/packages/e8/df/ec8a5fd66ea6cd6f525b1fcbb23511b033c3e9bc42b81384834ffa484a62/multidict-6.7.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:a048ce45dcdaaf1defb76b2e684f997fb5abf74437b6cb7b22ddad934a964e34", size = 242229 }, - { url = "https://files.pythonhosted.org/packages/8a/a2/59b405d59fd39ec86d1142630e9049243015a5f5291ba49cadf3c090c541/multidict-6.7.0-cp311-cp311-win32.whl", hash = "sha256:a90af66facec4cebe4181b9e62a68be65e45ac9b52b67de9eec118701856e7ff", size = 41308 }, - { url = "https://files.pythonhosted.org/packages/32/0f/13228f26f8b882c34da36efa776c3b7348455ec383bab4a66390e42963ae/multidict-6.7.0-cp311-cp311-win_amd64.whl", hash = "sha256:95b5ffa4349df2887518bb839409bcf22caa72d82beec453216802f475b23c81", size = 46037 }, - { url = "https://files.pythonhosted.org/packages/84/1f/68588e31b000535a3207fd3c909ebeec4fb36b52c442107499c18a896a2a/multidict-6.7.0-cp311-cp311-win_arm64.whl", hash = "sha256:329aa225b085b6f004a4955271a7ba9f1087e39dcb7e65f6284a988264a63912", size = 43023 }, - { url = "https://files.pythonhosted.org/packages/c2/9e/9f61ac18d9c8b475889f32ccfa91c9f59363480613fc807b6e3023d6f60b/multidict-6.7.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:8a3862568a36d26e650a19bb5cbbba14b71789032aebc0423f8cc5f150730184", size = 76877 }, - { url = "https://files.pythonhosted.org/packages/38/6f/614f09a04e6184f8824268fce4bc925e9849edfa654ddd59f0b64508c595/multidict-6.7.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:960c60b5849b9b4f9dcc9bea6e3626143c252c74113df2c1540aebce70209b45", size = 45467 }, - { url = "https://files.pythonhosted.org/packages/b3/93/c4f67a436dd026f2e780c433277fff72be79152894d9fc36f44569cab1a6/multidict-6.7.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2049be98fb57a31b4ccf870bf377af2504d4ae35646a19037ec271e4c07998aa", size = 43834 }, - { url = "https://files.pythonhosted.org/packages/7f/f5/013798161ca665e4a422afbc5e2d9e4070142a9ff8905e482139cd09e4d0/multidict-6.7.0-cp312-cp312-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:0934f3843a1860dd465d38895c17fce1f1cb37295149ab05cd1b9a03afacb2a7", size = 250545 }, - { url = "https://files.pythonhosted.org/packages/71/2f/91dbac13e0ba94669ea5119ba267c9a832f0cb65419aca75549fcf09a3dc/multidict-6.7.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b3e34f3a1b8131ba06f1a73adab24f30934d148afcd5f5de9a73565a4404384e", size = 258305 }, - { url = "https://files.pythonhosted.org/packages/ef/b0/754038b26f6e04488b48ac621f779c341338d78503fb45403755af2df477/multidict-6.7.0-cp312-cp312-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:efbb54e98446892590dc2458c19c10344ee9a883a79b5cec4bc34d6656e8d546", size = 242363 }, - { url = "https://files.pythonhosted.org/packages/87/15/9da40b9336a7c9fa606c4cf2ed80a649dffeb42b905d4f63a1d7eb17d746/multidict-6.7.0-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:a35c5fc61d4f51eb045061e7967cfe3123d622cd500e8868e7c0c592a09fedc4", size = 268375 }, - { url = "https://files.pythonhosted.org/packages/82/72/c53fcade0cc94dfaad583105fd92b3a783af2091eddcb41a6d5a52474000/multidict-6.7.0-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:29fe6740ebccba4175af1b9b87bf553e9c15cd5868ee967e010efcf94e4fd0f1", size = 269346 }, - { url = "https://files.pythonhosted.org/packages/0d/e2/9baffdae21a76f77ef8447f1a05a96ec4bc0a24dae08767abc0a2fe680b8/multidict-6.7.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:123e2a72e20537add2f33a79e605f6191fba2afda4cbb876e35c1a7074298a7d", size = 256107 }, - { url = "https://files.pythonhosted.org/packages/3c/06/3f06f611087dc60d65ef775f1fb5aca7c6d61c6db4990e7cda0cef9b1651/multidict-6.7.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:b284e319754366c1aee2267a2036248b24eeb17ecd5dc16022095e747f2f4304", size = 253592 }, - { url = "https://files.pythonhosted.org/packages/20/24/54e804ec7945b6023b340c412ce9c3f81e91b3bf5fa5ce65558740141bee/multidict-6.7.0-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:803d685de7be4303b5a657b76e2f6d1240e7e0a8aa2968ad5811fa2285553a12", size = 251024 }, - { url = "https://files.pythonhosted.org/packages/14/48/011cba467ea0b17ceb938315d219391d3e421dfd35928e5dbdc3f4ae76ef/multidict-6.7.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:c04a328260dfd5db8c39538f999f02779012268f54614902d0afc775d44e0a62", size = 251484 }, - { url = "https://files.pythonhosted.org/packages/0d/2f/919258b43bb35b99fa127435cfb2d91798eb3a943396631ef43e3720dcf4/multidict-6.7.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:8a19cdb57cd3df4cd865849d93ee14920fb97224300c88501f16ecfa2604b4e0", size = 263579 }, - { url = "https://files.pythonhosted.org/packages/31/22/a0e884d86b5242b5a74cf08e876bdf299e413016b66e55511f7a804a366e/multidict-6.7.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:9b2fd74c52accced7e75de26023b7dccee62511a600e62311b918ec5c168fc2a", size = 259654 }, - { url = "https://files.pythonhosted.org/packages/b2/e5/17e10e1b5c5f5a40f2fcbb45953c9b215f8a4098003915e46a93f5fcaa8f/multidict-6.7.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:3e8bfdd0e487acf992407a140d2589fe598238eaeffa3da8448d63a63cd363f8", size = 251511 }, - { url = "https://files.pythonhosted.org/packages/e3/9a/201bb1e17e7af53139597069c375e7b0dcbd47594604f65c2d5359508566/multidict-6.7.0-cp312-cp312-win32.whl", hash = "sha256:dd32a49400a2c3d52088e120ee00c1e3576cbff7e10b98467962c74fdb762ed4", size = 41895 }, - { url = "https://files.pythonhosted.org/packages/46/e2/348cd32faad84eaf1d20cce80e2bb0ef8d312c55bca1f7fa9865e7770aaf/multidict-6.7.0-cp312-cp312-win_amd64.whl", hash = "sha256:92abb658ef2d7ef22ac9f8bb88e8b6c3e571671534e029359b6d9e845923eb1b", size = 46073 }, - { url = "https://files.pythonhosted.org/packages/25/ec/aad2613c1910dce907480e0c3aa306905830f25df2e54ccc9dea450cb5aa/multidict-6.7.0-cp312-cp312-win_arm64.whl", hash = "sha256:490dab541a6a642ce1a9d61a4781656b346a55c13038f0b1244653828e3a83ec", size = 43226 }, - { url = "https://files.pythonhosted.org/packages/b7/da/7d22601b625e241d4f23ef1ebff8acfc60da633c9e7e7922e24d10f592b3/multidict-6.7.0-py3-none-any.whl", hash = "sha256:394fc5c42a333c9ffc3e421a4c85e08580d990e08b99f6bf35b4132114c5dcb3", size = 12317 }, + { url = "https://files.pythonhosted.org/packages/34/9e/5c727587644d67b2ed479041e4b1c58e30afc011e3d45d25bbe35781217c/multidict-6.7.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:4d409aa42a94c0b3fa617708ef5276dfe81012ba6753a0370fcc9d0195d0a1fc", size = 76604, upload-time = "2025-10-06T14:48:54.277Z" }, + { url = "https://files.pythonhosted.org/packages/17/e4/67b5c27bd17c085a5ea8f1ec05b8a3e5cba0ca734bfcad5560fb129e70ca/multidict-6.7.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:14c9e076eede3b54c636f8ce1c9c252b5f057c62131211f0ceeec273810c9721", size = 44715, upload-time = "2025-10-06T14:48:55.445Z" }, + { url = "https://files.pythonhosted.org/packages/4d/e1/866a5d77be6ea435711bef2a4291eed11032679b6b28b56b4776ab06ba3e/multidict-6.7.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4c09703000a9d0fa3c3404b27041e574cc7f4df4c6563873246d0e11812a94b6", size = 44332, upload-time = "2025-10-06T14:48:56.706Z" }, + { url = "https://files.pythonhosted.org/packages/31/61/0c2d50241ada71ff61a79518db85ada85fdabfcf395d5968dae1cbda04e5/multidict-6.7.0-cp311-cp311-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:a265acbb7bb33a3a2d626afbe756371dce0279e7b17f4f4eda406459c2b5ff1c", size = 245212, upload-time = "2025-10-06T14:48:58.042Z" }, + { url = "https://files.pythonhosted.org/packages/ac/e0/919666a4e4b57fff1b57f279be1c9316e6cdc5de8a8b525d76f6598fefc7/multidict-6.7.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:51cb455de290ae462593e5b1cb1118c5c22ea7f0d3620d9940bf695cea5a4bd7", size = 246671, upload-time = "2025-10-06T14:49:00.004Z" }, + { url = "https://files.pythonhosted.org/packages/a1/cc/d027d9c5a520f3321b65adea289b965e7bcbd2c34402663f482648c716ce/multidict-6.7.0-cp311-cp311-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:db99677b4457c7a5c5a949353e125ba72d62b35f74e26da141530fbb012218a7", size = 225491, upload-time = "2025-10-06T14:49:01.393Z" }, + { url = "https://files.pythonhosted.org/packages/75/c4/bbd633980ce6155a28ff04e6a6492dd3335858394d7bb752d8b108708558/multidict-6.7.0-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:f470f68adc395e0183b92a2f4689264d1ea4b40504a24d9882c27375e6662bb9", size = 257322, upload-time = "2025-10-06T14:49:02.745Z" }, + { url = "https://files.pythonhosted.org/packages/4c/6d/d622322d344f1f053eae47e033b0b3f965af01212de21b10bcf91be991fb/multidict-6.7.0-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:0db4956f82723cc1c270de9c6e799b4c341d327762ec78ef82bb962f79cc07d8", size = 254694, upload-time = "2025-10-06T14:49:04.15Z" }, + { url = "https://files.pythonhosted.org/packages/a8/9f/78f8761c2705d4c6d7516faed63c0ebdac569f6db1bef95e0d5218fdc146/multidict-6.7.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:3e56d780c238f9e1ae66a22d2adf8d16f485381878250db8d496623cd38b22bd", size = 246715, upload-time = "2025-10-06T14:49:05.967Z" }, + { url = "https://files.pythonhosted.org/packages/78/59/950818e04f91b9c2b95aab3d923d9eabd01689d0dcd889563988e9ea0fd8/multidict-6.7.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:9d14baca2ee12c1a64740d4531356ba50b82543017f3ad6de0deb943c5979abb", size = 243189, upload-time = "2025-10-06T14:49:07.37Z" }, + { url = "https://files.pythonhosted.org/packages/7a/3d/77c79e1934cad2ee74991840f8a0110966d9599b3af95964c0cd79bb905b/multidict-6.7.0-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:295a92a76188917c7f99cda95858c822f9e4aae5824246bba9b6b44004ddd0a6", size = 237845, upload-time = "2025-10-06T14:49:08.759Z" }, + { url = "https://files.pythonhosted.org/packages/63/1b/834ce32a0a97a3b70f86437f685f880136677ac00d8bce0027e9fd9c2db7/multidict-6.7.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:39f1719f57adbb767ef592a50ae5ebb794220d1188f9ca93de471336401c34d2", size = 246374, upload-time = "2025-10-06T14:49:10.574Z" }, + { url = "https://files.pythonhosted.org/packages/23/ef/43d1c3ba205b5dec93dc97f3fba179dfa47910fc73aaaea4f7ceb41cec2a/multidict-6.7.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:0a13fb8e748dfc94749f622de065dd5c1def7e0d2216dba72b1d8069a389c6ff", size = 253345, upload-time = "2025-10-06T14:49:12.331Z" }, + { url = "https://files.pythonhosted.org/packages/6b/03/eaf95bcc2d19ead522001f6a650ef32811aa9e3624ff0ad37c445c7a588c/multidict-6.7.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:e3aa16de190d29a0ea1b48253c57d99a68492c8dd8948638073ab9e74dc9410b", size = 246940, upload-time = "2025-10-06T14:49:13.821Z" }, + { url = "https://files.pythonhosted.org/packages/e8/df/ec8a5fd66ea6cd6f525b1fcbb23511b033c3e9bc42b81384834ffa484a62/multidict-6.7.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:a048ce45dcdaaf1defb76b2e684f997fb5abf74437b6cb7b22ddad934a964e34", size = 242229, upload-time = "2025-10-06T14:49:15.603Z" }, + { url = "https://files.pythonhosted.org/packages/8a/a2/59b405d59fd39ec86d1142630e9049243015a5f5291ba49cadf3c090c541/multidict-6.7.0-cp311-cp311-win32.whl", hash = "sha256:a90af66facec4cebe4181b9e62a68be65e45ac9b52b67de9eec118701856e7ff", size = 41308, upload-time = "2025-10-06T14:49:16.871Z" }, + { url = "https://files.pythonhosted.org/packages/32/0f/13228f26f8b882c34da36efa776c3b7348455ec383bab4a66390e42963ae/multidict-6.7.0-cp311-cp311-win_amd64.whl", hash = "sha256:95b5ffa4349df2887518bb839409bcf22caa72d82beec453216802f475b23c81", size = 46037, upload-time = "2025-10-06T14:49:18.457Z" }, + { url = "https://files.pythonhosted.org/packages/84/1f/68588e31b000535a3207fd3c909ebeec4fb36b52c442107499c18a896a2a/multidict-6.7.0-cp311-cp311-win_arm64.whl", hash = "sha256:329aa225b085b6f004a4955271a7ba9f1087e39dcb7e65f6284a988264a63912", size = 43023, upload-time = "2025-10-06T14:49:19.648Z" }, + { url = "https://files.pythonhosted.org/packages/c2/9e/9f61ac18d9c8b475889f32ccfa91c9f59363480613fc807b6e3023d6f60b/multidict-6.7.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:8a3862568a36d26e650a19bb5cbbba14b71789032aebc0423f8cc5f150730184", size = 76877, upload-time = "2025-10-06T14:49:20.884Z" }, + { url = "https://files.pythonhosted.org/packages/38/6f/614f09a04e6184f8824268fce4bc925e9849edfa654ddd59f0b64508c595/multidict-6.7.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:960c60b5849b9b4f9dcc9bea6e3626143c252c74113df2c1540aebce70209b45", size = 45467, upload-time = "2025-10-06T14:49:22.054Z" }, + { url = "https://files.pythonhosted.org/packages/b3/93/c4f67a436dd026f2e780c433277fff72be79152894d9fc36f44569cab1a6/multidict-6.7.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2049be98fb57a31b4ccf870bf377af2504d4ae35646a19037ec271e4c07998aa", size = 43834, upload-time = "2025-10-06T14:49:23.566Z" }, + { url = "https://files.pythonhosted.org/packages/7f/f5/013798161ca665e4a422afbc5e2d9e4070142a9ff8905e482139cd09e4d0/multidict-6.7.0-cp312-cp312-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:0934f3843a1860dd465d38895c17fce1f1cb37295149ab05cd1b9a03afacb2a7", size = 250545, upload-time = "2025-10-06T14:49:24.882Z" }, + { url = "https://files.pythonhosted.org/packages/71/2f/91dbac13e0ba94669ea5119ba267c9a832f0cb65419aca75549fcf09a3dc/multidict-6.7.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b3e34f3a1b8131ba06f1a73adab24f30934d148afcd5f5de9a73565a4404384e", size = 258305, upload-time = "2025-10-06T14:49:26.778Z" }, + { url = "https://files.pythonhosted.org/packages/ef/b0/754038b26f6e04488b48ac621f779c341338d78503fb45403755af2df477/multidict-6.7.0-cp312-cp312-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:efbb54e98446892590dc2458c19c10344ee9a883a79b5cec4bc34d6656e8d546", size = 242363, upload-time = "2025-10-06T14:49:28.562Z" }, + { url = "https://files.pythonhosted.org/packages/87/15/9da40b9336a7c9fa606c4cf2ed80a649dffeb42b905d4f63a1d7eb17d746/multidict-6.7.0-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:a35c5fc61d4f51eb045061e7967cfe3123d622cd500e8868e7c0c592a09fedc4", size = 268375, upload-time = "2025-10-06T14:49:29.96Z" }, + { url = "https://files.pythonhosted.org/packages/82/72/c53fcade0cc94dfaad583105fd92b3a783af2091eddcb41a6d5a52474000/multidict-6.7.0-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:29fe6740ebccba4175af1b9b87bf553e9c15cd5868ee967e010efcf94e4fd0f1", size = 269346, upload-time = "2025-10-06T14:49:31.404Z" }, + { url = "https://files.pythonhosted.org/packages/0d/e2/9baffdae21a76f77ef8447f1a05a96ec4bc0a24dae08767abc0a2fe680b8/multidict-6.7.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:123e2a72e20537add2f33a79e605f6191fba2afda4cbb876e35c1a7074298a7d", size = 256107, upload-time = "2025-10-06T14:49:32.974Z" }, + { url = "https://files.pythonhosted.org/packages/3c/06/3f06f611087dc60d65ef775f1fb5aca7c6d61c6db4990e7cda0cef9b1651/multidict-6.7.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:b284e319754366c1aee2267a2036248b24eeb17ecd5dc16022095e747f2f4304", size = 253592, upload-time = "2025-10-06T14:49:34.52Z" }, + { url = "https://files.pythonhosted.org/packages/20/24/54e804ec7945b6023b340c412ce9c3f81e91b3bf5fa5ce65558740141bee/multidict-6.7.0-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:803d685de7be4303b5a657b76e2f6d1240e7e0a8aa2968ad5811fa2285553a12", size = 251024, upload-time = "2025-10-06T14:49:35.956Z" }, + { url = "https://files.pythonhosted.org/packages/14/48/011cba467ea0b17ceb938315d219391d3e421dfd35928e5dbdc3f4ae76ef/multidict-6.7.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:c04a328260dfd5db8c39538f999f02779012268f54614902d0afc775d44e0a62", size = 251484, upload-time = "2025-10-06T14:49:37.631Z" }, + { url = "https://files.pythonhosted.org/packages/0d/2f/919258b43bb35b99fa127435cfb2d91798eb3a943396631ef43e3720dcf4/multidict-6.7.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:8a19cdb57cd3df4cd865849d93ee14920fb97224300c88501f16ecfa2604b4e0", size = 263579, upload-time = "2025-10-06T14:49:39.502Z" }, + { url = "https://files.pythonhosted.org/packages/31/22/a0e884d86b5242b5a74cf08e876bdf299e413016b66e55511f7a804a366e/multidict-6.7.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:9b2fd74c52accced7e75de26023b7dccee62511a600e62311b918ec5c168fc2a", size = 259654, upload-time = "2025-10-06T14:49:41.32Z" }, + { url = "https://files.pythonhosted.org/packages/b2/e5/17e10e1b5c5f5a40f2fcbb45953c9b215f8a4098003915e46a93f5fcaa8f/multidict-6.7.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:3e8bfdd0e487acf992407a140d2589fe598238eaeffa3da8448d63a63cd363f8", size = 251511, upload-time = "2025-10-06T14:49:46.021Z" }, + { url = "https://files.pythonhosted.org/packages/e3/9a/201bb1e17e7af53139597069c375e7b0dcbd47594604f65c2d5359508566/multidict-6.7.0-cp312-cp312-win32.whl", hash = "sha256:dd32a49400a2c3d52088e120ee00c1e3576cbff7e10b98467962c74fdb762ed4", size = 41895, upload-time = "2025-10-06T14:49:48.718Z" }, + { url = "https://files.pythonhosted.org/packages/46/e2/348cd32faad84eaf1d20cce80e2bb0ef8d312c55bca1f7fa9865e7770aaf/multidict-6.7.0-cp312-cp312-win_amd64.whl", hash = "sha256:92abb658ef2d7ef22ac9f8bb88e8b6c3e571671534e029359b6d9e845923eb1b", size = 46073, upload-time = "2025-10-06T14:49:50.28Z" }, + { url = "https://files.pythonhosted.org/packages/25/ec/aad2613c1910dce907480e0c3aa306905830f25df2e54ccc9dea450cb5aa/multidict-6.7.0-cp312-cp312-win_arm64.whl", hash = "sha256:490dab541a6a642ce1a9d61a4781656b346a55c13038f0b1244653828e3a83ec", size = 43226, upload-time = "2025-10-06T14:49:52.304Z" }, + { url = "https://files.pythonhosted.org/packages/b7/da/7d22601b625e241d4f23ef1ebff8acfc60da633c9e7e7922e24d10f592b3/multidict-6.7.0-py3-none-any.whl", hash = "sha256:394fc5c42a333c9ffc3e421a4c85e08580d990e08b99f6bf35b4132114c5dcb3", size = 12317, upload-time = "2025-10-06T14:52:29.272Z" }, ] [[package]] @@ -3547,21 +3588,21 @@ dependencies = [ { name = "pathspec" }, { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/8e/22/ea637422dedf0bf36f3ef238eab4e455e2a0dcc3082b5cc067615347ab8e/mypy-1.17.1.tar.gz", hash = "sha256:25e01ec741ab5bb3eec8ba9cdb0f769230368a22c959c4937360efb89b7e9f01", size = 3352570 } +sdist = { url = "https://files.pythonhosted.org/packages/8e/22/ea637422dedf0bf36f3ef238eab4e455e2a0dcc3082b5cc067615347ab8e/mypy-1.17.1.tar.gz", hash = "sha256:25e01ec741ab5bb3eec8ba9cdb0f769230368a22c959c4937360efb89b7e9f01", size = 3352570, upload-time = "2025-07-31T07:54:19.204Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/46/cf/eadc80c4e0a70db1c08921dcc220357ba8ab2faecb4392e3cebeb10edbfa/mypy-1.17.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:ad37544be07c5d7fba814eb370e006df58fed8ad1ef33ed1649cb1889ba6ff58", size = 10921009 }, - { url = "https://files.pythonhosted.org/packages/5d/c1/c869d8c067829ad30d9bdae051046561552516cfb3a14f7f0347b7d973ee/mypy-1.17.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:064e2ff508e5464b4bd807a7c1625bc5047c5022b85c70f030680e18f37273a5", size = 10047482 }, - { url = "https://files.pythonhosted.org/packages/98/b9/803672bab3fe03cee2e14786ca056efda4bb511ea02dadcedde6176d06d0/mypy-1.17.1-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:70401bbabd2fa1aa7c43bb358f54037baf0586f41e83b0ae67dd0534fc64edfd", size = 11832883 }, - { url = "https://files.pythonhosted.org/packages/88/fb/fcdac695beca66800918c18697b48833a9a6701de288452b6715a98cfee1/mypy-1.17.1-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:e92bdc656b7757c438660f775f872a669b8ff374edc4d18277d86b63edba6b8b", size = 12566215 }, - { url = "https://files.pythonhosted.org/packages/7f/37/a932da3d3dace99ee8eb2043b6ab03b6768c36eb29a02f98f46c18c0da0e/mypy-1.17.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:c1fdf4abb29ed1cb091cf432979e162c208a5ac676ce35010373ff29247bcad5", size = 12751956 }, - { url = "https://files.pythonhosted.org/packages/8c/cf/6438a429e0f2f5cab8bc83e53dbebfa666476f40ee322e13cac5e64b79e7/mypy-1.17.1-cp311-cp311-win_amd64.whl", hash = "sha256:ff2933428516ab63f961644bc49bc4cbe42bbffb2cd3b71cc7277c07d16b1a8b", size = 9507307 }, - { url = "https://files.pythonhosted.org/packages/17/a2/7034d0d61af8098ec47902108553122baa0f438df8a713be860f7407c9e6/mypy-1.17.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:69e83ea6553a3ba79c08c6e15dbd9bfa912ec1e493bf75489ef93beb65209aeb", size = 11086295 }, - { url = "https://files.pythonhosted.org/packages/14/1f/19e7e44b594d4b12f6ba8064dbe136505cec813549ca3e5191e40b1d3cc2/mypy-1.17.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:1b16708a66d38abb1e6b5702f5c2c87e133289da36f6a1d15f6a5221085c6403", size = 10112355 }, - { url = "https://files.pythonhosted.org/packages/5b/69/baa33927e29e6b4c55d798a9d44db5d394072eef2bdc18c3e2048c9ed1e9/mypy-1.17.1-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:89e972c0035e9e05823907ad5398c5a73b9f47a002b22359b177d40bdaee7056", size = 11875285 }, - { url = "https://files.pythonhosted.org/packages/90/13/f3a89c76b0a41e19490b01e7069713a30949d9a6c147289ee1521bcea245/mypy-1.17.1-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:03b6d0ed2b188e35ee6d5c36b5580cffd6da23319991c49ab5556c023ccf1341", size = 12737895 }, - { url = "https://files.pythonhosted.org/packages/23/a1/c4ee79ac484241301564072e6476c5a5be2590bc2e7bfd28220033d2ef8f/mypy-1.17.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:c837b896b37cd103570d776bda106eabb8737aa6dd4f248451aecf53030cdbeb", size = 12931025 }, - { url = "https://files.pythonhosted.org/packages/89/b8/7409477be7919a0608900e6320b155c72caab4fef46427c5cc75f85edadd/mypy-1.17.1-cp312-cp312-win_amd64.whl", hash = "sha256:665afab0963a4b39dff7c1fa563cc8b11ecff7910206db4b2e64dd1ba25aed19", size = 9584664 }, - { url = "https://files.pythonhosted.org/packages/1d/f3/8fcd2af0f5b806f6cf463efaffd3c9548a28f84220493ecd38d127b6b66d/mypy-1.17.1-py3-none-any.whl", hash = "sha256:a9f52c0351c21fe24c21d8c0eb1f62967b262d6729393397b6f443c3b773c3b9", size = 2283411 }, + { url = "https://files.pythonhosted.org/packages/46/cf/eadc80c4e0a70db1c08921dcc220357ba8ab2faecb4392e3cebeb10edbfa/mypy-1.17.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:ad37544be07c5d7fba814eb370e006df58fed8ad1ef33ed1649cb1889ba6ff58", size = 10921009, upload-time = "2025-07-31T07:53:23.037Z" }, + { url = "https://files.pythonhosted.org/packages/5d/c1/c869d8c067829ad30d9bdae051046561552516cfb3a14f7f0347b7d973ee/mypy-1.17.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:064e2ff508e5464b4bd807a7c1625bc5047c5022b85c70f030680e18f37273a5", size = 10047482, upload-time = "2025-07-31T07:53:26.151Z" }, + { url = "https://files.pythonhosted.org/packages/98/b9/803672bab3fe03cee2e14786ca056efda4bb511ea02dadcedde6176d06d0/mypy-1.17.1-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:70401bbabd2fa1aa7c43bb358f54037baf0586f41e83b0ae67dd0534fc64edfd", size = 11832883, upload-time = "2025-07-31T07:53:47.948Z" }, + { url = "https://files.pythonhosted.org/packages/88/fb/fcdac695beca66800918c18697b48833a9a6701de288452b6715a98cfee1/mypy-1.17.1-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:e92bdc656b7757c438660f775f872a669b8ff374edc4d18277d86b63edba6b8b", size = 12566215, upload-time = "2025-07-31T07:54:04.031Z" }, + { url = "https://files.pythonhosted.org/packages/7f/37/a932da3d3dace99ee8eb2043b6ab03b6768c36eb29a02f98f46c18c0da0e/mypy-1.17.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:c1fdf4abb29ed1cb091cf432979e162c208a5ac676ce35010373ff29247bcad5", size = 12751956, upload-time = "2025-07-31T07:53:36.263Z" }, + { url = "https://files.pythonhosted.org/packages/8c/cf/6438a429e0f2f5cab8bc83e53dbebfa666476f40ee322e13cac5e64b79e7/mypy-1.17.1-cp311-cp311-win_amd64.whl", hash = "sha256:ff2933428516ab63f961644bc49bc4cbe42bbffb2cd3b71cc7277c07d16b1a8b", size = 9507307, upload-time = "2025-07-31T07:53:59.734Z" }, + { url = "https://files.pythonhosted.org/packages/17/a2/7034d0d61af8098ec47902108553122baa0f438df8a713be860f7407c9e6/mypy-1.17.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:69e83ea6553a3ba79c08c6e15dbd9bfa912ec1e493bf75489ef93beb65209aeb", size = 11086295, upload-time = "2025-07-31T07:53:28.124Z" }, + { url = "https://files.pythonhosted.org/packages/14/1f/19e7e44b594d4b12f6ba8064dbe136505cec813549ca3e5191e40b1d3cc2/mypy-1.17.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:1b16708a66d38abb1e6b5702f5c2c87e133289da36f6a1d15f6a5221085c6403", size = 10112355, upload-time = "2025-07-31T07:53:21.121Z" }, + { url = "https://files.pythonhosted.org/packages/5b/69/baa33927e29e6b4c55d798a9d44db5d394072eef2bdc18c3e2048c9ed1e9/mypy-1.17.1-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:89e972c0035e9e05823907ad5398c5a73b9f47a002b22359b177d40bdaee7056", size = 11875285, upload-time = "2025-07-31T07:53:55.293Z" }, + { url = "https://files.pythonhosted.org/packages/90/13/f3a89c76b0a41e19490b01e7069713a30949d9a6c147289ee1521bcea245/mypy-1.17.1-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:03b6d0ed2b188e35ee6d5c36b5580cffd6da23319991c49ab5556c023ccf1341", size = 12737895, upload-time = "2025-07-31T07:53:43.623Z" }, + { url = "https://files.pythonhosted.org/packages/23/a1/c4ee79ac484241301564072e6476c5a5be2590bc2e7bfd28220033d2ef8f/mypy-1.17.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:c837b896b37cd103570d776bda106eabb8737aa6dd4f248451aecf53030cdbeb", size = 12931025, upload-time = "2025-07-31T07:54:17.125Z" }, + { url = "https://files.pythonhosted.org/packages/89/b8/7409477be7919a0608900e6320b155c72caab4fef46427c5cc75f85edadd/mypy-1.17.1-cp312-cp312-win_amd64.whl", hash = "sha256:665afab0963a4b39dff7c1fa563cc8b11ecff7910206db4b2e64dd1ba25aed19", size = 9584664, upload-time = "2025-07-31T07:54:12.842Z" }, + { url = "https://files.pythonhosted.org/packages/1d/f3/8fcd2af0f5b806f6cf463efaffd3c9548a28f84220493ecd38d127b6b66d/mypy-1.17.1-py3-none-any.whl", hash = "sha256:a9f52c0351c21fe24c21d8c0eb1f62967b262d6729393397b6f443c3b773c3b9", size = 2283411, upload-time = "2025-07-31T07:53:24.664Z" }, ] [[package]] @@ -3571,46 +3612,46 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "typing-extensions", marker = "python_full_version < '3.12'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/af/f1/00aea4f91501728e7af7e899ce3a75d48d6df97daa720db11e46730fa123/mypy_boto3_bedrock_runtime-1.41.2.tar.gz", hash = "sha256:ba2c11f2f18116fd69e70923389ce68378fa1620f70e600efb354395a1a9e0e5", size = 28890 } +sdist = { url = "https://files.pythonhosted.org/packages/af/f1/00aea4f91501728e7af7e899ce3a75d48d6df97daa720db11e46730fa123/mypy_boto3_bedrock_runtime-1.41.2.tar.gz", hash = "sha256:ba2c11f2f18116fd69e70923389ce68378fa1620f70e600efb354395a1a9e0e5", size = 28890, upload-time = "2025-11-21T20:35:30.074Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/a7/cc/96a2af58c632701edb5be1dda95434464da43df40ae868a1ab1ddf033839/mypy_boto3_bedrock_runtime-1.41.2-py3-none-any.whl", hash = "sha256:a720ff1e98cf10723c37a61a46cff220b190c55b8fb57d4397e6cf286262cf02", size = 34967 }, + { url = "https://files.pythonhosted.org/packages/a7/cc/96a2af58c632701edb5be1dda95434464da43df40ae868a1ab1ddf033839/mypy_boto3_bedrock_runtime-1.41.2-py3-none-any.whl", hash = "sha256:a720ff1e98cf10723c37a61a46cff220b190c55b8fb57d4397e6cf286262cf02", size = 34967, upload-time = "2025-11-21T20:35:27.655Z" }, ] [[package]] name = "mypy-extensions" version = "1.1.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/a2/6e/371856a3fb9d31ca8dac321cda606860fa4548858c0cc45d9d1d4ca2628b/mypy_extensions-1.1.0.tar.gz", hash = "sha256:52e68efc3284861e772bbcd66823fde5ae21fd2fdb51c62a211403730b916558", size = 6343 } +sdist = { url = "https://files.pythonhosted.org/packages/a2/6e/371856a3fb9d31ca8dac321cda606860fa4548858c0cc45d9d1d4ca2628b/mypy_extensions-1.1.0.tar.gz", hash = "sha256:52e68efc3284861e772bbcd66823fde5ae21fd2fdb51c62a211403730b916558", size = 6343, upload-time = "2025-04-22T14:54:24.164Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/79/7b/2c79738432f5c924bef5071f933bcc9efd0473bac3b4aa584a6f7c1c8df8/mypy_extensions-1.1.0-py3-none-any.whl", hash = "sha256:1be4cccdb0f2482337c4743e60421de3a356cd97508abadd57d47403e94f5505", size = 4963 }, + { url = "https://files.pythonhosted.org/packages/79/7b/2c79738432f5c924bef5071f933bcc9efd0473bac3b4aa584a6f7c1c8df8/mypy_extensions-1.1.0-py3-none-any.whl", hash = "sha256:1be4cccdb0f2482337c4743e60421de3a356cd97508abadd57d47403e94f5505", size = 4963, upload-time = "2025-04-22T14:54:22.983Z" }, ] [[package]] name = "mysql-connector-python" version = "9.5.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/39/33/b332b001bc8c5ee09255a0d4b09a254da674450edd6a3e5228b245ca82a0/mysql_connector_python-9.5.0.tar.gz", hash = "sha256:92fb924285a86d8c146ebd63d94f9eaefa548da7813bc46271508fdc6cc1d596", size = 12251077 } +sdist = { url = "https://files.pythonhosted.org/packages/39/33/b332b001bc8c5ee09255a0d4b09a254da674450edd6a3e5228b245ca82a0/mysql_connector_python-9.5.0.tar.gz", hash = "sha256:92fb924285a86d8c146ebd63d94f9eaefa548da7813bc46271508fdc6cc1d596", size = 12251077, upload-time = "2025-10-22T09:05:45.423Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/05/03/77347d58b0027ce93a41858477e08422e498c6ebc24348b1f725ed7a67ae/mysql_connector_python-9.5.0-cp311-cp311-macosx_14_0_arm64.whl", hash = "sha256:653e70cd10cf2d18dd828fae58dff5f0f7a5cf7e48e244f2093314dddf84a4b9", size = 17578984 }, - { url = "https://files.pythonhosted.org/packages/a5/bb/0f45c7ee55ebc56d6731a593d85c0e7f25f83af90a094efebfd5be9fe010/mysql_connector_python-9.5.0-cp311-cp311-macosx_14_0_x86_64.whl", hash = "sha256:5add93f60b3922be71ea31b89bc8a452b876adbb49262561bd559860dae96b3f", size = 18445067 }, - { url = "https://files.pythonhosted.org/packages/1c/ec/054de99d4aa50d851a37edca9039280f7194cc1bfd30aab38f5bd6977ebe/mysql_connector_python-9.5.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:20950a5e44896c03e3dc93ceb3a5e9b48c9acae18665ca6e13249b3fe5b96811", size = 33668029 }, - { url = "https://files.pythonhosted.org/packages/90/a2/e6095dc3a7ad5c959fe4a65681db63af131f572e57cdffcc7816bc84e3ad/mysql_connector_python-9.5.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:7fdd3205b9242c284019310fa84437f3357b13f598e3f9b5d80d337d4a6406b8", size = 34101687 }, - { url = "https://files.pythonhosted.org/packages/9c/88/bc13c33fca11acaf808bd1809d8602d78f5bb84f7b1e7b1a288c383a14fd/mysql_connector_python-9.5.0-cp311-cp311-win_amd64.whl", hash = "sha256:c021d8b0830958b28712c70c53b206b4cf4766948dae201ea7ca588a186605e0", size = 16511749 }, - { url = "https://files.pythonhosted.org/packages/02/89/167ebee82f4b01ba7339c241c3cc2518886a2be9f871770a1efa81b940a0/mysql_connector_python-9.5.0-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:a72c2ef9d50b84f3c567c31b3bf30901af740686baa2a4abead5f202e0b7ea61", size = 17581904 }, - { url = "https://files.pythonhosted.org/packages/67/46/630ca969ce10b30fdc605d65dab4a6157556d8cc3b77c724f56c2d83cb79/mysql_connector_python-9.5.0-cp312-cp312-macosx_14_0_x86_64.whl", hash = "sha256:bd9ba5a946cfd3b3b2688a75135357e862834b0321ed936fd968049be290872b", size = 18448195 }, - { url = "https://files.pythonhosted.org/packages/f6/87/4c421f41ad169d8c9065ad5c46673c7af889a523e4899c1ac1d6bfd37262/mysql_connector_python-9.5.0-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:5ef7accbdf8b5f6ec60d2a1550654b7e27e63bf6f7b04020d5fb4191fb02bc4d", size = 33668638 }, - { url = "https://files.pythonhosted.org/packages/a6/01/67cf210d50bfefbb9224b9a5c465857c1767388dade1004c903c8e22a991/mysql_connector_python-9.5.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:a6e0a4a0274d15e3d4c892ab93f58f46431222117dba20608178dfb2cc4d5fd8", size = 34102899 }, - { url = "https://files.pythonhosted.org/packages/cd/ef/3d1a67d503fff38cc30e11d111cf28f0976987fb175f47b10d44494e1080/mysql_connector_python-9.5.0-cp312-cp312-win_amd64.whl", hash = "sha256:b6c69cb37600b7e22f476150034e2afbd53342a175e20aea887f8158fc5e3ff6", size = 16512684 }, - { url = "https://files.pythonhosted.org/packages/95/e1/45373c06781340c7b74fe9b88b85278ac05321889a307eaa5be079a997d4/mysql_connector_python-9.5.0-py2.py3-none-any.whl", hash = "sha256:ace137b88eb6fdafa1e5b2e03ac76ce1b8b1844b3a4af1192a02ae7c1a45bdee", size = 479047 }, + { url = "https://files.pythonhosted.org/packages/05/03/77347d58b0027ce93a41858477e08422e498c6ebc24348b1f725ed7a67ae/mysql_connector_python-9.5.0-cp311-cp311-macosx_14_0_arm64.whl", hash = "sha256:653e70cd10cf2d18dd828fae58dff5f0f7a5cf7e48e244f2093314dddf84a4b9", size = 17578984, upload-time = "2025-10-22T09:01:41.213Z" }, + { url = "https://files.pythonhosted.org/packages/a5/bb/0f45c7ee55ebc56d6731a593d85c0e7f25f83af90a094efebfd5be9fe010/mysql_connector_python-9.5.0-cp311-cp311-macosx_14_0_x86_64.whl", hash = "sha256:5add93f60b3922be71ea31b89bc8a452b876adbb49262561bd559860dae96b3f", size = 18445067, upload-time = "2025-10-22T09:01:43.215Z" }, + { url = "https://files.pythonhosted.org/packages/1c/ec/054de99d4aa50d851a37edca9039280f7194cc1bfd30aab38f5bd6977ebe/mysql_connector_python-9.5.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:20950a5e44896c03e3dc93ceb3a5e9b48c9acae18665ca6e13249b3fe5b96811", size = 33668029, upload-time = "2025-10-22T09:01:45.74Z" }, + { url = "https://files.pythonhosted.org/packages/90/a2/e6095dc3a7ad5c959fe4a65681db63af131f572e57cdffcc7816bc84e3ad/mysql_connector_python-9.5.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:7fdd3205b9242c284019310fa84437f3357b13f598e3f9b5d80d337d4a6406b8", size = 34101687, upload-time = "2025-10-22T09:01:48.462Z" }, + { url = "https://files.pythonhosted.org/packages/9c/88/bc13c33fca11acaf808bd1809d8602d78f5bb84f7b1e7b1a288c383a14fd/mysql_connector_python-9.5.0-cp311-cp311-win_amd64.whl", hash = "sha256:c021d8b0830958b28712c70c53b206b4cf4766948dae201ea7ca588a186605e0", size = 16511749, upload-time = "2025-10-22T09:01:51.032Z" }, + { url = "https://files.pythonhosted.org/packages/02/89/167ebee82f4b01ba7339c241c3cc2518886a2be9f871770a1efa81b940a0/mysql_connector_python-9.5.0-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:a72c2ef9d50b84f3c567c31b3bf30901af740686baa2a4abead5f202e0b7ea61", size = 17581904, upload-time = "2025-10-22T09:01:53.21Z" }, + { url = "https://files.pythonhosted.org/packages/67/46/630ca969ce10b30fdc605d65dab4a6157556d8cc3b77c724f56c2d83cb79/mysql_connector_python-9.5.0-cp312-cp312-macosx_14_0_x86_64.whl", hash = "sha256:bd9ba5a946cfd3b3b2688a75135357e862834b0321ed936fd968049be290872b", size = 18448195, upload-time = "2025-10-22T09:01:55.378Z" }, + { url = "https://files.pythonhosted.org/packages/f6/87/4c421f41ad169d8c9065ad5c46673c7af889a523e4899c1ac1d6bfd37262/mysql_connector_python-9.5.0-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:5ef7accbdf8b5f6ec60d2a1550654b7e27e63bf6f7b04020d5fb4191fb02bc4d", size = 33668638, upload-time = "2025-10-22T09:01:57.896Z" }, + { url = "https://files.pythonhosted.org/packages/a6/01/67cf210d50bfefbb9224b9a5c465857c1767388dade1004c903c8e22a991/mysql_connector_python-9.5.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:a6e0a4a0274d15e3d4c892ab93f58f46431222117dba20608178dfb2cc4d5fd8", size = 34102899, upload-time = "2025-10-22T09:02:00.291Z" }, + { url = "https://files.pythonhosted.org/packages/cd/ef/3d1a67d503fff38cc30e11d111cf28f0976987fb175f47b10d44494e1080/mysql_connector_python-9.5.0-cp312-cp312-win_amd64.whl", hash = "sha256:b6c69cb37600b7e22f476150034e2afbd53342a175e20aea887f8158fc5e3ff6", size = 16512684, upload-time = "2025-10-22T09:02:02.411Z" }, + { url = "https://files.pythonhosted.org/packages/95/e1/45373c06781340c7b74fe9b88b85278ac05321889a307eaa5be079a997d4/mysql_connector_python-9.5.0-py2.py3-none-any.whl", hash = "sha256:ace137b88eb6fdafa1e5b2e03ac76ce1b8b1844b3a4af1192a02ae7c1a45bdee", size = 479047, upload-time = "2025-10-22T09:02:27.809Z" }, ] [[package]] name = "networkx" version = "3.6" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/e8/fc/7b6fd4d22c8c4dc5704430140d8b3f520531d4fe7328b8f8d03f5a7950e8/networkx-3.6.tar.gz", hash = "sha256:285276002ad1f7f7da0f7b42f004bcba70d381e936559166363707fdad3d72ad", size = 2511464 } +sdist = { url = "https://files.pythonhosted.org/packages/e8/fc/7b6fd4d22c8c4dc5704430140d8b3f520531d4fe7328b8f8d03f5a7950e8/networkx-3.6.tar.gz", hash = "sha256:285276002ad1f7f7da0f7b42f004bcba70d381e936559166363707fdad3d72ad", size = 2511464, upload-time = "2025-11-24T03:03:47.158Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/07/c7/d64168da60332c17d24c0d2f08bdf3987e8d1ae9d84b5bbd0eec2eb26a55/networkx-3.6-py3-none-any.whl", hash = "sha256:cdb395b105806062473d3be36458d8f1459a4e4b98e236a66c3a48996e07684f", size = 2063713 }, + { url = "https://files.pythonhosted.org/packages/07/c7/d64168da60332c17d24c0d2f08bdf3987e8d1ae9d84b5bbd0eec2eb26a55/networkx-3.6-py3-none-any.whl", hash = "sha256:cdb395b105806062473d3be36458d8f1459a4e4b98e236a66c3a48996e07684f", size = 2063713, upload-time = "2025-11-24T03:03:45.21Z" }, ] [[package]] @@ -3623,25 +3664,25 @@ dependencies = [ { name = "regex" }, { name = "tqdm" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/f9/76/3a5e4312c19a028770f86fd7c058cf9f4ec4321c6cf7526bab998a5b683c/nltk-3.9.2.tar.gz", hash = "sha256:0f409e9b069ca4177c1903c3e843eef90c7e92992fa4931ae607da6de49e1419", size = 2887629 } +sdist = { url = "https://files.pythonhosted.org/packages/f9/76/3a5e4312c19a028770f86fd7c058cf9f4ec4321c6cf7526bab998a5b683c/nltk-3.9.2.tar.gz", hash = "sha256:0f409e9b069ca4177c1903c3e843eef90c7e92992fa4931ae607da6de49e1419", size = 2887629, upload-time = "2025-10-01T07:19:23.764Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/60/90/81ac364ef94209c100e12579629dc92bf7a709a84af32f8c551b02c07e94/nltk-3.9.2-py3-none-any.whl", hash = "sha256:1e209d2b3009110635ed9709a67a1a3e33a10f799490fa71cf4bec218c11c88a", size = 1513404 }, + { url = "https://files.pythonhosted.org/packages/60/90/81ac364ef94209c100e12579629dc92bf7a709a84af32f8c551b02c07e94/nltk-3.9.2-py3-none-any.whl", hash = "sha256:1e209d2b3009110635ed9709a67a1a3e33a10f799490fa71cf4bec218c11c88a", size = 1513404, upload-time = "2025-10-01T07:19:21.648Z" }, ] [[package]] name = "nodejs-wheel-binaries" version = "24.11.1" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/e4/89/da307731fdbb05a5f640b26de5b8ac0dc463fef059162accfc89e32f73bc/nodejs_wheel_binaries-24.11.1.tar.gz", hash = "sha256:413dfffeadfb91edb4d8256545dea797c237bba9b3faefea973cde92d96bb922", size = 8059 } +sdist = { url = "https://files.pythonhosted.org/packages/e4/89/da307731fdbb05a5f640b26de5b8ac0dc463fef059162accfc89e32f73bc/nodejs_wheel_binaries-24.11.1.tar.gz", hash = "sha256:413dfffeadfb91edb4d8256545dea797c237bba9b3faefea973cde92d96bb922", size = 8059, upload-time = "2025-11-18T18:21:58.207Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/e4/5f/be5a4112e678143d4c15264d918f9a2dc086905c6426eb44515cf391a958/nodejs_wheel_binaries-24.11.1-py2.py3-none-macosx_13_0_arm64.whl", hash = "sha256:0e14874c3579def458245cdbc3239e37610702b0aa0975c1dc55e2cb80e42102", size = 55114309 }, - { url = "https://files.pythonhosted.org/packages/fa/1c/2e9d6af2ea32b65928c42b3e5baa7a306870711d93c3536cb25fc090a80d/nodejs_wheel_binaries-24.11.1-py2.py3-none-macosx_13_0_x86_64.whl", hash = "sha256:c2741525c9874b69b3e5a6d6c9179a6fe484ea0c3d5e7b7c01121c8e5d78b7e2", size = 55285957 }, - { url = "https://files.pythonhosted.org/packages/d0/79/35696d7ba41b1bd35ef8682f13d46ba38c826c59e58b86b267458eb53d87/nodejs_wheel_binaries-24.11.1-py2.py3-none-manylinux_2_28_aarch64.whl", hash = "sha256:5ef598101b0fb1c2bf643abb76dfbf6f76f1686198ed17ae46009049ee83c546", size = 59645875 }, - { url = "https://files.pythonhosted.org/packages/b4/98/2a9694adee0af72bc602a046b0632a0c89e26586090c558b1c9199b187cc/nodejs_wheel_binaries-24.11.1-py2.py3-none-manylinux_2_28_x86_64.whl", hash = "sha256:cde41d5e4705266688a8d8071debf4f8a6fcea264c61292782672ee75a6905f9", size = 60140941 }, - { url = "https://files.pythonhosted.org/packages/d0/d6/573e5e2cba9d934f5f89d0beab00c3315e2e6604eb4df0fcd1d80c5a07a8/nodejs_wheel_binaries-24.11.1-py2.py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:78bc5bb889313b565df8969bb7423849a9c7fc218bf735ff0ce176b56b3e96f0", size = 61644243 }, - { url = "https://files.pythonhosted.org/packages/c7/e6/643234d5e94067df8ce8d7bba10f3804106668f7a1050aeb10fdd226ead4/nodejs_wheel_binaries-24.11.1-py2.py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:c79a7e43869ccecab1cae8183778249cceb14ca2de67b5650b223385682c6239", size = 62225657 }, - { url = "https://files.pythonhosted.org/packages/4d/1c/2fb05127102a80225cab7a75c0e9edf88a0a1b79f912e1e36c7c1aaa8f4e/nodejs_wheel_binaries-24.11.1-py2.py3-none-win_amd64.whl", hash = "sha256:10197b1c9c04d79403501766f76508b0dac101ab34371ef8a46fcf51773497d0", size = 41322308 }, - { url = "https://files.pythonhosted.org/packages/ad/b7/bc0cdbc2cc3a66fcac82c79912e135a0110b37b790a14c477f18e18d90cd/nodejs_wheel_binaries-24.11.1-py2.py3-none-win_arm64.whl", hash = "sha256:376b9ea1c4bc1207878975dfeb604f7aa5668c260c6154dcd2af9d42f7734116", size = 39026497 }, + { url = "https://files.pythonhosted.org/packages/e4/5f/be5a4112e678143d4c15264d918f9a2dc086905c6426eb44515cf391a958/nodejs_wheel_binaries-24.11.1-py2.py3-none-macosx_13_0_arm64.whl", hash = "sha256:0e14874c3579def458245cdbc3239e37610702b0aa0975c1dc55e2cb80e42102", size = 55114309, upload-time = "2025-11-18T18:21:21.697Z" }, + { url = "https://files.pythonhosted.org/packages/fa/1c/2e9d6af2ea32b65928c42b3e5baa7a306870711d93c3536cb25fc090a80d/nodejs_wheel_binaries-24.11.1-py2.py3-none-macosx_13_0_x86_64.whl", hash = "sha256:c2741525c9874b69b3e5a6d6c9179a6fe484ea0c3d5e7b7c01121c8e5d78b7e2", size = 55285957, upload-time = "2025-11-18T18:21:27.177Z" }, + { url = "https://files.pythonhosted.org/packages/d0/79/35696d7ba41b1bd35ef8682f13d46ba38c826c59e58b86b267458eb53d87/nodejs_wheel_binaries-24.11.1-py2.py3-none-manylinux_2_28_aarch64.whl", hash = "sha256:5ef598101b0fb1c2bf643abb76dfbf6f76f1686198ed17ae46009049ee83c546", size = 59645875, upload-time = "2025-11-18T18:21:33.004Z" }, + { url = "https://files.pythonhosted.org/packages/b4/98/2a9694adee0af72bc602a046b0632a0c89e26586090c558b1c9199b187cc/nodejs_wheel_binaries-24.11.1-py2.py3-none-manylinux_2_28_x86_64.whl", hash = "sha256:cde41d5e4705266688a8d8071debf4f8a6fcea264c61292782672ee75a6905f9", size = 60140941, upload-time = "2025-11-18T18:21:37.228Z" }, + { url = "https://files.pythonhosted.org/packages/d0/d6/573e5e2cba9d934f5f89d0beab00c3315e2e6604eb4df0fcd1d80c5a07a8/nodejs_wheel_binaries-24.11.1-py2.py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:78bc5bb889313b565df8969bb7423849a9c7fc218bf735ff0ce176b56b3e96f0", size = 61644243, upload-time = "2025-11-18T18:21:43.325Z" }, + { url = "https://files.pythonhosted.org/packages/c7/e6/643234d5e94067df8ce8d7bba10f3804106668f7a1050aeb10fdd226ead4/nodejs_wheel_binaries-24.11.1-py2.py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:c79a7e43869ccecab1cae8183778249cceb14ca2de67b5650b223385682c6239", size = 62225657, upload-time = "2025-11-18T18:21:47.708Z" }, + { url = "https://files.pythonhosted.org/packages/4d/1c/2fb05127102a80225cab7a75c0e9edf88a0a1b79f912e1e36c7c1aaa8f4e/nodejs_wheel_binaries-24.11.1-py2.py3-none-win_amd64.whl", hash = "sha256:10197b1c9c04d79403501766f76508b0dac101ab34371ef8a46fcf51773497d0", size = 41322308, upload-time = "2025-11-18T18:21:51.347Z" }, + { url = "https://files.pythonhosted.org/packages/ad/b7/bc0cdbc2cc3a66fcac82c79912e135a0110b37b790a14c477f18e18d90cd/nodejs_wheel_binaries-24.11.1-py2.py3-none-win_arm64.whl", hash = "sha256:376b9ea1c4bc1207878975dfeb604f7aa5668c260c6154dcd2af9d42f7734116", size = 39026497, upload-time = "2025-11-18T18:21:54.634Z" }, ] [[package]] @@ -3652,18 +3693,18 @@ dependencies = [ { name = "llvmlite" }, { name = "numpy" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/a3/20/33dbdbfe60e5fd8e3dbfde299d106279a33d9f8308346022316781368591/numba-0.62.1.tar.gz", hash = "sha256:7b774242aa890e34c21200a1fc62e5b5757d5286267e71103257f4e2af0d5161", size = 2749817 } +sdist = { url = "https://files.pythonhosted.org/packages/a3/20/33dbdbfe60e5fd8e3dbfde299d106279a33d9f8308346022316781368591/numba-0.62.1.tar.gz", hash = "sha256:7b774242aa890e34c21200a1fc62e5b5757d5286267e71103257f4e2af0d5161", size = 2749817, upload-time = "2025-09-29T10:46:31.551Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/dd/5f/8b3491dd849474f55e33c16ef55678ace1455c490555337899c35826836c/numba-0.62.1-cp311-cp311-macosx_10_15_x86_64.whl", hash = "sha256:f43e24b057714e480fe44bc6031de499e7cf8150c63eb461192caa6cc8530bc8", size = 2684279 }, - { url = "https://files.pythonhosted.org/packages/bf/18/71969149bfeb65a629e652b752b80167fe8a6a6f6e084f1f2060801f7f31/numba-0.62.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:57cbddc53b9ee02830b828a8428757f5c218831ccc96490a314ef569d8342b7b", size = 2687330 }, - { url = "https://files.pythonhosted.org/packages/0e/7d/403be3fecae33088027bc8a95dc80a2fda1e3beff3e0e5fc4374ada3afbe/numba-0.62.1-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:604059730c637c7885386521bb1b0ddcbc91fd56131a6dcc54163d6f1804c872", size = 3739727 }, - { url = "https://files.pythonhosted.org/packages/e0/c3/3d910d08b659a6d4c62ab3cd8cd93c4d8b7709f55afa0d79a87413027ff6/numba-0.62.1-cp311-cp311-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d6c540880170bee817011757dc9049dba5a29db0c09b4d2349295991fe3ee55f", size = 3445490 }, - { url = "https://files.pythonhosted.org/packages/5b/82/9d425c2f20d9f0a37f7cb955945a553a00fa06a2b025856c3550227c5543/numba-0.62.1-cp311-cp311-win_amd64.whl", hash = "sha256:03de6d691d6b6e2b76660ba0f38f37b81ece8b2cc524a62f2a0cfae2bfb6f9da", size = 2745550 }, - { url = "https://files.pythonhosted.org/packages/5e/fa/30fa6873e9f821c0ae755915a3ca444e6ff8d6a7b6860b669a3d33377ac7/numba-0.62.1-cp312-cp312-macosx_10_15_x86_64.whl", hash = "sha256:1b743b32f8fa5fff22e19c2e906db2f0a340782caf024477b97801b918cf0494", size = 2685346 }, - { url = "https://files.pythonhosted.org/packages/a9/d5/504ce8dc46e0dba2790c77e6b878ee65b60fe3e7d6d0006483ef6fde5a97/numba-0.62.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:90fa21b0142bcf08ad8e32a97d25d0b84b1e921bc9423f8dda07d3652860eef6", size = 2688139 }, - { url = "https://files.pythonhosted.org/packages/50/5f/6a802741176c93f2ebe97ad90751894c7b0c922b52ba99a4395e79492205/numba-0.62.1-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:6ef84d0ac19f1bf80431347b6f4ce3c39b7ec13f48f233a48c01e2ec06ecbc59", size = 3796453 }, - { url = "https://files.pythonhosted.org/packages/7e/df/efd21527d25150c4544eccc9d0b7260a5dec4b7e98b5a581990e05a133c0/numba-0.62.1-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:9315cc5e441300e0ca07c828a627d92a6802bcbf27c5487f31ae73783c58da53", size = 3496451 }, - { url = "https://files.pythonhosted.org/packages/80/44/79bfdab12a02796bf4f1841630355c82b5a69933b1d50eb15c7fa37dabe8/numba-0.62.1-cp312-cp312-win_amd64.whl", hash = "sha256:44e3aa6228039992f058f5ebfcfd372c83798e9464297bdad8cc79febcf7891e", size = 2745552 }, + { url = "https://files.pythonhosted.org/packages/dd/5f/8b3491dd849474f55e33c16ef55678ace1455c490555337899c35826836c/numba-0.62.1-cp311-cp311-macosx_10_15_x86_64.whl", hash = "sha256:f43e24b057714e480fe44bc6031de499e7cf8150c63eb461192caa6cc8530bc8", size = 2684279, upload-time = "2025-09-29T10:43:37.213Z" }, + { url = "https://files.pythonhosted.org/packages/bf/18/71969149bfeb65a629e652b752b80167fe8a6a6f6e084f1f2060801f7f31/numba-0.62.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:57cbddc53b9ee02830b828a8428757f5c218831ccc96490a314ef569d8342b7b", size = 2687330, upload-time = "2025-09-29T10:43:59.601Z" }, + { url = "https://files.pythonhosted.org/packages/0e/7d/403be3fecae33088027bc8a95dc80a2fda1e3beff3e0e5fc4374ada3afbe/numba-0.62.1-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:604059730c637c7885386521bb1b0ddcbc91fd56131a6dcc54163d6f1804c872", size = 3739727, upload-time = "2025-09-29T10:42:45.922Z" }, + { url = "https://files.pythonhosted.org/packages/e0/c3/3d910d08b659a6d4c62ab3cd8cd93c4d8b7709f55afa0d79a87413027ff6/numba-0.62.1-cp311-cp311-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d6c540880170bee817011757dc9049dba5a29db0c09b4d2349295991fe3ee55f", size = 3445490, upload-time = "2025-09-29T10:43:12.692Z" }, + { url = "https://files.pythonhosted.org/packages/5b/82/9d425c2f20d9f0a37f7cb955945a553a00fa06a2b025856c3550227c5543/numba-0.62.1-cp311-cp311-win_amd64.whl", hash = "sha256:03de6d691d6b6e2b76660ba0f38f37b81ece8b2cc524a62f2a0cfae2bfb6f9da", size = 2745550, upload-time = "2025-09-29T10:44:20.571Z" }, + { url = "https://files.pythonhosted.org/packages/5e/fa/30fa6873e9f821c0ae755915a3ca444e6ff8d6a7b6860b669a3d33377ac7/numba-0.62.1-cp312-cp312-macosx_10_15_x86_64.whl", hash = "sha256:1b743b32f8fa5fff22e19c2e906db2f0a340782caf024477b97801b918cf0494", size = 2685346, upload-time = "2025-09-29T10:43:43.677Z" }, + { url = "https://files.pythonhosted.org/packages/a9/d5/504ce8dc46e0dba2790c77e6b878ee65b60fe3e7d6d0006483ef6fde5a97/numba-0.62.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:90fa21b0142bcf08ad8e32a97d25d0b84b1e921bc9423f8dda07d3652860eef6", size = 2688139, upload-time = "2025-09-29T10:44:04.894Z" }, + { url = "https://files.pythonhosted.org/packages/50/5f/6a802741176c93f2ebe97ad90751894c7b0c922b52ba99a4395e79492205/numba-0.62.1-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:6ef84d0ac19f1bf80431347b6f4ce3c39b7ec13f48f233a48c01e2ec06ecbc59", size = 3796453, upload-time = "2025-09-29T10:42:52.771Z" }, + { url = "https://files.pythonhosted.org/packages/7e/df/efd21527d25150c4544eccc9d0b7260a5dec4b7e98b5a581990e05a133c0/numba-0.62.1-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:9315cc5e441300e0ca07c828a627d92a6802bcbf27c5487f31ae73783c58da53", size = 3496451, upload-time = "2025-09-29T10:43:19.279Z" }, + { url = "https://files.pythonhosted.org/packages/80/44/79bfdab12a02796bf4f1841630355c82b5a69933b1d50eb15c7fa37dabe8/numba-0.62.1-cp312-cp312-win_amd64.whl", hash = "sha256:44e3aa6228039992f058f5ebfcfd372c83798e9464297bdad8cc79febcf7891e", size = 2745552, upload-time = "2025-09-29T10:44:26.399Z" }, ] [[package]] @@ -3673,48 +3714,48 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "numpy" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/cb/2f/fdba158c9dbe5caca9c3eca3eaffffb251f2fb8674bf8e2d0aed5f38d319/numexpr-2.14.1.tar.gz", hash = "sha256:4be00b1086c7b7a5c32e31558122b7b80243fe098579b170967da83f3152b48b", size = 119400 } +sdist = { url = "https://files.pythonhosted.org/packages/cb/2f/fdba158c9dbe5caca9c3eca3eaffffb251f2fb8674bf8e2d0aed5f38d319/numexpr-2.14.1.tar.gz", hash = "sha256:4be00b1086c7b7a5c32e31558122b7b80243fe098579b170967da83f3152b48b", size = 119400, upload-time = "2025-10-13T16:17:27.351Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/b2/a3/67999bdd1ed1f938d38f3fedd4969632f2f197b090e50505f7cc1fa82510/numexpr-2.14.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:2d03fcb4644a12f70a14d74006f72662824da5b6128bf1bcd10cc3ed80e64c34", size = 163195 }, - { url = "https://files.pythonhosted.org/packages/25/95/d64f680ea1fc56d165457287e0851d6708800f9fcea346fc1b9957942ee6/numexpr-2.14.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:2773ee1133f77009a1fc2f34fe236f3d9823779f5f75450e183137d49f00499f", size = 152088 }, - { url = "https://files.pythonhosted.org/packages/0e/7f/3bae417cb13ae08afd86d08bb0301c32440fe0cae4e6262b530e0819aeda/numexpr-2.14.1-cp311-cp311-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ebe4980f9494b9f94d10d2e526edc29e72516698d3bf95670ba79415492212a4", size = 451126 }, - { url = "https://files.pythonhosted.org/packages/4c/1a/edbe839109518364ac0bd9e918cf874c755bb2c128040e920f198c494263/numexpr-2.14.1-cp311-cp311-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2a381e5e919a745c9503bcefffc1c7f98c972c04ec58fc8e999ed1a929e01ba6", size = 442012 }, - { url = "https://files.pythonhosted.org/packages/66/b1/be4ce99bff769a5003baddac103f34681997b31d4640d5a75c0e8ed59c78/numexpr-2.14.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:d08856cfc1b440eb1caaa60515235369654321995dd68eb9377577392020f6cb", size = 1415975 }, - { url = "https://files.pythonhosted.org/packages/e7/33/b33b8fdc032a05d9ebb44a51bfcd4b92c178a2572cd3e6c1b03d8a4b45b2/numexpr-2.14.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:03130afa04edf83a7b590d207444f05a00363c9b9ea5d81c0f53b1ea13fad55a", size = 1464683 }, - { url = "https://files.pythonhosted.org/packages/d0/b2/ddcf0ac6cf0a1d605e5aecd4281507fd79a9628a67896795ab2e975de5df/numexpr-2.14.1-cp311-cp311-win32.whl", hash = "sha256:db78fa0c9fcbaded3ae7453faf060bd7a18b0dc10299d7fcd02d9362be1213ed", size = 166838 }, - { url = "https://files.pythonhosted.org/packages/64/72/4ca9bd97b2eb6dce9f5e70a3b6acec1a93e1fb9b079cb4cba2cdfbbf295d/numexpr-2.14.1-cp311-cp311-win_amd64.whl", hash = "sha256:e9b2f957798c67a2428be96b04bce85439bed05efe78eb78e4c2ca43737578e7", size = 160069 }, - { url = "https://files.pythonhosted.org/packages/9d/20/c473fc04a371f5e2f8c5749e04505c13e7a8ede27c09e9f099b2ad6f43d6/numexpr-2.14.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:91ebae0ab18c799b0e6b8c5a8d11e1fa3848eb4011271d99848b297468a39430", size = 162790 }, - { url = "https://files.pythonhosted.org/packages/45/93/b6760dd1904c2a498e5f43d1bb436f59383c3ddea3815f1461dfaa259373/numexpr-2.14.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:47041f2f7b9e69498fb311af672ba914a60e6e6d804011caacb17d66f639e659", size = 152196 }, - { url = "https://files.pythonhosted.org/packages/72/94/cc921e35593b820521e464cbbeaf8212bbdb07f16dc79fe283168df38195/numexpr-2.14.1-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d686dfb2c1382d9e6e0ee0b7647f943c1886dba3adbf606c625479f35f1956c1", size = 452468 }, - { url = "https://files.pythonhosted.org/packages/d9/43/560e9ba23c02c904b5934496486d061bcb14cd3ebba2e3cf0e2dccb6c22b/numexpr-2.14.1-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:eee6d4fbbbc368e6cdd0772734d6249128d957b3b8ad47a100789009f4de7083", size = 443631 }, - { url = "https://files.pythonhosted.org/packages/7b/6c/78f83b6219f61c2c22d71ab6e6c2d4e5d7381334c6c29b77204e59edb039/numexpr-2.14.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:3a2839efa25f3c8d4133252ea7342d8f81226c7c4dda81f97a57e090b9d87a48", size = 1417670 }, - { url = "https://files.pythonhosted.org/packages/0e/bb/1ccc9dcaf46281568ce769888bf16294c40e98a5158e4b16c241de31d0d3/numexpr-2.14.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:9f9137f1351b310436662b5dc6f4082a245efa8950c3b0d9008028df92fefb9b", size = 1466212 }, - { url = "https://files.pythonhosted.org/packages/31/9f/203d82b9e39dadd91d64bca55b3c8ca432e981b822468dcef41a4418626b/numexpr-2.14.1-cp312-cp312-win32.whl", hash = "sha256:36f8d5c1bd1355df93b43d766790f9046cccfc1e32b7c6163f75bcde682cda07", size = 166996 }, - { url = "https://files.pythonhosted.org/packages/1f/67/ffe750b5452eb66de788c34e7d21ec6d886abb4d7c43ad1dc88ceb3d998f/numexpr-2.14.1-cp312-cp312-win_amd64.whl", hash = "sha256:fdd886f4b7dbaf167633ee396478f0d0aa58ea2f9e7ccc3c6431019623e8d68f", size = 160187 }, + { url = "https://files.pythonhosted.org/packages/b2/a3/67999bdd1ed1f938d38f3fedd4969632f2f197b090e50505f7cc1fa82510/numexpr-2.14.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:2d03fcb4644a12f70a14d74006f72662824da5b6128bf1bcd10cc3ed80e64c34", size = 163195, upload-time = "2025-10-13T16:16:31.212Z" }, + { url = "https://files.pythonhosted.org/packages/25/95/d64f680ea1fc56d165457287e0851d6708800f9fcea346fc1b9957942ee6/numexpr-2.14.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:2773ee1133f77009a1fc2f34fe236f3d9823779f5f75450e183137d49f00499f", size = 152088, upload-time = "2025-10-13T16:16:33.186Z" }, + { url = "https://files.pythonhosted.org/packages/0e/7f/3bae417cb13ae08afd86d08bb0301c32440fe0cae4e6262b530e0819aeda/numexpr-2.14.1-cp311-cp311-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ebe4980f9494b9f94d10d2e526edc29e72516698d3bf95670ba79415492212a4", size = 451126, upload-time = "2025-10-13T16:13:22.248Z" }, + { url = "https://files.pythonhosted.org/packages/4c/1a/edbe839109518364ac0bd9e918cf874c755bb2c128040e920f198c494263/numexpr-2.14.1-cp311-cp311-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2a381e5e919a745c9503bcefffc1c7f98c972c04ec58fc8e999ed1a929e01ba6", size = 442012, upload-time = "2025-10-13T16:14:51.416Z" }, + { url = "https://files.pythonhosted.org/packages/66/b1/be4ce99bff769a5003baddac103f34681997b31d4640d5a75c0e8ed59c78/numexpr-2.14.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:d08856cfc1b440eb1caaa60515235369654321995dd68eb9377577392020f6cb", size = 1415975, upload-time = "2025-10-13T16:13:26.088Z" }, + { url = "https://files.pythonhosted.org/packages/e7/33/b33b8fdc032a05d9ebb44a51bfcd4b92c178a2572cd3e6c1b03d8a4b45b2/numexpr-2.14.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:03130afa04edf83a7b590d207444f05a00363c9b9ea5d81c0f53b1ea13fad55a", size = 1464683, upload-time = "2025-10-13T16:14:58.87Z" }, + { url = "https://files.pythonhosted.org/packages/d0/b2/ddcf0ac6cf0a1d605e5aecd4281507fd79a9628a67896795ab2e975de5df/numexpr-2.14.1-cp311-cp311-win32.whl", hash = "sha256:db78fa0c9fcbaded3ae7453faf060bd7a18b0dc10299d7fcd02d9362be1213ed", size = 166838, upload-time = "2025-10-13T16:17:06.765Z" }, + { url = "https://files.pythonhosted.org/packages/64/72/4ca9bd97b2eb6dce9f5e70a3b6acec1a93e1fb9b079cb4cba2cdfbbf295d/numexpr-2.14.1-cp311-cp311-win_amd64.whl", hash = "sha256:e9b2f957798c67a2428be96b04bce85439bed05efe78eb78e4c2ca43737578e7", size = 160069, upload-time = "2025-10-13T16:17:08.752Z" }, + { url = "https://files.pythonhosted.org/packages/9d/20/c473fc04a371f5e2f8c5749e04505c13e7a8ede27c09e9f099b2ad6f43d6/numexpr-2.14.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:91ebae0ab18c799b0e6b8c5a8d11e1fa3848eb4011271d99848b297468a39430", size = 162790, upload-time = "2025-10-13T16:16:34.903Z" }, + { url = "https://files.pythonhosted.org/packages/45/93/b6760dd1904c2a498e5f43d1bb436f59383c3ddea3815f1461dfaa259373/numexpr-2.14.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:47041f2f7b9e69498fb311af672ba914a60e6e6d804011caacb17d66f639e659", size = 152196, upload-time = "2025-10-13T16:16:36.593Z" }, + { url = "https://files.pythonhosted.org/packages/72/94/cc921e35593b820521e464cbbeaf8212bbdb07f16dc79fe283168df38195/numexpr-2.14.1-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d686dfb2c1382d9e6e0ee0b7647f943c1886dba3adbf606c625479f35f1956c1", size = 452468, upload-time = "2025-10-13T16:13:29.531Z" }, + { url = "https://files.pythonhosted.org/packages/d9/43/560e9ba23c02c904b5934496486d061bcb14cd3ebba2e3cf0e2dccb6c22b/numexpr-2.14.1-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:eee6d4fbbbc368e6cdd0772734d6249128d957b3b8ad47a100789009f4de7083", size = 443631, upload-time = "2025-10-13T16:15:02.473Z" }, + { url = "https://files.pythonhosted.org/packages/7b/6c/78f83b6219f61c2c22d71ab6e6c2d4e5d7381334c6c29b77204e59edb039/numexpr-2.14.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:3a2839efa25f3c8d4133252ea7342d8f81226c7c4dda81f97a57e090b9d87a48", size = 1417670, upload-time = "2025-10-13T16:13:33.464Z" }, + { url = "https://files.pythonhosted.org/packages/0e/bb/1ccc9dcaf46281568ce769888bf16294c40e98a5158e4b16c241de31d0d3/numexpr-2.14.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:9f9137f1351b310436662b5dc6f4082a245efa8950c3b0d9008028df92fefb9b", size = 1466212, upload-time = "2025-10-13T16:15:12.828Z" }, + { url = "https://files.pythonhosted.org/packages/31/9f/203d82b9e39dadd91d64bca55b3c8ca432e981b822468dcef41a4418626b/numexpr-2.14.1-cp312-cp312-win32.whl", hash = "sha256:36f8d5c1bd1355df93b43d766790f9046cccfc1e32b7c6163f75bcde682cda07", size = 166996, upload-time = "2025-10-13T16:17:10.369Z" }, + { url = "https://files.pythonhosted.org/packages/1f/67/ffe750b5452eb66de788c34e7d21ec6d886abb4d7c43ad1dc88ceb3d998f/numexpr-2.14.1-cp312-cp312-win_amd64.whl", hash = "sha256:fdd886f4b7dbaf167633ee396478f0d0aa58ea2f9e7ccc3c6431019623e8d68f", size = 160187, upload-time = "2025-10-13T16:17:11.974Z" }, ] [[package]] name = "numpy" version = "1.26.4" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/65/6e/09db70a523a96d25e115e71cc56a6f9031e7b8cd166c1ac8438307c14058/numpy-1.26.4.tar.gz", hash = "sha256:2a02aba9ed12e4ac4eb3ea9421c420301a0c6460d9830d74a9df87efa4912010", size = 15786129 } +sdist = { url = "https://files.pythonhosted.org/packages/65/6e/09db70a523a96d25e115e71cc56a6f9031e7b8cd166c1ac8438307c14058/numpy-1.26.4.tar.gz", hash = "sha256:2a02aba9ed12e4ac4eb3ea9421c420301a0c6460d9830d74a9df87efa4912010", size = 15786129, upload-time = "2024-02-06T00:26:44.495Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/11/57/baae43d14fe163fa0e4c47f307b6b2511ab8d7d30177c491960504252053/numpy-1.26.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4c66707fabe114439db9068ee468c26bbdf909cac0fb58686a42a24de1760c71", size = 20630554 }, - { url = "https://files.pythonhosted.org/packages/1a/2e/151484f49fd03944c4a3ad9c418ed193cfd02724e138ac8a9505d056c582/numpy-1.26.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:edd8b5fe47dab091176d21bb6de568acdd906d1887a4584a15a9a96a1dca06ef", size = 13997127 }, - { url = "https://files.pythonhosted.org/packages/79/ae/7e5b85136806f9dadf4878bf73cf223fe5c2636818ba3ab1c585d0403164/numpy-1.26.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7ab55401287bfec946ced39700c053796e7cc0e3acbef09993a9ad2adba6ca6e", size = 14222994 }, - { url = "https://files.pythonhosted.org/packages/3a/d0/edc009c27b406c4f9cbc79274d6e46d634d139075492ad055e3d68445925/numpy-1.26.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:666dbfb6ec68962c033a450943ded891bed2d54e6755e35e5835d63f4f6931d5", size = 18252005 }, - { url = "https://files.pythonhosted.org/packages/09/bf/2b1aaf8f525f2923ff6cfcf134ae5e750e279ac65ebf386c75a0cf6da06a/numpy-1.26.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:96ff0b2ad353d8f990b63294c8986f1ec3cb19d749234014f4e7eb0112ceba5a", size = 13885297 }, - { url = "https://files.pythonhosted.org/packages/df/a0/4e0f14d847cfc2a633a1c8621d00724f3206cfeddeb66d35698c4e2cf3d2/numpy-1.26.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:60dedbb91afcbfdc9bc0b1f3f402804070deed7392c23eb7a7f07fa857868e8a", size = 18093567 }, - { url = "https://files.pythonhosted.org/packages/d2/b7/a734c733286e10a7f1a8ad1ae8c90f2d33bf604a96548e0a4a3a6739b468/numpy-1.26.4-cp311-cp311-win32.whl", hash = "sha256:1af303d6b2210eb850fcf03064d364652b7120803a0b872f5211f5234b399f20", size = 5968812 }, - { url = "https://files.pythonhosted.org/packages/3f/6b/5610004206cf7f8e7ad91c5a85a8c71b2f2f8051a0c0c4d5916b76d6cbb2/numpy-1.26.4-cp311-cp311-win_amd64.whl", hash = "sha256:cd25bcecc4974d09257ffcd1f098ee778f7834c3ad767fe5db785be9a4aa9cb2", size = 15811913 }, - { url = "https://files.pythonhosted.org/packages/95/12/8f2020a8e8b8383ac0177dc9570aad031a3beb12e38847f7129bacd96228/numpy-1.26.4-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:b3ce300f3644fb06443ee2222c2201dd3a89ea6040541412b8fa189341847218", size = 20335901 }, - { url = "https://files.pythonhosted.org/packages/75/5b/ca6c8bd14007e5ca171c7c03102d17b4f4e0ceb53957e8c44343a9546dcc/numpy-1.26.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:03a8c78d01d9781b28a6989f6fa1bb2c4f2d51201cf99d3dd875df6fbd96b23b", size = 13685868 }, - { url = "https://files.pythonhosted.org/packages/79/f8/97f10e6755e2a7d027ca783f63044d5b1bc1ae7acb12afe6a9b4286eac17/numpy-1.26.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9fad7dcb1aac3c7f0584a5a8133e3a43eeb2fe127f47e3632d43d677c66c102b", size = 13925109 }, - { url = "https://files.pythonhosted.org/packages/0f/50/de23fde84e45f5c4fda2488c759b69990fd4512387a8632860f3ac9cd225/numpy-1.26.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:675d61ffbfa78604709862923189bad94014bef562cc35cf61d3a07bba02a7ed", size = 17950613 }, - { url = "https://files.pythonhosted.org/packages/4c/0c/9c603826b6465e82591e05ca230dfc13376da512b25ccd0894709b054ed0/numpy-1.26.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:ab47dbe5cc8210f55aa58e4805fe224dac469cde56b9f731a4c098b91917159a", size = 13572172 }, - { url = "https://files.pythonhosted.org/packages/76/8c/2ba3902e1a0fc1c74962ea9bb33a534bb05984ad7ff9515bf8d07527cadd/numpy-1.26.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:1dda2e7b4ec9dd512f84935c5f126c8bd8b9f2fc001e9f54af255e8c5f16b0e0", size = 17786643 }, - { url = "https://files.pythonhosted.org/packages/28/4a/46d9e65106879492374999e76eb85f87b15328e06bd1550668f79f7b18c6/numpy-1.26.4-cp312-cp312-win32.whl", hash = "sha256:50193e430acfc1346175fcbdaa28ffec49947a06918b7b92130744e81e640110", size = 5677803 }, - { url = "https://files.pythonhosted.org/packages/16/2e/86f24451c2d530c88daf997cb8d6ac622c1d40d19f5a031ed68a4b73a374/numpy-1.26.4-cp312-cp312-win_amd64.whl", hash = "sha256:08beddf13648eb95f8d867350f6a018a4be2e5ad54c8d8caed89ebca558b2818", size = 15517754 }, + { url = "https://files.pythonhosted.org/packages/11/57/baae43d14fe163fa0e4c47f307b6b2511ab8d7d30177c491960504252053/numpy-1.26.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4c66707fabe114439db9068ee468c26bbdf909cac0fb58686a42a24de1760c71", size = 20630554, upload-time = "2024-02-05T23:51:50.149Z" }, + { url = "https://files.pythonhosted.org/packages/1a/2e/151484f49fd03944c4a3ad9c418ed193cfd02724e138ac8a9505d056c582/numpy-1.26.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:edd8b5fe47dab091176d21bb6de568acdd906d1887a4584a15a9a96a1dca06ef", size = 13997127, upload-time = "2024-02-05T23:52:15.314Z" }, + { url = "https://files.pythonhosted.org/packages/79/ae/7e5b85136806f9dadf4878bf73cf223fe5c2636818ba3ab1c585d0403164/numpy-1.26.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7ab55401287bfec946ced39700c053796e7cc0e3acbef09993a9ad2adba6ca6e", size = 14222994, upload-time = "2024-02-05T23:52:47.569Z" }, + { url = "https://files.pythonhosted.org/packages/3a/d0/edc009c27b406c4f9cbc79274d6e46d634d139075492ad055e3d68445925/numpy-1.26.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:666dbfb6ec68962c033a450943ded891bed2d54e6755e35e5835d63f4f6931d5", size = 18252005, upload-time = "2024-02-05T23:53:15.637Z" }, + { url = "https://files.pythonhosted.org/packages/09/bf/2b1aaf8f525f2923ff6cfcf134ae5e750e279ac65ebf386c75a0cf6da06a/numpy-1.26.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:96ff0b2ad353d8f990b63294c8986f1ec3cb19d749234014f4e7eb0112ceba5a", size = 13885297, upload-time = "2024-02-05T23:53:42.16Z" }, + { url = "https://files.pythonhosted.org/packages/df/a0/4e0f14d847cfc2a633a1c8621d00724f3206cfeddeb66d35698c4e2cf3d2/numpy-1.26.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:60dedbb91afcbfdc9bc0b1f3f402804070deed7392c23eb7a7f07fa857868e8a", size = 18093567, upload-time = "2024-02-05T23:54:11.696Z" }, + { url = "https://files.pythonhosted.org/packages/d2/b7/a734c733286e10a7f1a8ad1ae8c90f2d33bf604a96548e0a4a3a6739b468/numpy-1.26.4-cp311-cp311-win32.whl", hash = "sha256:1af303d6b2210eb850fcf03064d364652b7120803a0b872f5211f5234b399f20", size = 5968812, upload-time = "2024-02-05T23:54:26.453Z" }, + { url = "https://files.pythonhosted.org/packages/3f/6b/5610004206cf7f8e7ad91c5a85a8c71b2f2f8051a0c0c4d5916b76d6cbb2/numpy-1.26.4-cp311-cp311-win_amd64.whl", hash = "sha256:cd25bcecc4974d09257ffcd1f098ee778f7834c3ad767fe5db785be9a4aa9cb2", size = 15811913, upload-time = "2024-02-05T23:54:53.933Z" }, + { url = "https://files.pythonhosted.org/packages/95/12/8f2020a8e8b8383ac0177dc9570aad031a3beb12e38847f7129bacd96228/numpy-1.26.4-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:b3ce300f3644fb06443ee2222c2201dd3a89ea6040541412b8fa189341847218", size = 20335901, upload-time = "2024-02-05T23:55:32.801Z" }, + { url = "https://files.pythonhosted.org/packages/75/5b/ca6c8bd14007e5ca171c7c03102d17b4f4e0ceb53957e8c44343a9546dcc/numpy-1.26.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:03a8c78d01d9781b28a6989f6fa1bb2c4f2d51201cf99d3dd875df6fbd96b23b", size = 13685868, upload-time = "2024-02-05T23:55:56.28Z" }, + { url = "https://files.pythonhosted.org/packages/79/f8/97f10e6755e2a7d027ca783f63044d5b1bc1ae7acb12afe6a9b4286eac17/numpy-1.26.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9fad7dcb1aac3c7f0584a5a8133e3a43eeb2fe127f47e3632d43d677c66c102b", size = 13925109, upload-time = "2024-02-05T23:56:20.368Z" }, + { url = "https://files.pythonhosted.org/packages/0f/50/de23fde84e45f5c4fda2488c759b69990fd4512387a8632860f3ac9cd225/numpy-1.26.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:675d61ffbfa78604709862923189bad94014bef562cc35cf61d3a07bba02a7ed", size = 17950613, upload-time = "2024-02-05T23:56:56.054Z" }, + { url = "https://files.pythonhosted.org/packages/4c/0c/9c603826b6465e82591e05ca230dfc13376da512b25ccd0894709b054ed0/numpy-1.26.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:ab47dbe5cc8210f55aa58e4805fe224dac469cde56b9f731a4c098b91917159a", size = 13572172, upload-time = "2024-02-05T23:57:21.56Z" }, + { url = "https://files.pythonhosted.org/packages/76/8c/2ba3902e1a0fc1c74962ea9bb33a534bb05984ad7ff9515bf8d07527cadd/numpy-1.26.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:1dda2e7b4ec9dd512f84935c5f126c8bd8b9f2fc001e9f54af255e8c5f16b0e0", size = 17786643, upload-time = "2024-02-05T23:57:56.585Z" }, + { url = "https://files.pythonhosted.org/packages/28/4a/46d9e65106879492374999e76eb85f87b15328e06bd1550668f79f7b18c6/numpy-1.26.4-cp312-cp312-win32.whl", hash = "sha256:50193e430acfc1346175fcbdaa28ffec49947a06918b7b92130744e81e640110", size = 5677803, upload-time = "2024-02-05T23:58:08.963Z" }, + { url = "https://files.pythonhosted.org/packages/16/2e/86f24451c2d530c88daf997cb8d6ac622c1d40d19f5a031ed68a4b73a374/numpy-1.26.4-cp312-cp312-win_amd64.whl", hash = "sha256:08beddf13648eb95f8d867350f6a018a4be2e5ad54c8d8caed89ebca558b2818", size = 15517754, upload-time = "2024-02-05T23:58:36.364Z" }, ] [[package]] @@ -3724,18 +3765,18 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "numpy" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/ff/a7/780dc00f4fed2f2b653f76a196b3a6807c7c667f30ae95a7fd082c1081d8/numpy_typing_compat-20250818.1.25.tar.gz", hash = "sha256:8ff461725af0b436e9b0445d07712f1e6e3a97540a3542810f65f936dcc587a5", size = 5027 } +sdist = { url = "https://files.pythonhosted.org/packages/ff/a7/780dc00f4fed2f2b653f76a196b3a6807c7c667f30ae95a7fd082c1081d8/numpy_typing_compat-20250818.1.25.tar.gz", hash = "sha256:8ff461725af0b436e9b0445d07712f1e6e3a97540a3542810f65f936dcc587a5", size = 5027, upload-time = "2025-08-18T23:46:39.062Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/1e/71/30e8d317b6896acbc347d3089764b6209ba299095550773e14d27dcf035f/numpy_typing_compat-20250818.1.25-py3-none-any.whl", hash = "sha256:4f91427369583074b236c804dd27559134f08ec4243485034c8e7d258cbd9cd3", size = 6355 }, + { url = "https://files.pythonhosted.org/packages/1e/71/30e8d317b6896acbc347d3089764b6209ba299095550773e14d27dcf035f/numpy_typing_compat-20250818.1.25-py3-none-any.whl", hash = "sha256:4f91427369583074b236c804dd27559134f08ec4243485034c8e7d258cbd9cd3", size = 6355, upload-time = "2025-08-18T23:46:30.927Z" }, ] [[package]] name = "oauthlib" version = "3.3.1" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/0b/5f/19930f824ffeb0ad4372da4812c50edbd1434f678c90c2733e1188edfc63/oauthlib-3.3.1.tar.gz", hash = "sha256:0f0f8aa759826a193cf66c12ea1af1637f87b9b4622d46e866952bb022e538c9", size = 185918 } +sdist = { url = "https://files.pythonhosted.org/packages/0b/5f/19930f824ffeb0ad4372da4812c50edbd1434f678c90c2733e1188edfc63/oauthlib-3.3.1.tar.gz", hash = "sha256:0f0f8aa759826a193cf66c12ea1af1637f87b9b4622d46e866952bb022e538c9", size = 185918, upload-time = "2025-06-19T22:48:08.269Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/be/9c/92789c596b8df838baa98fa71844d84283302f7604ed565dafe5a6b5041a/oauthlib-3.3.1-py3-none-any.whl", hash = "sha256:88119c938d2b8fb88561af5f6ee0eec8cc8d552b7bb1f712743136eb7523b7a1", size = 160065 }, + { url = "https://files.pythonhosted.org/packages/be/9c/92789c596b8df838baa98fa71844d84283302f7604ed565dafe5a6b5041a/oauthlib-3.3.1-py3-none-any.whl", hash = "sha256:88119c938d2b8fb88561af5f6ee0eec8cc8d552b7bb1f712743136eb7523b7a1", size = 160065, upload-time = "2025-06-19T22:48:06.508Z" }, ] [[package]] @@ -3745,15 +3786,15 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "defusedxml" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/97/73/8ade73f6749177003f7ce3304f524774adda96e6aaab30ea79fd8fda7934/odfpy-1.4.1.tar.gz", hash = "sha256:db766a6e59c5103212f3cc92ec8dd50a0f3a02790233ed0b52148b70d3c438ec", size = 717045 } +sdist = { url = "https://files.pythonhosted.org/packages/97/73/8ade73f6749177003f7ce3304f524774adda96e6aaab30ea79fd8fda7934/odfpy-1.4.1.tar.gz", hash = "sha256:db766a6e59c5103212f3cc92ec8dd50a0f3a02790233ed0b52148b70d3c438ec", size = 717045, upload-time = "2020-01-18T16:55:48.852Z" } [[package]] name = "olefile" version = "0.47" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/69/1b/077b508e3e500e1629d366249c3ccb32f95e50258b231705c09e3c7a4366/olefile-0.47.zip", hash = "sha256:599383381a0bf3dfbd932ca0ca6515acd174ed48870cbf7fee123d698c192c1c", size = 112240 } +sdist = { url = "https://files.pythonhosted.org/packages/69/1b/077b508e3e500e1629d366249c3ccb32f95e50258b231705c09e3c7a4366/olefile-0.47.zip", hash = "sha256:599383381a0bf3dfbd932ca0ca6515acd174ed48870cbf7fee123d698c192c1c", size = 112240, upload-time = "2023-12-01T16:22:53.025Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/17/d3/b64c356a907242d719fc668b71befd73324e47ab46c8ebbbede252c154b2/olefile-0.47-py2.py3-none-any.whl", hash = "sha256:543c7da2a7adadf21214938bb79c83ea12b473a4b6ee4ad4bf854e7715e13d1f", size = 114565 }, + { url = "https://files.pythonhosted.org/packages/17/d3/b64c356a907242d719fc668b71befd73324e47ab46c8ebbbede252c154b2/olefile-0.47-py2.py3-none-any.whl", hash = "sha256:543c7da2a7adadf21214938bb79c83ea12b473a4b6ee4ad4bf854e7715e13d1f", size = 114565, upload-time = "2023-12-01T16:22:51.518Z" }, ] [[package]] @@ -3769,16 +3810,16 @@ dependencies = [ { name = "sympy" }, ] wheels = [ - { url = "https://files.pythonhosted.org/packages/44/be/467b00f09061572f022ffd17e49e49e5a7a789056bad95b54dfd3bee73ff/onnxruntime-1.23.2-cp311-cp311-macosx_13_0_arm64.whl", hash = "sha256:6f91d2c9b0965e86827a5ba01531d5b669770b01775b23199565d6c1f136616c", size = 17196113 }, - { url = "https://files.pythonhosted.org/packages/9f/a8/3c23a8f75f93122d2b3410bfb74d06d0f8da4ac663185f91866b03f7da1b/onnxruntime-1.23.2-cp311-cp311-macosx_13_0_x86_64.whl", hash = "sha256:87d8b6eaf0fbeb6835a60a4265fde7a3b60157cf1b2764773ac47237b4d48612", size = 19153857 }, - { url = "https://files.pythonhosted.org/packages/3f/d8/506eed9af03d86f8db4880a4c47cd0dffee973ef7e4f4cff9f1d4bcf7d22/onnxruntime-1.23.2-cp311-cp311-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:bbfd2fca76c855317568c1b36a885ddea2272c13cb0e395002c402f2360429a6", size = 15220095 }, - { url = "https://files.pythonhosted.org/packages/e9/80/113381ba832d5e777accedc6cb41d10f9eca82321ae31ebb6bcede530cea/onnxruntime-1.23.2-cp311-cp311-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:da44b99206e77734c5819aa2142c69e64f3b46edc3bd314f6a45a932defc0b3e", size = 17372080 }, - { url = "https://files.pythonhosted.org/packages/3a/db/1b4a62e23183a0c3fe441782462c0ede9a2a65c6bbffb9582fab7c7a0d38/onnxruntime-1.23.2-cp311-cp311-win_amd64.whl", hash = "sha256:902c756d8b633ce0dedd889b7c08459433fbcf35e9c38d1c03ddc020f0648c6e", size = 13468349 }, - { url = "https://files.pythonhosted.org/packages/1b/9e/f748cd64161213adeef83d0cb16cb8ace1e62fa501033acdd9f9341fff57/onnxruntime-1.23.2-cp312-cp312-macosx_13_0_arm64.whl", hash = "sha256:b8f029a6b98d3cf5be564d52802bb50a8489ab73409fa9db0bf583eabb7c2321", size = 17195929 }, - { url = "https://files.pythonhosted.org/packages/91/9d/a81aafd899b900101988ead7fb14974c8a58695338ab6a0f3d6b0100f30b/onnxruntime-1.23.2-cp312-cp312-macosx_13_0_x86_64.whl", hash = "sha256:218295a8acae83905f6f1aed8cacb8e3eb3bd7513a13fe4ba3b2664a19fc4a6b", size = 19157705 }, - { url = "https://files.pythonhosted.org/packages/3c/35/4e40f2fba272a6698d62be2cd21ddc3675edfc1a4b9ddefcc4648f115315/onnxruntime-1.23.2-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:76ff670550dc23e58ea9bc53b5149b99a44e63b34b524f7b8547469aaa0dcb8c", size = 15226915 }, - { url = "https://files.pythonhosted.org/packages/ef/88/9cc25d2bafe6bc0d4d3c1db3ade98196d5b355c0b273e6a5dc09c5d5d0d5/onnxruntime-1.23.2-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0f9b4ae77f8e3c9bee50c27bc1beede83f786fe1d52e99ac85aa8d65a01e9b77", size = 17382649 }, - { url = "https://files.pythonhosted.org/packages/c0/b4/569d298f9fc4d286c11c45e85d9ffa9e877af12ace98af8cab52396e8f46/onnxruntime-1.23.2-cp312-cp312-win_amd64.whl", hash = "sha256:25de5214923ce941a3523739d34a520aac30f21e631de53bba9174dc9c004435", size = 13470528 }, + { url = "https://files.pythonhosted.org/packages/44/be/467b00f09061572f022ffd17e49e49e5a7a789056bad95b54dfd3bee73ff/onnxruntime-1.23.2-cp311-cp311-macosx_13_0_arm64.whl", hash = "sha256:6f91d2c9b0965e86827a5ba01531d5b669770b01775b23199565d6c1f136616c", size = 17196113, upload-time = "2025-10-22T03:47:33.526Z" }, + { url = "https://files.pythonhosted.org/packages/9f/a8/3c23a8f75f93122d2b3410bfb74d06d0f8da4ac663185f91866b03f7da1b/onnxruntime-1.23.2-cp311-cp311-macosx_13_0_x86_64.whl", hash = "sha256:87d8b6eaf0fbeb6835a60a4265fde7a3b60157cf1b2764773ac47237b4d48612", size = 19153857, upload-time = "2025-10-22T03:46:37.578Z" }, + { url = "https://files.pythonhosted.org/packages/3f/d8/506eed9af03d86f8db4880a4c47cd0dffee973ef7e4f4cff9f1d4bcf7d22/onnxruntime-1.23.2-cp311-cp311-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:bbfd2fca76c855317568c1b36a885ddea2272c13cb0e395002c402f2360429a6", size = 15220095, upload-time = "2025-10-22T03:46:24.769Z" }, + { url = "https://files.pythonhosted.org/packages/e9/80/113381ba832d5e777accedc6cb41d10f9eca82321ae31ebb6bcede530cea/onnxruntime-1.23.2-cp311-cp311-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:da44b99206e77734c5819aa2142c69e64f3b46edc3bd314f6a45a932defc0b3e", size = 17372080, upload-time = "2025-10-22T03:47:00.265Z" }, + { url = "https://files.pythonhosted.org/packages/3a/db/1b4a62e23183a0c3fe441782462c0ede9a2a65c6bbffb9582fab7c7a0d38/onnxruntime-1.23.2-cp311-cp311-win_amd64.whl", hash = "sha256:902c756d8b633ce0dedd889b7c08459433fbcf35e9c38d1c03ddc020f0648c6e", size = 13468349, upload-time = "2025-10-22T03:47:25.783Z" }, + { url = "https://files.pythonhosted.org/packages/1b/9e/f748cd64161213adeef83d0cb16cb8ace1e62fa501033acdd9f9341fff57/onnxruntime-1.23.2-cp312-cp312-macosx_13_0_arm64.whl", hash = "sha256:b8f029a6b98d3cf5be564d52802bb50a8489ab73409fa9db0bf583eabb7c2321", size = 17195929, upload-time = "2025-10-22T03:47:36.24Z" }, + { url = "https://files.pythonhosted.org/packages/91/9d/a81aafd899b900101988ead7fb14974c8a58695338ab6a0f3d6b0100f30b/onnxruntime-1.23.2-cp312-cp312-macosx_13_0_x86_64.whl", hash = "sha256:218295a8acae83905f6f1aed8cacb8e3eb3bd7513a13fe4ba3b2664a19fc4a6b", size = 19157705, upload-time = "2025-10-22T03:46:40.415Z" }, + { url = "https://files.pythonhosted.org/packages/3c/35/4e40f2fba272a6698d62be2cd21ddc3675edfc1a4b9ddefcc4648f115315/onnxruntime-1.23.2-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:76ff670550dc23e58ea9bc53b5149b99a44e63b34b524f7b8547469aaa0dcb8c", size = 15226915, upload-time = "2025-10-22T03:46:27.773Z" }, + { url = "https://files.pythonhosted.org/packages/ef/88/9cc25d2bafe6bc0d4d3c1db3ade98196d5b355c0b273e6a5dc09c5d5d0d5/onnxruntime-1.23.2-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0f9b4ae77f8e3c9bee50c27bc1beede83f786fe1d52e99ac85aa8d65a01e9b77", size = 17382649, upload-time = "2025-10-22T03:47:02.782Z" }, + { url = "https://files.pythonhosted.org/packages/c0/b4/569d298f9fc4d286c11c45e85d9ffa9e877af12ace98af8cab52396e8f46/onnxruntime-1.23.2-cp312-cp312-win_amd64.whl", hash = "sha256:25de5214923ce941a3523739d34a520aac30f21e631de53bba9174dc9c004435", size = 13470528, upload-time = "2025-10-22T03:47:28.106Z" }, ] [[package]] @@ -3795,25 +3836,25 @@ dependencies = [ { name = "tqdm" }, { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/d5/e4/42591e356f1d53c568418dc7e30dcda7be31dd5a4d570bca22acb0525862/openai-2.8.1.tar.gz", hash = "sha256:cb1b79eef6e809f6da326a7ef6038719e35aa944c42d081807bfa1be8060f15f", size = 602490 } +sdist = { url = "https://files.pythonhosted.org/packages/d5/e4/42591e356f1d53c568418dc7e30dcda7be31dd5a4d570bca22acb0525862/openai-2.8.1.tar.gz", hash = "sha256:cb1b79eef6e809f6da326a7ef6038719e35aa944c42d081807bfa1be8060f15f", size = 602490, upload-time = "2025-11-17T22:39:59.549Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/55/4f/dbc0c124c40cb390508a82770fb9f6e3ed162560181a85089191a851c59a/openai-2.8.1-py3-none-any.whl", hash = "sha256:c6c3b5a04994734386e8dad3c00a393f56d3b68a27cd2e8acae91a59e4122463", size = 1022688 }, + { url = "https://files.pythonhosted.org/packages/55/4f/dbc0c124c40cb390508a82770fb9f6e3ed162560181a85089191a851c59a/openai-2.8.1-py3-none-any.whl", hash = "sha256:c6c3b5a04994734386e8dad3c00a393f56d3b68a27cd2e8acae91a59e4122463", size = 1022688, upload-time = "2025-11-17T22:39:57.675Z" }, ] [[package]] name = "opendal" version = "0.46.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/33/db/9c37efe16afe6371d66a0be94fa701c281108820198f18443dc997fbf3d8/opendal-0.46.0.tar.gz", hash = "sha256:334aa4c5b3cc0776598ef8d3c154f074f6a9d87981b951d70db1407efed3b06c", size = 989391 } +sdist = { url = "https://files.pythonhosted.org/packages/33/db/9c37efe16afe6371d66a0be94fa701c281108820198f18443dc997fbf3d8/opendal-0.46.0.tar.gz", hash = "sha256:334aa4c5b3cc0776598ef8d3c154f074f6a9d87981b951d70db1407efed3b06c", size = 989391, upload-time = "2025-07-17T06:58:52.913Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/6c/05/a8d9c6a935a181d38b55c2cb7121394a6bdd819909ff453a17e78f45672a/opendal-0.46.0-cp311-abi3-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:8cd4db71694c93e99055349714c7f7c7177e4767428e9e4bc592e4055edb6dba", size = 26502380 }, - { url = "https://files.pythonhosted.org/packages/57/8d/cf684b246fa38ab946f3d11671230d07b5b14d2aeb152b68bd51f4b2210b/opendal-0.46.0-cp311-abi3-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:3019f923a7e1c5db86a36cee95d0c899ca7379e355bda9eb37e16d076c1f42f3", size = 12684482 }, - { url = "https://files.pythonhosted.org/packages/ad/71/36a97a8258cd0f0dd902561d0329a339f5a39a9896f0380763f526e9af89/opendal-0.46.0-cp311-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4e202ded0be5410546193f563258e9a78a57337f5c2bb553b8802a420c2ef683", size = 14114685 }, - { url = "https://files.pythonhosted.org/packages/b7/fa/9a30c17428a12246c6ae17b406e7214a9a3caecec37af6860d27e99f9b66/opendal-0.46.0-cp311-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:7db426ba8171d665953836653a596ef1bad3732a1c4dd2e3fa68bc20beee7afc", size = 13191783 }, - { url = "https://files.pythonhosted.org/packages/f8/32/4f7351ee242b63c817896afb373e5d5f28e1d9ca4e51b69a7b2e934694cf/opendal-0.46.0-cp311-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:898444dc072201044ed8c1dcce0929ebda8b10b92ba9c95248cf7fcbbc9dc1d7", size = 13358943 }, - { url = "https://files.pythonhosted.org/packages/77/e5/f650cf79ffbf7c7c8d7466fe9b4fa04cda97d950f915b8b3e2ced29f0f3e/opendal-0.46.0-cp311-abi3-musllinux_1_1_armv7l.whl", hash = "sha256:998e7a80a3468fd3f8604873aec6777fd25d3101fdbb1b63a4dc5fef14797086", size = 13015627 }, - { url = "https://files.pythonhosted.org/packages/c4/d1/77b731016edd494514447322d6b02a2a49c41ad6deeaa824dd2958479574/opendal-0.46.0-cp311-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:093098658482e7b87d16bf2931b5ef0ee22ed6a695f945874c696da72a6d057a", size = 14314675 }, - { url = "https://files.pythonhosted.org/packages/1e/93/328f7c72ccf04b915ab88802342d8f79322b7fba5509513b509681651224/opendal-0.46.0-cp311-abi3-win_amd64.whl", hash = "sha256:f5e58abc86db005879340a9187372a8c105c456c762943139a48dde63aad790d", size = 14904045 }, + { url = "https://files.pythonhosted.org/packages/6c/05/a8d9c6a935a181d38b55c2cb7121394a6bdd819909ff453a17e78f45672a/opendal-0.46.0-cp311-abi3-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:8cd4db71694c93e99055349714c7f7c7177e4767428e9e4bc592e4055edb6dba", size = 26502380, upload-time = "2025-07-17T06:58:16.173Z" }, + { url = "https://files.pythonhosted.org/packages/57/8d/cf684b246fa38ab946f3d11671230d07b5b14d2aeb152b68bd51f4b2210b/opendal-0.46.0-cp311-abi3-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:3019f923a7e1c5db86a36cee95d0c899ca7379e355bda9eb37e16d076c1f42f3", size = 12684482, upload-time = "2025-07-17T06:58:18.462Z" }, + { url = "https://files.pythonhosted.org/packages/ad/71/36a97a8258cd0f0dd902561d0329a339f5a39a9896f0380763f526e9af89/opendal-0.46.0-cp311-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4e202ded0be5410546193f563258e9a78a57337f5c2bb553b8802a420c2ef683", size = 14114685, upload-time = "2025-07-17T06:58:20.728Z" }, + { url = "https://files.pythonhosted.org/packages/b7/fa/9a30c17428a12246c6ae17b406e7214a9a3caecec37af6860d27e99f9b66/opendal-0.46.0-cp311-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:7db426ba8171d665953836653a596ef1bad3732a1c4dd2e3fa68bc20beee7afc", size = 13191783, upload-time = "2025-07-17T06:58:23.181Z" }, + { url = "https://files.pythonhosted.org/packages/f8/32/4f7351ee242b63c817896afb373e5d5f28e1d9ca4e51b69a7b2e934694cf/opendal-0.46.0-cp311-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:898444dc072201044ed8c1dcce0929ebda8b10b92ba9c95248cf7fcbbc9dc1d7", size = 13358943, upload-time = "2025-07-17T06:58:25.281Z" }, + { url = "https://files.pythonhosted.org/packages/77/e5/f650cf79ffbf7c7c8d7466fe9b4fa04cda97d950f915b8b3e2ced29f0f3e/opendal-0.46.0-cp311-abi3-musllinux_1_1_armv7l.whl", hash = "sha256:998e7a80a3468fd3f8604873aec6777fd25d3101fdbb1b63a4dc5fef14797086", size = 13015627, upload-time = "2025-07-17T06:58:27.28Z" }, + { url = "https://files.pythonhosted.org/packages/c4/d1/77b731016edd494514447322d6b02a2a49c41ad6deeaa824dd2958479574/opendal-0.46.0-cp311-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:093098658482e7b87d16bf2931b5ef0ee22ed6a695f945874c696da72a6d057a", size = 14314675, upload-time = "2025-07-17T06:58:29.622Z" }, + { url = "https://files.pythonhosted.org/packages/1e/93/328f7c72ccf04b915ab88802342d8f79322b7fba5509513b509681651224/opendal-0.46.0-cp311-abi3-win_amd64.whl", hash = "sha256:f5e58abc86db005879340a9187372a8c105c456c762943139a48dde63aad790d", size = 14904045, upload-time = "2025-07-17T06:58:31.692Z" }, ] [[package]] @@ -3826,18 +3867,18 @@ dependencies = [ { name = "opentelemetry-sdk" }, { name = "wrapt" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/00/d0/b19061a21fd6127d2857c77744a36073bba9c1502d1d5e8517b708eb8b7c/openinference_instrumentation-0.1.42.tar.gz", hash = "sha256:2275babc34022e151b5492cfba41d3b12e28377f8e08cb45e5d64fe2d9d7fe37", size = 23954 } +sdist = { url = "https://files.pythonhosted.org/packages/00/d0/b19061a21fd6127d2857c77744a36073bba9c1502d1d5e8517b708eb8b7c/openinference_instrumentation-0.1.42.tar.gz", hash = "sha256:2275babc34022e151b5492cfba41d3b12e28377f8e08cb45e5d64fe2d9d7fe37", size = 23954, upload-time = "2025-11-05T01:37:46.869Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/c3/71/43ee4616fc95dbd2f560550f199c6652a5eb93f84e8aa0039bc95c19cfe0/openinference_instrumentation-0.1.42-py3-none-any.whl", hash = "sha256:e7521ff90833ef7cc65db526a2f59b76a496180abeaaee30ec6abbbc0b43f8ec", size = 30086 }, + { url = "https://files.pythonhosted.org/packages/c3/71/43ee4616fc95dbd2f560550f199c6652a5eb93f84e8aa0039bc95c19cfe0/openinference_instrumentation-0.1.42-py3-none-any.whl", hash = "sha256:e7521ff90833ef7cc65db526a2f59b76a496180abeaaee30ec6abbbc0b43f8ec", size = 30086, upload-time = "2025-11-05T01:37:43.866Z" }, ] [[package]] name = "openinference-semantic-conventions" version = "0.1.25" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/0b/68/81c8a0b90334ff11e4f285e4934c57f30bea3ef0c0b9f99b65e7b80fae3b/openinference_semantic_conventions-0.1.25.tar.gz", hash = "sha256:f0a8c2cfbd00195d1f362b4803518341e80867d446c2959bf1743f1894fce31d", size = 12767 } +sdist = { url = "https://files.pythonhosted.org/packages/0b/68/81c8a0b90334ff11e4f285e4934c57f30bea3ef0c0b9f99b65e7b80fae3b/openinference_semantic_conventions-0.1.25.tar.gz", hash = "sha256:f0a8c2cfbd00195d1f362b4803518341e80867d446c2959bf1743f1894fce31d", size = 12767, upload-time = "2025-11-05T01:37:45.89Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/fd/3d/dd14ee2eb8a3f3054249562e76b253a1545c76adbbfd43a294f71acde5c3/openinference_semantic_conventions-0.1.25-py3-none-any.whl", hash = "sha256:3814240f3bd61f05d9562b761de70ee793d55b03bca1634edf57d7a2735af238", size = 10395 }, + { url = "https://files.pythonhosted.org/packages/fd/3d/dd14ee2eb8a3f3054249562e76b253a1545c76adbbfd43a294f71acde5c3/openinference_semantic_conventions-0.1.25-py3-none-any.whl", hash = "sha256:3814240f3bd61f05d9562b761de70ee793d55b03bca1634edf57d7a2735af238", size = 10395, upload-time = "2025-11-05T01:37:43.697Z" }, ] [[package]] @@ -3847,9 +3888,9 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "et-xmlfile" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/3d/f9/88d94a75de065ea32619465d2f77b29a0469500e99012523b91cc4141cd1/openpyxl-3.1.5.tar.gz", hash = "sha256:cf0e3cf56142039133628b5acffe8ef0c12bc902d2aadd3e0fe5878dc08d1050", size = 186464 } +sdist = { url = "https://files.pythonhosted.org/packages/3d/f9/88d94a75de065ea32619465d2f77b29a0469500e99012523b91cc4141cd1/openpyxl-3.1.5.tar.gz", hash = "sha256:cf0e3cf56142039133628b5acffe8ef0c12bc902d2aadd3e0fe5878dc08d1050", size = 186464, upload-time = "2024-06-28T14:03:44.161Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/c0/da/977ded879c29cbd04de313843e76868e6e13408a94ed6b987245dc7c8506/openpyxl-3.1.5-py2.py3-none-any.whl", hash = "sha256:5282c12b107bffeef825f4617dc029afaf41d0ea60823bbb665ef3079dc79de2", size = 250910 }, + { url = "https://files.pythonhosted.org/packages/c0/da/977ded879c29cbd04de313843e76868e6e13408a94ed6b987245dc7c8506/openpyxl-3.1.5-py2.py3-none-any.whl", hash = "sha256:5282c12b107bffeef825f4617dc029afaf41d0ea60823bbb665ef3079dc79de2", size = 250910, upload-time = "2024-06-28T14:03:41.161Z" }, ] [[package]] @@ -3863,9 +3904,9 @@ dependencies = [ { name = "six" }, { name = "urllib3" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/e4/dc/acb182db6bb0c71f1e6e41c49260e01d68e52a03efb64e44aed3cc7f483f/opensearch-py-2.4.0.tar.gz", hash = "sha256:7eba2b6ed2ddcf33225bfebfba2aee026877838cc39f760ec80f27827308cc4b", size = 182924 } +sdist = { url = "https://files.pythonhosted.org/packages/e4/dc/acb182db6bb0c71f1e6e41c49260e01d68e52a03efb64e44aed3cc7f483f/opensearch-py-2.4.0.tar.gz", hash = "sha256:7eba2b6ed2ddcf33225bfebfba2aee026877838cc39f760ec80f27827308cc4b", size = 182924, upload-time = "2023-11-15T21:41:37.329Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/c1/98/178aacf07ece7f95d1948352778702898d57c286053813deb20ebb409923/opensearch_py-2.4.0-py2.py3-none-any.whl", hash = "sha256:316077235437c8ceac970232261f3393c65fb92a80f33c5b106f50f1dab24fd9", size = 258405 }, + { url = "https://files.pythonhosted.org/packages/c1/98/178aacf07ece7f95d1948352778702898d57c286053813deb20ebb409923/opensearch_py-2.4.0-py2.py3-none-any.whl", hash = "sha256:316077235437c8ceac970232261f3393c65fb92a80f33c5b106f50f1dab24fd9", size = 258405, upload-time = "2023-11-15T21:41:35.59Z" }, ] [[package]] @@ -3876,9 +3917,9 @@ dependencies = [ { name = "deprecated" }, { name = "importlib-metadata" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/c9/83/93114b6de85a98963aec218a51509a52ed3f8de918fe91eb0f7299805c3f/opentelemetry_api-1.27.0.tar.gz", hash = "sha256:ed673583eaa5f81b5ce5e86ef7cdaf622f88ef65f0b9aab40b843dcae5bef342", size = 62693 } +sdist = { url = "https://files.pythonhosted.org/packages/c9/83/93114b6de85a98963aec218a51509a52ed3f8de918fe91eb0f7299805c3f/opentelemetry_api-1.27.0.tar.gz", hash = "sha256:ed673583eaa5f81b5ce5e86ef7cdaf622f88ef65f0b9aab40b843dcae5bef342", size = 62693, upload-time = "2024-08-28T21:35:31.445Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/fb/1f/737dcdbc9fea2fa96c1b392ae47275165a7c641663fbb08a8d252968eed2/opentelemetry_api-1.27.0-py3-none-any.whl", hash = "sha256:953d5871815e7c30c81b56d910c707588000fff7a3ca1c73e6531911d53065e7", size = 63970 }, + { url = "https://files.pythonhosted.org/packages/fb/1f/737dcdbc9fea2fa96c1b392ae47275165a7c641663fbb08a8d252968eed2/opentelemetry_api-1.27.0-py3-none-any.whl", hash = "sha256:953d5871815e7c30c81b56d910c707588000fff7a3ca1c73e6531911d53065e7", size = 63970, upload-time = "2024-08-28T21:35:00.598Z" }, ] [[package]] @@ -3890,9 +3931,9 @@ dependencies = [ { name = "opentelemetry-instrumentation" }, { name = "opentelemetry-sdk" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/f4/09/423e17c439ed24c45110affe84aad886a536b7871a42637d2ad14a179b47/opentelemetry_distro-0.48b0.tar.gz", hash = "sha256:5cb15915780ac4972583286a56683d43bd4ca95371d72f5f3f179c8b0b2ddc91", size = 2556 } +sdist = { url = "https://files.pythonhosted.org/packages/f4/09/423e17c439ed24c45110affe84aad886a536b7871a42637d2ad14a179b47/opentelemetry_distro-0.48b0.tar.gz", hash = "sha256:5cb15915780ac4972583286a56683d43bd4ca95371d72f5f3f179c8b0b2ddc91", size = 2556, upload-time = "2024-08-28T21:27:40.455Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/82/cf/fa9a5fe954f1942e03b319ae0e319ebc93d9f984b548bcd9b3f232a1434d/opentelemetry_distro-0.48b0-py3-none-any.whl", hash = "sha256:b2f8fce114325b020769af3b9bf503efb8af07efc190bd1b9deac7843171664a", size = 3321 }, + { url = "https://files.pythonhosted.org/packages/82/cf/fa9a5fe954f1942e03b319ae0e319ebc93d9f984b548bcd9b3f232a1434d/opentelemetry_distro-0.48b0-py3-none-any.whl", hash = "sha256:b2f8fce114325b020769af3b9bf503efb8af07efc190bd1b9deac7843171664a", size = 3321, upload-time = "2024-08-28T21:26:26.584Z" }, ] [[package]] @@ -3903,9 +3944,9 @@ dependencies = [ { name = "opentelemetry-exporter-otlp-proto-grpc" }, { name = "opentelemetry-exporter-otlp-proto-http" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/fc/d3/8156cc14e8f4573a3572ee7f30badc7aabd02961a09acc72ab5f2c789ef1/opentelemetry_exporter_otlp-1.27.0.tar.gz", hash = "sha256:4a599459e623868cc95d933c301199c2367e530f089750e115599fccd67cb2a1", size = 6166 } +sdist = { url = "https://files.pythonhosted.org/packages/fc/d3/8156cc14e8f4573a3572ee7f30badc7aabd02961a09acc72ab5f2c789ef1/opentelemetry_exporter_otlp-1.27.0.tar.gz", hash = "sha256:4a599459e623868cc95d933c301199c2367e530f089750e115599fccd67cb2a1", size = 6166, upload-time = "2024-08-28T21:35:33.746Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/59/6d/95e1fc2c8d945a734db32e87a5aa7a804f847c1657a21351df9338bd1c9c/opentelemetry_exporter_otlp-1.27.0-py3-none-any.whl", hash = "sha256:7688791cbdd951d71eb6445951d1cfbb7b6b2d7ee5948fac805d404802931145", size = 7001 }, + { url = "https://files.pythonhosted.org/packages/59/6d/95e1fc2c8d945a734db32e87a5aa7a804f847c1657a21351df9338bd1c9c/opentelemetry_exporter_otlp-1.27.0-py3-none-any.whl", hash = "sha256:7688791cbdd951d71eb6445951d1cfbb7b6b2d7ee5948fac805d404802931145", size = 7001, upload-time = "2024-08-28T21:35:04.02Z" }, ] [[package]] @@ -3915,9 +3956,9 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "opentelemetry-proto" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/cd/2e/7eaf4ba595fb5213cf639c9158dfb64aacb2e4c7d74bfa664af89fa111f4/opentelemetry_exporter_otlp_proto_common-1.27.0.tar.gz", hash = "sha256:159d27cf49f359e3798c4c3eb8da6ef4020e292571bd8c5604a2a573231dd5c8", size = 17860 } +sdist = { url = "https://files.pythonhosted.org/packages/cd/2e/7eaf4ba595fb5213cf639c9158dfb64aacb2e4c7d74bfa664af89fa111f4/opentelemetry_exporter_otlp_proto_common-1.27.0.tar.gz", hash = "sha256:159d27cf49f359e3798c4c3eb8da6ef4020e292571bd8c5604a2a573231dd5c8", size = 17860, upload-time = "2024-08-28T21:35:34.896Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/41/27/4610ab3d9bb3cde4309b6505f98b3aabca04a26aa480aa18cede23149837/opentelemetry_exporter_otlp_proto_common-1.27.0-py3-none-any.whl", hash = "sha256:675db7fffcb60946f3a5c43e17d1168a3307a94a930ecf8d2ea1f286f3d4f79a", size = 17848 }, + { url = "https://files.pythonhosted.org/packages/41/27/4610ab3d9bb3cde4309b6505f98b3aabca04a26aa480aa18cede23149837/opentelemetry_exporter_otlp_proto_common-1.27.0-py3-none-any.whl", hash = "sha256:675db7fffcb60946f3a5c43e17d1168a3307a94a930ecf8d2ea1f286f3d4f79a", size = 17848, upload-time = "2024-08-28T21:35:05.412Z" }, ] [[package]] @@ -3933,9 +3974,9 @@ dependencies = [ { name = "opentelemetry-proto" }, { name = "opentelemetry-sdk" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/a1/d0/c1e375b292df26e0ffebf194e82cd197e4c26cc298582bda626ce3ce74c5/opentelemetry_exporter_otlp_proto_grpc-1.27.0.tar.gz", hash = "sha256:af6f72f76bcf425dfb5ad11c1a6d6eca2863b91e63575f89bb7b4b55099d968f", size = 26244 } +sdist = { url = "https://files.pythonhosted.org/packages/a1/d0/c1e375b292df26e0ffebf194e82cd197e4c26cc298582bda626ce3ce74c5/opentelemetry_exporter_otlp_proto_grpc-1.27.0.tar.gz", hash = "sha256:af6f72f76bcf425dfb5ad11c1a6d6eca2863b91e63575f89bb7b4b55099d968f", size = 26244, upload-time = "2024-08-28T21:35:36.314Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/8d/80/32217460c2c64c0568cea38410124ff680a9b65f6732867bbf857c4d8626/opentelemetry_exporter_otlp_proto_grpc-1.27.0-py3-none-any.whl", hash = "sha256:56b5bbd5d61aab05e300d9d62a6b3c134827bbd28d0b12f2649c2da368006c9e", size = 18541 }, + { url = "https://files.pythonhosted.org/packages/8d/80/32217460c2c64c0568cea38410124ff680a9b65f6732867bbf857c4d8626/opentelemetry_exporter_otlp_proto_grpc-1.27.0-py3-none-any.whl", hash = "sha256:56b5bbd5d61aab05e300d9d62a6b3c134827bbd28d0b12f2649c2da368006c9e", size = 18541, upload-time = "2024-08-28T21:35:06.493Z" }, ] [[package]] @@ -3951,9 +3992,9 @@ dependencies = [ { name = "opentelemetry-sdk" }, { name = "requests" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/31/0a/f05c55e8913bf58a033583f2580a0ec31a5f4cf2beacc9e286dcb74d6979/opentelemetry_exporter_otlp_proto_http-1.27.0.tar.gz", hash = "sha256:2103479092d8eb18f61f3fbff084f67cc7f2d4a7d37e75304b8b56c1d09ebef5", size = 15059 } +sdist = { url = "https://files.pythonhosted.org/packages/31/0a/f05c55e8913bf58a033583f2580a0ec31a5f4cf2beacc9e286dcb74d6979/opentelemetry_exporter_otlp_proto_http-1.27.0.tar.gz", hash = "sha256:2103479092d8eb18f61f3fbff084f67cc7f2d4a7d37e75304b8b56c1d09ebef5", size = 15059, upload-time = "2024-08-28T21:35:37.079Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/2d/8d/4755884afc0b1db6000527cac0ca17273063b6142c773ce4ecd307a82e72/opentelemetry_exporter_otlp_proto_http-1.27.0-py3-none-any.whl", hash = "sha256:688027575c9da42e179a69fe17e2d1eba9b14d81de8d13553a21d3114f3b4d75", size = 17203 }, + { url = "https://files.pythonhosted.org/packages/2d/8d/4755884afc0b1db6000527cac0ca17273063b6142c773ce4ecd307a82e72/opentelemetry_exporter_otlp_proto_http-1.27.0-py3-none-any.whl", hash = "sha256:688027575c9da42e179a69fe17e2d1eba9b14d81de8d13553a21d3114f3b4d75", size = 17203, upload-time = "2024-08-28T21:35:08.141Z" }, ] [[package]] @@ -3965,9 +4006,9 @@ dependencies = [ { name = "setuptools" }, { name = "wrapt" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/04/0e/d9394839af5d55c8feb3b22cd11138b953b49739b20678ca96289e30f904/opentelemetry_instrumentation-0.48b0.tar.gz", hash = "sha256:94929685d906380743a71c3970f76b5f07476eea1834abd5dd9d17abfe23cc35", size = 24724 } +sdist = { url = "https://files.pythonhosted.org/packages/04/0e/d9394839af5d55c8feb3b22cd11138b953b49739b20678ca96289e30f904/opentelemetry_instrumentation-0.48b0.tar.gz", hash = "sha256:94929685d906380743a71c3970f76b5f07476eea1834abd5dd9d17abfe23cc35", size = 24724, upload-time = "2024-08-28T21:27:42.82Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/0a/7f/405c41d4f359121376c9d5117dcf68149b8122d3f6c718996d037bd4d800/opentelemetry_instrumentation-0.48b0-py3-none-any.whl", hash = "sha256:a69750dc4ba6a5c3eb67986a337185a25b739966d80479befe37b546fc870b44", size = 29449 }, + { url = "https://files.pythonhosted.org/packages/0a/7f/405c41d4f359121376c9d5117dcf68149b8122d3f6c718996d037bd4d800/opentelemetry_instrumentation-0.48b0-py3-none-any.whl", hash = "sha256:a69750dc4ba6a5c3eb67986a337185a25b739966d80479befe37b546fc870b44", size = 29449, upload-time = "2024-08-28T21:26:31.288Z" }, ] [[package]] @@ -3981,9 +4022,9 @@ dependencies = [ { name = "opentelemetry-semantic-conventions" }, { name = "opentelemetry-util-http" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/44/ac/fd3d40bab3234ec3f5c052a815100676baaae1832fa1067935f11e5c59c6/opentelemetry_instrumentation_asgi-0.48b0.tar.gz", hash = "sha256:04c32174b23c7fa72ddfe192dad874954968a6a924608079af9952964ecdf785", size = 23435 } +sdist = { url = "https://files.pythonhosted.org/packages/44/ac/fd3d40bab3234ec3f5c052a815100676baaae1832fa1067935f11e5c59c6/opentelemetry_instrumentation_asgi-0.48b0.tar.gz", hash = "sha256:04c32174b23c7fa72ddfe192dad874954968a6a924608079af9952964ecdf785", size = 23435, upload-time = "2024-08-28T21:27:47.276Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/db/74/a0e0d38622856597dd8e630f2bd793760485eb165708e11b8be1696bbb5a/opentelemetry_instrumentation_asgi-0.48b0-py3-none-any.whl", hash = "sha256:ddb1b5fc800ae66e85a4e2eca4d9ecd66367a8c7b556169d9e7b57e10676e44d", size = 15958 }, + { url = "https://files.pythonhosted.org/packages/db/74/a0e0d38622856597dd8e630f2bd793760485eb165708e11b8be1696bbb5a/opentelemetry_instrumentation_asgi-0.48b0-py3-none-any.whl", hash = "sha256:ddb1b5fc800ae66e85a4e2eca4d9ecd66367a8c7b556169d9e7b57e10676e44d", size = 15958, upload-time = "2024-08-28T21:26:38.139Z" }, ] [[package]] @@ -3995,9 +4036,9 @@ dependencies = [ { name = "opentelemetry-instrumentation" }, { name = "opentelemetry-semantic-conventions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/42/68/72975eff50cc22d8f65f96c425a2e8844f91488e78ffcfb603ac7cee0e5a/opentelemetry_instrumentation_celery-0.48b0.tar.gz", hash = "sha256:1d33aa6c4a1e6c5d17a64215245208a96e56c9d07611685dbae09a557704af26", size = 14445 } +sdist = { url = "https://files.pythonhosted.org/packages/42/68/72975eff50cc22d8f65f96c425a2e8844f91488e78ffcfb603ac7cee0e5a/opentelemetry_instrumentation_celery-0.48b0.tar.gz", hash = "sha256:1d33aa6c4a1e6c5d17a64215245208a96e56c9d07611685dbae09a557704af26", size = 14445, upload-time = "2024-08-28T21:27:56.392Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/28/59/f09e8f9f596d375fd86b7677751525bbc485c8cc8c5388e39786a3d3b968/opentelemetry_instrumentation_celery-0.48b0-py3-none-any.whl", hash = "sha256:c1904e38cc58fb2a33cd657d6e296285c5ffb0dca3f164762f94b905e5abc88e", size = 13697 }, + { url = "https://files.pythonhosted.org/packages/28/59/f09e8f9f596d375fd86b7677751525bbc485c8cc8c5388e39786a3d3b968/opentelemetry_instrumentation_celery-0.48b0-py3-none-any.whl", hash = "sha256:c1904e38cc58fb2a33cd657d6e296285c5ffb0dca3f164762f94b905e5abc88e", size = 13697, upload-time = "2024-08-28T21:26:50.01Z" }, ] [[package]] @@ -4011,9 +4052,9 @@ dependencies = [ { name = "opentelemetry-semantic-conventions" }, { name = "opentelemetry-util-http" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/58/20/43477da5850ef2cd3792715d442aecd051e885e0603b6ee5783b2104ba8f/opentelemetry_instrumentation_fastapi-0.48b0.tar.gz", hash = "sha256:21a72563ea412c0b535815aeed75fc580240f1f02ebc72381cfab672648637a2", size = 18497 } +sdist = { url = "https://files.pythonhosted.org/packages/58/20/43477da5850ef2cd3792715d442aecd051e885e0603b6ee5783b2104ba8f/opentelemetry_instrumentation_fastapi-0.48b0.tar.gz", hash = "sha256:21a72563ea412c0b535815aeed75fc580240f1f02ebc72381cfab672648637a2", size = 18497, upload-time = "2024-08-28T21:28:01.14Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/ee/50/745ab075a3041b7a5f29a579d2c28eaad54f64b4589d8f9fd364c62cf0f3/opentelemetry_instrumentation_fastapi-0.48b0-py3-none-any.whl", hash = "sha256:afeb820a59e139d3e5d96619600f11ce0187658b8ae9e3480857dd790bc024f2", size = 11777 }, + { url = "https://files.pythonhosted.org/packages/ee/50/745ab075a3041b7a5f29a579d2c28eaad54f64b4589d8f9fd364c62cf0f3/opentelemetry_instrumentation_fastapi-0.48b0-py3-none-any.whl", hash = "sha256:afeb820a59e139d3e5d96619600f11ce0187658b8ae9e3480857dd790bc024f2", size = 11777, upload-time = "2024-08-28T21:26:57.457Z" }, ] [[package]] @@ -4029,9 +4070,9 @@ dependencies = [ { name = "opentelemetry-util-http" }, { name = "packaging" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/ed/2f/5c3af780a69f9ba78445fe0e5035c41f67281a31b08f3c3e7ec460bda726/opentelemetry_instrumentation_flask-0.48b0.tar.gz", hash = "sha256:e03a34428071aebf4864ea6c6a564acef64f88c13eb3818e64ea90da61266c3d", size = 19196 } +sdist = { url = "https://files.pythonhosted.org/packages/ed/2f/5c3af780a69f9ba78445fe0e5035c41f67281a31b08f3c3e7ec460bda726/opentelemetry_instrumentation_flask-0.48b0.tar.gz", hash = "sha256:e03a34428071aebf4864ea6c6a564acef64f88c13eb3818e64ea90da61266c3d", size = 19196, upload-time = "2024-08-28T21:28:01.986Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/78/3d/fcde4f8f0bf9fa1ee73a12304fa538076fb83fe0a2ae966ab0f0b7da5109/opentelemetry_instrumentation_flask-0.48b0-py3-none-any.whl", hash = "sha256:26b045420b9d76e85493b1c23fcf27517972423480dc6cf78fd6924248ba5808", size = 14588 }, + { url = "https://files.pythonhosted.org/packages/78/3d/fcde4f8f0bf9fa1ee73a12304fa538076fb83fe0a2ae966ab0f0b7da5109/opentelemetry_instrumentation_flask-0.48b0-py3-none-any.whl", hash = "sha256:26b045420b9d76e85493b1c23fcf27517972423480dc6cf78fd6924248ba5808", size = 14588, upload-time = "2024-08-28T21:26:58.504Z" }, ] [[package]] @@ -4044,9 +4085,9 @@ dependencies = [ { name = "opentelemetry-semantic-conventions" }, { name = "opentelemetry-util-http" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/d3/d9/c65d818607c16d1b7ea8d2de6111c6cecadf8d2fd38c1885a72733a7c6d3/opentelemetry_instrumentation_httpx-0.48b0.tar.gz", hash = "sha256:ee977479e10398931921fb995ac27ccdeea2e14e392cb27ef012fc549089b60a", size = 16931 } +sdist = { url = "https://files.pythonhosted.org/packages/d3/d9/c65d818607c16d1b7ea8d2de6111c6cecadf8d2fd38c1885a72733a7c6d3/opentelemetry_instrumentation_httpx-0.48b0.tar.gz", hash = "sha256:ee977479e10398931921fb995ac27ccdeea2e14e392cb27ef012fc549089b60a", size = 16931, upload-time = "2024-08-28T21:28:03.794Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/c2/fe/f2daa9d6d988c093b8c7b1d35df675761a8ece0b600b035dc04982746c9d/opentelemetry_instrumentation_httpx-0.48b0-py3-none-any.whl", hash = "sha256:d94f9d612c82d09fe22944d1904a30a464c19bea2ba76be656c99a28ad8be8e5", size = 13900 }, + { url = "https://files.pythonhosted.org/packages/c2/fe/f2daa9d6d988c093b8c7b1d35df675761a8ece0b600b035dc04982746c9d/opentelemetry_instrumentation_httpx-0.48b0-py3-none-any.whl", hash = "sha256:d94f9d612c82d09fe22944d1904a30a464c19bea2ba76be656c99a28ad8be8e5", size = 13900, upload-time = "2024-08-28T21:27:01.566Z" }, ] [[package]] @@ -4059,9 +4100,9 @@ dependencies = [ { name = "opentelemetry-semantic-conventions" }, { name = "wrapt" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/70/be/92e98e4c7f275be3d373899a41b0a7d4df64266657d985dbbdb9a54de0d5/opentelemetry_instrumentation_redis-0.48b0.tar.gz", hash = "sha256:61e33e984b4120e1b980d9fba6e9f7ca0c8d972f9970654d8f6e9f27fa115a8c", size = 10511 } +sdist = { url = "https://files.pythonhosted.org/packages/70/be/92e98e4c7f275be3d373899a41b0a7d4df64266657d985dbbdb9a54de0d5/opentelemetry_instrumentation_redis-0.48b0.tar.gz", hash = "sha256:61e33e984b4120e1b980d9fba6e9f7ca0c8d972f9970654d8f6e9f27fa115a8c", size = 10511, upload-time = "2024-08-28T21:28:15.061Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/94/40/892f30d400091106309cc047fd3f6d76a828fedd984a953fd5386b78a2fb/opentelemetry_instrumentation_redis-0.48b0-py3-none-any.whl", hash = "sha256:48c7f2e25cbb30bde749dc0d8b9c74c404c851f554af832956b9630b27f5bcb7", size = 11610 }, + { url = "https://files.pythonhosted.org/packages/94/40/892f30d400091106309cc047fd3f6d76a828fedd984a953fd5386b78a2fb/opentelemetry_instrumentation_redis-0.48b0-py3-none-any.whl", hash = "sha256:48c7f2e25cbb30bde749dc0d8b9c74c404c851f554af832956b9630b27f5bcb7", size = 11610, upload-time = "2024-08-28T21:27:18.759Z" }, ] [[package]] @@ -4075,9 +4116,9 @@ dependencies = [ { name = "packaging" }, { name = "wrapt" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/4c/77/3fcebbca8bd729da50dc2130d8ca869a235aa5483a85ef06c5dc8643476b/opentelemetry_instrumentation_sqlalchemy-0.48b0.tar.gz", hash = "sha256:dbf2d5a755b470e64e5e2762b56f8d56313787e4c7d71a87fe25c33f48eb3493", size = 13194 } +sdist = { url = "https://files.pythonhosted.org/packages/4c/77/3fcebbca8bd729da50dc2130d8ca869a235aa5483a85ef06c5dc8643476b/opentelemetry_instrumentation_sqlalchemy-0.48b0.tar.gz", hash = "sha256:dbf2d5a755b470e64e5e2762b56f8d56313787e4c7d71a87fe25c33f48eb3493", size = 13194, upload-time = "2024-08-28T21:28:18.122Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/e1/84/4b6f1e9e9f83a52d966e91963f5a8424edc4a3d5ea32854c96c2d1618284/opentelemetry_instrumentation_sqlalchemy-0.48b0-py3-none-any.whl", hash = "sha256:625848a34aa5770cb4b1dcdbd95afce4307a0230338711101325261d739f391f", size = 13360 }, + { url = "https://files.pythonhosted.org/packages/e1/84/4b6f1e9e9f83a52d966e91963f5a8424edc4a3d5ea32854c96c2d1618284/opentelemetry_instrumentation_sqlalchemy-0.48b0-py3-none-any.whl", hash = "sha256:625848a34aa5770cb4b1dcdbd95afce4307a0230338711101325261d739f391f", size = 13360, upload-time = "2024-08-28T21:27:22.102Z" }, ] [[package]] @@ -4090,9 +4131,9 @@ dependencies = [ { name = "opentelemetry-semantic-conventions" }, { name = "opentelemetry-util-http" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/de/a5/f45cdfba18f22aefd2378eac8c07c1f8c9656d6bf7ce315ced48c67f3437/opentelemetry_instrumentation_wsgi-0.48b0.tar.gz", hash = "sha256:1a1e752367b0df4397e0b835839225ef5c2c3c053743a261551af13434fc4d51", size = 17974 } +sdist = { url = "https://files.pythonhosted.org/packages/de/a5/f45cdfba18f22aefd2378eac8c07c1f8c9656d6bf7ce315ced48c67f3437/opentelemetry_instrumentation_wsgi-0.48b0.tar.gz", hash = "sha256:1a1e752367b0df4397e0b835839225ef5c2c3c053743a261551af13434fc4d51", size = 17974, upload-time = "2024-08-28T21:28:24.902Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/fb/87/fa420007e0ba7e8cd43799ab204717ab515f000236fa2726a6be3299efdd/opentelemetry_instrumentation_wsgi-0.48b0-py3-none-any.whl", hash = "sha256:c6051124d741972090fe94b2fa302555e1e2a22e9cdda32dd39ed49a5b34e0c6", size = 13691 }, + { url = "https://files.pythonhosted.org/packages/fb/87/fa420007e0ba7e8cd43799ab204717ab515f000236fa2726a6be3299efdd/opentelemetry_instrumentation_wsgi-0.48b0-py3-none-any.whl", hash = "sha256:c6051124d741972090fe94b2fa302555e1e2a22e9cdda32dd39ed49a5b34e0c6", size = 13691, upload-time = "2024-08-28T21:27:33.257Z" }, ] [[package]] @@ -4103,9 +4144,9 @@ dependencies = [ { name = "deprecated" }, { name = "opentelemetry-api" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/53/a3/3ceeb5ff5a1906371834d5c594e24e5b84f35528d219054833deca4ac44c/opentelemetry_propagator_b3-1.27.0.tar.gz", hash = "sha256:39377b6aa619234e08fbc6db79bf880aff36d7e2761efa9afa28b78d5937308f", size = 9590 } +sdist = { url = "https://files.pythonhosted.org/packages/53/a3/3ceeb5ff5a1906371834d5c594e24e5b84f35528d219054833deca4ac44c/opentelemetry_propagator_b3-1.27.0.tar.gz", hash = "sha256:39377b6aa619234e08fbc6db79bf880aff36d7e2761efa9afa28b78d5937308f", size = 9590, upload-time = "2024-08-28T21:35:43.971Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/03/3f/75ba77b8d9938bae575bc457a5c56ca2246ff5367b54c7d4252a31d1c91f/opentelemetry_propagator_b3-1.27.0-py3-none-any.whl", hash = "sha256:1dd75e9801ba02e870df3830097d35771a64c123127c984d9b05c352a35aa9cc", size = 8899 }, + { url = "https://files.pythonhosted.org/packages/03/3f/75ba77b8d9938bae575bc457a5c56ca2246ff5367b54c7d4252a31d1c91f/opentelemetry_propagator_b3-1.27.0-py3-none-any.whl", hash = "sha256:1dd75e9801ba02e870df3830097d35771a64c123127c984d9b05c352a35aa9cc", size = 8899, upload-time = "2024-08-28T21:35:18.317Z" }, ] [[package]] @@ -4115,9 +4156,9 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "protobuf" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/9a/59/959f0beea798ae0ee9c979b90f220736fbec924eedbefc60ca581232e659/opentelemetry_proto-1.27.0.tar.gz", hash = "sha256:33c9345d91dafd8a74fc3d7576c5a38f18b7fdf8d02983ac67485386132aedd6", size = 34749 } +sdist = { url = "https://files.pythonhosted.org/packages/9a/59/959f0beea798ae0ee9c979b90f220736fbec924eedbefc60ca581232e659/opentelemetry_proto-1.27.0.tar.gz", hash = "sha256:33c9345d91dafd8a74fc3d7576c5a38f18b7fdf8d02983ac67485386132aedd6", size = 34749, upload-time = "2024-08-28T21:35:45.839Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/94/56/3d2d826834209b19a5141eed717f7922150224d1a982385d19a9444cbf8d/opentelemetry_proto-1.27.0-py3-none-any.whl", hash = "sha256:b133873de5581a50063e1e4b29cdcf0c5e253a8c2d8dc1229add20a4c3830ace", size = 52464 }, + { url = "https://files.pythonhosted.org/packages/94/56/3d2d826834209b19a5141eed717f7922150224d1a982385d19a9444cbf8d/opentelemetry_proto-1.27.0-py3-none-any.whl", hash = "sha256:b133873de5581a50063e1e4b29cdcf0c5e253a8c2d8dc1229add20a4c3830ace", size = 52464, upload-time = "2024-08-28T21:35:21.434Z" }, ] [[package]] @@ -4129,9 +4170,9 @@ dependencies = [ { name = "opentelemetry-semantic-conventions" }, { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/0d/9a/82a6ac0f06590f3d72241a587cb8b0b751bd98728e896cc4cbd4847248e6/opentelemetry_sdk-1.27.0.tar.gz", hash = "sha256:d525017dea0ccce9ba4e0245100ec46ecdc043f2d7b8315d56b19aff0904fa6f", size = 145019 } +sdist = { url = "https://files.pythonhosted.org/packages/0d/9a/82a6ac0f06590f3d72241a587cb8b0b751bd98728e896cc4cbd4847248e6/opentelemetry_sdk-1.27.0.tar.gz", hash = "sha256:d525017dea0ccce9ba4e0245100ec46ecdc043f2d7b8315d56b19aff0904fa6f", size = 145019, upload-time = "2024-08-28T21:35:46.708Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/c1/bd/a6602e71e315055d63b2ff07172bd2d012b4cba2d4e00735d74ba42fc4d6/opentelemetry_sdk-1.27.0-py3-none-any.whl", hash = "sha256:365f5e32f920faf0fd9e14fdfd92c086e317eaa5f860edba9cdc17a380d9197d", size = 110505 }, + { url = "https://files.pythonhosted.org/packages/c1/bd/a6602e71e315055d63b2ff07172bd2d012b4cba2d4e00735d74ba42fc4d6/opentelemetry_sdk-1.27.0-py3-none-any.whl", hash = "sha256:365f5e32f920faf0fd9e14fdfd92c086e317eaa5f860edba9cdc17a380d9197d", size = 110505, upload-time = "2024-08-28T21:35:24.769Z" }, ] [[package]] @@ -4142,18 +4183,18 @@ dependencies = [ { name = "deprecated" }, { name = "opentelemetry-api" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/0a/89/1724ad69f7411772446067cdfa73b598694c8c91f7f8c922e344d96d81f9/opentelemetry_semantic_conventions-0.48b0.tar.gz", hash = "sha256:12d74983783b6878162208be57c9effcb89dc88691c64992d70bb89dc00daa1a", size = 89445 } +sdist = { url = "https://files.pythonhosted.org/packages/0a/89/1724ad69f7411772446067cdfa73b598694c8c91f7f8c922e344d96d81f9/opentelemetry_semantic_conventions-0.48b0.tar.gz", hash = "sha256:12d74983783b6878162208be57c9effcb89dc88691c64992d70bb89dc00daa1a", size = 89445, upload-time = "2024-08-28T21:35:47.673Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/b7/7a/4f0063dbb0b6c971568291a8bc19a4ca70d3c185db2d956230dd67429dfc/opentelemetry_semantic_conventions-0.48b0-py3-none-any.whl", hash = "sha256:a0de9f45c413a8669788a38569c7e0a11ce6ce97861a628cca785deecdc32a1f", size = 149685 }, + { url = "https://files.pythonhosted.org/packages/b7/7a/4f0063dbb0b6c971568291a8bc19a4ca70d3c185db2d956230dd67429dfc/opentelemetry_semantic_conventions-0.48b0-py3-none-any.whl", hash = "sha256:a0de9f45c413a8669788a38569c7e0a11ce6ce97861a628cca785deecdc32a1f", size = 149685, upload-time = "2024-08-28T21:35:25.983Z" }, ] [[package]] name = "opentelemetry-util-http" version = "0.48b0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/d6/d7/185c494754340e0a3928fd39fde2616ee78f2c9d66253affaad62d5b7935/opentelemetry_util_http-0.48b0.tar.gz", hash = "sha256:60312015153580cc20f322e5cdc3d3ecad80a71743235bdb77716e742814623c", size = 7863 } +sdist = { url = "https://files.pythonhosted.org/packages/d6/d7/185c494754340e0a3928fd39fde2616ee78f2c9d66253affaad62d5b7935/opentelemetry_util_http-0.48b0.tar.gz", hash = "sha256:60312015153580cc20f322e5cdc3d3ecad80a71743235bdb77716e742814623c", size = 7863, upload-time = "2024-08-28T21:28:27.266Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/ad/2e/36097c0a4d0115b8c7e377c90bab7783ac183bc5cb4071308f8959454311/opentelemetry_util_http-0.48b0-py3-none-any.whl", hash = "sha256:76f598af93aab50328d2a69c786beaedc8b6a7770f7a818cc307eb353debfffb", size = 6946 }, + { url = "https://files.pythonhosted.org/packages/ad/2e/36097c0a4d0115b8c7e377c90bab7783ac183bc5cb4071308f8959454311/opentelemetry_util_http-0.48b0-py3-none-any.whl", hash = "sha256:76f598af93aab50328d2a69c786beaedc8b6a7770f7a818cc307eb353debfffb", size = 6946, upload-time = "2024-08-28T21:27:37.975Z" }, ] [[package]] @@ -4177,9 +4218,9 @@ dependencies = [ { name = "tqdm" }, { name = "uuid6" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/30/af/f6382cea86bdfbfd0f9571960a15301da4a6ecd1506070d9252a0c0a7564/opik-1.8.102.tar.gz", hash = "sha256:c836a113e8b7fdf90770a3854dcc859b3c30d6347383d7c11e52971a530ed2c3", size = 490462 } +sdist = { url = "https://files.pythonhosted.org/packages/30/af/f6382cea86bdfbfd0f9571960a15301da4a6ecd1506070d9252a0c0a7564/opik-1.8.102.tar.gz", hash = "sha256:c836a113e8b7fdf90770a3854dcc859b3c30d6347383d7c11e52971a530ed2c3", size = 490462, upload-time = "2025-11-05T18:54:50.142Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/b9/8b/9b15a01f8360201100b9a5d3e0aeeeda57833fca2b16d34b9fada147fc4b/opik-1.8.102-py3-none-any.whl", hash = "sha256:d8501134bf62bf95443de036f6eaa4f66006f81f9b99e0a8a09e21d8be8c1628", size = 885834 }, + { url = "https://files.pythonhosted.org/packages/b9/8b/9b15a01f8360201100b9a5d3e0aeeeda57833fca2b16d34b9fada147fc4b/opik-1.8.102-py3-none-any.whl", hash = "sha256:d8501134bf62bf95443de036f6eaa4f66006f81f9b99e0a8a09e21d8be8c1628", size = 885834, upload-time = "2025-11-05T18:54:48.22Z" }, ] [[package]] @@ -4189,9 +4230,9 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/94/ca/d3a2abcf12cc8c18ccac1178ef87ab50a235bf386d2401341776fdad18aa/optype-0.14.0.tar.gz", hash = "sha256:925cf060b7d1337647f880401f6094321e7d8e837533b8e159b9a92afa3157c6", size = 100880 } +sdist = { url = "https://files.pythonhosted.org/packages/94/ca/d3a2abcf12cc8c18ccac1178ef87ab50a235bf386d2401341776fdad18aa/optype-0.14.0.tar.gz", hash = "sha256:925cf060b7d1337647f880401f6094321e7d8e837533b8e159b9a92afa3157c6", size = 100880, upload-time = "2025-10-01T04:49:56.232Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/84/a6/11b0eb65eeafa87260d36858b69ec4e0072d09e37ea6714280960030bc93/optype-0.14.0-py3-none-any.whl", hash = "sha256:50d02edafd04edf2e5e27d6249760a51b2198adb9f6ffd778030b3d2806b026b", size = 89465 }, + { url = "https://files.pythonhosted.org/packages/84/a6/11b0eb65eeafa87260d36858b69ec4e0072d09e37ea6714280960030bc93/optype-0.14.0-py3-none-any.whl", hash = "sha256:50d02edafd04edf2e5e27d6249760a51b2198adb9f6ffd778030b3d2806b026b", size = 89465, upload-time = "2025-10-01T04:49:54.674Z" }, ] [package.optional-dependencies] @@ -4207,56 +4248,56 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "cryptography" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/51/c9/fae18fa5d803712d188486f8e86ad4f4e00316793ca19745d7c11092c360/oracledb-3.3.0.tar.gz", hash = "sha256:e830d3544a1578296bcaa54c6e8c8ae10a58c7db467c528c4b27adbf9c8b4cb0", size = 811776 } +sdist = { url = "https://files.pythonhosted.org/packages/51/c9/fae18fa5d803712d188486f8e86ad4f4e00316793ca19745d7c11092c360/oracledb-3.3.0.tar.gz", hash = "sha256:e830d3544a1578296bcaa54c6e8c8ae10a58c7db467c528c4b27adbf9c8b4cb0", size = 811776, upload-time = "2025-07-29T22:34:10.489Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/3f/35/95d9a502fdc48ce1ef3a513ebd027488353441e15aa0448619abb3d09d32/oracledb-3.3.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:d9adb74f837838e21898d938e3a725cf73099c65f98b0b34d77146b453e945e0", size = 3963945 }, - { url = "https://files.pythonhosted.org/packages/16/a7/8f1ef447d995bb51d9fdc36356697afeceb603932f16410c12d52b2df1a4/oracledb-3.3.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4b063d1007882570f170ebde0f364e78d4a70c8f015735cc900663278b9ceef7", size = 2449385 }, - { url = "https://files.pythonhosted.org/packages/b3/fa/6a78480450bc7d256808d0f38ade3385735fb5a90dab662167b4257dcf94/oracledb-3.3.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:187728f0a2d161676b8c581a9d8f15d9631a8fea1e628f6d0e9fa2f01280cd22", size = 2634943 }, - { url = "https://files.pythonhosted.org/packages/5b/90/ea32b569a45fb99fac30b96f1ac0fb38b029eeebb78357bc6db4be9dde41/oracledb-3.3.0-cp311-cp311-win32.whl", hash = "sha256:920f14314f3402c5ab98f2efc5932e0547e9c0a4ca9338641357f73844e3e2b1", size = 1483549 }, - { url = "https://files.pythonhosted.org/packages/81/55/ae60f72836eb8531b630299f9ed68df3fe7868c6da16f820a108155a21f9/oracledb-3.3.0-cp311-cp311-win_amd64.whl", hash = "sha256:825edb97976468db1c7e52c78ba38d75ce7e2b71a2e88f8629bcf02be8e68a8a", size = 1834737 }, - { url = "https://files.pythonhosted.org/packages/08/a8/f6b7809d70e98e113786d5a6f1294da81c046d2fa901ad656669fc5d7fae/oracledb-3.3.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:9d25e37d640872731ac9b73f83cbc5fc4743cd744766bdb250488caf0d7696a8", size = 3943512 }, - { url = "https://files.pythonhosted.org/packages/df/b9/8145ad8991f4864d3de4a911d439e5bc6cdbf14af448f3ab1e846a54210c/oracledb-3.3.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b0bf7cdc2b668f939aa364f552861bc7a149d7cd3f3794730d43ef07613b2bf9", size = 2276258 }, - { url = "https://files.pythonhosted.org/packages/56/bf/f65635ad5df17d6e4a2083182750bb136ac663ff0e9996ce59d77d200f60/oracledb-3.3.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2fe20540fde64a6987046807ea47af93be918fd70b9766b3eb803c01e6d4202e", size = 2458811 }, - { url = "https://files.pythonhosted.org/packages/7d/30/e0c130b6278c10b0e6cd77a3a1a29a785c083c549676cf701c5d180b8e63/oracledb-3.3.0-cp312-cp312-win32.whl", hash = "sha256:db080be9345cbf9506ffdaea3c13d5314605355e76d186ec4edfa49960ffb813", size = 1445525 }, - { url = "https://files.pythonhosted.org/packages/1a/5c/7254f5e1a33a5d6b8bf6813d4f4fdcf5c4166ec8a7af932d987879d5595c/oracledb-3.3.0-cp312-cp312-win_amd64.whl", hash = "sha256:be81e3afe79f6c8ece79a86d6067ad1572d2992ce1c590a086f3755a09535eb4", size = 1789976 }, + { url = "https://files.pythonhosted.org/packages/3f/35/95d9a502fdc48ce1ef3a513ebd027488353441e15aa0448619abb3d09d32/oracledb-3.3.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:d9adb74f837838e21898d938e3a725cf73099c65f98b0b34d77146b453e945e0", size = 3963945, upload-time = "2025-07-29T22:34:28.633Z" }, + { url = "https://files.pythonhosted.org/packages/16/a7/8f1ef447d995bb51d9fdc36356697afeceb603932f16410c12d52b2df1a4/oracledb-3.3.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4b063d1007882570f170ebde0f364e78d4a70c8f015735cc900663278b9ceef7", size = 2449385, upload-time = "2025-07-29T22:34:30.592Z" }, + { url = "https://files.pythonhosted.org/packages/b3/fa/6a78480450bc7d256808d0f38ade3385735fb5a90dab662167b4257dcf94/oracledb-3.3.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:187728f0a2d161676b8c581a9d8f15d9631a8fea1e628f6d0e9fa2f01280cd22", size = 2634943, upload-time = "2025-07-29T22:34:33.142Z" }, + { url = "https://files.pythonhosted.org/packages/5b/90/ea32b569a45fb99fac30b96f1ac0fb38b029eeebb78357bc6db4be9dde41/oracledb-3.3.0-cp311-cp311-win32.whl", hash = "sha256:920f14314f3402c5ab98f2efc5932e0547e9c0a4ca9338641357f73844e3e2b1", size = 1483549, upload-time = "2025-07-29T22:34:35.015Z" }, + { url = "https://files.pythonhosted.org/packages/81/55/ae60f72836eb8531b630299f9ed68df3fe7868c6da16f820a108155a21f9/oracledb-3.3.0-cp311-cp311-win_amd64.whl", hash = "sha256:825edb97976468db1c7e52c78ba38d75ce7e2b71a2e88f8629bcf02be8e68a8a", size = 1834737, upload-time = "2025-07-29T22:34:36.824Z" }, + { url = "https://files.pythonhosted.org/packages/08/a8/f6b7809d70e98e113786d5a6f1294da81c046d2fa901ad656669fc5d7fae/oracledb-3.3.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:9d25e37d640872731ac9b73f83cbc5fc4743cd744766bdb250488caf0d7696a8", size = 3943512, upload-time = "2025-07-29T22:34:39.237Z" }, + { url = "https://files.pythonhosted.org/packages/df/b9/8145ad8991f4864d3de4a911d439e5bc6cdbf14af448f3ab1e846a54210c/oracledb-3.3.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b0bf7cdc2b668f939aa364f552861bc7a149d7cd3f3794730d43ef07613b2bf9", size = 2276258, upload-time = "2025-07-29T22:34:41.547Z" }, + { url = "https://files.pythonhosted.org/packages/56/bf/f65635ad5df17d6e4a2083182750bb136ac663ff0e9996ce59d77d200f60/oracledb-3.3.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2fe20540fde64a6987046807ea47af93be918fd70b9766b3eb803c01e6d4202e", size = 2458811, upload-time = "2025-07-29T22:34:44.648Z" }, + { url = "https://files.pythonhosted.org/packages/7d/30/e0c130b6278c10b0e6cd77a3a1a29a785c083c549676cf701c5d180b8e63/oracledb-3.3.0-cp312-cp312-win32.whl", hash = "sha256:db080be9345cbf9506ffdaea3c13d5314605355e76d186ec4edfa49960ffb813", size = 1445525, upload-time = "2025-07-29T22:34:46.603Z" }, + { url = "https://files.pythonhosted.org/packages/1a/5c/7254f5e1a33a5d6b8bf6813d4f4fdcf5c4166ec8a7af932d987879d5595c/oracledb-3.3.0-cp312-cp312-win_amd64.whl", hash = "sha256:be81e3afe79f6c8ece79a86d6067ad1572d2992ce1c590a086f3755a09535eb4", size = 1789976, upload-time = "2025-07-29T22:34:48.5Z" }, ] [[package]] name = "orjson" version = "3.11.4" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/c6/fe/ed708782d6709cc60eb4c2d8a361a440661f74134675c72990f2c48c785f/orjson-3.11.4.tar.gz", hash = "sha256:39485f4ab4c9b30a3943cfe99e1a213c4776fb69e8abd68f66b83d5a0b0fdc6d", size = 5945188 } +sdist = { url = "https://files.pythonhosted.org/packages/c6/fe/ed708782d6709cc60eb4c2d8a361a440661f74134675c72990f2c48c785f/orjson-3.11.4.tar.gz", hash = "sha256:39485f4ab4c9b30a3943cfe99e1a213c4776fb69e8abd68f66b83d5a0b0fdc6d", size = 5945188, upload-time = "2025-10-24T15:50:38.027Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/63/1d/1ea6005fffb56715fd48f632611e163d1604e8316a5bad2288bee9a1c9eb/orjson-3.11.4-cp311-cp311-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:5e59d23cd93ada23ec59a96f215139753fbfe3a4d989549bcb390f8c00370b39", size = 243498 }, - { url = "https://files.pythonhosted.org/packages/37/d7/ffed10c7da677f2a9da307d491b9eb1d0125b0307019c4ad3d665fd31f4f/orjson-3.11.4-cp311-cp311-macosx_15_0_arm64.whl", hash = "sha256:5c3aedecfc1beb988c27c79d52ebefab93b6c3921dbec361167e6559aba2d36d", size = 128961 }, - { url = "https://files.pythonhosted.org/packages/a2/96/3e4d10a18866d1368f73c8c44b7fe37cc8a15c32f2a7620be3877d4c55a3/orjson-3.11.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:da9e5301f1c2caa2a9a4a303480d79c9ad73560b2e7761de742ab39fe59d9175", size = 130321 }, - { url = "https://files.pythonhosted.org/packages/eb/1f/465f66e93f434f968dd74d5b623eb62c657bdba2332f5a8be9f118bb74c7/orjson-3.11.4-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:8873812c164a90a79f65368f8f96817e59e35d0cc02786a5356f0e2abed78040", size = 129207 }, - { url = "https://files.pythonhosted.org/packages/28/43/d1e94837543321c119dff277ae8e348562fe8c0fafbb648ef7cb0c67e521/orjson-3.11.4-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5d7feb0741ebb15204e748f26c9638e6665a5fa93c37a2c73d64f1669b0ddc63", size = 136323 }, - { url = "https://files.pythonhosted.org/packages/bf/04/93303776c8890e422a5847dd012b4853cdd88206b8bbd3edc292c90102d1/orjson-3.11.4-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:01ee5487fefee21e6910da4c2ee9eef005bee568a0879834df86f888d2ffbdd9", size = 137440 }, - { url = "https://files.pythonhosted.org/packages/1e/ef/75519d039e5ae6b0f34d0336854d55544ba903e21bf56c83adc51cd8bf82/orjson-3.11.4-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3d40d46f348c0321df01507f92b95a377240c4ec31985225a6668f10e2676f9a", size = 136680 }, - { url = "https://files.pythonhosted.org/packages/b5/18/bf8581eaae0b941b44efe14fee7b7862c3382fbc9a0842132cfc7cf5ecf4/orjson-3.11.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:95713e5fc8af84d8edc75b785d2386f653b63d62b16d681687746734b4dfc0be", size = 136160 }, - { url = "https://files.pythonhosted.org/packages/c4/35/a6d582766d351f87fc0a22ad740a641b0a8e6fc47515e8614d2e4790ae10/orjson-3.11.4-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:ad73ede24f9083614d6c4ca9a85fe70e33be7bf047ec586ee2363bc7418fe4d7", size = 140318 }, - { url = "https://files.pythonhosted.org/packages/76/b3/5a4801803ab2e2e2d703bce1a56540d9f99a9143fbec7bf63d225044fef8/orjson-3.11.4-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:842289889de515421f3f224ef9c1f1efb199a32d76d8d2ca2706fa8afe749549", size = 406330 }, - { url = "https://files.pythonhosted.org/packages/80/55/a8f682f64833e3a649f620eafefee175cbfeb9854fc5b710b90c3bca45df/orjson-3.11.4-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:3b2427ed5791619851c52a1261b45c233930977e7de8cf36de05636c708fa905", size = 149580 }, - { url = "https://files.pythonhosted.org/packages/ad/e4/c132fa0c67afbb3eb88274fa98df9ac1f631a675e7877037c611805a4413/orjson-3.11.4-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:3c36e524af1d29982e9b190573677ea02781456b2e537d5840e4538a5ec41907", size = 139846 }, - { url = "https://files.pythonhosted.org/packages/54/06/dc3491489efd651fef99c5908e13951abd1aead1257c67f16135f95ce209/orjson-3.11.4-cp311-cp311-win32.whl", hash = "sha256:87255b88756eab4a68ec61837ca754e5d10fa8bc47dc57f75cedfeaec358d54c", size = 135781 }, - { url = "https://files.pythonhosted.org/packages/79/b7/5e5e8d77bd4ea02a6ac54c42c818afb01dd31961be8a574eb79f1d2cfb1e/orjson-3.11.4-cp311-cp311-win_amd64.whl", hash = "sha256:e2d5d5d798aba9a0e1fede8d853fa899ce2cb930ec0857365f700dffc2c7af6a", size = 131391 }, - { url = "https://files.pythonhosted.org/packages/0f/dc/9484127cc1aa213be398ed735f5f270eedcb0c0977303a6f6ddc46b60204/orjson-3.11.4-cp311-cp311-win_arm64.whl", hash = "sha256:6bb6bb41b14c95d4f2702bce9975fda4516f1db48e500102fc4d8119032ff045", size = 126252 }, - { url = "https://files.pythonhosted.org/packages/63/51/6b556192a04595b93e277a9ff71cd0cc06c21a7df98bcce5963fa0f5e36f/orjson-3.11.4-cp312-cp312-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:d4371de39319d05d3f482f372720b841c841b52f5385bd99c61ed69d55d9ab50", size = 243571 }, - { url = "https://files.pythonhosted.org/packages/1c/2c/2602392ddf2601d538ff11848b98621cd465d1a1ceb9db9e8043181f2f7b/orjson-3.11.4-cp312-cp312-macosx_15_0_arm64.whl", hash = "sha256:e41fd3b3cac850eaae78232f37325ed7d7436e11c471246b87b2cd294ec94853", size = 128891 }, - { url = "https://files.pythonhosted.org/packages/4e/47/bf85dcf95f7a3a12bf223394a4f849430acd82633848d52def09fa3f46ad/orjson-3.11.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:600e0e9ca042878c7fdf189cf1b028fe2c1418cc9195f6cb9824eb6ed99cb938", size = 130137 }, - { url = "https://files.pythonhosted.org/packages/b4/4d/a0cb31007f3ab6f1fd2a1b17057c7c349bc2baf8921a85c0180cc7be8011/orjson-3.11.4-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:7bbf9b333f1568ef5da42bc96e18bf30fd7f8d54e9ae066d711056add508e415", size = 129152 }, - { url = "https://files.pythonhosted.org/packages/f7/ef/2811def7ce3d8576b19e3929fff8f8f0d44bc5eb2e0fdecb2e6e6cc6c720/orjson-3.11.4-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4806363144bb6e7297b8e95870e78d30a649fdc4e23fc84daa80c8ebd366ce44", size = 136834 }, - { url = "https://files.pythonhosted.org/packages/00/d4/9aee9e54f1809cec8ed5abd9bc31e8a9631d19460e3b8470145d25140106/orjson-3.11.4-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ad355e8308493f527d41154e9053b86a5be892b3b359a5c6d5d95cda23601cb2", size = 137519 }, - { url = "https://files.pythonhosted.org/packages/db/ea/67bfdb5465d5679e8ae8d68c11753aaf4f47e3e7264bad66dc2f2249e643/orjson-3.11.4-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c8a7517482667fb9f0ff1b2f16fe5829296ed7a655d04d68cd9711a4d8a4e708", size = 136749 }, - { url = "https://files.pythonhosted.org/packages/01/7e/62517dddcfce6d53a39543cd74d0dccfcbdf53967017c58af68822100272/orjson-3.11.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:97eb5942c7395a171cbfecc4ef6701fc3c403e762194683772df4c54cfbb2210", size = 136325 }, - { url = "https://files.pythonhosted.org/packages/18/ae/40516739f99ab4c7ec3aaa5cc242d341fcb03a45d89edeeaabc5f69cb2cf/orjson-3.11.4-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:149d95d5e018bdd822e3f38c103b1a7c91f88d38a88aada5c4e9b3a73a244241", size = 140204 }, - { url = "https://files.pythonhosted.org/packages/82/18/ff5734365623a8916e3a4037fcef1cd1782bfc14cf0992afe7940c5320bf/orjson-3.11.4-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:624f3951181eb46fc47dea3d221554e98784c823e7069edb5dbd0dc826ac909b", size = 406242 }, - { url = "https://files.pythonhosted.org/packages/e1/43/96436041f0a0c8c8deca6a05ebeaf529bf1de04839f93ac5e7c479807aec/orjson-3.11.4-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:03bfa548cf35e3f8b3a96c4e8e41f753c686ff3d8e182ce275b1751deddab58c", size = 150013 }, - { url = "https://files.pythonhosted.org/packages/1b/48/78302d98423ed8780479a1e682b9aecb869e8404545d999d34fa486e573e/orjson-3.11.4-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:525021896afef44a68148f6ed8a8bf8375553d6066c7f48537657f64823565b9", size = 139951 }, - { url = "https://files.pythonhosted.org/packages/4a/7b/ad613fdcdaa812f075ec0875143c3d37f8654457d2af17703905425981bf/orjson-3.11.4-cp312-cp312-win32.whl", hash = "sha256:b58430396687ce0f7d9eeb3dd47761ca7d8fda8e9eb92b3077a7a353a75efefa", size = 136049 }, - { url = "https://files.pythonhosted.org/packages/b9/3c/9cf47c3ff5f39b8350fb21ba65d789b6a1129d4cbb3033ba36c8a9023520/orjson-3.11.4-cp312-cp312-win_amd64.whl", hash = "sha256:c6dbf422894e1e3c80a177133c0dda260f81428f9de16d61041949f6a2e5c140", size = 131461 }, - { url = "https://files.pythonhosted.org/packages/c6/3b/e2425f61e5825dc5b08c2a5a2b3af387eaaca22a12b9c8c01504f8614c36/orjson-3.11.4-cp312-cp312-win_arm64.whl", hash = "sha256:d38d2bc06d6415852224fcc9c0bfa834c25431e466dc319f0edd56cca81aa96e", size = 126167 }, + { url = "https://files.pythonhosted.org/packages/63/1d/1ea6005fffb56715fd48f632611e163d1604e8316a5bad2288bee9a1c9eb/orjson-3.11.4-cp311-cp311-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:5e59d23cd93ada23ec59a96f215139753fbfe3a4d989549bcb390f8c00370b39", size = 243498, upload-time = "2025-10-24T15:48:48.101Z" }, + { url = "https://files.pythonhosted.org/packages/37/d7/ffed10c7da677f2a9da307d491b9eb1d0125b0307019c4ad3d665fd31f4f/orjson-3.11.4-cp311-cp311-macosx_15_0_arm64.whl", hash = "sha256:5c3aedecfc1beb988c27c79d52ebefab93b6c3921dbec361167e6559aba2d36d", size = 128961, upload-time = "2025-10-24T15:48:49.571Z" }, + { url = "https://files.pythonhosted.org/packages/a2/96/3e4d10a18866d1368f73c8c44b7fe37cc8a15c32f2a7620be3877d4c55a3/orjson-3.11.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:da9e5301f1c2caa2a9a4a303480d79c9ad73560b2e7761de742ab39fe59d9175", size = 130321, upload-time = "2025-10-24T15:48:50.713Z" }, + { url = "https://files.pythonhosted.org/packages/eb/1f/465f66e93f434f968dd74d5b623eb62c657bdba2332f5a8be9f118bb74c7/orjson-3.11.4-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:8873812c164a90a79f65368f8f96817e59e35d0cc02786a5356f0e2abed78040", size = 129207, upload-time = "2025-10-24T15:48:52.193Z" }, + { url = "https://files.pythonhosted.org/packages/28/43/d1e94837543321c119dff277ae8e348562fe8c0fafbb648ef7cb0c67e521/orjson-3.11.4-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5d7feb0741ebb15204e748f26c9638e6665a5fa93c37a2c73d64f1669b0ddc63", size = 136323, upload-time = "2025-10-24T15:48:54.806Z" }, + { url = "https://files.pythonhosted.org/packages/bf/04/93303776c8890e422a5847dd012b4853cdd88206b8bbd3edc292c90102d1/orjson-3.11.4-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:01ee5487fefee21e6910da4c2ee9eef005bee568a0879834df86f888d2ffbdd9", size = 137440, upload-time = "2025-10-24T15:48:56.326Z" }, + { url = "https://files.pythonhosted.org/packages/1e/ef/75519d039e5ae6b0f34d0336854d55544ba903e21bf56c83adc51cd8bf82/orjson-3.11.4-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3d40d46f348c0321df01507f92b95a377240c4ec31985225a6668f10e2676f9a", size = 136680, upload-time = "2025-10-24T15:48:57.476Z" }, + { url = "https://files.pythonhosted.org/packages/b5/18/bf8581eaae0b941b44efe14fee7b7862c3382fbc9a0842132cfc7cf5ecf4/orjson-3.11.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:95713e5fc8af84d8edc75b785d2386f653b63d62b16d681687746734b4dfc0be", size = 136160, upload-time = "2025-10-24T15:48:59.631Z" }, + { url = "https://files.pythonhosted.org/packages/c4/35/a6d582766d351f87fc0a22ad740a641b0a8e6fc47515e8614d2e4790ae10/orjson-3.11.4-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:ad73ede24f9083614d6c4ca9a85fe70e33be7bf047ec586ee2363bc7418fe4d7", size = 140318, upload-time = "2025-10-24T15:49:00.834Z" }, + { url = "https://files.pythonhosted.org/packages/76/b3/5a4801803ab2e2e2d703bce1a56540d9f99a9143fbec7bf63d225044fef8/orjson-3.11.4-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:842289889de515421f3f224ef9c1f1efb199a32d76d8d2ca2706fa8afe749549", size = 406330, upload-time = "2025-10-24T15:49:02.327Z" }, + { url = "https://files.pythonhosted.org/packages/80/55/a8f682f64833e3a649f620eafefee175cbfeb9854fc5b710b90c3bca45df/orjson-3.11.4-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:3b2427ed5791619851c52a1261b45c233930977e7de8cf36de05636c708fa905", size = 149580, upload-time = "2025-10-24T15:49:03.517Z" }, + { url = "https://files.pythonhosted.org/packages/ad/e4/c132fa0c67afbb3eb88274fa98df9ac1f631a675e7877037c611805a4413/orjson-3.11.4-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:3c36e524af1d29982e9b190573677ea02781456b2e537d5840e4538a5ec41907", size = 139846, upload-time = "2025-10-24T15:49:04.761Z" }, + { url = "https://files.pythonhosted.org/packages/54/06/dc3491489efd651fef99c5908e13951abd1aead1257c67f16135f95ce209/orjson-3.11.4-cp311-cp311-win32.whl", hash = "sha256:87255b88756eab4a68ec61837ca754e5d10fa8bc47dc57f75cedfeaec358d54c", size = 135781, upload-time = "2025-10-24T15:49:05.969Z" }, + { url = "https://files.pythonhosted.org/packages/79/b7/5e5e8d77bd4ea02a6ac54c42c818afb01dd31961be8a574eb79f1d2cfb1e/orjson-3.11.4-cp311-cp311-win_amd64.whl", hash = "sha256:e2d5d5d798aba9a0e1fede8d853fa899ce2cb930ec0857365f700dffc2c7af6a", size = 131391, upload-time = "2025-10-24T15:49:07.355Z" }, + { url = "https://files.pythonhosted.org/packages/0f/dc/9484127cc1aa213be398ed735f5f270eedcb0c0977303a6f6ddc46b60204/orjson-3.11.4-cp311-cp311-win_arm64.whl", hash = "sha256:6bb6bb41b14c95d4f2702bce9975fda4516f1db48e500102fc4d8119032ff045", size = 126252, upload-time = "2025-10-24T15:49:08.869Z" }, + { url = "https://files.pythonhosted.org/packages/63/51/6b556192a04595b93e277a9ff71cd0cc06c21a7df98bcce5963fa0f5e36f/orjson-3.11.4-cp312-cp312-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:d4371de39319d05d3f482f372720b841c841b52f5385bd99c61ed69d55d9ab50", size = 243571, upload-time = "2025-10-24T15:49:10.008Z" }, + { url = "https://files.pythonhosted.org/packages/1c/2c/2602392ddf2601d538ff11848b98621cd465d1a1ceb9db9e8043181f2f7b/orjson-3.11.4-cp312-cp312-macosx_15_0_arm64.whl", hash = "sha256:e41fd3b3cac850eaae78232f37325ed7d7436e11c471246b87b2cd294ec94853", size = 128891, upload-time = "2025-10-24T15:49:11.297Z" }, + { url = "https://files.pythonhosted.org/packages/4e/47/bf85dcf95f7a3a12bf223394a4f849430acd82633848d52def09fa3f46ad/orjson-3.11.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:600e0e9ca042878c7fdf189cf1b028fe2c1418cc9195f6cb9824eb6ed99cb938", size = 130137, upload-time = "2025-10-24T15:49:12.544Z" }, + { url = "https://files.pythonhosted.org/packages/b4/4d/a0cb31007f3ab6f1fd2a1b17057c7c349bc2baf8921a85c0180cc7be8011/orjson-3.11.4-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:7bbf9b333f1568ef5da42bc96e18bf30fd7f8d54e9ae066d711056add508e415", size = 129152, upload-time = "2025-10-24T15:49:13.754Z" }, + { url = "https://files.pythonhosted.org/packages/f7/ef/2811def7ce3d8576b19e3929fff8f8f0d44bc5eb2e0fdecb2e6e6cc6c720/orjson-3.11.4-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4806363144bb6e7297b8e95870e78d30a649fdc4e23fc84daa80c8ebd366ce44", size = 136834, upload-time = "2025-10-24T15:49:15.307Z" }, + { url = "https://files.pythonhosted.org/packages/00/d4/9aee9e54f1809cec8ed5abd9bc31e8a9631d19460e3b8470145d25140106/orjson-3.11.4-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ad355e8308493f527d41154e9053b86a5be892b3b359a5c6d5d95cda23601cb2", size = 137519, upload-time = "2025-10-24T15:49:16.557Z" }, + { url = "https://files.pythonhosted.org/packages/db/ea/67bfdb5465d5679e8ae8d68c11753aaf4f47e3e7264bad66dc2f2249e643/orjson-3.11.4-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c8a7517482667fb9f0ff1b2f16fe5829296ed7a655d04d68cd9711a4d8a4e708", size = 136749, upload-time = "2025-10-24T15:49:17.796Z" }, + { url = "https://files.pythonhosted.org/packages/01/7e/62517dddcfce6d53a39543cd74d0dccfcbdf53967017c58af68822100272/orjson-3.11.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:97eb5942c7395a171cbfecc4ef6701fc3c403e762194683772df4c54cfbb2210", size = 136325, upload-time = "2025-10-24T15:49:19.347Z" }, + { url = "https://files.pythonhosted.org/packages/18/ae/40516739f99ab4c7ec3aaa5cc242d341fcb03a45d89edeeaabc5f69cb2cf/orjson-3.11.4-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:149d95d5e018bdd822e3f38c103b1a7c91f88d38a88aada5c4e9b3a73a244241", size = 140204, upload-time = "2025-10-24T15:49:20.545Z" }, + { url = "https://files.pythonhosted.org/packages/82/18/ff5734365623a8916e3a4037fcef1cd1782bfc14cf0992afe7940c5320bf/orjson-3.11.4-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:624f3951181eb46fc47dea3d221554e98784c823e7069edb5dbd0dc826ac909b", size = 406242, upload-time = "2025-10-24T15:49:21.884Z" }, + { url = "https://files.pythonhosted.org/packages/e1/43/96436041f0a0c8c8deca6a05ebeaf529bf1de04839f93ac5e7c479807aec/orjson-3.11.4-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:03bfa548cf35e3f8b3a96c4e8e41f753c686ff3d8e182ce275b1751deddab58c", size = 150013, upload-time = "2025-10-24T15:49:23.185Z" }, + { url = "https://files.pythonhosted.org/packages/1b/48/78302d98423ed8780479a1e682b9aecb869e8404545d999d34fa486e573e/orjson-3.11.4-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:525021896afef44a68148f6ed8a8bf8375553d6066c7f48537657f64823565b9", size = 139951, upload-time = "2025-10-24T15:49:24.428Z" }, + { url = "https://files.pythonhosted.org/packages/4a/7b/ad613fdcdaa812f075ec0875143c3d37f8654457d2af17703905425981bf/orjson-3.11.4-cp312-cp312-win32.whl", hash = "sha256:b58430396687ce0f7d9eeb3dd47761ca7d8fda8e9eb92b3077a7a353a75efefa", size = 136049, upload-time = "2025-10-24T15:49:25.973Z" }, + { url = "https://files.pythonhosted.org/packages/b9/3c/9cf47c3ff5f39b8350fb21ba65d789b6a1129d4cbb3033ba36c8a9023520/orjson-3.11.4-cp312-cp312-win_amd64.whl", hash = "sha256:c6dbf422894e1e3c80a177133c0dda260f81428f9de16d61041949f6a2e5c140", size = 131461, upload-time = "2025-10-24T15:49:27.259Z" }, + { url = "https://files.pythonhosted.org/packages/c6/3b/e2425f61e5825dc5b08c2a5a2b3af387eaaca22a12b9c8c01504f8614c36/orjson-3.11.4-cp312-cp312-win_arm64.whl", hash = "sha256:d38d2bc06d6415852224fcc9c0bfa834c25431e466dc319f0edd56cca81aa96e", size = 126167, upload-time = "2025-10-24T15:49:28.511Z" }, ] [[package]] @@ -4271,24 +4312,24 @@ dependencies = [ { name = "requests" }, { name = "six" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/61/ce/d23a9d44268dc992ae1a878d24341dddaea4de4ae374c261209bb6e9554b/oss2-2.18.5.tar.gz", hash = "sha256:555c857f4441ae42a2c0abab8fc9482543fba35d65a4a4be73101c959a2b4011", size = 283388 } +sdist = { url = "https://files.pythonhosted.org/packages/61/ce/d23a9d44268dc992ae1a878d24341dddaea4de4ae374c261209bb6e9554b/oss2-2.18.5.tar.gz", hash = "sha256:555c857f4441ae42a2c0abab8fc9482543fba35d65a4a4be73101c959a2b4011", size = 283388, upload-time = "2024-04-29T12:49:07.686Z" } [[package]] name = "overrides" version = "7.7.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/36/86/b585f53236dec60aba864e050778b25045f857e17f6e5ea0ae95fe80edd2/overrides-7.7.0.tar.gz", hash = "sha256:55158fa3d93b98cc75299b1e67078ad9003ca27945c76162c1c0766d6f91820a", size = 22812 } +sdist = { url = "https://files.pythonhosted.org/packages/36/86/b585f53236dec60aba864e050778b25045f857e17f6e5ea0ae95fe80edd2/overrides-7.7.0.tar.gz", hash = "sha256:55158fa3d93b98cc75299b1e67078ad9003ca27945c76162c1c0766d6f91820a", size = 22812, upload-time = "2024-01-27T21:01:33.423Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/2c/ab/fc8290c6a4c722e5514d80f62b2dc4c4df1a68a41d1364e625c35990fcf3/overrides-7.7.0-py3-none-any.whl", hash = "sha256:c7ed9d062f78b8e4c1a7b70bd8796b35ead4d9f510227ef9c5dc7626c60d7e49", size = 17832 }, + { url = "https://files.pythonhosted.org/packages/2c/ab/fc8290c6a4c722e5514d80f62b2dc4c4df1a68a41d1364e625c35990fcf3/overrides-7.7.0-py3-none-any.whl", hash = "sha256:c7ed9d062f78b8e4c1a7b70bd8796b35ead4d9f510227ef9c5dc7626c60d7e49", size = 17832, upload-time = "2024-01-27T21:01:31.393Z" }, ] [[package]] name = "packaging" version = "23.2" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/fb/2b/9b9c33ffed44ee921d0967086d653047286054117d584f1b1a7c22ceaf7b/packaging-23.2.tar.gz", hash = "sha256:048fb0e9405036518eaaf48a55953c750c11e1a1b68e0dd1a9d62ed0c092cfc5", size = 146714 } +sdist = { url = "https://files.pythonhosted.org/packages/fb/2b/9b9c33ffed44ee921d0967086d653047286054117d584f1b1a7c22ceaf7b/packaging-23.2.tar.gz", hash = "sha256:048fb0e9405036518eaaf48a55953c750c11e1a1b68e0dd1a9d62ed0c092cfc5", size = 146714, upload-time = "2023-10-01T13:50:05.279Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/ec/1a/610693ac4ee14fcdf2d9bf3c493370e4f2ef7ae2e19217d7a237ff42367d/packaging-23.2-py3-none-any.whl", hash = "sha256:8c491190033a9af7e1d931d0b5dacc2ef47509b34dd0de67ed209b5203fc88c7", size = 53011 }, + { url = "https://files.pythonhosted.org/packages/ec/1a/610693ac4ee14fcdf2d9bf3c493370e4f2ef7ae2e19217d7a237ff42367d/packaging-23.2-py3-none-any.whl", hash = "sha256:8c491190033a9af7e1d931d0b5dacc2ef47509b34dd0de67ed209b5203fc88c7", size = 53011, upload-time = "2023-10-01T13:50:03.745Z" }, ] [[package]] @@ -4301,22 +4342,22 @@ dependencies = [ { name = "pytz" }, { name = "tzdata" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/9c/d6/9f8431bacc2e19dca897724cd097b1bb224a6ad5433784a44b587c7c13af/pandas-2.2.3.tar.gz", hash = "sha256:4f18ba62b61d7e192368b84517265a99b4d7ee8912f8708660fb4a366cc82667", size = 4399213 } +sdist = { url = "https://files.pythonhosted.org/packages/9c/d6/9f8431bacc2e19dca897724cd097b1bb224a6ad5433784a44b587c7c13af/pandas-2.2.3.tar.gz", hash = "sha256:4f18ba62b61d7e192368b84517265a99b4d7ee8912f8708660fb4a366cc82667", size = 4399213, upload-time = "2024-09-20T13:10:04.827Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/a8/44/d9502bf0ed197ba9bf1103c9867d5904ddcaf869e52329787fc54ed70cc8/pandas-2.2.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:66108071e1b935240e74525006034333f98bcdb87ea116de573a6a0dccb6c039", size = 12602222 }, - { url = "https://files.pythonhosted.org/packages/52/11/9eac327a38834f162b8250aab32a6781339c69afe7574368fffe46387edf/pandas-2.2.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:7c2875855b0ff77b2a64a0365e24455d9990730d6431b9e0ee18ad8acee13dbd", size = 11321274 }, - { url = "https://files.pythonhosted.org/packages/45/fb/c4beeb084718598ba19aa9f5abbc8aed8b42f90930da861fcb1acdb54c3a/pandas-2.2.3-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:cd8d0c3be0515c12fed0bdbae072551c8b54b7192c7b1fda0ba56059a0179698", size = 15579836 }, - { url = "https://files.pythonhosted.org/packages/cd/5f/4dba1d39bb9c38d574a9a22548c540177f78ea47b32f99c0ff2ec499fac5/pandas-2.2.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c124333816c3a9b03fbeef3a9f230ba9a737e9e5bb4060aa2107a86cc0a497fc", size = 13058505 }, - { url = "https://files.pythonhosted.org/packages/b9/57/708135b90391995361636634df1f1130d03ba456e95bcf576fada459115a/pandas-2.2.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:63cc132e40a2e084cf01adf0775b15ac515ba905d7dcca47e9a251819c575ef3", size = 16744420 }, - { url = "https://files.pythonhosted.org/packages/86/4a/03ed6b7ee323cf30404265c284cee9c65c56a212e0a08d9ee06984ba2240/pandas-2.2.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:29401dbfa9ad77319367d36940cd8a0b3a11aba16063e39632d98b0e931ddf32", size = 14440457 }, - { url = "https://files.pythonhosted.org/packages/ed/8c/87ddf1fcb55d11f9f847e3c69bb1c6f8e46e2f40ab1a2d2abadb2401b007/pandas-2.2.3-cp311-cp311-win_amd64.whl", hash = "sha256:3fc6873a41186404dad67245896a6e440baacc92f5b716ccd1bc9ed2995ab2c5", size = 11617166 }, - { url = "https://files.pythonhosted.org/packages/17/a3/fb2734118db0af37ea7433f57f722c0a56687e14b14690edff0cdb4b7e58/pandas-2.2.3-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:b1d432e8d08679a40e2a6d8b2f9770a5c21793a6f9f47fdd52c5ce1948a5a8a9", size = 12529893 }, - { url = "https://files.pythonhosted.org/packages/e1/0c/ad295fd74bfac85358fd579e271cded3ac969de81f62dd0142c426b9da91/pandas-2.2.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:a5a1595fe639f5988ba6a8e5bc9649af3baf26df3998a0abe56c02609392e0a4", size = 11363475 }, - { url = "https://files.pythonhosted.org/packages/c6/2a/4bba3f03f7d07207481fed47f5b35f556c7441acddc368ec43d6643c5777/pandas-2.2.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:5de54125a92bb4d1c051c0659e6fcb75256bf799a732a87184e5ea503965bce3", size = 15188645 }, - { url = "https://files.pythonhosted.org/packages/38/f8/d8fddee9ed0d0c0f4a2132c1dfcf0e3e53265055da8df952a53e7eaf178c/pandas-2.2.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fffb8ae78d8af97f849404f21411c95062db1496aeb3e56f146f0355c9989319", size = 12739445 }, - { url = "https://files.pythonhosted.org/packages/20/e8/45a05d9c39d2cea61ab175dbe6a2de1d05b679e8de2011da4ee190d7e748/pandas-2.2.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:6dfcb5ee8d4d50c06a51c2fffa6cff6272098ad6540aed1a76d15fb9318194d8", size = 16359235 }, - { url = "https://files.pythonhosted.org/packages/1d/99/617d07a6a5e429ff90c90da64d428516605a1ec7d7bea494235e1c3882de/pandas-2.2.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:062309c1b9ea12a50e8ce661145c6aab431b1e99530d3cd60640e255778bd43a", size = 14056756 }, - { url = "https://files.pythonhosted.org/packages/29/d4/1244ab8edf173a10fd601f7e13b9566c1b525c4f365d6bee918e68381889/pandas-2.2.3-cp312-cp312-win_amd64.whl", hash = "sha256:59ef3764d0fe818125a5097d2ae867ca3fa64df032331b7e0917cf5d7bf66b13", size = 11504248 }, + { url = "https://files.pythonhosted.org/packages/a8/44/d9502bf0ed197ba9bf1103c9867d5904ddcaf869e52329787fc54ed70cc8/pandas-2.2.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:66108071e1b935240e74525006034333f98bcdb87ea116de573a6a0dccb6c039", size = 12602222, upload-time = "2024-09-20T13:08:56.254Z" }, + { url = "https://files.pythonhosted.org/packages/52/11/9eac327a38834f162b8250aab32a6781339c69afe7574368fffe46387edf/pandas-2.2.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:7c2875855b0ff77b2a64a0365e24455d9990730d6431b9e0ee18ad8acee13dbd", size = 11321274, upload-time = "2024-09-20T13:08:58.645Z" }, + { url = "https://files.pythonhosted.org/packages/45/fb/c4beeb084718598ba19aa9f5abbc8aed8b42f90930da861fcb1acdb54c3a/pandas-2.2.3-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:cd8d0c3be0515c12fed0bdbae072551c8b54b7192c7b1fda0ba56059a0179698", size = 15579836, upload-time = "2024-09-20T19:01:57.571Z" }, + { url = "https://files.pythonhosted.org/packages/cd/5f/4dba1d39bb9c38d574a9a22548c540177f78ea47b32f99c0ff2ec499fac5/pandas-2.2.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c124333816c3a9b03fbeef3a9f230ba9a737e9e5bb4060aa2107a86cc0a497fc", size = 13058505, upload-time = "2024-09-20T13:09:01.501Z" }, + { url = "https://files.pythonhosted.org/packages/b9/57/708135b90391995361636634df1f1130d03ba456e95bcf576fada459115a/pandas-2.2.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:63cc132e40a2e084cf01adf0775b15ac515ba905d7dcca47e9a251819c575ef3", size = 16744420, upload-time = "2024-09-20T19:02:00.678Z" }, + { url = "https://files.pythonhosted.org/packages/86/4a/03ed6b7ee323cf30404265c284cee9c65c56a212e0a08d9ee06984ba2240/pandas-2.2.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:29401dbfa9ad77319367d36940cd8a0b3a11aba16063e39632d98b0e931ddf32", size = 14440457, upload-time = "2024-09-20T13:09:04.105Z" }, + { url = "https://files.pythonhosted.org/packages/ed/8c/87ddf1fcb55d11f9f847e3c69bb1c6f8e46e2f40ab1a2d2abadb2401b007/pandas-2.2.3-cp311-cp311-win_amd64.whl", hash = "sha256:3fc6873a41186404dad67245896a6e440baacc92f5b716ccd1bc9ed2995ab2c5", size = 11617166, upload-time = "2024-09-20T13:09:06.917Z" }, + { url = "https://files.pythonhosted.org/packages/17/a3/fb2734118db0af37ea7433f57f722c0a56687e14b14690edff0cdb4b7e58/pandas-2.2.3-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:b1d432e8d08679a40e2a6d8b2f9770a5c21793a6f9f47fdd52c5ce1948a5a8a9", size = 12529893, upload-time = "2024-09-20T13:09:09.655Z" }, + { url = "https://files.pythonhosted.org/packages/e1/0c/ad295fd74bfac85358fd579e271cded3ac969de81f62dd0142c426b9da91/pandas-2.2.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:a5a1595fe639f5988ba6a8e5bc9649af3baf26df3998a0abe56c02609392e0a4", size = 11363475, upload-time = "2024-09-20T13:09:14.718Z" }, + { url = "https://files.pythonhosted.org/packages/c6/2a/4bba3f03f7d07207481fed47f5b35f556c7441acddc368ec43d6643c5777/pandas-2.2.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:5de54125a92bb4d1c051c0659e6fcb75256bf799a732a87184e5ea503965bce3", size = 15188645, upload-time = "2024-09-20T19:02:03.88Z" }, + { url = "https://files.pythonhosted.org/packages/38/f8/d8fddee9ed0d0c0f4a2132c1dfcf0e3e53265055da8df952a53e7eaf178c/pandas-2.2.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fffb8ae78d8af97f849404f21411c95062db1496aeb3e56f146f0355c9989319", size = 12739445, upload-time = "2024-09-20T13:09:17.621Z" }, + { url = "https://files.pythonhosted.org/packages/20/e8/45a05d9c39d2cea61ab175dbe6a2de1d05b679e8de2011da4ee190d7e748/pandas-2.2.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:6dfcb5ee8d4d50c06a51c2fffa6cff6272098ad6540aed1a76d15fb9318194d8", size = 16359235, upload-time = "2024-09-20T19:02:07.094Z" }, + { url = "https://files.pythonhosted.org/packages/1d/99/617d07a6a5e429ff90c90da64d428516605a1ec7d7bea494235e1c3882de/pandas-2.2.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:062309c1b9ea12a50e8ce661145c6aab431b1e99530d3cd60640e255778bd43a", size = 14056756, upload-time = "2024-09-20T13:09:20.474Z" }, + { url = "https://files.pythonhosted.org/packages/29/d4/1244ab8edf173a10fd601f7e13b9566c1b525c4f365d6bee918e68381889/pandas-2.2.3-cp312-cp312-win_amd64.whl", hash = "sha256:59ef3764d0fe818125a5097d2ae867ca3fa64df032331b7e0917cf5d7bf66b13", size = 11504248, upload-time = "2024-09-20T13:09:23.137Z" }, ] [package.optional-dependencies] @@ -4346,18 +4387,18 @@ dependencies = [ { name = "numpy" }, { name = "types-pytz" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/5f/0d/5fe7f7f3596eb1c2526fea151e9470f86b379183d8b9debe44b2098651ca/pandas_stubs-2.2.3.250527.tar.gz", hash = "sha256:e2d694c4e72106055295ad143664e5c99e5815b07190d1ff85b73b13ff019e63", size = 106312 } +sdist = { url = "https://files.pythonhosted.org/packages/5f/0d/5fe7f7f3596eb1c2526fea151e9470f86b379183d8b9debe44b2098651ca/pandas_stubs-2.2.3.250527.tar.gz", hash = "sha256:e2d694c4e72106055295ad143664e5c99e5815b07190d1ff85b73b13ff019e63", size = 106312, upload-time = "2025-05-27T15:24:29.716Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/ec/f8/46141ba8c9d7064dc5008bfb4a6ae5bd3c30e4c61c28b5c5ed485bf358ba/pandas_stubs-2.2.3.250527-py3-none-any.whl", hash = "sha256:cd0a49a95b8c5f944e605be711042a4dd8550e2c559b43d70ba2c4b524b66163", size = 159683 }, + { url = "https://files.pythonhosted.org/packages/ec/f8/46141ba8c9d7064dc5008bfb4a6ae5bd3c30e4c61c28b5c5ed485bf358ba/pandas_stubs-2.2.3.250527-py3-none-any.whl", hash = "sha256:cd0a49a95b8c5f944e605be711042a4dd8550e2c559b43d70ba2c4b524b66163", size = 159683, upload-time = "2025-05-27T15:24:28.4Z" }, ] [[package]] name = "pathspec" version = "0.12.1" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/ca/bc/f35b8446f4531a7cb215605d100cd88b7ac6f44ab3fc94870c120ab3adbf/pathspec-0.12.1.tar.gz", hash = "sha256:a482d51503a1ab33b1c67a6c3813a26953dbdc71c31dacaef9a838c4e29f5712", size = 51043 } +sdist = { url = "https://files.pythonhosted.org/packages/ca/bc/f35b8446f4531a7cb215605d100cd88b7ac6f44ab3fc94870c120ab3adbf/pathspec-0.12.1.tar.gz", hash = "sha256:a482d51503a1ab33b1c67a6c3813a26953dbdc71c31dacaef9a838c4e29f5712", size = 51043, upload-time = "2023-12-10T22:30:45Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/cc/20/ff623b09d963f88bfde16306a54e12ee5ea43e9b597108672ff3a408aad6/pathspec-0.12.1-py3-none-any.whl", hash = "sha256:a0d503e138a4c123b27490a4f7beda6a01c6f288df0e4a8b79c7eb0dc7b4cc08", size = 31191 }, + { url = "https://files.pythonhosted.org/packages/cc/20/ff623b09d963f88bfde16306a54e12ee5ea43e9b597108672ff3a408aad6/pathspec-0.12.1-py3-none-any.whl", hash = "sha256:a0d503e138a4c123b27490a4f7beda6a01c6f288df0e4a8b79c7eb0dc7b4cc08", size = 31191, upload-time = "2023-12-10T22:30:43.14Z" }, ] [[package]] @@ -4368,9 +4409,9 @@ dependencies = [ { name = "charset-normalizer" }, { name = "cryptography" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/78/46/5223d613ac4963e1f7c07b2660fe0e9e770102ec6bda8c038400113fb215/pdfminer_six-20250506.tar.gz", hash = "sha256:b03cc8df09cf3c7aba8246deae52e0bca7ebb112a38895b5e1d4f5dd2b8ca2e7", size = 7387678 } +sdist = { url = "https://files.pythonhosted.org/packages/78/46/5223d613ac4963e1f7c07b2660fe0e9e770102ec6bda8c038400113fb215/pdfminer_six-20250506.tar.gz", hash = "sha256:b03cc8df09cf3c7aba8246deae52e0bca7ebb112a38895b5e1d4f5dd2b8ca2e7", size = 7387678, upload-time = "2025-05-06T16:17:00.787Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/73/16/7a432c0101fa87457e75cb12c879e1749c5870a786525e2e0f42871d6462/pdfminer_six-20250506-py3-none-any.whl", hash = "sha256:d81ad173f62e5f841b53a8ba63af1a4a355933cfc0ffabd608e568b9193909e3", size = 5620187 }, + { url = "https://files.pythonhosted.org/packages/73/16/7a432c0101fa87457e75cb12c879e1749c5870a786525e2e0f42871d6462/pdfminer_six-20250506-py3-none-any.whl", hash = "sha256:d81ad173f62e5f841b53a8ba63af1a4a355933cfc0ffabd608e568b9193909e3", size = 5620187, upload-time = "2025-05-06T16:16:58.669Z" }, ] [[package]] @@ -4381,9 +4422,9 @@ dependencies = [ { name = "numpy" }, { name = "toml" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/01/09/c0be8f54386367159fd22495635fba65ac6bbc436a34502bc2849d89f6ab/pgvecto_rs-0.2.2.tar.gz", hash = "sha256:edaa913d1747152b1407cbdf6337d51ac852547b54953ef38997433be3a75a3b", size = 28561 } +sdist = { url = "https://files.pythonhosted.org/packages/01/09/c0be8f54386367159fd22495635fba65ac6bbc436a34502bc2849d89f6ab/pgvecto_rs-0.2.2.tar.gz", hash = "sha256:edaa913d1747152b1407cbdf6337d51ac852547b54953ef38997433be3a75a3b", size = 28561, upload-time = "2024-10-08T02:01:15.678Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/ba/dc/a39ceb4fe4b72f889228119b91e0ef7fcaaf9ec662ab19acdacb74cd5eaf/pgvecto_rs-0.2.2-py3-none-any.whl", hash = "sha256:5f3f7f806813de408c45dc10a9eb418b986c4d7b7723e8fce9298f2f7d8fbbd5", size = 30779 }, + { url = "https://files.pythonhosted.org/packages/ba/dc/a39ceb4fe4b72f889228119b91e0ef7fcaaf9ec662ab19acdacb74cd5eaf/pgvecto_rs-0.2.2-py3-none-any.whl", hash = "sha256:5f3f7f806813de408c45dc10a9eb418b986c4d7b7723e8fce9298f2f7d8fbbd5", size = 30779, upload-time = "2024-10-08T02:01:14.669Z" }, ] [package.optional-dependencies] @@ -4399,71 +4440,71 @@ dependencies = [ { name = "numpy" }, ] wheels = [ - { url = "https://files.pythonhosted.org/packages/29/bb/4686b1090a7c68fa367e981130a074dc6c1236571d914ffa6e05c882b59d/pgvector-0.2.5-py2.py3-none-any.whl", hash = "sha256:5e5e93ec4d3c45ab1fa388729d56c602f6966296e19deee8878928c6d567e41b", size = 9638 }, + { url = "https://files.pythonhosted.org/packages/29/bb/4686b1090a7c68fa367e981130a074dc6c1236571d914ffa6e05c882b59d/pgvector-0.2.5-py2.py3-none-any.whl", hash = "sha256:5e5e93ec4d3c45ab1fa388729d56c602f6966296e19deee8878928c6d567e41b", size = 9638, upload-time = "2024-02-07T19:35:03.8Z" }, ] [[package]] name = "pillow" version = "12.0.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/5a/b0/cace85a1b0c9775a9f8f5d5423c8261c858760e2466c79b2dd184638b056/pillow-12.0.0.tar.gz", hash = "sha256:87d4f8125c9988bfbed67af47dd7a953e2fc7b0cc1e7800ec6d2080d490bb353", size = 47008828 } +sdist = { url = "https://files.pythonhosted.org/packages/5a/b0/cace85a1b0c9775a9f8f5d5423c8261c858760e2466c79b2dd184638b056/pillow-12.0.0.tar.gz", hash = "sha256:87d4f8125c9988bfbed67af47dd7a953e2fc7b0cc1e7800ec6d2080d490bb353", size = 47008828, upload-time = "2025-10-15T18:24:14.008Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/0e/5a/a2f6773b64edb921a756eb0729068acad9fc5208a53f4a349396e9436721/pillow-12.0.0-cp311-cp311-macosx_10_10_x86_64.whl", hash = "sha256:0fd00cac9c03256c8b2ff58f162ebcd2587ad3e1f2e397eab718c47e24d231cc", size = 5289798 }, - { url = "https://files.pythonhosted.org/packages/2e/05/069b1f8a2e4b5a37493da6c5868531c3f77b85e716ad7a590ef87d58730d/pillow-12.0.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a3475b96f5908b3b16c47533daaa87380c491357d197564e0ba34ae75c0f3257", size = 4650589 }, - { url = "https://files.pythonhosted.org/packages/61/e3/2c820d6e9a36432503ead175ae294f96861b07600a7156154a086ba7111a/pillow-12.0.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:110486b79f2d112cf6add83b28b627e369219388f64ef2f960fef9ebaf54c642", size = 6230472 }, - { url = "https://files.pythonhosted.org/packages/4f/89/63427f51c64209c5e23d4d52071c8d0f21024d3a8a487737caaf614a5795/pillow-12.0.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:5269cc1caeedb67e6f7269a42014f381f45e2e7cd42d834ede3c703a1d915fe3", size = 8033887 }, - { url = "https://files.pythonhosted.org/packages/f6/1b/c9711318d4901093c15840f268ad649459cd81984c9ec9887756cca049a5/pillow-12.0.0-cp311-cp311-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:aa5129de4e174daccbc59d0a3b6d20eaf24417d59851c07ebb37aeb02947987c", size = 6343964 }, - { url = "https://files.pythonhosted.org/packages/41/1e/db9470f2d030b4995083044cd8738cdd1bf773106819f6d8ba12597d5352/pillow-12.0.0-cp311-cp311-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:bee2a6db3a7242ea309aa7ee8e2780726fed67ff4e5b40169f2c940e7eb09227", size = 7034756 }, - { url = "https://files.pythonhosted.org/packages/cc/b0/6177a8bdd5ee4ed87cba2de5a3cc1db55ffbbec6176784ce5bb75aa96798/pillow-12.0.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:90387104ee8400a7b4598253b4c406f8958f59fcf983a6cea2b50d59f7d63d0b", size = 6458075 }, - { url = "https://files.pythonhosted.org/packages/bc/5e/61537aa6fa977922c6a03253a0e727e6e4a72381a80d63ad8eec350684f2/pillow-12.0.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:bc91a56697869546d1b8f0a3ff35224557ae7f881050e99f615e0119bf934b4e", size = 7125955 }, - { url = "https://files.pythonhosted.org/packages/1f/3d/d5033539344ee3cbd9a4d69e12e63ca3a44a739eb2d4c8da350a3d38edd7/pillow-12.0.0-cp311-cp311-win32.whl", hash = "sha256:27f95b12453d165099c84f8a8bfdfd46b9e4bda9e0e4b65f0635430027f55739", size = 6298440 }, - { url = "https://files.pythonhosted.org/packages/4d/42/aaca386de5cc8bd8a0254516957c1f265e3521c91515b16e286c662854c4/pillow-12.0.0-cp311-cp311-win_amd64.whl", hash = "sha256:b583dc9070312190192631373c6c8ed277254aa6e6084b74bdd0a6d3b221608e", size = 6999256 }, - { url = "https://files.pythonhosted.org/packages/ba/f1/9197c9c2d5708b785f631a6dfbfa8eb3fb9672837cb92ae9af812c13b4ed/pillow-12.0.0-cp311-cp311-win_arm64.whl", hash = "sha256:759de84a33be3b178a64c8ba28ad5c135900359e85fb662bc6e403ad4407791d", size = 2436025 }, - { url = "https://files.pythonhosted.org/packages/2c/90/4fcce2c22caf044e660a198d740e7fbc14395619e3cb1abad12192c0826c/pillow-12.0.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:53561a4ddc36facb432fae7a9d8afbfaf94795414f5cdc5fc52f28c1dca90371", size = 5249377 }, - { url = "https://files.pythonhosted.org/packages/fd/e0/ed960067543d080691d47d6938ebccbf3976a931c9567ab2fbfab983a5dd/pillow-12.0.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:71db6b4c1653045dacc1585c1b0d184004f0d7e694c7b34ac165ca70c0838082", size = 4650343 }, - { url = "https://files.pythonhosted.org/packages/e7/a1/f81fdeddcb99c044bf7d6faa47e12850f13cee0849537a7d27eeab5534d4/pillow-12.0.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:2fa5f0b6716fc88f11380b88b31fe591a06c6315e955c096c35715788b339e3f", size = 6232981 }, - { url = "https://files.pythonhosted.org/packages/88/e1/9098d3ce341a8750b55b0e00c03f1630d6178f38ac191c81c97a3b047b44/pillow-12.0.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:82240051c6ca513c616f7f9da06e871f61bfd7805f566275841af15015b8f98d", size = 8041399 }, - { url = "https://files.pythonhosted.org/packages/a7/62/a22e8d3b602ae8cc01446d0c57a54e982737f44b6f2e1e019a925143771d/pillow-12.0.0-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:55f818bd74fe2f11d4d7cbc65880a843c4075e0ac7226bc1a23261dbea531953", size = 6347740 }, - { url = "https://files.pythonhosted.org/packages/4f/87/424511bdcd02c8d7acf9f65caa09f291a519b16bd83c3fb3374b3d4ae951/pillow-12.0.0-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b87843e225e74576437fd5b6a4c2205d422754f84a06942cfaf1dc32243e45a8", size = 7040201 }, - { url = "https://files.pythonhosted.org/packages/dc/4d/435c8ac688c54d11755aedfdd9f29c9eeddf68d150fe42d1d3dbd2365149/pillow-12.0.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:c607c90ba67533e1b2355b821fef6764d1dd2cbe26b8c1005ae84f7aea25ff79", size = 6462334 }, - { url = "https://files.pythonhosted.org/packages/2b/f2/ad34167a8059a59b8ad10bc5c72d4d9b35acc6b7c0877af8ac885b5f2044/pillow-12.0.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:21f241bdd5080a15bc86d3466a9f6074a9c2c2b314100dd896ac81ee6db2f1ba", size = 7134162 }, - { url = "https://files.pythonhosted.org/packages/0c/b1/a7391df6adacf0a5c2cf6ac1cf1fcc1369e7d439d28f637a847f8803beb3/pillow-12.0.0-cp312-cp312-win32.whl", hash = "sha256:dd333073e0cacdc3089525c7df7d39b211bcdf31fc2824e49d01c6b6187b07d0", size = 6298769 }, - { url = "https://files.pythonhosted.org/packages/a2/0b/d87733741526541c909bbf159e338dcace4f982daac6e5a8d6be225ca32d/pillow-12.0.0-cp312-cp312-win_amd64.whl", hash = "sha256:9fe611163f6303d1619bbcb653540a4d60f9e55e622d60a3108be0d5b441017a", size = 7001107 }, - { url = "https://files.pythonhosted.org/packages/bc/96/aaa61ce33cc98421fb6088af2a03be4157b1e7e0e87087c888e2370a7f45/pillow-12.0.0-cp312-cp312-win_arm64.whl", hash = "sha256:7dfb439562f234f7d57b1ac6bc8fe7f838a4bd49c79230e0f6a1da93e82f1fad", size = 2436012 }, - { url = "https://files.pythonhosted.org/packages/1d/b3/582327e6c9f86d037b63beebe981425d6811104cb443e8193824ef1a2f27/pillow-12.0.0-pp311-pypy311_pp73-macosx_10_15_x86_64.whl", hash = "sha256:b22bd8c974942477156be55a768f7aa37c46904c175be4e158b6a86e3a6b7ca8", size = 5215068 }, - { url = "https://files.pythonhosted.org/packages/fd/d6/67748211d119f3b6540baf90f92fae73ae51d5217b171b0e8b5f7e5d558f/pillow-12.0.0-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:805ebf596939e48dbb2e4922a1d3852cfc25c38160751ce02da93058b48d252a", size = 4614994 }, - { url = "https://files.pythonhosted.org/packages/2d/e1/f8281e5d844c41872b273b9f2c34a4bf64ca08905668c8ae730eedc7c9fa/pillow-12.0.0-pp311-pypy311_pp73-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:cae81479f77420d217def5f54b5b9d279804d17e982e0f2fa19b1d1e14ab5197", size = 5246639 }, - { url = "https://files.pythonhosted.org/packages/94/5a/0d8ab8ffe8a102ff5df60d0de5af309015163bf710c7bb3e8311dd3b3ad0/pillow-12.0.0-pp311-pypy311_pp73-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:aeaefa96c768fc66818730b952a862235d68825c178f1b3ffd4efd7ad2edcb7c", size = 6986839 }, - { url = "https://files.pythonhosted.org/packages/20/2e/3434380e8110b76cd9eb00a363c484b050f949b4bbe84ba770bb8508a02c/pillow-12.0.0-pp311-pypy311_pp73-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:09f2d0abef9e4e2f349305a4f8cc784a8a6c2f58a8c4892eea13b10a943bd26e", size = 5313505 }, - { url = "https://files.pythonhosted.org/packages/57/ca/5a9d38900d9d74785141d6580950fe705de68af735ff6e727cb911b64740/pillow-12.0.0-pp311-pypy311_pp73-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:bdee52571a343d721fb2eb3b090a82d959ff37fc631e3f70422e0c2e029f3e76", size = 5963654 }, - { url = "https://files.pythonhosted.org/packages/95/7e/f896623c3c635a90537ac093c6a618ebe1a90d87206e42309cb5d98a1b9e/pillow-12.0.0-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:b290fd8aa38422444d4b50d579de197557f182ef1068b75f5aa8558638b8d0a5", size = 6997850 }, + { url = "https://files.pythonhosted.org/packages/0e/5a/a2f6773b64edb921a756eb0729068acad9fc5208a53f4a349396e9436721/pillow-12.0.0-cp311-cp311-macosx_10_10_x86_64.whl", hash = "sha256:0fd00cac9c03256c8b2ff58f162ebcd2587ad3e1f2e397eab718c47e24d231cc", size = 5289798, upload-time = "2025-10-15T18:21:47.763Z" }, + { url = "https://files.pythonhosted.org/packages/2e/05/069b1f8a2e4b5a37493da6c5868531c3f77b85e716ad7a590ef87d58730d/pillow-12.0.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a3475b96f5908b3b16c47533daaa87380c491357d197564e0ba34ae75c0f3257", size = 4650589, upload-time = "2025-10-15T18:21:49.515Z" }, + { url = "https://files.pythonhosted.org/packages/61/e3/2c820d6e9a36432503ead175ae294f96861b07600a7156154a086ba7111a/pillow-12.0.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:110486b79f2d112cf6add83b28b627e369219388f64ef2f960fef9ebaf54c642", size = 6230472, upload-time = "2025-10-15T18:21:51.052Z" }, + { url = "https://files.pythonhosted.org/packages/4f/89/63427f51c64209c5e23d4d52071c8d0f21024d3a8a487737caaf614a5795/pillow-12.0.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:5269cc1caeedb67e6f7269a42014f381f45e2e7cd42d834ede3c703a1d915fe3", size = 8033887, upload-time = "2025-10-15T18:21:52.604Z" }, + { url = "https://files.pythonhosted.org/packages/f6/1b/c9711318d4901093c15840f268ad649459cd81984c9ec9887756cca049a5/pillow-12.0.0-cp311-cp311-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:aa5129de4e174daccbc59d0a3b6d20eaf24417d59851c07ebb37aeb02947987c", size = 6343964, upload-time = "2025-10-15T18:21:54.619Z" }, + { url = "https://files.pythonhosted.org/packages/41/1e/db9470f2d030b4995083044cd8738cdd1bf773106819f6d8ba12597d5352/pillow-12.0.0-cp311-cp311-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:bee2a6db3a7242ea309aa7ee8e2780726fed67ff4e5b40169f2c940e7eb09227", size = 7034756, upload-time = "2025-10-15T18:21:56.151Z" }, + { url = "https://files.pythonhosted.org/packages/cc/b0/6177a8bdd5ee4ed87cba2de5a3cc1db55ffbbec6176784ce5bb75aa96798/pillow-12.0.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:90387104ee8400a7b4598253b4c406f8958f59fcf983a6cea2b50d59f7d63d0b", size = 6458075, upload-time = "2025-10-15T18:21:57.759Z" }, + { url = "https://files.pythonhosted.org/packages/bc/5e/61537aa6fa977922c6a03253a0e727e6e4a72381a80d63ad8eec350684f2/pillow-12.0.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:bc91a56697869546d1b8f0a3ff35224557ae7f881050e99f615e0119bf934b4e", size = 7125955, upload-time = "2025-10-15T18:21:59.372Z" }, + { url = "https://files.pythonhosted.org/packages/1f/3d/d5033539344ee3cbd9a4d69e12e63ca3a44a739eb2d4c8da350a3d38edd7/pillow-12.0.0-cp311-cp311-win32.whl", hash = "sha256:27f95b12453d165099c84f8a8bfdfd46b9e4bda9e0e4b65f0635430027f55739", size = 6298440, upload-time = "2025-10-15T18:22:00.982Z" }, + { url = "https://files.pythonhosted.org/packages/4d/42/aaca386de5cc8bd8a0254516957c1f265e3521c91515b16e286c662854c4/pillow-12.0.0-cp311-cp311-win_amd64.whl", hash = "sha256:b583dc9070312190192631373c6c8ed277254aa6e6084b74bdd0a6d3b221608e", size = 6999256, upload-time = "2025-10-15T18:22:02.617Z" }, + { url = "https://files.pythonhosted.org/packages/ba/f1/9197c9c2d5708b785f631a6dfbfa8eb3fb9672837cb92ae9af812c13b4ed/pillow-12.0.0-cp311-cp311-win_arm64.whl", hash = "sha256:759de84a33be3b178a64c8ba28ad5c135900359e85fb662bc6e403ad4407791d", size = 2436025, upload-time = "2025-10-15T18:22:04.598Z" }, + { url = "https://files.pythonhosted.org/packages/2c/90/4fcce2c22caf044e660a198d740e7fbc14395619e3cb1abad12192c0826c/pillow-12.0.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:53561a4ddc36facb432fae7a9d8afbfaf94795414f5cdc5fc52f28c1dca90371", size = 5249377, upload-time = "2025-10-15T18:22:05.993Z" }, + { url = "https://files.pythonhosted.org/packages/fd/e0/ed960067543d080691d47d6938ebccbf3976a931c9567ab2fbfab983a5dd/pillow-12.0.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:71db6b4c1653045dacc1585c1b0d184004f0d7e694c7b34ac165ca70c0838082", size = 4650343, upload-time = "2025-10-15T18:22:07.718Z" }, + { url = "https://files.pythonhosted.org/packages/e7/a1/f81fdeddcb99c044bf7d6faa47e12850f13cee0849537a7d27eeab5534d4/pillow-12.0.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:2fa5f0b6716fc88f11380b88b31fe591a06c6315e955c096c35715788b339e3f", size = 6232981, upload-time = "2025-10-15T18:22:09.287Z" }, + { url = "https://files.pythonhosted.org/packages/88/e1/9098d3ce341a8750b55b0e00c03f1630d6178f38ac191c81c97a3b047b44/pillow-12.0.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:82240051c6ca513c616f7f9da06e871f61bfd7805f566275841af15015b8f98d", size = 8041399, upload-time = "2025-10-15T18:22:10.872Z" }, + { url = "https://files.pythonhosted.org/packages/a7/62/a22e8d3b602ae8cc01446d0c57a54e982737f44b6f2e1e019a925143771d/pillow-12.0.0-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:55f818bd74fe2f11d4d7cbc65880a843c4075e0ac7226bc1a23261dbea531953", size = 6347740, upload-time = "2025-10-15T18:22:12.769Z" }, + { url = "https://files.pythonhosted.org/packages/4f/87/424511bdcd02c8d7acf9f65caa09f291a519b16bd83c3fb3374b3d4ae951/pillow-12.0.0-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b87843e225e74576437fd5b6a4c2205d422754f84a06942cfaf1dc32243e45a8", size = 7040201, upload-time = "2025-10-15T18:22:14.813Z" }, + { url = "https://files.pythonhosted.org/packages/dc/4d/435c8ac688c54d11755aedfdd9f29c9eeddf68d150fe42d1d3dbd2365149/pillow-12.0.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:c607c90ba67533e1b2355b821fef6764d1dd2cbe26b8c1005ae84f7aea25ff79", size = 6462334, upload-time = "2025-10-15T18:22:16.375Z" }, + { url = "https://files.pythonhosted.org/packages/2b/f2/ad34167a8059a59b8ad10bc5c72d4d9b35acc6b7c0877af8ac885b5f2044/pillow-12.0.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:21f241bdd5080a15bc86d3466a9f6074a9c2c2b314100dd896ac81ee6db2f1ba", size = 7134162, upload-time = "2025-10-15T18:22:17.996Z" }, + { url = "https://files.pythonhosted.org/packages/0c/b1/a7391df6adacf0a5c2cf6ac1cf1fcc1369e7d439d28f637a847f8803beb3/pillow-12.0.0-cp312-cp312-win32.whl", hash = "sha256:dd333073e0cacdc3089525c7df7d39b211bcdf31fc2824e49d01c6b6187b07d0", size = 6298769, upload-time = "2025-10-15T18:22:19.923Z" }, + { url = "https://files.pythonhosted.org/packages/a2/0b/d87733741526541c909bbf159e338dcace4f982daac6e5a8d6be225ca32d/pillow-12.0.0-cp312-cp312-win_amd64.whl", hash = "sha256:9fe611163f6303d1619bbcb653540a4d60f9e55e622d60a3108be0d5b441017a", size = 7001107, upload-time = "2025-10-15T18:22:21.644Z" }, + { url = "https://files.pythonhosted.org/packages/bc/96/aaa61ce33cc98421fb6088af2a03be4157b1e7e0e87087c888e2370a7f45/pillow-12.0.0-cp312-cp312-win_arm64.whl", hash = "sha256:7dfb439562f234f7d57b1ac6bc8fe7f838a4bd49c79230e0f6a1da93e82f1fad", size = 2436012, upload-time = "2025-10-15T18:22:23.621Z" }, + { url = "https://files.pythonhosted.org/packages/1d/b3/582327e6c9f86d037b63beebe981425d6811104cb443e8193824ef1a2f27/pillow-12.0.0-pp311-pypy311_pp73-macosx_10_15_x86_64.whl", hash = "sha256:b22bd8c974942477156be55a768f7aa37c46904c175be4e158b6a86e3a6b7ca8", size = 5215068, upload-time = "2025-10-15T18:23:59.594Z" }, + { url = "https://files.pythonhosted.org/packages/fd/d6/67748211d119f3b6540baf90f92fae73ae51d5217b171b0e8b5f7e5d558f/pillow-12.0.0-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:805ebf596939e48dbb2e4922a1d3852cfc25c38160751ce02da93058b48d252a", size = 4614994, upload-time = "2025-10-15T18:24:01.669Z" }, + { url = "https://files.pythonhosted.org/packages/2d/e1/f8281e5d844c41872b273b9f2c34a4bf64ca08905668c8ae730eedc7c9fa/pillow-12.0.0-pp311-pypy311_pp73-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:cae81479f77420d217def5f54b5b9d279804d17e982e0f2fa19b1d1e14ab5197", size = 5246639, upload-time = "2025-10-15T18:24:03.403Z" }, + { url = "https://files.pythonhosted.org/packages/94/5a/0d8ab8ffe8a102ff5df60d0de5af309015163bf710c7bb3e8311dd3b3ad0/pillow-12.0.0-pp311-pypy311_pp73-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:aeaefa96c768fc66818730b952a862235d68825c178f1b3ffd4efd7ad2edcb7c", size = 6986839, upload-time = "2025-10-15T18:24:05.344Z" }, + { url = "https://files.pythonhosted.org/packages/20/2e/3434380e8110b76cd9eb00a363c484b050f949b4bbe84ba770bb8508a02c/pillow-12.0.0-pp311-pypy311_pp73-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:09f2d0abef9e4e2f349305a4f8cc784a8a6c2f58a8c4892eea13b10a943bd26e", size = 5313505, upload-time = "2025-10-15T18:24:07.137Z" }, + { url = "https://files.pythonhosted.org/packages/57/ca/5a9d38900d9d74785141d6580950fe705de68af735ff6e727cb911b64740/pillow-12.0.0-pp311-pypy311_pp73-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:bdee52571a343d721fb2eb3b090a82d959ff37fc631e3f70422e0c2e029f3e76", size = 5963654, upload-time = "2025-10-15T18:24:09.579Z" }, + { url = "https://files.pythonhosted.org/packages/95/7e/f896623c3c635a90537ac093c6a618ebe1a90d87206e42309cb5d98a1b9e/pillow-12.0.0-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:b290fd8aa38422444d4b50d579de197557f182ef1068b75f5aa8558638b8d0a5", size = 6997850, upload-time = "2025-10-15T18:24:11.495Z" }, ] [[package]] name = "platformdirs" version = "4.5.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/61/33/9611380c2bdb1225fdef633e2a9610622310fed35ab11dac9620972ee088/platformdirs-4.5.0.tar.gz", hash = "sha256:70ddccdd7c99fc5942e9fc25636a8b34d04c24b335100223152c2803e4063312", size = 21632 } +sdist = { url = "https://files.pythonhosted.org/packages/61/33/9611380c2bdb1225fdef633e2a9610622310fed35ab11dac9620972ee088/platformdirs-4.5.0.tar.gz", hash = "sha256:70ddccdd7c99fc5942e9fc25636a8b34d04c24b335100223152c2803e4063312", size = 21632, upload-time = "2025-10-08T17:44:48.791Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/73/cb/ac7874b3e5d58441674fb70742e6c374b28b0c7cb988d37d991cde47166c/platformdirs-4.5.0-py3-none-any.whl", hash = "sha256:e578a81bb873cbb89a41fcc904c7ef523cc18284b7e3b3ccf06aca1403b7ebd3", size = 18651 }, + { url = "https://files.pythonhosted.org/packages/73/cb/ac7874b3e5d58441674fb70742e6c374b28b0c7cb988d37d991cde47166c/platformdirs-4.5.0-py3-none-any.whl", hash = "sha256:e578a81bb873cbb89a41fcc904c7ef523cc18284b7e3b3ccf06aca1403b7ebd3", size = 18651, upload-time = "2025-10-08T17:44:47.223Z" }, ] [[package]] name = "pluggy" version = "1.6.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/f9/e2/3e91f31a7d2b083fe6ef3fa267035b518369d9511ffab804f839851d2779/pluggy-1.6.0.tar.gz", hash = "sha256:7dcc130b76258d33b90f61b658791dede3486c3e6bfb003ee5c9bfb396dd22f3", size = 69412 } +sdist = { url = "https://files.pythonhosted.org/packages/f9/e2/3e91f31a7d2b083fe6ef3fa267035b518369d9511ffab804f839851d2779/pluggy-1.6.0.tar.gz", hash = "sha256:7dcc130b76258d33b90f61b658791dede3486c3e6bfb003ee5c9bfb396dd22f3", size = 69412, upload-time = "2025-05-15T12:30:07.975Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/54/20/4d324d65cc6d9205fabedc306948156824eb9f0ee1633355a8f7ec5c66bf/pluggy-1.6.0-py3-none-any.whl", hash = "sha256:e920276dd6813095e9377c0bc5566d94c932c33b27a3e3945d8389c374dd4746", size = 20538 }, + { url = "https://files.pythonhosted.org/packages/54/20/4d324d65cc6d9205fabedc306948156824eb9f0ee1633355a8f7ec5c66bf/pluggy-1.6.0-py3-none-any.whl", hash = "sha256:e920276dd6813095e9377c0bc5566d94c932c33b27a3e3945d8389c374dd4746", size = 20538, upload-time = "2025-05-15T12:30:06.134Z" }, ] [[package]] name = "ply" version = "3.11" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/e5/69/882ee5c9d017149285cab114ebeab373308ef0f874fcdac9beb90e0ac4da/ply-3.11.tar.gz", hash = "sha256:00c7c1aaa88358b9c765b6d3000c6eec0ba42abca5351b095321aef446081da3", size = 159130 } +sdist = { url = "https://files.pythonhosted.org/packages/e5/69/882ee5c9d017149285cab114ebeab373308ef0f874fcdac9beb90e0ac4da/ply-3.11.tar.gz", hash = "sha256:00c7c1aaa88358b9c765b6d3000c6eec0ba42abca5351b095321aef446081da3", size = 159130, upload-time = "2018-02-15T19:01:31.097Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/a3/58/35da89ee790598a0700ea49b2a66594140f44dec458c07e8e3d4979137fc/ply-3.11-py2.py3-none-any.whl", hash = "sha256:096f9b8350b65ebd2fd1346b12452efe5b9607f7482813ffca50c22722a807ce", size = 49567 }, + { url = "https://files.pythonhosted.org/packages/a3/58/35da89ee790598a0700ea49b2a66594140f44dec458c07e8e3d4979137fc/ply-3.11-py2.py3-none-any.whl", hash = "sha256:096f9b8350b65ebd2fd1346b12452efe5b9607f7482813ffca50c22722a807ce", size = 49567, upload-time = "2018-02-15T19:01:27.172Z" }, ] [[package]] @@ -4486,9 +4527,9 @@ dependencies = [ { name = "pyyaml" }, { name = "setuptools" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/02/c3/5a2a2ba06850bc5ec27f83ac8b92210dff9ff6736b2c42f700b489b3fd86/polyfile_weave-0.5.7.tar.gz", hash = "sha256:c3d863f51c30322c236bdf385e116ac06d4e7de9ec25a3aae14d42b1d528e33b", size = 5987445 } +sdist = { url = "https://files.pythonhosted.org/packages/02/c3/5a2a2ba06850bc5ec27f83ac8b92210dff9ff6736b2c42f700b489b3fd86/polyfile_weave-0.5.7.tar.gz", hash = "sha256:c3d863f51c30322c236bdf385e116ac06d4e7de9ec25a3aae14d42b1d528e33b", size = 5987445, upload-time = "2025-09-22T19:21:11.222Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/cd/f6/d1efedc0f9506e47699616e896d8efe39e8f0b6a7d1d590c3e97455ecf4a/polyfile_weave-0.5.7-py3-none-any.whl", hash = "sha256:880454788bc383408bf19eefd6d1c49a18b965d90c99bccb58f4da65870c82dd", size = 1655397 }, + { url = "https://files.pythonhosted.org/packages/cd/f6/d1efedc0f9506e47699616e896d8efe39e8f0b6a7d1d590c3e97455ecf4a/polyfile_weave-0.5.7-py3-none-any.whl", hash = "sha256:880454788bc383408bf19eefd6d1c49a18b965d90c99bccb58f4da65870c82dd", size = 1655397, upload-time = "2025-09-22T19:21:09.142Z" }, ] [[package]] @@ -4498,9 +4539,9 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "pywin32", marker = "sys_platform == 'win32'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/ed/d3/c6c64067759e87af98cc668c1cc75171347d0f1577fab7ca3749134e3cd4/portalocker-2.10.1.tar.gz", hash = "sha256:ef1bf844e878ab08aee7e40184156e1151f228f103aa5c6bd0724cc330960f8f", size = 40891 } +sdist = { url = "https://files.pythonhosted.org/packages/ed/d3/c6c64067759e87af98cc668c1cc75171347d0f1577fab7ca3749134e3cd4/portalocker-2.10.1.tar.gz", hash = "sha256:ef1bf844e878ab08aee7e40184156e1151f228f103aa5c6bd0724cc330960f8f", size = 40891, upload-time = "2024-07-13T23:15:34.86Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/9b/fb/a70a4214956182e0d7a9099ab17d50bfcba1056188e9b14f35b9e2b62a0d/portalocker-2.10.1-py3-none-any.whl", hash = "sha256:53a5984ebc86a025552264b459b46a2086e269b21823cb572f8f28ee759e45bf", size = 18423 }, + { url = "https://files.pythonhosted.org/packages/9b/fb/a70a4214956182e0d7a9099ab17d50bfcba1056188e9b14f35b9e2b62a0d/portalocker-2.10.1-py3-none-any.whl", hash = "sha256:53a5984ebc86a025552264b459b46a2086e269b21823cb572f8f28ee759e45bf", size = 18423, upload-time = "2024-07-13T23:15:32.602Z" }, ] [[package]] @@ -4512,9 +4553,9 @@ dependencies = [ { name = "httpx", extra = ["http2"] }, { name = "pydantic" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/6e/3e/1b50568e1f5db0bdced4a82c7887e37326585faef7ca43ead86849cb4861/postgrest-1.1.1.tar.gz", hash = "sha256:f3bb3e8c4602775c75c844a31f565f5f3dd584df4d36d683f0b67d01a86be322", size = 15431 } +sdist = { url = "https://files.pythonhosted.org/packages/6e/3e/1b50568e1f5db0bdced4a82c7887e37326585faef7ca43ead86849cb4861/postgrest-1.1.1.tar.gz", hash = "sha256:f3bb3e8c4602775c75c844a31f565f5f3dd584df4d36d683f0b67d01a86be322", size = 15431, upload-time = "2025-06-23T19:21:34.742Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/a4/71/188a50ea64c17f73ff4df5196ec1553a8f1723421eb2d1069c73bab47d78/postgrest-1.1.1-py3-none-any.whl", hash = "sha256:98a6035ee1d14288484bfe36235942c5fb2d26af6d8120dfe3efbe007859251a", size = 22366 }, + { url = "https://files.pythonhosted.org/packages/a4/71/188a50ea64c17f73ff4df5196ec1553a8f1723421eb2d1069c73bab47d78/postgrest-1.1.1-py3-none-any.whl", hash = "sha256:98a6035ee1d14288484bfe36235942c5fb2d26af6d8120dfe3efbe007859251a", size = 22366, upload-time = "2025-06-23T19:21:33.637Z" }, ] [[package]] @@ -4529,9 +4570,9 @@ dependencies = [ { name = "six" }, { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/a2/d4/b9afe855a8a7a1bf4459c28ae4c300b40338122dc850acabefcf2c3df24d/posthog-7.0.1.tar.gz", hash = "sha256:21150562c2630a599c1d7eac94bc5c64eb6f6acbf3ff52ccf1e57345706db05a", size = 126985 } +sdist = { url = "https://files.pythonhosted.org/packages/a2/d4/b9afe855a8a7a1bf4459c28ae4c300b40338122dc850acabefcf2c3df24d/posthog-7.0.1.tar.gz", hash = "sha256:21150562c2630a599c1d7eac94bc5c64eb6f6acbf3ff52ccf1e57345706db05a", size = 126985, upload-time = "2025-11-15T12:44:22.465Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/05/0c/8b6b20b0be71725e6e8a32dcd460cdbf62fe6df9bc656a650150dc98fedd/posthog-7.0.1-py3-none-any.whl", hash = "sha256:efe212d8d88a9ba80a20c588eab4baf4b1a5e90e40b551160a5603bb21e96904", size = 145234 }, + { url = "https://files.pythonhosted.org/packages/05/0c/8b6b20b0be71725e6e8a32dcd460cdbf62fe6df9bc656a650150dc98fedd/posthog-7.0.1-py3-none-any.whl", hash = "sha256:efe212d8d88a9ba80a20c588eab4baf4b1a5e90e40b551160a5603bb21e96904", size = 145234, upload-time = "2025-11-15T12:44:21.247Z" }, ] [[package]] @@ -4541,48 +4582,48 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "wcwidth" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/a1/96/06e01a7b38dce6fe1db213e061a4602dd6032a8a97ef6c1a862537732421/prompt_toolkit-3.0.52.tar.gz", hash = "sha256:28cde192929c8e7321de85de1ddbe736f1375148b02f2e17edd840042b1be855", size = 434198 } +sdist = { url = "https://files.pythonhosted.org/packages/a1/96/06e01a7b38dce6fe1db213e061a4602dd6032a8a97ef6c1a862537732421/prompt_toolkit-3.0.52.tar.gz", hash = "sha256:28cde192929c8e7321de85de1ddbe736f1375148b02f2e17edd840042b1be855", size = 434198, upload-time = "2025-08-27T15:24:02.057Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/84/03/0d3ce49e2505ae70cf43bc5bb3033955d2fc9f932163e84dc0779cc47f48/prompt_toolkit-3.0.52-py3-none-any.whl", hash = "sha256:9aac639a3bbd33284347de5ad8d68ecc044b91a762dc39b7c21095fcd6a19955", size = 391431 }, + { url = "https://files.pythonhosted.org/packages/84/03/0d3ce49e2505ae70cf43bc5bb3033955d2fc9f932163e84dc0779cc47f48/prompt_toolkit-3.0.52-py3-none-any.whl", hash = "sha256:9aac639a3bbd33284347de5ad8d68ecc044b91a762dc39b7c21095fcd6a19955", size = 391431, upload-time = "2025-08-27T15:23:59.498Z" }, ] [[package]] name = "propcache" version = "0.4.1" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/9e/da/e9fc233cf63743258bff22b3dfa7ea5baef7b5bc324af47a0ad89b8ffc6f/propcache-0.4.1.tar.gz", hash = "sha256:f48107a8c637e80362555f37ecf49abe20370e557cc4ab374f04ec4423c97c3d", size = 46442 } +sdist = { url = "https://files.pythonhosted.org/packages/9e/da/e9fc233cf63743258bff22b3dfa7ea5baef7b5bc324af47a0ad89b8ffc6f/propcache-0.4.1.tar.gz", hash = "sha256:f48107a8c637e80362555f37ecf49abe20370e557cc4ab374f04ec4423c97c3d", size = 46442, upload-time = "2025-10-08T19:49:02.291Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/8c/d4/4e2c9aaf7ac2242b9358f98dccd8f90f2605402f5afeff6c578682c2c491/propcache-0.4.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:60a8fda9644b7dfd5dece8c61d8a85e271cb958075bfc4e01083c148b61a7caf", size = 80208 }, - { url = "https://files.pythonhosted.org/packages/c2/21/d7b68e911f9c8e18e4ae43bdbc1e1e9bbd971f8866eb81608947b6f585ff/propcache-0.4.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c30b53e7e6bda1d547cabb47c825f3843a0a1a42b0496087bb58d8fedf9f41b5", size = 45777 }, - { url = "https://files.pythonhosted.org/packages/d3/1d/11605e99ac8ea9435651ee71ab4cb4bf03f0949586246476a25aadfec54a/propcache-0.4.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:6918ecbd897443087a3b7cd978d56546a812517dcaaca51b49526720571fa93e", size = 47647 }, - { url = "https://files.pythonhosted.org/packages/58/1a/3c62c127a8466c9c843bccb503d40a273e5cc69838805f322e2826509e0d/propcache-0.4.1-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:3d902a36df4e5989763425a8ab9e98cd8ad5c52c823b34ee7ef307fd50582566", size = 214929 }, - { url = "https://files.pythonhosted.org/packages/56/b9/8fa98f850960b367c4b8fe0592e7fc341daa7a9462e925228f10a60cf74f/propcache-0.4.1-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:a9695397f85973bb40427dedddf70d8dc4a44b22f1650dd4af9eedf443d45165", size = 221778 }, - { url = "https://files.pythonhosted.org/packages/46/a6/0ab4f660eb59649d14b3d3d65c439421cf2f87fe5dd68591cbe3c1e78a89/propcache-0.4.1-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:2bb07ffd7eaad486576430c89f9b215f9e4be68c4866a96e97db9e97fead85dc", size = 228144 }, - { url = "https://files.pythonhosted.org/packages/52/6a/57f43e054fb3d3a56ac9fc532bc684fc6169a26c75c353e65425b3e56eef/propcache-0.4.1-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:fd6f30fdcf9ae2a70abd34da54f18da086160e4d7d9251f81f3da0ff84fc5a48", size = 210030 }, - { url = "https://files.pythonhosted.org/packages/40/e2/27e6feebb5f6b8408fa29f5efbb765cd54c153ac77314d27e457a3e993b7/propcache-0.4.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:fc38cba02d1acba4e2869eef1a57a43dfbd3d49a59bf90dda7444ec2be6a5570", size = 208252 }, - { url = "https://files.pythonhosted.org/packages/9e/f8/91c27b22ccda1dbc7967f921c42825564fa5336a01ecd72eb78a9f4f53c2/propcache-0.4.1-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:67fad6162281e80e882fb3ec355398cf72864a54069d060321f6cd0ade95fe85", size = 202064 }, - { url = "https://files.pythonhosted.org/packages/f2/26/7f00bd6bd1adba5aafe5f4a66390f243acab58eab24ff1a08bebb2ef9d40/propcache-0.4.1-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:f10207adf04d08bec185bae14d9606a1444715bc99180f9331c9c02093e1959e", size = 212429 }, - { url = "https://files.pythonhosted.org/packages/84/89/fd108ba7815c1117ddca79c228f3f8a15fc82a73bca8b142eb5de13b2785/propcache-0.4.1-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:e9b0d8d0845bbc4cfcdcbcdbf5086886bc8157aa963c31c777ceff7846c77757", size = 216727 }, - { url = "https://files.pythonhosted.org/packages/79/37/3ec3f7e3173e73f1d600495d8b545b53802cbf35506e5732dd8578db3724/propcache-0.4.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:981333cb2f4c1896a12f4ab92a9cc8f09ea664e9b7dbdc4eff74627af3a11c0f", size = 205097 }, - { url = "https://files.pythonhosted.org/packages/61/b0/b2631c19793f869d35f47d5a3a56fb19e9160d3c119f15ac7344fc3ccae7/propcache-0.4.1-cp311-cp311-win32.whl", hash = "sha256:f1d2f90aeec838a52f1c1a32fe9a619fefd5e411721a9117fbf82aea638fe8a1", size = 38084 }, - { url = "https://files.pythonhosted.org/packages/f4/78/6cce448e2098e9f3bfc91bb877f06aa24b6ccace872e39c53b2f707c4648/propcache-0.4.1-cp311-cp311-win_amd64.whl", hash = "sha256:364426a62660f3f699949ac8c621aad6977be7126c5807ce48c0aeb8e7333ea6", size = 41637 }, - { url = "https://files.pythonhosted.org/packages/9c/e9/754f180cccd7f51a39913782c74717c581b9cc8177ad0e949f4d51812383/propcache-0.4.1-cp311-cp311-win_arm64.whl", hash = "sha256:e53f3a38d3510c11953f3e6a33f205c6d1b001129f972805ca9b42fc308bc239", size = 38064 }, - { url = "https://files.pythonhosted.org/packages/a2/0f/f17b1b2b221d5ca28b4b876e8bb046ac40466513960646bda8e1853cdfa2/propcache-0.4.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:e153e9cd40cc8945138822807139367f256f89c6810c2634a4f6902b52d3b4e2", size = 80061 }, - { url = "https://files.pythonhosted.org/packages/76/47/8ccf75935f51448ba9a16a71b783eb7ef6b9ee60f5d14c7f8a8a79fbeed7/propcache-0.4.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:cd547953428f7abb73c5ad82cbb32109566204260d98e41e5dfdc682eb7f8403", size = 46037 }, - { url = "https://files.pythonhosted.org/packages/0a/b6/5c9a0e42df4d00bfb4a3cbbe5cf9f54260300c88a0e9af1f47ca5ce17ac0/propcache-0.4.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f048da1b4f243fc44f205dfd320933a951b8d89e0afd4c7cacc762a8b9165207", size = 47324 }, - { url = "https://files.pythonhosted.org/packages/9e/d3/6c7ee328b39a81ee877c962469f1e795f9db87f925251efeb0545e0020d0/propcache-0.4.1-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ec17c65562a827bba85e3872ead335f95405ea1674860d96483a02f5c698fa72", size = 225505 }, - { url = "https://files.pythonhosted.org/packages/01/5d/1c53f4563490b1d06a684742cc6076ef944bc6457df6051b7d1a877c057b/propcache-0.4.1-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:405aac25c6394ef275dee4c709be43745d36674b223ba4eb7144bf4d691b7367", size = 230242 }, - { url = "https://files.pythonhosted.org/packages/20/e1/ce4620633b0e2422207c3cb774a0ee61cac13abc6217763a7b9e2e3f4a12/propcache-0.4.1-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:0013cb6f8dde4b2a2f66903b8ba740bdfe378c943c4377a200551ceb27f379e4", size = 238474 }, - { url = "https://files.pythonhosted.org/packages/46/4b/3aae6835b8e5f44ea6a68348ad90f78134047b503765087be2f9912140ea/propcache-0.4.1-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:15932ab57837c3368b024473a525e25d316d8353016e7cc0e5ba9eb343fbb1cf", size = 221575 }, - { url = "https://files.pythonhosted.org/packages/6e/a5/8a5e8678bcc9d3a1a15b9a29165640d64762d424a16af543f00629c87338/propcache-0.4.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:031dce78b9dc099f4c29785d9cf5577a3faf9ebf74ecbd3c856a7b92768c3df3", size = 216736 }, - { url = "https://files.pythonhosted.org/packages/f1/63/b7b215eddeac83ca1c6b934f89d09a625aa9ee4ba158338854c87210cc36/propcache-0.4.1-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:ab08df6c9a035bee56e31af99be621526bd237bea9f32def431c656b29e41778", size = 213019 }, - { url = "https://files.pythonhosted.org/packages/57/74/f580099a58c8af587cac7ba19ee7cb418506342fbbe2d4a4401661cca886/propcache-0.4.1-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:4d7af63f9f93fe593afbf104c21b3b15868efb2c21d07d8732c0c4287e66b6a6", size = 220376 }, - { url = "https://files.pythonhosted.org/packages/c4/ee/542f1313aff7eaf19c2bb758c5d0560d2683dac001a1c96d0774af799843/propcache-0.4.1-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:cfc27c945f422e8b5071b6e93169679e4eb5bf73bbcbf1ba3ae3a83d2f78ebd9", size = 226988 }, - { url = "https://files.pythonhosted.org/packages/8f/18/9c6b015dd9c6930f6ce2229e1f02fb35298b847f2087ea2b436a5bfa7287/propcache-0.4.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:35c3277624a080cc6ec6f847cbbbb5b49affa3598c4535a0a4682a697aaa5c75", size = 215615 }, - { url = "https://files.pythonhosted.org/packages/80/9e/e7b85720b98c45a45e1fca6a177024934dc9bc5f4d5dd04207f216fc33ed/propcache-0.4.1-cp312-cp312-win32.whl", hash = "sha256:671538c2262dadb5ba6395e26c1731e1d52534bfe9ae56d0b5573ce539266aa8", size = 38066 }, - { url = "https://files.pythonhosted.org/packages/54/09/d19cff2a5aaac632ec8fc03737b223597b1e347416934c1b3a7df079784c/propcache-0.4.1-cp312-cp312-win_amd64.whl", hash = "sha256:cb2d222e72399fcf5890d1d5cc1060857b9b236adff2792ff48ca2dfd46c81db", size = 41655 }, - { url = "https://files.pythonhosted.org/packages/68/ab/6b5c191bb5de08036a8c697b265d4ca76148efb10fa162f14af14fb5f076/propcache-0.4.1-cp312-cp312-win_arm64.whl", hash = "sha256:204483131fb222bdaaeeea9f9e6c6ed0cac32731f75dfc1d4a567fc1926477c1", size = 37789 }, - { url = "https://files.pythonhosted.org/packages/5b/5a/bc7b4a4ef808fa59a816c17b20c4bef6884daebbdf627ff2a161da67da19/propcache-0.4.1-py3-none-any.whl", hash = "sha256:af2a6052aeb6cf17d3e46ee169099044fd8224cbaf75c76a2ef596e8163e2237", size = 13305 }, + { url = "https://files.pythonhosted.org/packages/8c/d4/4e2c9aaf7ac2242b9358f98dccd8f90f2605402f5afeff6c578682c2c491/propcache-0.4.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:60a8fda9644b7dfd5dece8c61d8a85e271cb958075bfc4e01083c148b61a7caf", size = 80208, upload-time = "2025-10-08T19:46:24.597Z" }, + { url = "https://files.pythonhosted.org/packages/c2/21/d7b68e911f9c8e18e4ae43bdbc1e1e9bbd971f8866eb81608947b6f585ff/propcache-0.4.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c30b53e7e6bda1d547cabb47c825f3843a0a1a42b0496087bb58d8fedf9f41b5", size = 45777, upload-time = "2025-10-08T19:46:25.733Z" }, + { url = "https://files.pythonhosted.org/packages/d3/1d/11605e99ac8ea9435651ee71ab4cb4bf03f0949586246476a25aadfec54a/propcache-0.4.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:6918ecbd897443087a3b7cd978d56546a812517dcaaca51b49526720571fa93e", size = 47647, upload-time = "2025-10-08T19:46:27.304Z" }, + { url = "https://files.pythonhosted.org/packages/58/1a/3c62c127a8466c9c843bccb503d40a273e5cc69838805f322e2826509e0d/propcache-0.4.1-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:3d902a36df4e5989763425a8ab9e98cd8ad5c52c823b34ee7ef307fd50582566", size = 214929, upload-time = "2025-10-08T19:46:28.62Z" }, + { url = "https://files.pythonhosted.org/packages/56/b9/8fa98f850960b367c4b8fe0592e7fc341daa7a9462e925228f10a60cf74f/propcache-0.4.1-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:a9695397f85973bb40427dedddf70d8dc4a44b22f1650dd4af9eedf443d45165", size = 221778, upload-time = "2025-10-08T19:46:30.358Z" }, + { url = "https://files.pythonhosted.org/packages/46/a6/0ab4f660eb59649d14b3d3d65c439421cf2f87fe5dd68591cbe3c1e78a89/propcache-0.4.1-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:2bb07ffd7eaad486576430c89f9b215f9e4be68c4866a96e97db9e97fead85dc", size = 228144, upload-time = "2025-10-08T19:46:32.607Z" }, + { url = "https://files.pythonhosted.org/packages/52/6a/57f43e054fb3d3a56ac9fc532bc684fc6169a26c75c353e65425b3e56eef/propcache-0.4.1-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:fd6f30fdcf9ae2a70abd34da54f18da086160e4d7d9251f81f3da0ff84fc5a48", size = 210030, upload-time = "2025-10-08T19:46:33.969Z" }, + { url = "https://files.pythonhosted.org/packages/40/e2/27e6feebb5f6b8408fa29f5efbb765cd54c153ac77314d27e457a3e993b7/propcache-0.4.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:fc38cba02d1acba4e2869eef1a57a43dfbd3d49a59bf90dda7444ec2be6a5570", size = 208252, upload-time = "2025-10-08T19:46:35.309Z" }, + { url = "https://files.pythonhosted.org/packages/9e/f8/91c27b22ccda1dbc7967f921c42825564fa5336a01ecd72eb78a9f4f53c2/propcache-0.4.1-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:67fad6162281e80e882fb3ec355398cf72864a54069d060321f6cd0ade95fe85", size = 202064, upload-time = "2025-10-08T19:46:36.993Z" }, + { url = "https://files.pythonhosted.org/packages/f2/26/7f00bd6bd1adba5aafe5f4a66390f243acab58eab24ff1a08bebb2ef9d40/propcache-0.4.1-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:f10207adf04d08bec185bae14d9606a1444715bc99180f9331c9c02093e1959e", size = 212429, upload-time = "2025-10-08T19:46:38.398Z" }, + { url = "https://files.pythonhosted.org/packages/84/89/fd108ba7815c1117ddca79c228f3f8a15fc82a73bca8b142eb5de13b2785/propcache-0.4.1-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:e9b0d8d0845bbc4cfcdcbcdbf5086886bc8157aa963c31c777ceff7846c77757", size = 216727, upload-time = "2025-10-08T19:46:39.732Z" }, + { url = "https://files.pythonhosted.org/packages/79/37/3ec3f7e3173e73f1d600495d8b545b53802cbf35506e5732dd8578db3724/propcache-0.4.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:981333cb2f4c1896a12f4ab92a9cc8f09ea664e9b7dbdc4eff74627af3a11c0f", size = 205097, upload-time = "2025-10-08T19:46:41.025Z" }, + { url = "https://files.pythonhosted.org/packages/61/b0/b2631c19793f869d35f47d5a3a56fb19e9160d3c119f15ac7344fc3ccae7/propcache-0.4.1-cp311-cp311-win32.whl", hash = "sha256:f1d2f90aeec838a52f1c1a32fe9a619fefd5e411721a9117fbf82aea638fe8a1", size = 38084, upload-time = "2025-10-08T19:46:42.693Z" }, + { url = "https://files.pythonhosted.org/packages/f4/78/6cce448e2098e9f3bfc91bb877f06aa24b6ccace872e39c53b2f707c4648/propcache-0.4.1-cp311-cp311-win_amd64.whl", hash = "sha256:364426a62660f3f699949ac8c621aad6977be7126c5807ce48c0aeb8e7333ea6", size = 41637, upload-time = "2025-10-08T19:46:43.778Z" }, + { url = "https://files.pythonhosted.org/packages/9c/e9/754f180cccd7f51a39913782c74717c581b9cc8177ad0e949f4d51812383/propcache-0.4.1-cp311-cp311-win_arm64.whl", hash = "sha256:e53f3a38d3510c11953f3e6a33f205c6d1b001129f972805ca9b42fc308bc239", size = 38064, upload-time = "2025-10-08T19:46:44.872Z" }, + { url = "https://files.pythonhosted.org/packages/a2/0f/f17b1b2b221d5ca28b4b876e8bb046ac40466513960646bda8e1853cdfa2/propcache-0.4.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:e153e9cd40cc8945138822807139367f256f89c6810c2634a4f6902b52d3b4e2", size = 80061, upload-time = "2025-10-08T19:46:46.075Z" }, + { url = "https://files.pythonhosted.org/packages/76/47/8ccf75935f51448ba9a16a71b783eb7ef6b9ee60f5d14c7f8a8a79fbeed7/propcache-0.4.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:cd547953428f7abb73c5ad82cbb32109566204260d98e41e5dfdc682eb7f8403", size = 46037, upload-time = "2025-10-08T19:46:47.23Z" }, + { url = "https://files.pythonhosted.org/packages/0a/b6/5c9a0e42df4d00bfb4a3cbbe5cf9f54260300c88a0e9af1f47ca5ce17ac0/propcache-0.4.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f048da1b4f243fc44f205dfd320933a951b8d89e0afd4c7cacc762a8b9165207", size = 47324, upload-time = "2025-10-08T19:46:48.384Z" }, + { url = "https://files.pythonhosted.org/packages/9e/d3/6c7ee328b39a81ee877c962469f1e795f9db87f925251efeb0545e0020d0/propcache-0.4.1-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ec17c65562a827bba85e3872ead335f95405ea1674860d96483a02f5c698fa72", size = 225505, upload-time = "2025-10-08T19:46:50.055Z" }, + { url = "https://files.pythonhosted.org/packages/01/5d/1c53f4563490b1d06a684742cc6076ef944bc6457df6051b7d1a877c057b/propcache-0.4.1-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:405aac25c6394ef275dee4c709be43745d36674b223ba4eb7144bf4d691b7367", size = 230242, upload-time = "2025-10-08T19:46:51.815Z" }, + { url = "https://files.pythonhosted.org/packages/20/e1/ce4620633b0e2422207c3cb774a0ee61cac13abc6217763a7b9e2e3f4a12/propcache-0.4.1-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:0013cb6f8dde4b2a2f66903b8ba740bdfe378c943c4377a200551ceb27f379e4", size = 238474, upload-time = "2025-10-08T19:46:53.208Z" }, + { url = "https://files.pythonhosted.org/packages/46/4b/3aae6835b8e5f44ea6a68348ad90f78134047b503765087be2f9912140ea/propcache-0.4.1-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:15932ab57837c3368b024473a525e25d316d8353016e7cc0e5ba9eb343fbb1cf", size = 221575, upload-time = "2025-10-08T19:46:54.511Z" }, + { url = "https://files.pythonhosted.org/packages/6e/a5/8a5e8678bcc9d3a1a15b9a29165640d64762d424a16af543f00629c87338/propcache-0.4.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:031dce78b9dc099f4c29785d9cf5577a3faf9ebf74ecbd3c856a7b92768c3df3", size = 216736, upload-time = "2025-10-08T19:46:56.212Z" }, + { url = "https://files.pythonhosted.org/packages/f1/63/b7b215eddeac83ca1c6b934f89d09a625aa9ee4ba158338854c87210cc36/propcache-0.4.1-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:ab08df6c9a035bee56e31af99be621526bd237bea9f32def431c656b29e41778", size = 213019, upload-time = "2025-10-08T19:46:57.595Z" }, + { url = "https://files.pythonhosted.org/packages/57/74/f580099a58c8af587cac7ba19ee7cb418506342fbbe2d4a4401661cca886/propcache-0.4.1-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:4d7af63f9f93fe593afbf104c21b3b15868efb2c21d07d8732c0c4287e66b6a6", size = 220376, upload-time = "2025-10-08T19:46:59.067Z" }, + { url = "https://files.pythonhosted.org/packages/c4/ee/542f1313aff7eaf19c2bb758c5d0560d2683dac001a1c96d0774af799843/propcache-0.4.1-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:cfc27c945f422e8b5071b6e93169679e4eb5bf73bbcbf1ba3ae3a83d2f78ebd9", size = 226988, upload-time = "2025-10-08T19:47:00.544Z" }, + { url = "https://files.pythonhosted.org/packages/8f/18/9c6b015dd9c6930f6ce2229e1f02fb35298b847f2087ea2b436a5bfa7287/propcache-0.4.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:35c3277624a080cc6ec6f847cbbbb5b49affa3598c4535a0a4682a697aaa5c75", size = 215615, upload-time = "2025-10-08T19:47:01.968Z" }, + { url = "https://files.pythonhosted.org/packages/80/9e/e7b85720b98c45a45e1fca6a177024934dc9bc5f4d5dd04207f216fc33ed/propcache-0.4.1-cp312-cp312-win32.whl", hash = "sha256:671538c2262dadb5ba6395e26c1731e1d52534bfe9ae56d0b5573ce539266aa8", size = 38066, upload-time = "2025-10-08T19:47:03.503Z" }, + { url = "https://files.pythonhosted.org/packages/54/09/d19cff2a5aaac632ec8fc03737b223597b1e347416934c1b3a7df079784c/propcache-0.4.1-cp312-cp312-win_amd64.whl", hash = "sha256:cb2d222e72399fcf5890d1d5cc1060857b9b236adff2792ff48ca2dfd46c81db", size = 41655, upload-time = "2025-10-08T19:47:04.973Z" }, + { url = "https://files.pythonhosted.org/packages/68/ab/6b5c191bb5de08036a8c697b265d4ca76148efb10fa162f14af14fb5f076/propcache-0.4.1-cp312-cp312-win_arm64.whl", hash = "sha256:204483131fb222bdaaeeea9f9e6c6ed0cac32731f75dfc1d4a567fc1926477c1", size = 37789, upload-time = "2025-10-08T19:47:06.077Z" }, + { url = "https://files.pythonhosted.org/packages/5b/5a/bc7b4a4ef808fa59a816c17b20c4bef6884daebbdf627ff2a161da67da19/propcache-0.4.1-py3-none-any.whl", hash = "sha256:af2a6052aeb6cf17d3e46ee169099044fd8224cbaf75c76a2ef596e8163e2237", size = 13305, upload-time = "2025-10-08T19:49:00.792Z" }, ] [[package]] @@ -4592,91 +4633,91 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "protobuf" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/f4/ac/87285f15f7cce6d4a008f33f1757fb5a13611ea8914eb58c3d0d26243468/proto_plus-1.26.1.tar.gz", hash = "sha256:21a515a4c4c0088a773899e23c7bbade3d18f9c66c73edd4c7ee3816bc96a012", size = 56142 } +sdist = { url = "https://files.pythonhosted.org/packages/f4/ac/87285f15f7cce6d4a008f33f1757fb5a13611ea8914eb58c3d0d26243468/proto_plus-1.26.1.tar.gz", hash = "sha256:21a515a4c4c0088a773899e23c7bbade3d18f9c66c73edd4c7ee3816bc96a012", size = 56142, upload-time = "2025-03-10T15:54:38.843Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/4e/6d/280c4c2ce28b1593a19ad5239c8b826871fc6ec275c21afc8e1820108039/proto_plus-1.26.1-py3-none-any.whl", hash = "sha256:13285478c2dcf2abb829db158e1047e2f1e8d63a077d94263c2b88b043c75a66", size = 50163 }, + { url = "https://files.pythonhosted.org/packages/4e/6d/280c4c2ce28b1593a19ad5239c8b826871fc6ec275c21afc8e1820108039/proto_plus-1.26.1-py3-none-any.whl", hash = "sha256:13285478c2dcf2abb829db158e1047e2f1e8d63a077d94263c2b88b043c75a66", size = 50163, upload-time = "2025-03-10T15:54:37.335Z" }, ] [[package]] name = "protobuf" version = "4.25.8" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/df/01/34c8d2b6354906d728703cb9d546a0e534de479e25f1b581e4094c4a85cc/protobuf-4.25.8.tar.gz", hash = "sha256:6135cf8affe1fc6f76cced2641e4ea8d3e59518d1f24ae41ba97bcad82d397cd", size = 380920 } +sdist = { url = "https://files.pythonhosted.org/packages/df/01/34c8d2b6354906d728703cb9d546a0e534de479e25f1b581e4094c4a85cc/protobuf-4.25.8.tar.gz", hash = "sha256:6135cf8affe1fc6f76cced2641e4ea8d3e59518d1f24ae41ba97bcad82d397cd", size = 380920, upload-time = "2025-05-28T14:22:25.153Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/45/ff/05f34305fe6b85bbfbecbc559d423a5985605cad5eda4f47eae9e9c9c5c5/protobuf-4.25.8-cp310-abi3-win32.whl", hash = "sha256:504435d831565f7cfac9f0714440028907f1975e4bed228e58e72ecfff58a1e0", size = 392745 }, - { url = "https://files.pythonhosted.org/packages/08/35/8b8a8405c564caf4ba835b1fdf554da869954712b26d8f2a98c0e434469b/protobuf-4.25.8-cp310-abi3-win_amd64.whl", hash = "sha256:bd551eb1fe1d7e92c1af1d75bdfa572eff1ab0e5bf1736716814cdccdb2360f9", size = 413736 }, - { url = "https://files.pythonhosted.org/packages/28/d7/ab27049a035b258dab43445eb6ec84a26277b16105b277cbe0a7698bdc6c/protobuf-4.25.8-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:ca809b42f4444f144f2115c4c1a747b9a404d590f18f37e9402422033e464e0f", size = 394537 }, - { url = "https://files.pythonhosted.org/packages/bd/6d/a4a198b61808dd3d1ee187082ccc21499bc949d639feb948961b48be9a7e/protobuf-4.25.8-cp37-abi3-manylinux2014_aarch64.whl", hash = "sha256:9ad7ef62d92baf5a8654fbb88dac7fa5594cfa70fd3440488a5ca3bfc6d795a7", size = 294005 }, - { url = "https://files.pythonhosted.org/packages/d6/c6/c9deaa6e789b6fc41b88ccbdfe7a42d2b82663248b715f55aa77fbc00724/protobuf-4.25.8-cp37-abi3-manylinux2014_x86_64.whl", hash = "sha256:83e6e54e93d2b696a92cad6e6efc924f3850f82b52e1563778dfab8b355101b0", size = 294924 }, - { url = "https://files.pythonhosted.org/packages/0c/c1/6aece0ab5209981a70cd186f164c133fdba2f51e124ff92b73de7fd24d78/protobuf-4.25.8-py3-none-any.whl", hash = "sha256:15a0af558aa3b13efef102ae6e4f3efac06f1eea11afb3a57db2901447d9fb59", size = 156757 }, + { url = "https://files.pythonhosted.org/packages/45/ff/05f34305fe6b85bbfbecbc559d423a5985605cad5eda4f47eae9e9c9c5c5/protobuf-4.25.8-cp310-abi3-win32.whl", hash = "sha256:504435d831565f7cfac9f0714440028907f1975e4bed228e58e72ecfff58a1e0", size = 392745, upload-time = "2025-05-28T14:22:10.524Z" }, + { url = "https://files.pythonhosted.org/packages/08/35/8b8a8405c564caf4ba835b1fdf554da869954712b26d8f2a98c0e434469b/protobuf-4.25.8-cp310-abi3-win_amd64.whl", hash = "sha256:bd551eb1fe1d7e92c1af1d75bdfa572eff1ab0e5bf1736716814cdccdb2360f9", size = 413736, upload-time = "2025-05-28T14:22:13.156Z" }, + { url = "https://files.pythonhosted.org/packages/28/d7/ab27049a035b258dab43445eb6ec84a26277b16105b277cbe0a7698bdc6c/protobuf-4.25.8-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:ca809b42f4444f144f2115c4c1a747b9a404d590f18f37e9402422033e464e0f", size = 394537, upload-time = "2025-05-28T14:22:14.768Z" }, + { url = "https://files.pythonhosted.org/packages/bd/6d/a4a198b61808dd3d1ee187082ccc21499bc949d639feb948961b48be9a7e/protobuf-4.25.8-cp37-abi3-manylinux2014_aarch64.whl", hash = "sha256:9ad7ef62d92baf5a8654fbb88dac7fa5594cfa70fd3440488a5ca3bfc6d795a7", size = 294005, upload-time = "2025-05-28T14:22:16.052Z" }, + { url = "https://files.pythonhosted.org/packages/d6/c6/c9deaa6e789b6fc41b88ccbdfe7a42d2b82663248b715f55aa77fbc00724/protobuf-4.25.8-cp37-abi3-manylinux2014_x86_64.whl", hash = "sha256:83e6e54e93d2b696a92cad6e6efc924f3850f82b52e1563778dfab8b355101b0", size = 294924, upload-time = "2025-05-28T14:22:17.105Z" }, + { url = "https://files.pythonhosted.org/packages/0c/c1/6aece0ab5209981a70cd186f164c133fdba2f51e124ff92b73de7fd24d78/protobuf-4.25.8-py3-none-any.whl", hash = "sha256:15a0af558aa3b13efef102ae6e4f3efac06f1eea11afb3a57db2901447d9fb59", size = 156757, upload-time = "2025-05-28T14:22:24.135Z" }, ] [[package]] name = "psutil" version = "7.1.3" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/e1/88/bdd0a41e5857d5d703287598cbf08dad90aed56774ea52ae071bae9071b6/psutil-7.1.3.tar.gz", hash = "sha256:6c86281738d77335af7aec228328e944b30930899ea760ecf33a4dba66be5e74", size = 489059 } +sdist = { url = "https://files.pythonhosted.org/packages/e1/88/bdd0a41e5857d5d703287598cbf08dad90aed56774ea52ae071bae9071b6/psutil-7.1.3.tar.gz", hash = "sha256:6c86281738d77335af7aec228328e944b30930899ea760ecf33a4dba66be5e74", size = 489059, upload-time = "2025-11-02T12:25:54.619Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/ef/94/46b9154a800253e7ecff5aaacdf8ebf43db99de4a2dfa18575b02548654e/psutil-7.1.3-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:2bdbcd0e58ca14996a42adf3621a6244f1bb2e2e528886959c72cf1e326677ab", size = 238359 }, - { url = "https://files.pythonhosted.org/packages/68/3a/9f93cff5c025029a36d9a92fef47220ab4692ee7f2be0fba9f92813d0cb8/psutil-7.1.3-cp36-abi3-macosx_11_0_arm64.whl", hash = "sha256:bc31fa00f1fbc3c3802141eede66f3a2d51d89716a194bf2cd6fc68310a19880", size = 239171 }, - { url = "https://files.pythonhosted.org/packages/ce/b1/5f49af514f76431ba4eea935b8ad3725cdeb397e9245ab919dbc1d1dc20f/psutil-7.1.3-cp36-abi3-manylinux2010_x86_64.manylinux_2_12_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:3bb428f9f05c1225a558f53e30ccbad9930b11c3fc206836242de1091d3e7dd3", size = 263261 }, - { url = "https://files.pythonhosted.org/packages/e0/95/992c8816a74016eb095e73585d747e0a8ea21a061ed3689474fabb29a395/psutil-7.1.3-cp36-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:56d974e02ca2c8eb4812c3f76c30e28836fffc311d55d979f1465c1feeb2b68b", size = 264635 }, - { url = "https://files.pythonhosted.org/packages/55/4c/c3ed1a622b6ae2fd3c945a366e64eb35247a31e4db16cf5095e269e8eb3c/psutil-7.1.3-cp37-abi3-win_amd64.whl", hash = "sha256:f39c2c19fe824b47484b96f9692932248a54c43799a84282cfe58d05a6449efd", size = 247633 }, - { url = "https://files.pythonhosted.org/packages/c9/ad/33b2ccec09bf96c2b2ef3f9a6f66baac8253d7565d8839e024a6b905d45d/psutil-7.1.3-cp37-abi3-win_arm64.whl", hash = "sha256:bd0d69cee829226a761e92f28140bec9a5ee9d5b4fb4b0cc589068dbfff559b1", size = 244608 }, + { url = "https://files.pythonhosted.org/packages/ef/94/46b9154a800253e7ecff5aaacdf8ebf43db99de4a2dfa18575b02548654e/psutil-7.1.3-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:2bdbcd0e58ca14996a42adf3621a6244f1bb2e2e528886959c72cf1e326677ab", size = 238359, upload-time = "2025-11-02T12:26:25.284Z" }, + { url = "https://files.pythonhosted.org/packages/68/3a/9f93cff5c025029a36d9a92fef47220ab4692ee7f2be0fba9f92813d0cb8/psutil-7.1.3-cp36-abi3-macosx_11_0_arm64.whl", hash = "sha256:bc31fa00f1fbc3c3802141eede66f3a2d51d89716a194bf2cd6fc68310a19880", size = 239171, upload-time = "2025-11-02T12:26:27.23Z" }, + { url = "https://files.pythonhosted.org/packages/ce/b1/5f49af514f76431ba4eea935b8ad3725cdeb397e9245ab919dbc1d1dc20f/psutil-7.1.3-cp36-abi3-manylinux2010_x86_64.manylinux_2_12_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:3bb428f9f05c1225a558f53e30ccbad9930b11c3fc206836242de1091d3e7dd3", size = 263261, upload-time = "2025-11-02T12:26:29.48Z" }, + { url = "https://files.pythonhosted.org/packages/e0/95/992c8816a74016eb095e73585d747e0a8ea21a061ed3689474fabb29a395/psutil-7.1.3-cp36-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:56d974e02ca2c8eb4812c3f76c30e28836fffc311d55d979f1465c1feeb2b68b", size = 264635, upload-time = "2025-11-02T12:26:31.74Z" }, + { url = "https://files.pythonhosted.org/packages/55/4c/c3ed1a622b6ae2fd3c945a366e64eb35247a31e4db16cf5095e269e8eb3c/psutil-7.1.3-cp37-abi3-win_amd64.whl", hash = "sha256:f39c2c19fe824b47484b96f9692932248a54c43799a84282cfe58d05a6449efd", size = 247633, upload-time = "2025-11-02T12:26:33.887Z" }, + { url = "https://files.pythonhosted.org/packages/c9/ad/33b2ccec09bf96c2b2ef3f9a6f66baac8253d7565d8839e024a6b905d45d/psutil-7.1.3-cp37-abi3-win_arm64.whl", hash = "sha256:bd0d69cee829226a761e92f28140bec9a5ee9d5b4fb4b0cc589068dbfff559b1", size = 244608, upload-time = "2025-11-02T12:26:36.136Z" }, ] [[package]] name = "psycogreen" version = "1.0.2" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/eb/72/4a7965cf54e341006ad74cdc72cd6572c789bc4f4e3fadc78672f1fbcfbd/psycogreen-1.0.2.tar.gz", hash = "sha256:c429845a8a49cf2f76b71265008760bcd7c7c77d80b806db4dc81116dbcd130d", size = 5411 } +sdist = { url = "https://files.pythonhosted.org/packages/eb/72/4a7965cf54e341006ad74cdc72cd6572c789bc4f4e3fadc78672f1fbcfbd/psycogreen-1.0.2.tar.gz", hash = "sha256:c429845a8a49cf2f76b71265008760bcd7c7c77d80b806db4dc81116dbcd130d", size = 5411, upload-time = "2020-02-22T19:55:22.02Z" } [[package]] name = "psycopg2-binary" version = "2.9.11" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/ac/6c/8767aaa597ba424643dc87348c6f1754dd9f48e80fdc1b9f7ca5c3a7c213/psycopg2-binary-2.9.11.tar.gz", hash = "sha256:b6aed9e096bf63f9e75edf2581aa9a7e7186d97ab5c177aa6c87797cd591236c", size = 379620 } +sdist = { url = "https://files.pythonhosted.org/packages/ac/6c/8767aaa597ba424643dc87348c6f1754dd9f48e80fdc1b9f7ca5c3a7c213/psycopg2-binary-2.9.11.tar.gz", hash = "sha256:b6aed9e096bf63f9e75edf2581aa9a7e7186d97ab5c177aa6c87797cd591236c", size = 379620, upload-time = "2025-10-10T11:14:48.041Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/c7/ae/8d8266f6dd183ab4d48b95b9674034e1b482a3f8619b33a0d86438694577/psycopg2_binary-2.9.11-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:0e8480afd62362d0a6a27dd09e4ca2def6fa50ed3a4e7c09165266106b2ffa10", size = 3756452 }, - { url = "https://files.pythonhosted.org/packages/4b/34/aa03d327739c1be70e09d01182619aca8ebab5970cd0cfa50dd8b9cec2ac/psycopg2_binary-2.9.11-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:763c93ef1df3da6d1a90f86ea7f3f806dc06b21c198fa87c3c25504abec9404a", size = 3863957 }, - { url = "https://files.pythonhosted.org/packages/48/89/3fdb5902bdab8868bbedc1c6e6023a4e08112ceac5db97fc2012060e0c9a/psycopg2_binary-2.9.11-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:2e164359396576a3cc701ba8af4751ae68a07235d7a380c631184a611220d9a4", size = 4410955 }, - { url = "https://files.pythonhosted.org/packages/ce/24/e18339c407a13c72b336e0d9013fbbbde77b6fd13e853979019a1269519c/psycopg2_binary-2.9.11-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:d57c9c387660b8893093459738b6abddbb30a7eab058b77b0d0d1c7d521ddfd7", size = 4468007 }, - { url = "https://files.pythonhosted.org/packages/91/7e/b8441e831a0f16c159b5381698f9f7f7ed54b77d57bc9c5f99144cc78232/psycopg2_binary-2.9.11-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:2c226ef95eb2250974bf6fa7a842082b31f68385c4f3268370e3f3870e7859ee", size = 4165012 }, - { url = "https://files.pythonhosted.org/packages/0d/61/4aa89eeb6d751f05178a13da95516c036e27468c5d4d2509bb1e15341c81/psycopg2_binary-2.9.11-cp311-cp311-manylinux_2_38_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:a311f1edc9967723d3511ea7d2708e2c3592e3405677bf53d5c7246753591fbb", size = 3981881 }, - { url = "https://files.pythonhosted.org/packages/76/a1/2f5841cae4c635a9459fe7aca8ed771336e9383b6429e05c01267b0774cf/psycopg2_binary-2.9.11-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:ebb415404821b6d1c47353ebe9c8645967a5235e6d88f914147e7fd411419e6f", size = 3650985 }, - { url = "https://files.pythonhosted.org/packages/84/74/4defcac9d002bca5709951b975173c8c2fa968e1a95dc713f61b3a8d3b6a/psycopg2_binary-2.9.11-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:f07c9c4a5093258a03b28fab9b4f151aa376989e7f35f855088234e656ee6a94", size = 3296039 }, - { url = "https://files.pythonhosted.org/packages/6d/c2/782a3c64403d8ce35b5c50e1b684412cf94f171dc18111be8c976abd2de1/psycopg2_binary-2.9.11-cp311-cp311-musllinux_1_2_riscv64.whl", hash = "sha256:00ce1830d971f43b667abe4a56e42c1e2d594b32da4802e44a73bacacb25535f", size = 3043477 }, - { url = "https://files.pythonhosted.org/packages/c8/31/36a1d8e702aa35c38fc117c2b8be3f182613faa25d794b8aeaab948d4c03/psycopg2_binary-2.9.11-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:cffe9d7697ae7456649617e8bb8d7a45afb71cd13f7ab22af3e5c61f04840908", size = 3345842 }, - { url = "https://files.pythonhosted.org/packages/6e/b4/a5375cda5b54cb95ee9b836930fea30ae5a8f14aa97da7821722323d979b/psycopg2_binary-2.9.11-cp311-cp311-win_amd64.whl", hash = "sha256:304fd7b7f97eef30e91b8f7e720b3db75fee010b520e434ea35ed1ff22501d03", size = 2713894 }, - { url = "https://files.pythonhosted.org/packages/d8/91/f870a02f51be4a65987b45a7de4c2e1897dd0d01051e2b559a38fa634e3e/psycopg2_binary-2.9.11-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:be9b840ac0525a283a96b556616f5b4820e0526addb8dcf6525a0fa162730be4", size = 3756603 }, - { url = "https://files.pythonhosted.org/packages/27/fa/cae40e06849b6c9a95eb5c04d419942f00d9eaac8d81626107461e268821/psycopg2_binary-2.9.11-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f090b7ddd13ca842ebfe301cd587a76a4cf0913b1e429eb92c1be5dbeb1a19bc", size = 3864509 }, - { url = "https://files.pythonhosted.org/packages/2d/75/364847b879eb630b3ac8293798e380e441a957c53657995053c5ec39a316/psycopg2_binary-2.9.11-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:ab8905b5dcb05bf3fb22e0cf90e10f469563486ffb6a96569e51f897c750a76a", size = 4411159 }, - { url = "https://files.pythonhosted.org/packages/6f/a0/567f7ea38b6e1c62aafd58375665a547c00c608a471620c0edc364733e13/psycopg2_binary-2.9.11-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:bf940cd7e7fec19181fdbc29d76911741153d51cab52e5c21165f3262125685e", size = 4468234 }, - { url = "https://files.pythonhosted.org/packages/30/da/4e42788fb811bbbfd7b7f045570c062f49e350e1d1f3df056c3fb5763353/psycopg2_binary-2.9.11-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:fa0f693d3c68ae925966f0b14b8edda71696608039f4ed61b1fe9ffa468d16db", size = 4166236 }, - { url = "https://files.pythonhosted.org/packages/3c/94/c1777c355bc560992af848d98216148be5f1be001af06e06fc49cbded578/psycopg2_binary-2.9.11-cp312-cp312-manylinux_2_38_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:a1cf393f1cdaf6a9b57c0a719a1068ba1069f022a59b8b1fe44b006745b59757", size = 3983083 }, - { url = "https://files.pythonhosted.org/packages/bd/42/c9a21edf0e3daa7825ed04a4a8588686c6c14904344344a039556d78aa58/psycopg2_binary-2.9.11-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:ef7a6beb4beaa62f88592ccc65df20328029d721db309cb3250b0aae0fa146c3", size = 3652281 }, - { url = "https://files.pythonhosted.org/packages/12/22/dedfbcfa97917982301496b6b5e5e6c5531d1f35dd2b488b08d1ebc52482/psycopg2_binary-2.9.11-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:31b32c457a6025e74d233957cc9736742ac5a6cb196c6b68499f6bb51390bd6a", size = 3298010 }, - { url = "https://files.pythonhosted.org/packages/66/ea/d3390e6696276078bd01b2ece417deac954dfdd552d2edc3d03204416c0c/psycopg2_binary-2.9.11-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:edcb3aeb11cb4bf13a2af3c53a15b3d612edeb6409047ea0b5d6a21a9d744b34", size = 3044641 }, - { url = "https://files.pythonhosted.org/packages/12/9a/0402ded6cbd321da0c0ba7d34dc12b29b14f5764c2fc10750daa38e825fc/psycopg2_binary-2.9.11-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:62b6d93d7c0b61a1dd6197d208ab613eb7dcfdcca0a49c42ceb082257991de9d", size = 3347940 }, - { url = "https://files.pythonhosted.org/packages/b1/d2/99b55e85832ccde77b211738ff3925a5d73ad183c0b37bcbbe5a8ff04978/psycopg2_binary-2.9.11-cp312-cp312-win_amd64.whl", hash = "sha256:b33fabeb1fde21180479b2d4667e994de7bbf0eec22832ba5d9b5e4cf65b6c6d", size = 2714147 }, + { url = "https://files.pythonhosted.org/packages/c7/ae/8d8266f6dd183ab4d48b95b9674034e1b482a3f8619b33a0d86438694577/psycopg2_binary-2.9.11-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:0e8480afd62362d0a6a27dd09e4ca2def6fa50ed3a4e7c09165266106b2ffa10", size = 3756452, upload-time = "2025-10-10T11:11:11.583Z" }, + { url = "https://files.pythonhosted.org/packages/4b/34/aa03d327739c1be70e09d01182619aca8ebab5970cd0cfa50dd8b9cec2ac/psycopg2_binary-2.9.11-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:763c93ef1df3da6d1a90f86ea7f3f806dc06b21c198fa87c3c25504abec9404a", size = 3863957, upload-time = "2025-10-10T11:11:16.932Z" }, + { url = "https://files.pythonhosted.org/packages/48/89/3fdb5902bdab8868bbedc1c6e6023a4e08112ceac5db97fc2012060e0c9a/psycopg2_binary-2.9.11-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:2e164359396576a3cc701ba8af4751ae68a07235d7a380c631184a611220d9a4", size = 4410955, upload-time = "2025-10-10T11:11:21.21Z" }, + { url = "https://files.pythonhosted.org/packages/ce/24/e18339c407a13c72b336e0d9013fbbbde77b6fd13e853979019a1269519c/psycopg2_binary-2.9.11-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:d57c9c387660b8893093459738b6abddbb30a7eab058b77b0d0d1c7d521ddfd7", size = 4468007, upload-time = "2025-10-10T11:11:24.831Z" }, + { url = "https://files.pythonhosted.org/packages/91/7e/b8441e831a0f16c159b5381698f9f7f7ed54b77d57bc9c5f99144cc78232/psycopg2_binary-2.9.11-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:2c226ef95eb2250974bf6fa7a842082b31f68385c4f3268370e3f3870e7859ee", size = 4165012, upload-time = "2025-10-10T11:11:29.51Z" }, + { url = "https://files.pythonhosted.org/packages/0d/61/4aa89eeb6d751f05178a13da95516c036e27468c5d4d2509bb1e15341c81/psycopg2_binary-2.9.11-cp311-cp311-manylinux_2_38_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:a311f1edc9967723d3511ea7d2708e2c3592e3405677bf53d5c7246753591fbb", size = 3981881, upload-time = "2025-10-30T02:55:07.332Z" }, + { url = "https://files.pythonhosted.org/packages/76/a1/2f5841cae4c635a9459fe7aca8ed771336e9383b6429e05c01267b0774cf/psycopg2_binary-2.9.11-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:ebb415404821b6d1c47353ebe9c8645967a5235e6d88f914147e7fd411419e6f", size = 3650985, upload-time = "2025-10-10T11:11:34.975Z" }, + { url = "https://files.pythonhosted.org/packages/84/74/4defcac9d002bca5709951b975173c8c2fa968e1a95dc713f61b3a8d3b6a/psycopg2_binary-2.9.11-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:f07c9c4a5093258a03b28fab9b4f151aa376989e7f35f855088234e656ee6a94", size = 3296039, upload-time = "2025-10-10T11:11:40.432Z" }, + { url = "https://files.pythonhosted.org/packages/6d/c2/782a3c64403d8ce35b5c50e1b684412cf94f171dc18111be8c976abd2de1/psycopg2_binary-2.9.11-cp311-cp311-musllinux_1_2_riscv64.whl", hash = "sha256:00ce1830d971f43b667abe4a56e42c1e2d594b32da4802e44a73bacacb25535f", size = 3043477, upload-time = "2025-10-30T02:55:11.182Z" }, + { url = "https://files.pythonhosted.org/packages/c8/31/36a1d8e702aa35c38fc117c2b8be3f182613faa25d794b8aeaab948d4c03/psycopg2_binary-2.9.11-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:cffe9d7697ae7456649617e8bb8d7a45afb71cd13f7ab22af3e5c61f04840908", size = 3345842, upload-time = "2025-10-10T11:11:45.366Z" }, + { url = "https://files.pythonhosted.org/packages/6e/b4/a5375cda5b54cb95ee9b836930fea30ae5a8f14aa97da7821722323d979b/psycopg2_binary-2.9.11-cp311-cp311-win_amd64.whl", hash = "sha256:304fd7b7f97eef30e91b8f7e720b3db75fee010b520e434ea35ed1ff22501d03", size = 2713894, upload-time = "2025-10-10T11:11:48.775Z" }, + { url = "https://files.pythonhosted.org/packages/d8/91/f870a02f51be4a65987b45a7de4c2e1897dd0d01051e2b559a38fa634e3e/psycopg2_binary-2.9.11-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:be9b840ac0525a283a96b556616f5b4820e0526addb8dcf6525a0fa162730be4", size = 3756603, upload-time = "2025-10-10T11:11:52.213Z" }, + { url = "https://files.pythonhosted.org/packages/27/fa/cae40e06849b6c9a95eb5c04d419942f00d9eaac8d81626107461e268821/psycopg2_binary-2.9.11-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f090b7ddd13ca842ebfe301cd587a76a4cf0913b1e429eb92c1be5dbeb1a19bc", size = 3864509, upload-time = "2025-10-10T11:11:56.452Z" }, + { url = "https://files.pythonhosted.org/packages/2d/75/364847b879eb630b3ac8293798e380e441a957c53657995053c5ec39a316/psycopg2_binary-2.9.11-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:ab8905b5dcb05bf3fb22e0cf90e10f469563486ffb6a96569e51f897c750a76a", size = 4411159, upload-time = "2025-10-10T11:12:00.49Z" }, + { url = "https://files.pythonhosted.org/packages/6f/a0/567f7ea38b6e1c62aafd58375665a547c00c608a471620c0edc364733e13/psycopg2_binary-2.9.11-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:bf940cd7e7fec19181fdbc29d76911741153d51cab52e5c21165f3262125685e", size = 4468234, upload-time = "2025-10-10T11:12:04.892Z" }, + { url = "https://files.pythonhosted.org/packages/30/da/4e42788fb811bbbfd7b7f045570c062f49e350e1d1f3df056c3fb5763353/psycopg2_binary-2.9.11-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:fa0f693d3c68ae925966f0b14b8edda71696608039f4ed61b1fe9ffa468d16db", size = 4166236, upload-time = "2025-10-10T11:12:11.674Z" }, + { url = "https://files.pythonhosted.org/packages/3c/94/c1777c355bc560992af848d98216148be5f1be001af06e06fc49cbded578/psycopg2_binary-2.9.11-cp312-cp312-manylinux_2_38_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:a1cf393f1cdaf6a9b57c0a719a1068ba1069f022a59b8b1fe44b006745b59757", size = 3983083, upload-time = "2025-10-30T02:55:15.73Z" }, + { url = "https://files.pythonhosted.org/packages/bd/42/c9a21edf0e3daa7825ed04a4a8588686c6c14904344344a039556d78aa58/psycopg2_binary-2.9.11-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:ef7a6beb4beaa62f88592ccc65df20328029d721db309cb3250b0aae0fa146c3", size = 3652281, upload-time = "2025-10-10T11:12:17.713Z" }, + { url = "https://files.pythonhosted.org/packages/12/22/dedfbcfa97917982301496b6b5e5e6c5531d1f35dd2b488b08d1ebc52482/psycopg2_binary-2.9.11-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:31b32c457a6025e74d233957cc9736742ac5a6cb196c6b68499f6bb51390bd6a", size = 3298010, upload-time = "2025-10-10T11:12:22.671Z" }, + { url = "https://files.pythonhosted.org/packages/66/ea/d3390e6696276078bd01b2ece417deac954dfdd552d2edc3d03204416c0c/psycopg2_binary-2.9.11-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:edcb3aeb11cb4bf13a2af3c53a15b3d612edeb6409047ea0b5d6a21a9d744b34", size = 3044641, upload-time = "2025-10-30T02:55:19.929Z" }, + { url = "https://files.pythonhosted.org/packages/12/9a/0402ded6cbd321da0c0ba7d34dc12b29b14f5764c2fc10750daa38e825fc/psycopg2_binary-2.9.11-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:62b6d93d7c0b61a1dd6197d208ab613eb7dcfdcca0a49c42ceb082257991de9d", size = 3347940, upload-time = "2025-10-10T11:12:26.529Z" }, + { url = "https://files.pythonhosted.org/packages/b1/d2/99b55e85832ccde77b211738ff3925a5d73ad183c0b37bcbbe5a8ff04978/psycopg2_binary-2.9.11-cp312-cp312-win_amd64.whl", hash = "sha256:b33fabeb1fde21180479b2d4667e994de7bbf0eec22832ba5d9b5e4cf65b6c6d", size = 2714147, upload-time = "2025-10-10T11:12:29.535Z" }, ] [[package]] name = "py" version = "1.11.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/98/ff/fec109ceb715d2a6b4c4a85a61af3b40c723a961e8828319fbcb15b868dc/py-1.11.0.tar.gz", hash = "sha256:51c75c4126074b472f746a24399ad32f6053d1b34b68d2fa41e558e6f4a98719", size = 207796 } +sdist = { url = "https://files.pythonhosted.org/packages/98/ff/fec109ceb715d2a6b4c4a85a61af3b40c723a961e8828319fbcb15b868dc/py-1.11.0.tar.gz", hash = "sha256:51c75c4126074b472f746a24399ad32f6053d1b34b68d2fa41e558e6f4a98719", size = 207796, upload-time = "2021-11-04T17:17:01.377Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/f6/f0/10642828a8dfb741e5f3fbaac830550a518a775c7fff6f04a007259b0548/py-1.11.0-py2.py3-none-any.whl", hash = "sha256:607c53218732647dff4acdfcd50cb62615cedf612e72d1724fb1a0cc6405b378", size = 98708 }, + { url = "https://files.pythonhosted.org/packages/f6/f0/10642828a8dfb741e5f3fbaac830550a518a775c7fff6f04a007259b0548/py-1.11.0-py2.py3-none-any.whl", hash = "sha256:607c53218732647dff4acdfcd50cb62615cedf612e72d1724fb1a0cc6405b378", size = 98708, upload-time = "2021-11-04T17:17:00.152Z" }, ] [[package]] name = "py-cpuinfo" version = "9.0.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/37/a8/d832f7293ebb21690860d2e01d8115e5ff6f2ae8bbdc953f0eb0fa4bd2c7/py-cpuinfo-9.0.0.tar.gz", hash = "sha256:3cdbbf3fac90dc6f118bfd64384f309edeadd902d7c8fb17f02ffa1fc3f49690", size = 104716 } +sdist = { url = "https://files.pythonhosted.org/packages/37/a8/d832f7293ebb21690860d2e01d8115e5ff6f2ae8bbdc953f0eb0fa4bd2c7/py-cpuinfo-9.0.0.tar.gz", hash = "sha256:3cdbbf3fac90dc6f118bfd64384f309edeadd902d7c8fb17f02ffa1fc3f49690", size = 104716, upload-time = "2022-10-25T20:38:06.303Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/e0/a9/023730ba63db1e494a271cb018dcd361bd2c917ba7004c3e49d5daf795a2/py_cpuinfo-9.0.0-py3-none-any.whl", hash = "sha256:859625bc251f64e21f077d099d4162689c762b5d6a4c3c97553d56241c9674d5", size = 22335 }, + { url = "https://files.pythonhosted.org/packages/e0/a9/023730ba63db1e494a271cb018dcd361bd2c917ba7004c3e49d5daf795a2/py_cpuinfo-9.0.0-py3-none-any.whl", hash = "sha256:859625bc251f64e21f077d099d4162689c762b5d6a4c3c97553d56241c9674d5", size = 22335, upload-time = "2022-10-25T20:38:27.636Z" }, ] [[package]] @@ -4686,31 +4727,31 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "numpy" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/27/4e/ea6d43f324169f8aec0e57569443a38bab4b398d09769ca64f7b4d467de3/pyarrow-17.0.0.tar.gz", hash = "sha256:4beca9521ed2c0921c1023e68d097d0299b62c362639ea315572a58f3f50fd28", size = 1112479 } +sdist = { url = "https://files.pythonhosted.org/packages/27/4e/ea6d43f324169f8aec0e57569443a38bab4b398d09769ca64f7b4d467de3/pyarrow-17.0.0.tar.gz", hash = "sha256:4beca9521ed2c0921c1023e68d097d0299b62c362639ea315572a58f3f50fd28", size = 1112479, upload-time = "2024-07-17T10:41:25.092Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/f9/46/ce89f87c2936f5bb9d879473b9663ce7a4b1f4359acc2f0eb39865eaa1af/pyarrow-17.0.0-cp311-cp311-macosx_10_15_x86_64.whl", hash = "sha256:1c8856e2ef09eb87ecf937104aacfa0708f22dfeb039c363ec99735190ffb977", size = 29028748 }, - { url = "https://files.pythonhosted.org/packages/8d/8e/ce2e9b2146de422f6638333c01903140e9ada244a2a477918a368306c64c/pyarrow-17.0.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:2e19f569567efcbbd42084e87f948778eb371d308e137a0f97afe19bb860ccb3", size = 27190965 }, - { url = "https://files.pythonhosted.org/packages/3b/c8/5675719570eb1acd809481c6d64e2136ffb340bc387f4ca62dce79516cea/pyarrow-17.0.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6b244dc8e08a23b3e352899a006a26ae7b4d0da7bb636872fa8f5884e70acf15", size = 39269081 }, - { url = "https://files.pythonhosted.org/packages/5e/78/3931194f16ab681ebb87ad252e7b8d2c8b23dad49706cadc865dff4a1dd3/pyarrow-17.0.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0b72e87fe3e1db343995562f7fff8aee354b55ee83d13afba65400c178ab2597", size = 39864921 }, - { url = "https://files.pythonhosted.org/packages/d8/81/69b6606093363f55a2a574c018901c40952d4e902e670656d18213c71ad7/pyarrow-17.0.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:dc5c31c37409dfbc5d014047817cb4ccd8c1ea25d19576acf1a001fe07f5b420", size = 38740798 }, - { url = "https://files.pythonhosted.org/packages/4c/21/9ca93b84b92ef927814cb7ba37f0774a484c849d58f0b692b16af8eebcfb/pyarrow-17.0.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:e3343cb1e88bc2ea605986d4b94948716edc7a8d14afd4e2c097232f729758b4", size = 39871877 }, - { url = "https://files.pythonhosted.org/packages/30/d1/63a7c248432c71c7d3ee803e706590a0b81ce1a8d2b2ae49677774b813bb/pyarrow-17.0.0-cp311-cp311-win_amd64.whl", hash = "sha256:a27532c38f3de9eb3e90ecab63dfda948a8ca859a66e3a47f5f42d1e403c4d03", size = 25151089 }, - { url = "https://files.pythonhosted.org/packages/d4/62/ce6ac1275a432b4a27c55fe96c58147f111d8ba1ad800a112d31859fae2f/pyarrow-17.0.0-cp312-cp312-macosx_10_15_x86_64.whl", hash = "sha256:9b8a823cea605221e61f34859dcc03207e52e409ccf6354634143e23af7c8d22", size = 29019418 }, - { url = "https://files.pythonhosted.org/packages/8e/0a/dbd0c134e7a0c30bea439675cc120012337202e5fac7163ba839aa3691d2/pyarrow-17.0.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f1e70de6cb5790a50b01d2b686d54aaf73da01266850b05e3af2a1bc89e16053", size = 27152197 }, - { url = "https://files.pythonhosted.org/packages/cb/05/3f4a16498349db79090767620d6dc23c1ec0c658a668d61d76b87706c65d/pyarrow-17.0.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0071ce35788c6f9077ff9ecba4858108eebe2ea5a3f7cf2cf55ebc1dbc6ee24a", size = 39263026 }, - { url = "https://files.pythonhosted.org/packages/c2/0c/ea2107236740be8fa0e0d4a293a095c9f43546a2465bb7df34eee9126b09/pyarrow-17.0.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:757074882f844411fcca735e39aae74248a1531367a7c80799b4266390ae51cc", size = 39880798 }, - { url = "https://files.pythonhosted.org/packages/f6/b0/b9164a8bc495083c10c281cc65064553ec87b7537d6f742a89d5953a2a3e/pyarrow-17.0.0-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:9ba11c4f16976e89146781a83833df7f82077cdab7dc6232c897789343f7891a", size = 38715172 }, - { url = "https://files.pythonhosted.org/packages/f1/c4/9625418a1413005e486c006e56675334929fad864347c5ae7c1b2e7fe639/pyarrow-17.0.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:b0c6ac301093b42d34410b187bba560b17c0330f64907bfa4f7f7f2444b0cf9b", size = 39874508 }, - { url = "https://files.pythonhosted.org/packages/ae/49/baafe2a964f663413be3bd1cf5c45ed98c5e42e804e2328e18f4570027c1/pyarrow-17.0.0-cp312-cp312-win_amd64.whl", hash = "sha256:392bc9feabc647338e6c89267635e111d71edad5fcffba204425a7c8d13610d7", size = 25099235 }, + { url = "https://files.pythonhosted.org/packages/f9/46/ce89f87c2936f5bb9d879473b9663ce7a4b1f4359acc2f0eb39865eaa1af/pyarrow-17.0.0-cp311-cp311-macosx_10_15_x86_64.whl", hash = "sha256:1c8856e2ef09eb87ecf937104aacfa0708f22dfeb039c363ec99735190ffb977", size = 29028748, upload-time = "2024-07-16T10:30:02.609Z" }, + { url = "https://files.pythonhosted.org/packages/8d/8e/ce2e9b2146de422f6638333c01903140e9ada244a2a477918a368306c64c/pyarrow-17.0.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:2e19f569567efcbbd42084e87f948778eb371d308e137a0f97afe19bb860ccb3", size = 27190965, upload-time = "2024-07-16T10:30:10.718Z" }, + { url = "https://files.pythonhosted.org/packages/3b/c8/5675719570eb1acd809481c6d64e2136ffb340bc387f4ca62dce79516cea/pyarrow-17.0.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6b244dc8e08a23b3e352899a006a26ae7b4d0da7bb636872fa8f5884e70acf15", size = 39269081, upload-time = "2024-07-16T10:30:18.878Z" }, + { url = "https://files.pythonhosted.org/packages/5e/78/3931194f16ab681ebb87ad252e7b8d2c8b23dad49706cadc865dff4a1dd3/pyarrow-17.0.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0b72e87fe3e1db343995562f7fff8aee354b55ee83d13afba65400c178ab2597", size = 39864921, upload-time = "2024-07-16T10:30:27.008Z" }, + { url = "https://files.pythonhosted.org/packages/d8/81/69b6606093363f55a2a574c018901c40952d4e902e670656d18213c71ad7/pyarrow-17.0.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:dc5c31c37409dfbc5d014047817cb4ccd8c1ea25d19576acf1a001fe07f5b420", size = 38740798, upload-time = "2024-07-16T10:30:34.814Z" }, + { url = "https://files.pythonhosted.org/packages/4c/21/9ca93b84b92ef927814cb7ba37f0774a484c849d58f0b692b16af8eebcfb/pyarrow-17.0.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:e3343cb1e88bc2ea605986d4b94948716edc7a8d14afd4e2c097232f729758b4", size = 39871877, upload-time = "2024-07-16T10:30:42.672Z" }, + { url = "https://files.pythonhosted.org/packages/30/d1/63a7c248432c71c7d3ee803e706590a0b81ce1a8d2b2ae49677774b813bb/pyarrow-17.0.0-cp311-cp311-win_amd64.whl", hash = "sha256:a27532c38f3de9eb3e90ecab63dfda948a8ca859a66e3a47f5f42d1e403c4d03", size = 25151089, upload-time = "2024-07-16T10:30:49.279Z" }, + { url = "https://files.pythonhosted.org/packages/d4/62/ce6ac1275a432b4a27c55fe96c58147f111d8ba1ad800a112d31859fae2f/pyarrow-17.0.0-cp312-cp312-macosx_10_15_x86_64.whl", hash = "sha256:9b8a823cea605221e61f34859dcc03207e52e409ccf6354634143e23af7c8d22", size = 29019418, upload-time = "2024-07-16T10:30:55.573Z" }, + { url = "https://files.pythonhosted.org/packages/8e/0a/dbd0c134e7a0c30bea439675cc120012337202e5fac7163ba839aa3691d2/pyarrow-17.0.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f1e70de6cb5790a50b01d2b686d54aaf73da01266850b05e3af2a1bc89e16053", size = 27152197, upload-time = "2024-07-16T10:31:02.036Z" }, + { url = "https://files.pythonhosted.org/packages/cb/05/3f4a16498349db79090767620d6dc23c1ec0c658a668d61d76b87706c65d/pyarrow-17.0.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0071ce35788c6f9077ff9ecba4858108eebe2ea5a3f7cf2cf55ebc1dbc6ee24a", size = 39263026, upload-time = "2024-07-16T10:31:10.351Z" }, + { url = "https://files.pythonhosted.org/packages/c2/0c/ea2107236740be8fa0e0d4a293a095c9f43546a2465bb7df34eee9126b09/pyarrow-17.0.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:757074882f844411fcca735e39aae74248a1531367a7c80799b4266390ae51cc", size = 39880798, upload-time = "2024-07-16T10:31:17.66Z" }, + { url = "https://files.pythonhosted.org/packages/f6/b0/b9164a8bc495083c10c281cc65064553ec87b7537d6f742a89d5953a2a3e/pyarrow-17.0.0-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:9ba11c4f16976e89146781a83833df7f82077cdab7dc6232c897789343f7891a", size = 38715172, upload-time = "2024-07-16T10:31:25.965Z" }, + { url = "https://files.pythonhosted.org/packages/f1/c4/9625418a1413005e486c006e56675334929fad864347c5ae7c1b2e7fe639/pyarrow-17.0.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:b0c6ac301093b42d34410b187bba560b17c0330f64907bfa4f7f7f2444b0cf9b", size = 39874508, upload-time = "2024-07-16T10:31:33.721Z" }, + { url = "https://files.pythonhosted.org/packages/ae/49/baafe2a964f663413be3bd1cf5c45ed98c5e42e804e2328e18f4570027c1/pyarrow-17.0.0-cp312-cp312-win_amd64.whl", hash = "sha256:392bc9feabc647338e6c89267635e111d71edad5fcffba204425a7c8d13610d7", size = 25099235, upload-time = "2024-07-16T10:31:40.893Z" }, ] [[package]] name = "pyasn1" version = "0.6.1" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/ba/e9/01f1a64245b89f039897cb0130016d79f77d52669aae6ee7b159a6c4c018/pyasn1-0.6.1.tar.gz", hash = "sha256:6f580d2bdd84365380830acf45550f2511469f673cb4a5ae3857a3170128b034", size = 145322 } +sdist = { url = "https://files.pythonhosted.org/packages/ba/e9/01f1a64245b89f039897cb0130016d79f77d52669aae6ee7b159a6c4c018/pyasn1-0.6.1.tar.gz", hash = "sha256:6f580d2bdd84365380830acf45550f2511469f673cb4a5ae3857a3170128b034", size = 145322, upload-time = "2024-09-10T22:41:42.55Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/c8/f1/d6a797abb14f6283c0ddff96bbdd46937f64122b8c925cab503dd37f8214/pyasn1-0.6.1-py3-none-any.whl", hash = "sha256:0d632f46f2ba09143da3a8afe9e33fb6f92fa2320ab7e886e2d0f7672af84629", size = 83135 }, + { url = "https://files.pythonhosted.org/packages/c8/f1/d6a797abb14f6283c0ddff96bbdd46937f64122b8c925cab503dd37f8214/pyasn1-0.6.1-py3-none-any.whl", hash = "sha256:0d632f46f2ba09143da3a8afe9e33fb6f92fa2320ab7e886e2d0f7672af84629", size = 83135, upload-time = "2024-09-11T16:00:36.122Z" }, ] [[package]] @@ -4720,36 +4761,36 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "pyasn1" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/e9/e6/78ebbb10a8c8e4b61a59249394a4a594c1a7af95593dc933a349c8d00964/pyasn1_modules-0.4.2.tar.gz", hash = "sha256:677091de870a80aae844b1ca6134f54652fa2c8c5a52aa396440ac3106e941e6", size = 307892 } +sdist = { url = "https://files.pythonhosted.org/packages/e9/e6/78ebbb10a8c8e4b61a59249394a4a594c1a7af95593dc933a349c8d00964/pyasn1_modules-0.4.2.tar.gz", hash = "sha256:677091de870a80aae844b1ca6134f54652fa2c8c5a52aa396440ac3106e941e6", size = 307892, upload-time = "2025-03-28T02:41:22.17Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/47/8d/d529b5d697919ba8c11ad626e835d4039be708a35b0d22de83a269a6682c/pyasn1_modules-0.4.2-py3-none-any.whl", hash = "sha256:29253a9207ce32b64c3ac6600edc75368f98473906e8fd1043bd6b5b1de2c14a", size = 181259 }, + { url = "https://files.pythonhosted.org/packages/47/8d/d529b5d697919ba8c11ad626e835d4039be708a35b0d22de83a269a6682c/pyasn1_modules-0.4.2-py3-none-any.whl", hash = "sha256:29253a9207ce32b64c3ac6600edc75368f98473906e8fd1043bd6b5b1de2c14a", size = 181259, upload-time = "2025-03-28T02:41:19.028Z" }, ] [[package]] name = "pycparser" version = "2.23" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/fe/cf/d2d3b9f5699fb1e4615c8e32ff220203e43b248e1dfcc6736ad9057731ca/pycparser-2.23.tar.gz", hash = "sha256:78816d4f24add8f10a06d6f05b4d424ad9e96cfebf68a4ddc99c65c0720d00c2", size = 173734 } +sdist = { url = "https://files.pythonhosted.org/packages/fe/cf/d2d3b9f5699fb1e4615c8e32ff220203e43b248e1dfcc6736ad9057731ca/pycparser-2.23.tar.gz", hash = "sha256:78816d4f24add8f10a06d6f05b4d424ad9e96cfebf68a4ddc99c65c0720d00c2", size = 173734, upload-time = "2025-09-09T13:23:47.91Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/a0/e3/59cd50310fc9b59512193629e1984c1f95e5c8ae6e5d8c69532ccc65a7fe/pycparser-2.23-py3-none-any.whl", hash = "sha256:e5c6e8d3fbad53479cab09ac03729e0a9faf2bee3db8208a550daf5af81a5934", size = 118140 }, + { url = "https://files.pythonhosted.org/packages/a0/e3/59cd50310fc9b59512193629e1984c1f95e5c8ae6e5d8c69532ccc65a7fe/pycparser-2.23-py3-none-any.whl", hash = "sha256:e5c6e8d3fbad53479cab09ac03729e0a9faf2bee3db8208a550daf5af81a5934", size = 118140, upload-time = "2025-09-09T13:23:46.651Z" }, ] [[package]] name = "pycryptodome" version = "3.19.1" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/b1/38/42a8855ff1bf568c61ca6557e2203f318fb7afeadaf2eb8ecfdbde107151/pycryptodome-3.19.1.tar.gz", hash = "sha256:8ae0dd1bcfada451c35f9e29a3e5db385caabc190f98e4a80ad02a61098fb776", size = 4782144 } +sdist = { url = "https://files.pythonhosted.org/packages/b1/38/42a8855ff1bf568c61ca6557e2203f318fb7afeadaf2eb8ecfdbde107151/pycryptodome-3.19.1.tar.gz", hash = "sha256:8ae0dd1bcfada451c35f9e29a3e5db385caabc190f98e4a80ad02a61098fb776", size = 4782144, upload-time = "2023-12-28T06:52:40.741Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/a8/ef/4931bc30674f0de0ca0e827b58c8b0c17313a8eae2754976c610b866118b/pycryptodome-3.19.1-cp35-abi3-macosx_10_9_universal2.whl", hash = "sha256:67939a3adbe637281c611596e44500ff309d547e932c449337649921b17b6297", size = 2417027 }, - { url = "https://files.pythonhosted.org/packages/67/e6/238c53267fd8d223029c0a0d3730cb1b6594d60f62e40c4184703dc490b1/pycryptodome-3.19.1-cp35-abi3-macosx_10_9_x86_64.whl", hash = "sha256:11ddf6c9b52116b62223b6a9f4741bc4f62bb265392a4463282f7f34bb287180", size = 1579728 }, - { url = "https://files.pythonhosted.org/packages/7c/87/7181c42c8d5ba89822a4b824830506d0aeec02959bb893614767e3279846/pycryptodome-3.19.1-cp35-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e3e6f89480616781d2a7f981472d0cdb09b9da9e8196f43c1234eff45c915766", size = 2051440 }, - { url = "https://files.pythonhosted.org/packages/34/dd/332c4c0055527d17dac317ed9f9c864fc047b627d82f4b9a56c110afc6fc/pycryptodome-3.19.1-cp35-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:27e1efcb68993b7ce5d1d047a46a601d41281bba9f1971e6be4aa27c69ab8065", size = 2125379 }, - { url = "https://files.pythonhosted.org/packages/24/9e/320b885ea336c218ff54ec2b276cd70ba6904e4f5a14a771ed39a2c47d59/pycryptodome-3.19.1-cp35-abi3-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1c6273ca5a03b672e504995529b8bae56da0ebb691d8ef141c4aa68f60765700", size = 2153951 }, - { url = "https://files.pythonhosted.org/packages/f4/54/8ae0c43d1257b41bc9d3277c3f875174fd8ad86b9567f0b8609b99c938ee/pycryptodome-3.19.1-cp35-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:b0bfe61506795877ff974f994397f0c862d037f6f1c0bfc3572195fc00833b96", size = 2044041 }, - { url = "https://files.pythonhosted.org/packages/45/93/f8450a92cc38541c3ba1f4cb4e267e15ae6d6678ca617476d52c3a3764d4/pycryptodome-3.19.1-cp35-abi3-musllinux_1_1_i686.whl", hash = "sha256:f34976c5c8eb79e14c7d970fb097482835be8d410a4220f86260695ede4c3e17", size = 2182446 }, - { url = "https://files.pythonhosted.org/packages/af/cd/ed6e429fb0792ce368f66e83246264dd3a7a045b0b1e63043ed22a063ce5/pycryptodome-3.19.1-cp35-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:7c9e222d0976f68d0cf6409cfea896676ddc1d98485d601e9508f90f60e2b0a2", size = 2144914 }, - { url = "https://files.pythonhosted.org/packages/f6/23/b064bd4cfbf2cc5f25afcde0e7c880df5b20798172793137ba4b62d82e72/pycryptodome-3.19.1-cp35-abi3-win32.whl", hash = "sha256:4805e053571140cb37cf153b5c72cd324bb1e3e837cbe590a19f69b6cf85fd03", size = 1713105 }, - { url = "https://files.pythonhosted.org/packages/7d/e0/ded1968a5257ab34216a0f8db7433897a2337d59e6d03be113713b346ea2/pycryptodome-3.19.1-cp35-abi3-win_amd64.whl", hash = "sha256:a470237ee71a1efd63f9becebc0ad84b88ec28e6784a2047684b693f458f41b7", size = 1749222 }, + { url = "https://files.pythonhosted.org/packages/a8/ef/4931bc30674f0de0ca0e827b58c8b0c17313a8eae2754976c610b866118b/pycryptodome-3.19.1-cp35-abi3-macosx_10_9_universal2.whl", hash = "sha256:67939a3adbe637281c611596e44500ff309d547e932c449337649921b17b6297", size = 2417027, upload-time = "2023-12-28T06:51:50.138Z" }, + { url = "https://files.pythonhosted.org/packages/67/e6/238c53267fd8d223029c0a0d3730cb1b6594d60f62e40c4184703dc490b1/pycryptodome-3.19.1-cp35-abi3-macosx_10_9_x86_64.whl", hash = "sha256:11ddf6c9b52116b62223b6a9f4741bc4f62bb265392a4463282f7f34bb287180", size = 1579728, upload-time = "2023-12-28T06:51:52.385Z" }, + { url = "https://files.pythonhosted.org/packages/7c/87/7181c42c8d5ba89822a4b824830506d0aeec02959bb893614767e3279846/pycryptodome-3.19.1-cp35-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e3e6f89480616781d2a7f981472d0cdb09b9da9e8196f43c1234eff45c915766", size = 2051440, upload-time = "2023-12-28T06:51:55.751Z" }, + { url = "https://files.pythonhosted.org/packages/34/dd/332c4c0055527d17dac317ed9f9c864fc047b627d82f4b9a56c110afc6fc/pycryptodome-3.19.1-cp35-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:27e1efcb68993b7ce5d1d047a46a601d41281bba9f1971e6be4aa27c69ab8065", size = 2125379, upload-time = "2023-12-28T06:51:58.567Z" }, + { url = "https://files.pythonhosted.org/packages/24/9e/320b885ea336c218ff54ec2b276cd70ba6904e4f5a14a771ed39a2c47d59/pycryptodome-3.19.1-cp35-abi3-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1c6273ca5a03b672e504995529b8bae56da0ebb691d8ef141c4aa68f60765700", size = 2153951, upload-time = "2023-12-28T06:52:01.699Z" }, + { url = "https://files.pythonhosted.org/packages/f4/54/8ae0c43d1257b41bc9d3277c3f875174fd8ad86b9567f0b8609b99c938ee/pycryptodome-3.19.1-cp35-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:b0bfe61506795877ff974f994397f0c862d037f6f1c0bfc3572195fc00833b96", size = 2044041, upload-time = "2023-12-28T06:52:03.737Z" }, + { url = "https://files.pythonhosted.org/packages/45/93/f8450a92cc38541c3ba1f4cb4e267e15ae6d6678ca617476d52c3a3764d4/pycryptodome-3.19.1-cp35-abi3-musllinux_1_1_i686.whl", hash = "sha256:f34976c5c8eb79e14c7d970fb097482835be8d410a4220f86260695ede4c3e17", size = 2182446, upload-time = "2023-12-28T06:52:05.588Z" }, + { url = "https://files.pythonhosted.org/packages/af/cd/ed6e429fb0792ce368f66e83246264dd3a7a045b0b1e63043ed22a063ce5/pycryptodome-3.19.1-cp35-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:7c9e222d0976f68d0cf6409cfea896676ddc1d98485d601e9508f90f60e2b0a2", size = 2144914, upload-time = "2023-12-28T06:52:07.44Z" }, + { url = "https://files.pythonhosted.org/packages/f6/23/b064bd4cfbf2cc5f25afcde0e7c880df5b20798172793137ba4b62d82e72/pycryptodome-3.19.1-cp35-abi3-win32.whl", hash = "sha256:4805e053571140cb37cf153b5c72cd324bb1e3e837cbe590a19f69b6cf85fd03", size = 1713105, upload-time = "2023-12-28T06:52:09.585Z" }, + { url = "https://files.pythonhosted.org/packages/7d/e0/ded1968a5257ab34216a0f8db7433897a2337d59e6d03be113713b346ea2/pycryptodome-3.19.1-cp35-abi3-win_amd64.whl", hash = "sha256:a470237ee71a1efd63f9becebc0ad84b88ec28e6784a2047684b693f458f41b7", size = 1749222, upload-time = "2023-12-28T06:52:11.534Z" }, ] [[package]] @@ -4762,9 +4803,9 @@ dependencies = [ { name = "typing-extensions" }, { name = "typing-inspection" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/ae/54/ecab642b3bed45f7d5f59b38443dcb36ef50f85af192e6ece103dbfe9587/pydantic-2.11.10.tar.gz", hash = "sha256:dc280f0982fbda6c38fada4e476dc0a4f3aeaf9c6ad4c28df68a666ec3c61423", size = 788494 } +sdist = { url = "https://files.pythonhosted.org/packages/ae/54/ecab642b3bed45f7d5f59b38443dcb36ef50f85af192e6ece103dbfe9587/pydantic-2.11.10.tar.gz", hash = "sha256:dc280f0982fbda6c38fada4e476dc0a4f3aeaf9c6ad4c28df68a666ec3c61423", size = 788494, upload-time = "2025-10-04T10:40:41.338Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/bd/1f/73c53fcbfb0b5a78f91176df41945ca466e71e9d9d836e5c522abda39ee7/pydantic-2.11.10-py3-none-any.whl", hash = "sha256:802a655709d49bd004c31e865ef37da30b540786a46bfce02333e0e24b5fe29a", size = 444823 }, + { url = "https://files.pythonhosted.org/packages/bd/1f/73c53fcbfb0b5a78f91176df41945ca466e71e9d9d836e5c522abda39ee7/pydantic-2.11.10-py3-none-any.whl", hash = "sha256:802a655709d49bd004c31e865ef37da30b540786a46bfce02333e0e24b5fe29a", size = 444823, upload-time = "2025-10-04T10:40:39.055Z" }, ] [[package]] @@ -4774,45 +4815,45 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/ad/88/5f2260bdfae97aabf98f1778d43f69574390ad787afb646292a638c923d4/pydantic_core-2.33.2.tar.gz", hash = "sha256:7cb8bc3605c29176e1b105350d2e6474142d7c1bd1d9327c4a9bdb46bf827acc", size = 435195 } +sdist = { url = "https://files.pythonhosted.org/packages/ad/88/5f2260bdfae97aabf98f1778d43f69574390ad787afb646292a638c923d4/pydantic_core-2.33.2.tar.gz", hash = "sha256:7cb8bc3605c29176e1b105350d2e6474142d7c1bd1d9327c4a9bdb46bf827acc", size = 435195, upload-time = "2025-04-23T18:33:52.104Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/3f/8d/71db63483d518cbbf290261a1fc2839d17ff89fce7089e08cad07ccfce67/pydantic_core-2.33.2-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:4c5b0a576fb381edd6d27f0a85915c6daf2f8138dc5c267a57c08a62900758c7", size = 2028584 }, - { url = "https://files.pythonhosted.org/packages/24/2f/3cfa7244ae292dd850989f328722d2aef313f74ffc471184dc509e1e4e5a/pydantic_core-2.33.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e799c050df38a639db758c617ec771fd8fb7a5f8eaaa4b27b101f266b216a246", size = 1855071 }, - { url = "https://files.pythonhosted.org/packages/b3/d3/4ae42d33f5e3f50dd467761304be2fa0a9417fbf09735bc2cce003480f2a/pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dc46a01bf8d62f227d5ecee74178ffc448ff4e5197c756331f71efcc66dc980f", size = 1897823 }, - { url = "https://files.pythonhosted.org/packages/f4/f3/aa5976e8352b7695ff808599794b1fba2a9ae2ee954a3426855935799488/pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:a144d4f717285c6d9234a66778059f33a89096dfb9b39117663fd8413d582dcc", size = 1983792 }, - { url = "https://files.pythonhosted.org/packages/d5/7a/cda9b5a23c552037717f2b2a5257e9b2bfe45e687386df9591eff7b46d28/pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:73cf6373c21bc80b2e0dc88444f41ae60b2f070ed02095754eb5a01df12256de", size = 2136338 }, - { url = "https://files.pythonhosted.org/packages/2b/9f/b8f9ec8dd1417eb9da784e91e1667d58a2a4a7b7b34cf4af765ef663a7e5/pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3dc625f4aa79713512d1976fe9f0bc99f706a9dee21dfd1810b4bbbf228d0e8a", size = 2730998 }, - { url = "https://files.pythonhosted.org/packages/47/bc/cd720e078576bdb8255d5032c5d63ee5c0bf4b7173dd955185a1d658c456/pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:881b21b5549499972441da4758d662aeea93f1923f953e9cbaff14b8b9565aef", size = 2003200 }, - { url = "https://files.pythonhosted.org/packages/ca/22/3602b895ee2cd29d11a2b349372446ae9727c32e78a94b3d588a40fdf187/pydantic_core-2.33.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:bdc25f3681f7b78572699569514036afe3c243bc3059d3942624e936ec93450e", size = 2113890 }, - { url = "https://files.pythonhosted.org/packages/ff/e6/e3c5908c03cf00d629eb38393a98fccc38ee0ce8ecce32f69fc7d7b558a7/pydantic_core-2.33.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:fe5b32187cbc0c862ee201ad66c30cf218e5ed468ec8dc1cf49dec66e160cc4d", size = 2073359 }, - { url = "https://files.pythonhosted.org/packages/12/e7/6a36a07c59ebefc8777d1ffdaf5ae71b06b21952582e4b07eba88a421c79/pydantic_core-2.33.2-cp311-cp311-musllinux_1_1_armv7l.whl", hash = "sha256:bc7aee6f634a6f4a95676fcb5d6559a2c2a390330098dba5e5a5f28a2e4ada30", size = 2245883 }, - { url = "https://files.pythonhosted.org/packages/16/3f/59b3187aaa6cc0c1e6616e8045b284de2b6a87b027cce2ffcea073adf1d2/pydantic_core-2.33.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:235f45e5dbcccf6bd99f9f472858849f73d11120d76ea8707115415f8e5ebebf", size = 2241074 }, - { url = "https://files.pythonhosted.org/packages/e0/ed/55532bb88f674d5d8f67ab121a2a13c385df382de2a1677f30ad385f7438/pydantic_core-2.33.2-cp311-cp311-win32.whl", hash = "sha256:6368900c2d3ef09b69cb0b913f9f8263b03786e5b2a387706c5afb66800efd51", size = 1910538 }, - { url = "https://files.pythonhosted.org/packages/fe/1b/25b7cccd4519c0b23c2dd636ad39d381abf113085ce4f7bec2b0dc755eb1/pydantic_core-2.33.2-cp311-cp311-win_amd64.whl", hash = "sha256:1e063337ef9e9820c77acc768546325ebe04ee38b08703244c1309cccc4f1bab", size = 1952909 }, - { url = "https://files.pythonhosted.org/packages/49/a9/d809358e49126438055884c4366a1f6227f0f84f635a9014e2deb9b9de54/pydantic_core-2.33.2-cp311-cp311-win_arm64.whl", hash = "sha256:6b99022f1d19bc32a4c2a0d544fc9a76e3be90f0b3f4af413f87d38749300e65", size = 1897786 }, - { url = "https://files.pythonhosted.org/packages/18/8a/2b41c97f554ec8c71f2a8a5f85cb56a8b0956addfe8b0efb5b3d77e8bdc3/pydantic_core-2.33.2-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:a7ec89dc587667f22b6a0b6579c249fca9026ce7c333fc142ba42411fa243cdc", size = 2009000 }, - { url = "https://files.pythonhosted.org/packages/a1/02/6224312aacb3c8ecbaa959897af57181fb6cf3a3d7917fd44d0f2917e6f2/pydantic_core-2.33.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:3c6db6e52c6d70aa0d00d45cdb9b40f0433b96380071ea80b09277dba021ddf7", size = 1847996 }, - { url = "https://files.pythonhosted.org/packages/d6/46/6dcdf084a523dbe0a0be59d054734b86a981726f221f4562aed313dbcb49/pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4e61206137cbc65e6d5256e1166f88331d3b6238e082d9f74613b9b765fb9025", size = 1880957 }, - { url = "https://files.pythonhosted.org/packages/ec/6b/1ec2c03837ac00886ba8160ce041ce4e325b41d06a034adbef11339ae422/pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:eb8c529b2819c37140eb51b914153063d27ed88e3bdc31b71198a198e921e011", size = 1964199 }, - { url = "https://files.pythonhosted.org/packages/2d/1d/6bf34d6adb9debd9136bd197ca72642203ce9aaaa85cfcbfcf20f9696e83/pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c52b02ad8b4e2cf14ca7b3d918f3eb0ee91e63b3167c32591e57c4317e134f8f", size = 2120296 }, - { url = "https://files.pythonhosted.org/packages/e0/94/2bd0aaf5a591e974b32a9f7123f16637776c304471a0ab33cf263cf5591a/pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:96081f1605125ba0855dfda83f6f3df5ec90c61195421ba72223de35ccfb2f88", size = 2676109 }, - { url = "https://files.pythonhosted.org/packages/f9/41/4b043778cf9c4285d59742281a769eac371b9e47e35f98ad321349cc5d61/pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f57a69461af2a5fa6e6bbd7a5f60d3b7e6cebb687f55106933188e79ad155c1", size = 2002028 }, - { url = "https://files.pythonhosted.org/packages/cb/d5/7bb781bf2748ce3d03af04d5c969fa1308880e1dca35a9bd94e1a96a922e/pydantic_core-2.33.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:572c7e6c8bb4774d2ac88929e3d1f12bc45714ae5ee6d9a788a9fb35e60bb04b", size = 2100044 }, - { url = "https://files.pythonhosted.org/packages/fe/36/def5e53e1eb0ad896785702a5bbfd25eed546cdcf4087ad285021a90ed53/pydantic_core-2.33.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:db4b41f9bd95fbe5acd76d89920336ba96f03e149097365afe1cb092fceb89a1", size = 2058881 }, - { url = "https://files.pythonhosted.org/packages/01/6c/57f8d70b2ee57fc3dc8b9610315949837fa8c11d86927b9bb044f8705419/pydantic_core-2.33.2-cp312-cp312-musllinux_1_1_armv7l.whl", hash = "sha256:fa854f5cf7e33842a892e5c73f45327760bc7bc516339fda888c75ae60edaeb6", size = 2227034 }, - { url = "https://files.pythonhosted.org/packages/27/b9/9c17f0396a82b3d5cbea4c24d742083422639e7bb1d5bf600e12cb176a13/pydantic_core-2.33.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:5f483cfb75ff703095c59e365360cb73e00185e01aaea067cd19acffd2ab20ea", size = 2234187 }, - { url = "https://files.pythonhosted.org/packages/b0/6a/adf5734ffd52bf86d865093ad70b2ce543415e0e356f6cacabbc0d9ad910/pydantic_core-2.33.2-cp312-cp312-win32.whl", hash = "sha256:9cb1da0f5a471435a7bc7e439b8a728e8b61e59784b2af70d7c169f8dd8ae290", size = 1892628 }, - { url = "https://files.pythonhosted.org/packages/43/e4/5479fecb3606c1368d496a825d8411e126133c41224c1e7238be58b87d7e/pydantic_core-2.33.2-cp312-cp312-win_amd64.whl", hash = "sha256:f941635f2a3d96b2973e867144fde513665c87f13fe0e193c158ac51bfaaa7b2", size = 1955866 }, - { url = "https://files.pythonhosted.org/packages/0d/24/8b11e8b3e2be9dd82df4b11408a67c61bb4dc4f8e11b5b0fc888b38118b5/pydantic_core-2.33.2-cp312-cp312-win_arm64.whl", hash = "sha256:cca3868ddfaccfbc4bfb1d608e2ccaaebe0ae628e1416aeb9c4d88c001bb45ab", size = 1888894 }, - { url = "https://files.pythonhosted.org/packages/7b/27/d4ae6487d73948d6f20dddcd94be4ea43e74349b56eba82e9bdee2d7494c/pydantic_core-2.33.2-pp311-pypy311_pp73-macosx_10_12_x86_64.whl", hash = "sha256:dd14041875d09cc0f9308e37a6f8b65f5585cf2598a53aa0123df8b129d481f8", size = 2025200 }, - { url = "https://files.pythonhosted.org/packages/f1/b8/b3cb95375f05d33801024079b9392a5ab45267a63400bf1866e7ce0f0de4/pydantic_core-2.33.2-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:d87c561733f66531dced0da6e864f44ebf89a8fba55f31407b00c2f7f9449593", size = 1859123 }, - { url = "https://files.pythonhosted.org/packages/05/bc/0d0b5adeda59a261cd30a1235a445bf55c7e46ae44aea28f7bd6ed46e091/pydantic_core-2.33.2-pp311-pypy311_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2f82865531efd18d6e07a04a17331af02cb7a651583c418df8266f17a63c6612", size = 1892852 }, - { url = "https://files.pythonhosted.org/packages/3e/11/d37bdebbda2e449cb3f519f6ce950927b56d62f0b84fd9cb9e372a26a3d5/pydantic_core-2.33.2-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2bfb5112df54209d820d7bf9317c7a6c9025ea52e49f46b6a2060104bba37de7", size = 2067484 }, - { url = "https://files.pythonhosted.org/packages/8c/55/1f95f0a05ce72ecb02a8a8a1c3be0579bbc29b1d5ab68f1378b7bebc5057/pydantic_core-2.33.2-pp311-pypy311_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:64632ff9d614e5eecfb495796ad51b0ed98c453e447a76bcbeeb69615079fc7e", size = 2108896 }, - { url = "https://files.pythonhosted.org/packages/53/89/2b2de6c81fa131f423246a9109d7b2a375e83968ad0800d6e57d0574629b/pydantic_core-2.33.2-pp311-pypy311_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:f889f7a40498cc077332c7ab6b4608d296d852182211787d4f3ee377aaae66e8", size = 2069475 }, - { url = "https://files.pythonhosted.org/packages/b8/e9/1f7efbe20d0b2b10f6718944b5d8ece9152390904f29a78e68d4e7961159/pydantic_core-2.33.2-pp311-pypy311_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:de4b83bb311557e439b9e186f733f6c645b9417c84e2eb8203f3f820a4b988bf", size = 2239013 }, - { url = "https://files.pythonhosted.org/packages/3c/b2/5309c905a93811524a49b4e031e9851a6b00ff0fb668794472ea7746b448/pydantic_core-2.33.2-pp311-pypy311_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:82f68293f055f51b51ea42fafc74b6aad03e70e191799430b90c13d643059ebb", size = 2238715 }, - { url = "https://files.pythonhosted.org/packages/32/56/8a7ca5d2cd2cda1d245d34b1c9a942920a718082ae8e54e5f3e5a58b7add/pydantic_core-2.33.2-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:329467cecfb529c925cf2bbd4d60d2c509bc2fb52a20c1045bf09bb70971a9c1", size = 2066757 }, + { url = "https://files.pythonhosted.org/packages/3f/8d/71db63483d518cbbf290261a1fc2839d17ff89fce7089e08cad07ccfce67/pydantic_core-2.33.2-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:4c5b0a576fb381edd6d27f0a85915c6daf2f8138dc5c267a57c08a62900758c7", size = 2028584, upload-time = "2025-04-23T18:31:03.106Z" }, + { url = "https://files.pythonhosted.org/packages/24/2f/3cfa7244ae292dd850989f328722d2aef313f74ffc471184dc509e1e4e5a/pydantic_core-2.33.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e799c050df38a639db758c617ec771fd8fb7a5f8eaaa4b27b101f266b216a246", size = 1855071, upload-time = "2025-04-23T18:31:04.621Z" }, + { url = "https://files.pythonhosted.org/packages/b3/d3/4ae42d33f5e3f50dd467761304be2fa0a9417fbf09735bc2cce003480f2a/pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dc46a01bf8d62f227d5ecee74178ffc448ff4e5197c756331f71efcc66dc980f", size = 1897823, upload-time = "2025-04-23T18:31:06.377Z" }, + { url = "https://files.pythonhosted.org/packages/f4/f3/aa5976e8352b7695ff808599794b1fba2a9ae2ee954a3426855935799488/pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:a144d4f717285c6d9234a66778059f33a89096dfb9b39117663fd8413d582dcc", size = 1983792, upload-time = "2025-04-23T18:31:07.93Z" }, + { url = "https://files.pythonhosted.org/packages/d5/7a/cda9b5a23c552037717f2b2a5257e9b2bfe45e687386df9591eff7b46d28/pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:73cf6373c21bc80b2e0dc88444f41ae60b2f070ed02095754eb5a01df12256de", size = 2136338, upload-time = "2025-04-23T18:31:09.283Z" }, + { url = "https://files.pythonhosted.org/packages/2b/9f/b8f9ec8dd1417eb9da784e91e1667d58a2a4a7b7b34cf4af765ef663a7e5/pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3dc625f4aa79713512d1976fe9f0bc99f706a9dee21dfd1810b4bbbf228d0e8a", size = 2730998, upload-time = "2025-04-23T18:31:11.7Z" }, + { url = "https://files.pythonhosted.org/packages/47/bc/cd720e078576bdb8255d5032c5d63ee5c0bf4b7173dd955185a1d658c456/pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:881b21b5549499972441da4758d662aeea93f1923f953e9cbaff14b8b9565aef", size = 2003200, upload-time = "2025-04-23T18:31:13.536Z" }, + { url = "https://files.pythonhosted.org/packages/ca/22/3602b895ee2cd29d11a2b349372446ae9727c32e78a94b3d588a40fdf187/pydantic_core-2.33.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:bdc25f3681f7b78572699569514036afe3c243bc3059d3942624e936ec93450e", size = 2113890, upload-time = "2025-04-23T18:31:15.011Z" }, + { url = "https://files.pythonhosted.org/packages/ff/e6/e3c5908c03cf00d629eb38393a98fccc38ee0ce8ecce32f69fc7d7b558a7/pydantic_core-2.33.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:fe5b32187cbc0c862ee201ad66c30cf218e5ed468ec8dc1cf49dec66e160cc4d", size = 2073359, upload-time = "2025-04-23T18:31:16.393Z" }, + { url = "https://files.pythonhosted.org/packages/12/e7/6a36a07c59ebefc8777d1ffdaf5ae71b06b21952582e4b07eba88a421c79/pydantic_core-2.33.2-cp311-cp311-musllinux_1_1_armv7l.whl", hash = "sha256:bc7aee6f634a6f4a95676fcb5d6559a2c2a390330098dba5e5a5f28a2e4ada30", size = 2245883, upload-time = "2025-04-23T18:31:17.892Z" }, + { url = "https://files.pythonhosted.org/packages/16/3f/59b3187aaa6cc0c1e6616e8045b284de2b6a87b027cce2ffcea073adf1d2/pydantic_core-2.33.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:235f45e5dbcccf6bd99f9f472858849f73d11120d76ea8707115415f8e5ebebf", size = 2241074, upload-time = "2025-04-23T18:31:19.205Z" }, + { url = "https://files.pythonhosted.org/packages/e0/ed/55532bb88f674d5d8f67ab121a2a13c385df382de2a1677f30ad385f7438/pydantic_core-2.33.2-cp311-cp311-win32.whl", hash = "sha256:6368900c2d3ef09b69cb0b913f9f8263b03786e5b2a387706c5afb66800efd51", size = 1910538, upload-time = "2025-04-23T18:31:20.541Z" }, + { url = "https://files.pythonhosted.org/packages/fe/1b/25b7cccd4519c0b23c2dd636ad39d381abf113085ce4f7bec2b0dc755eb1/pydantic_core-2.33.2-cp311-cp311-win_amd64.whl", hash = "sha256:1e063337ef9e9820c77acc768546325ebe04ee38b08703244c1309cccc4f1bab", size = 1952909, upload-time = "2025-04-23T18:31:22.371Z" }, + { url = "https://files.pythonhosted.org/packages/49/a9/d809358e49126438055884c4366a1f6227f0f84f635a9014e2deb9b9de54/pydantic_core-2.33.2-cp311-cp311-win_arm64.whl", hash = "sha256:6b99022f1d19bc32a4c2a0d544fc9a76e3be90f0b3f4af413f87d38749300e65", size = 1897786, upload-time = "2025-04-23T18:31:24.161Z" }, + { url = "https://files.pythonhosted.org/packages/18/8a/2b41c97f554ec8c71f2a8a5f85cb56a8b0956addfe8b0efb5b3d77e8bdc3/pydantic_core-2.33.2-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:a7ec89dc587667f22b6a0b6579c249fca9026ce7c333fc142ba42411fa243cdc", size = 2009000, upload-time = "2025-04-23T18:31:25.863Z" }, + { url = "https://files.pythonhosted.org/packages/a1/02/6224312aacb3c8ecbaa959897af57181fb6cf3a3d7917fd44d0f2917e6f2/pydantic_core-2.33.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:3c6db6e52c6d70aa0d00d45cdb9b40f0433b96380071ea80b09277dba021ddf7", size = 1847996, upload-time = "2025-04-23T18:31:27.341Z" }, + { url = "https://files.pythonhosted.org/packages/d6/46/6dcdf084a523dbe0a0be59d054734b86a981726f221f4562aed313dbcb49/pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4e61206137cbc65e6d5256e1166f88331d3b6238e082d9f74613b9b765fb9025", size = 1880957, upload-time = "2025-04-23T18:31:28.956Z" }, + { url = "https://files.pythonhosted.org/packages/ec/6b/1ec2c03837ac00886ba8160ce041ce4e325b41d06a034adbef11339ae422/pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:eb8c529b2819c37140eb51b914153063d27ed88e3bdc31b71198a198e921e011", size = 1964199, upload-time = "2025-04-23T18:31:31.025Z" }, + { url = "https://files.pythonhosted.org/packages/2d/1d/6bf34d6adb9debd9136bd197ca72642203ce9aaaa85cfcbfcf20f9696e83/pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c52b02ad8b4e2cf14ca7b3d918f3eb0ee91e63b3167c32591e57c4317e134f8f", size = 2120296, upload-time = "2025-04-23T18:31:32.514Z" }, + { url = "https://files.pythonhosted.org/packages/e0/94/2bd0aaf5a591e974b32a9f7123f16637776c304471a0ab33cf263cf5591a/pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:96081f1605125ba0855dfda83f6f3df5ec90c61195421ba72223de35ccfb2f88", size = 2676109, upload-time = "2025-04-23T18:31:33.958Z" }, + { url = "https://files.pythonhosted.org/packages/f9/41/4b043778cf9c4285d59742281a769eac371b9e47e35f98ad321349cc5d61/pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f57a69461af2a5fa6e6bbd7a5f60d3b7e6cebb687f55106933188e79ad155c1", size = 2002028, upload-time = "2025-04-23T18:31:39.095Z" }, + { url = "https://files.pythonhosted.org/packages/cb/d5/7bb781bf2748ce3d03af04d5c969fa1308880e1dca35a9bd94e1a96a922e/pydantic_core-2.33.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:572c7e6c8bb4774d2ac88929e3d1f12bc45714ae5ee6d9a788a9fb35e60bb04b", size = 2100044, upload-time = "2025-04-23T18:31:41.034Z" }, + { url = "https://files.pythonhosted.org/packages/fe/36/def5e53e1eb0ad896785702a5bbfd25eed546cdcf4087ad285021a90ed53/pydantic_core-2.33.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:db4b41f9bd95fbe5acd76d89920336ba96f03e149097365afe1cb092fceb89a1", size = 2058881, upload-time = "2025-04-23T18:31:42.757Z" }, + { url = "https://files.pythonhosted.org/packages/01/6c/57f8d70b2ee57fc3dc8b9610315949837fa8c11d86927b9bb044f8705419/pydantic_core-2.33.2-cp312-cp312-musllinux_1_1_armv7l.whl", hash = "sha256:fa854f5cf7e33842a892e5c73f45327760bc7bc516339fda888c75ae60edaeb6", size = 2227034, upload-time = "2025-04-23T18:31:44.304Z" }, + { url = "https://files.pythonhosted.org/packages/27/b9/9c17f0396a82b3d5cbea4c24d742083422639e7bb1d5bf600e12cb176a13/pydantic_core-2.33.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:5f483cfb75ff703095c59e365360cb73e00185e01aaea067cd19acffd2ab20ea", size = 2234187, upload-time = "2025-04-23T18:31:45.891Z" }, + { url = "https://files.pythonhosted.org/packages/b0/6a/adf5734ffd52bf86d865093ad70b2ce543415e0e356f6cacabbc0d9ad910/pydantic_core-2.33.2-cp312-cp312-win32.whl", hash = "sha256:9cb1da0f5a471435a7bc7e439b8a728e8b61e59784b2af70d7c169f8dd8ae290", size = 1892628, upload-time = "2025-04-23T18:31:47.819Z" }, + { url = "https://files.pythonhosted.org/packages/43/e4/5479fecb3606c1368d496a825d8411e126133c41224c1e7238be58b87d7e/pydantic_core-2.33.2-cp312-cp312-win_amd64.whl", hash = "sha256:f941635f2a3d96b2973e867144fde513665c87f13fe0e193c158ac51bfaaa7b2", size = 1955866, upload-time = "2025-04-23T18:31:49.635Z" }, + { url = "https://files.pythonhosted.org/packages/0d/24/8b11e8b3e2be9dd82df4b11408a67c61bb4dc4f8e11b5b0fc888b38118b5/pydantic_core-2.33.2-cp312-cp312-win_arm64.whl", hash = "sha256:cca3868ddfaccfbc4bfb1d608e2ccaaebe0ae628e1416aeb9c4d88c001bb45ab", size = 1888894, upload-time = "2025-04-23T18:31:51.609Z" }, + { url = "https://files.pythonhosted.org/packages/7b/27/d4ae6487d73948d6f20dddcd94be4ea43e74349b56eba82e9bdee2d7494c/pydantic_core-2.33.2-pp311-pypy311_pp73-macosx_10_12_x86_64.whl", hash = "sha256:dd14041875d09cc0f9308e37a6f8b65f5585cf2598a53aa0123df8b129d481f8", size = 2025200, upload-time = "2025-04-23T18:33:14.199Z" }, + { url = "https://files.pythonhosted.org/packages/f1/b8/b3cb95375f05d33801024079b9392a5ab45267a63400bf1866e7ce0f0de4/pydantic_core-2.33.2-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:d87c561733f66531dced0da6e864f44ebf89a8fba55f31407b00c2f7f9449593", size = 1859123, upload-time = "2025-04-23T18:33:16.555Z" }, + { url = "https://files.pythonhosted.org/packages/05/bc/0d0b5adeda59a261cd30a1235a445bf55c7e46ae44aea28f7bd6ed46e091/pydantic_core-2.33.2-pp311-pypy311_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2f82865531efd18d6e07a04a17331af02cb7a651583c418df8266f17a63c6612", size = 1892852, upload-time = "2025-04-23T18:33:18.513Z" }, + { url = "https://files.pythonhosted.org/packages/3e/11/d37bdebbda2e449cb3f519f6ce950927b56d62f0b84fd9cb9e372a26a3d5/pydantic_core-2.33.2-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2bfb5112df54209d820d7bf9317c7a6c9025ea52e49f46b6a2060104bba37de7", size = 2067484, upload-time = "2025-04-23T18:33:20.475Z" }, + { url = "https://files.pythonhosted.org/packages/8c/55/1f95f0a05ce72ecb02a8a8a1c3be0579bbc29b1d5ab68f1378b7bebc5057/pydantic_core-2.33.2-pp311-pypy311_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:64632ff9d614e5eecfb495796ad51b0ed98c453e447a76bcbeeb69615079fc7e", size = 2108896, upload-time = "2025-04-23T18:33:22.501Z" }, + { url = "https://files.pythonhosted.org/packages/53/89/2b2de6c81fa131f423246a9109d7b2a375e83968ad0800d6e57d0574629b/pydantic_core-2.33.2-pp311-pypy311_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:f889f7a40498cc077332c7ab6b4608d296d852182211787d4f3ee377aaae66e8", size = 2069475, upload-time = "2025-04-23T18:33:24.528Z" }, + { url = "https://files.pythonhosted.org/packages/b8/e9/1f7efbe20d0b2b10f6718944b5d8ece9152390904f29a78e68d4e7961159/pydantic_core-2.33.2-pp311-pypy311_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:de4b83bb311557e439b9e186f733f6c645b9417c84e2eb8203f3f820a4b988bf", size = 2239013, upload-time = "2025-04-23T18:33:26.621Z" }, + { url = "https://files.pythonhosted.org/packages/3c/b2/5309c905a93811524a49b4e031e9851a6b00ff0fb668794472ea7746b448/pydantic_core-2.33.2-pp311-pypy311_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:82f68293f055f51b51ea42fafc74b6aad03e70e191799430b90c13d643059ebb", size = 2238715, upload-time = "2025-04-23T18:33:28.656Z" }, + { url = "https://files.pythonhosted.org/packages/32/56/8a7ca5d2cd2cda1d245d34b1c9a942920a718082ae8e54e5f3e5a58b7add/pydantic_core-2.33.2-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:329467cecfb529c925cf2bbd4d60d2c509bc2fb52a20c1045bf09bb70971a9c1", size = 2066757, upload-time = "2025-04-23T18:33:30.645Z" }, ] [[package]] @@ -4823,9 +4864,9 @@ dependencies = [ { name = "pydantic" }, { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/3a/10/fb64987804cde41bcc39d9cd757cd5f2bb5d97b389d81aa70238b14b8a7e/pydantic_extra_types-2.10.6.tar.gz", hash = "sha256:c63d70bf684366e6bbe1f4ee3957952ebe6973d41e7802aea0b770d06b116aeb", size = 141858 } +sdist = { url = "https://files.pythonhosted.org/packages/3a/10/fb64987804cde41bcc39d9cd757cd5f2bb5d97b389d81aa70238b14b8a7e/pydantic_extra_types-2.10.6.tar.gz", hash = "sha256:c63d70bf684366e6bbe1f4ee3957952ebe6973d41e7802aea0b770d06b116aeb", size = 141858, upload-time = "2025-10-08T13:47:49.483Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/93/04/5c918669096da8d1c9ec7bb716bd72e755526103a61bc5e76a3e4fb23b53/pydantic_extra_types-2.10.6-py3-none-any.whl", hash = "sha256:6106c448316d30abf721b5b9fecc65e983ef2614399a24142d689c7546cc246a", size = 40949 }, + { url = "https://files.pythonhosted.org/packages/93/04/5c918669096da8d1c9ec7bb716bd72e755526103a61bc5e76a3e4fb23b53/pydantic_extra_types-2.10.6-py3-none-any.whl", hash = "sha256:6106c448316d30abf721b5b9fecc65e983ef2614399a24142d689c7546cc246a", size = 40949, upload-time = "2025-10-08T13:47:48.268Z" }, ] [[package]] @@ -4837,27 +4878,27 @@ dependencies = [ { name = "python-dotenv" }, { name = "typing-inspection" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/20/c5/dbbc27b814c71676593d1c3f718e6cd7d4f00652cefa24b75f7aa3efb25e/pydantic_settings-2.11.0.tar.gz", hash = "sha256:d0e87a1c7d33593beb7194adb8470fc426e95ba02af83a0f23474a04c9a08180", size = 188394 } +sdist = { url = "https://files.pythonhosted.org/packages/20/c5/dbbc27b814c71676593d1c3f718e6cd7d4f00652cefa24b75f7aa3efb25e/pydantic_settings-2.11.0.tar.gz", hash = "sha256:d0e87a1c7d33593beb7194adb8470fc426e95ba02af83a0f23474a04c9a08180", size = 188394, upload-time = "2025-09-24T14:19:11.764Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/83/d6/887a1ff844e64aa823fb4905978d882a633cfe295c32eacad582b78a7d8b/pydantic_settings-2.11.0-py3-none-any.whl", hash = "sha256:fe2cea3413b9530d10f3a5875adffb17ada5c1e1bab0b2885546d7310415207c", size = 48608 }, + { url = "https://files.pythonhosted.org/packages/83/d6/887a1ff844e64aa823fb4905978d882a633cfe295c32eacad582b78a7d8b/pydantic_settings-2.11.0-py3-none-any.whl", hash = "sha256:fe2cea3413b9530d10f3a5875adffb17ada5c1e1bab0b2885546d7310415207c", size = 48608, upload-time = "2025-09-24T14:19:10.015Z" }, ] [[package]] name = "pygments" version = "2.19.2" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/b0/77/a5b8c569bf593b0140bde72ea885a803b82086995367bf2037de0159d924/pygments-2.19.2.tar.gz", hash = "sha256:636cb2477cec7f8952536970bc533bc43743542f70392ae026374600add5b887", size = 4968631 } +sdist = { url = "https://files.pythonhosted.org/packages/b0/77/a5b8c569bf593b0140bde72ea885a803b82086995367bf2037de0159d924/pygments-2.19.2.tar.gz", hash = "sha256:636cb2477cec7f8952536970bc533bc43743542f70392ae026374600add5b887", size = 4968631, upload-time = "2025-06-21T13:39:12.283Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/c7/21/705964c7812476f378728bdf590ca4b771ec72385c533964653c68e86bdc/pygments-2.19.2-py3-none-any.whl", hash = "sha256:86540386c03d588bb81d44bc3928634ff26449851e99741617ecb9037ee5ec0b", size = 1225217 }, + { url = "https://files.pythonhosted.org/packages/c7/21/705964c7812476f378728bdf590ca4b771ec72385c533964653c68e86bdc/pygments-2.19.2-py3-none-any.whl", hash = "sha256:86540386c03d588bb81d44bc3928634ff26449851e99741617ecb9037ee5ec0b", size = 1225217, upload-time = "2025-06-21T13:39:07.939Z" }, ] [[package]] name = "pyjwt" version = "2.10.1" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/e7/46/bd74733ff231675599650d3e47f361794b22ef3e3770998dda30d3b63726/pyjwt-2.10.1.tar.gz", hash = "sha256:3cc5772eb20009233caf06e9d8a0577824723b44e6648ee0a2aedb6cf9381953", size = 87785 } +sdist = { url = "https://files.pythonhosted.org/packages/e7/46/bd74733ff231675599650d3e47f361794b22ef3e3770998dda30d3b63726/pyjwt-2.10.1.tar.gz", hash = "sha256:3cc5772eb20009233caf06e9d8a0577824723b44e6648ee0a2aedb6cf9381953", size = 87785, upload-time = "2024-11-28T03:43:29.933Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/61/ad/689f02752eeec26aed679477e80e632ef1b682313be70793d798c1d5fc8f/PyJWT-2.10.1-py3-none-any.whl", hash = "sha256:dcdd193e30abefd5debf142f9adfcdd2b58004e644f25406ffaebd50bd98dacb", size = 22997 }, + { url = "https://files.pythonhosted.org/packages/61/ad/689f02752eeec26aed679477e80e632ef1b682313be70793d798c1d5fc8f/PyJWT-2.10.1-py3-none-any.whl", hash = "sha256:dcdd193e30abefd5debf142f9adfcdd2b58004e644f25406ffaebd50bd98dacb", size = 22997, upload-time = "2024-11-28T03:43:27.893Z" }, ] [package.optional-dependencies] @@ -4878,9 +4919,9 @@ dependencies = [ { name = "setuptools" }, { name = "ujson" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/dc/85/91828a9282bb7f9b210c0a93831979c5829cba5533ac12e87014b6e2208b/pymilvus-2.5.17.tar.gz", hash = "sha256:48ff55db9598e1b4cc25f4fe645b00d64ebcfb03f79f9f741267fc2a35526d43", size = 1281485 } +sdist = { url = "https://files.pythonhosted.org/packages/dc/85/91828a9282bb7f9b210c0a93831979c5829cba5533ac12e87014b6e2208b/pymilvus-2.5.17.tar.gz", hash = "sha256:48ff55db9598e1b4cc25f4fe645b00d64ebcfb03f79f9f741267fc2a35526d43", size = 1281485, upload-time = "2025-11-10T03:24:53.058Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/59/44/ee0c64617f58c123f570293f36b40f7b56fc123a2aa9573aa22e6ff0fb86/pymilvus-2.5.17-py3-none-any.whl", hash = "sha256:a43d36f2e5f793040917d35858d1ed2532307b7dfb03bc3eaf813aac085bc5a4", size = 244036 }, + { url = "https://files.pythonhosted.org/packages/59/44/ee0c64617f58c123f570293f36b40f7b56fc123a2aa9573aa22e6ff0fb86/pymilvus-2.5.17-py3-none-any.whl", hash = "sha256:a43d36f2e5f793040917d35858d1ed2532307b7dfb03bc3eaf813aac085bc5a4", size = 244036, upload-time = "2025-11-10T03:24:51.496Z" }, ] [[package]] @@ -4892,18 +4933,18 @@ dependencies = [ { name = "orjson" }, { name = "requests" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/b5/29/d9b112684ce490057b90bddede3fb6a69cf2787a3fd7736bdce203e77388/pymochow-2.2.9.tar.gz", hash = "sha256:5a28058edc8861deb67524410e786814571ed9fe0700c8c9fc0bc2ad5835b06c", size = 50079 } +sdist = { url = "https://files.pythonhosted.org/packages/b5/29/d9b112684ce490057b90bddede3fb6a69cf2787a3fd7736bdce203e77388/pymochow-2.2.9.tar.gz", hash = "sha256:5a28058edc8861deb67524410e786814571ed9fe0700c8c9fc0bc2ad5835b06c", size = 50079, upload-time = "2025-06-05T08:33:19.59Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/bf/9b/be18f9709dfd8187ff233be5acb253a9f4f1b07f1db0e7b09d84197c28e2/pymochow-2.2.9-py3-none-any.whl", hash = "sha256:639192b97f143d4a22fc163872be12aee19523c46f12e22416e8f289f1354d15", size = 77899 }, + { url = "https://files.pythonhosted.org/packages/bf/9b/be18f9709dfd8187ff233be5acb253a9f4f1b07f1db0e7b09d84197c28e2/pymochow-2.2.9-py3-none-any.whl", hash = "sha256:639192b97f143d4a22fc163872be12aee19523c46f12e22416e8f289f1354d15", size = 77899, upload-time = "2025-06-05T08:33:17.424Z" }, ] [[package]] name = "pymysql" version = "1.1.2" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/f5/ae/1fe3fcd9f959efa0ebe200b8de88b5a5ce3e767e38c7ac32fb179f16a388/pymysql-1.1.2.tar.gz", hash = "sha256:4961d3e165614ae65014e361811a724e2044ad3ea3739de9903ae7c21f539f03", size = 48258 } +sdist = { url = "https://files.pythonhosted.org/packages/f5/ae/1fe3fcd9f959efa0ebe200b8de88b5a5ce3e767e38c7ac32fb179f16a388/pymysql-1.1.2.tar.gz", hash = "sha256:4961d3e165614ae65014e361811a724e2044ad3ea3739de9903ae7c21f539f03", size = 48258, upload-time = "2025-08-24T12:55:55.146Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/7c/4c/ad33b92b9864cbde84f259d5df035a6447f91891f5be77788e2a3892bce3/pymysql-1.1.2-py3-none-any.whl", hash = "sha256:e6b1d89711dd51f8f74b1631fe08f039e7d76cf67a42a323d3178f0f25762ed9", size = 45300 }, + { url = "https://files.pythonhosted.org/packages/7c/4c/ad33b92b9864cbde84f259d5df035a6447f91891f5be77788e2a3892bce3/pymysql-1.1.2-py3-none-any.whl", hash = "sha256:e6b1d89711dd51f8f74b1631fe08f039e7d76cf67a42a323d3178f0f25762ed9", size = 45300, upload-time = "2025-08-24T12:55:53.394Z" }, ] [[package]] @@ -4918,80 +4959,89 @@ dependencies = [ { name = "sqlalchemy" }, { name = "sqlglot" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/ca/6f/24ae2d4ba811e5e112c89bb91ba7c50eb79658563650c8fc65caa80655f8/pyobvector-0.2.20.tar.gz", hash = "sha256:72a54044632ba3bb27d340fb660c50b22548d34c6a9214b6653bc18eee4287c4", size = 46648 } +sdist = { url = "https://files.pythonhosted.org/packages/ca/6f/24ae2d4ba811e5e112c89bb91ba7c50eb79658563650c8fc65caa80655f8/pyobvector-0.2.20.tar.gz", hash = "sha256:72a54044632ba3bb27d340fb660c50b22548d34c6a9214b6653bc18eee4287c4", size = 46648, upload-time = "2025-11-20T09:30:16.354Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/ae/21/630c4e9f0d30b7a6eebe0590cd97162e82a2d3ac4ed3a33259d0a67e0861/pyobvector-0.2.20-py3-none-any.whl", hash = "sha256:9a3c1d3eb5268eae64185f8807b10fd182f271acf33323ee731c2ad554d1c076", size = 60131 }, + { url = "https://files.pythonhosted.org/packages/ae/21/630c4e9f0d30b7a6eebe0590cd97162e82a2d3ac4ed3a33259d0a67e0861/pyobvector-0.2.20-py3-none-any.whl", hash = "sha256:9a3c1d3eb5268eae64185f8807b10fd182f271acf33323ee731c2ad554d1c076", size = 60131, upload-time = "2025-11-20T09:30:14.88Z" }, ] [[package]] name = "pypandoc" version = "1.16.2" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/0b/18/9f5f70567b97758625335209b98d5cb857e19aa1a9306e9749567a240634/pypandoc-1.16.2.tar.gz", hash = "sha256:7a72a9fbf4a5dc700465e384c3bb333d22220efc4e972cb98cf6fc723cdca86b", size = 31477 } +sdist = { url = "https://files.pythonhosted.org/packages/0b/18/9f5f70567b97758625335209b98d5cb857e19aa1a9306e9749567a240634/pypandoc-1.16.2.tar.gz", hash = "sha256:7a72a9fbf4a5dc700465e384c3bb333d22220efc4e972cb98cf6fc723cdca86b", size = 31477, upload-time = "2025-11-13T16:30:29.608Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/bb/e9/b145683854189bba84437ea569bfa786f408c8dc5bc16d8eb0753f5583bf/pypandoc-1.16.2-py3-none-any.whl", hash = "sha256:c200c1139c8e3247baf38d1e9279e85d9f162499d1999c6aa8418596558fe79b", size = 19451 }, + { url = "https://files.pythonhosted.org/packages/bb/e9/b145683854189bba84437ea569bfa786f408c8dc5bc16d8eb0753f5583bf/pypandoc-1.16.2-py3-none-any.whl", hash = "sha256:c200c1139c8e3247baf38d1e9279e85d9f162499d1999c6aa8418596558fe79b", size = 19451, upload-time = "2025-11-13T16:30:07.66Z" }, ] [[package]] name = "pyparsing" version = "3.2.5" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/f2/a5/181488fc2b9d093e3972d2a472855aae8a03f000592dbfce716a512b3359/pyparsing-3.2.5.tar.gz", hash = "sha256:2df8d5b7b2802ef88e8d016a2eb9c7aeaa923529cd251ed0fe4608275d4105b6", size = 1099274 } +sdist = { url = "https://files.pythonhosted.org/packages/f2/a5/181488fc2b9d093e3972d2a472855aae8a03f000592dbfce716a512b3359/pyparsing-3.2.5.tar.gz", hash = "sha256:2df8d5b7b2802ef88e8d016a2eb9c7aeaa923529cd251ed0fe4608275d4105b6", size = 1099274, upload-time = "2025-09-21T04:11:06.277Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/10/5e/1aa9a93198c6b64513c9d7752de7422c06402de6600a8767da1524f9570b/pyparsing-3.2.5-py3-none-any.whl", hash = "sha256:e38a4f02064cf41fe6593d328d0512495ad1f3d8a91c4f73fc401b3079a59a5e", size = 113890 }, + { url = "https://files.pythonhosted.org/packages/10/5e/1aa9a93198c6b64513c9d7752de7422c06402de6600a8767da1524f9570b/pyparsing-3.2.5-py3-none-any.whl", hash = "sha256:e38a4f02064cf41fe6593d328d0512495ad1f3d8a91c4f73fc401b3079a59a5e", size = 113890, upload-time = "2025-09-21T04:11:04.117Z" }, ] [[package]] name = "pypdf" version = "6.4.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/f3/01/f7510cc6124f494cfbec2e8d3c2e1a20d4f6c18622b0c03a3a70e968bacb/pypdf-6.4.0.tar.gz", hash = "sha256:4769d471f8ddc3341193ecc5d6560fa44cf8cd0abfabf21af4e195cc0c224072", size = 5276661 } +sdist = { url = "https://files.pythonhosted.org/packages/f3/01/f7510cc6124f494cfbec2e8d3c2e1a20d4f6c18622b0c03a3a70e968bacb/pypdf-6.4.0.tar.gz", hash = "sha256:4769d471f8ddc3341193ecc5d6560fa44cf8cd0abfabf21af4e195cc0c224072", size = 5276661, upload-time = "2025-11-23T14:04:43.185Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/cd/f2/9c9429411c91ac1dd5cd66780f22b6df20c64c3646cdd1e6d67cf38579c4/pypdf-6.4.0-py3-none-any.whl", hash = "sha256:55ab9837ed97fd7fcc5c131d52fcc2223bc5c6b8a1488bbf7c0e27f1f0023a79", size = 329497 }, + { url = "https://files.pythonhosted.org/packages/cd/f2/9c9429411c91ac1dd5cd66780f22b6df20c64c3646cdd1e6d67cf38579c4/pypdf-6.4.0-py3-none-any.whl", hash = "sha256:55ab9837ed97fd7fcc5c131d52fcc2223bc5c6b8a1488bbf7c0e27f1f0023a79", size = 329497, upload-time = "2025-11-23T14:04:41.448Z" }, ] [[package]] name = "pypdfium2" -version = "4.30.0" +version = "5.2.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/a1/14/838b3ba247a0ba92e4df5d23f2bea9478edcfd72b78a39d6ca36ccd84ad2/pypdfium2-4.30.0.tar.gz", hash = "sha256:48b5b7e5566665bc1015b9d69c1ebabe21f6aee468b509531c3c8318eeee2e16", size = 140239 } +sdist = { url = "https://files.pythonhosted.org/packages/f6/ab/73c7d24e4eac9ba952569403b32b7cca9412fc5b9bef54fdbd669551389f/pypdfium2-5.2.0.tar.gz", hash = "sha256:43863625231ce999c1ebbed6721a88de818b2ab4d909c1de558d413b9a400256", size = 269999, upload-time = "2025-12-12T13:20:15.353Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/c7/9a/c8ff5cc352c1b60b0b97642ae734f51edbab6e28b45b4fcdfe5306ee3c83/pypdfium2-4.30.0-py3-none-macosx_10_13_x86_64.whl", hash = "sha256:b33ceded0b6ff5b2b93bc1fe0ad4b71aa6b7e7bd5875f1ca0cdfb6ba6ac01aab", size = 2837254 }, - { url = "https://files.pythonhosted.org/packages/21/8b/27d4d5409f3c76b985f4ee4afe147b606594411e15ac4dc1c3363c9a9810/pypdfium2-4.30.0-py3-none-macosx_11_0_arm64.whl", hash = "sha256:4e55689f4b06e2d2406203e771f78789bd4f190731b5d57383d05cf611d829de", size = 2707624 }, - { url = "https://files.pythonhosted.org/packages/11/63/28a73ca17c24b41a205d658e177d68e198d7dde65a8c99c821d231b6ee3d/pypdfium2-4.30.0-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4e6e50f5ce7f65a40a33d7c9edc39f23140c57e37144c2d6d9e9262a2a854854", size = 2793126 }, - { url = "https://files.pythonhosted.org/packages/d1/96/53b3ebf0955edbd02ac6da16a818ecc65c939e98fdeb4e0958362bd385c8/pypdfium2-4.30.0-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:3d0dd3ecaffd0b6dbda3da663220e705cb563918249bda26058c6036752ba3a2", size = 2591077 }, - { url = "https://files.pythonhosted.org/packages/ec/ee/0394e56e7cab8b5b21f744d988400948ef71a9a892cbeb0b200d324ab2c7/pypdfium2-4.30.0-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cc3bf29b0db8c76cdfaac1ec1cde8edf211a7de7390fbf8934ad2aa9b4d6dfad", size = 2864431 }, - { url = "https://files.pythonhosted.org/packages/65/cd/3f1edf20a0ef4a212a5e20a5900e64942c5a374473671ac0780eaa08ea80/pypdfium2-4.30.0-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f1f78d2189e0ddf9ac2b7a9b9bd4f0c66f54d1389ff6c17e9fd9dc034d06eb3f", size = 2812008 }, - { url = "https://files.pythonhosted.org/packages/c8/91/2d517db61845698f41a2a974de90762e50faeb529201c6b3574935969045/pypdfium2-4.30.0-py3-none-musllinux_1_1_aarch64.whl", hash = "sha256:5eda3641a2da7a7a0b2f4dbd71d706401a656fea521b6b6faa0675b15d31a163", size = 6181543 }, - { url = "https://files.pythonhosted.org/packages/ba/c4/ed1315143a7a84b2c7616569dfb472473968d628f17c231c39e29ae9d780/pypdfium2-4.30.0-py3-none-musllinux_1_1_i686.whl", hash = "sha256:0dfa61421b5eb68e1188b0b2231e7ba35735aef2d867d86e48ee6cab6975195e", size = 6175911 }, - { url = "https://files.pythonhosted.org/packages/7a/c4/9e62d03f414e0e3051c56d5943c3bf42aa9608ede4e19dc96438364e9e03/pypdfium2-4.30.0-py3-none-musllinux_1_1_x86_64.whl", hash = "sha256:f33bd79e7a09d5f7acca3b0b69ff6c8a488869a7fab48fdf400fec6e20b9c8be", size = 6267430 }, - { url = "https://files.pythonhosted.org/packages/90/47/eda4904f715fb98561e34012826e883816945934a851745570521ec89520/pypdfium2-4.30.0-py3-none-win32.whl", hash = "sha256:ee2410f15d576d976c2ab2558c93d392a25fb9f6635e8dd0a8a3a5241b275e0e", size = 2775951 }, - { url = "https://files.pythonhosted.org/packages/25/bd/56d9ec6b9f0fc4e0d95288759f3179f0fcd34b1a1526b75673d2f6d5196f/pypdfium2-4.30.0-py3-none-win_amd64.whl", hash = "sha256:90dbb2ac07be53219f56be09961eb95cf2473f834d01a42d901d13ccfad64b4c", size = 2892098 }, - { url = "https://files.pythonhosted.org/packages/be/7a/097801205b991bc3115e8af1edb850d30aeaf0118520b016354cf5ccd3f6/pypdfium2-4.30.0-py3-none-win_arm64.whl", hash = "sha256:119b2969a6d6b1e8d55e99caaf05290294f2d0fe49c12a3f17102d01c441bd29", size = 2752118 }, + { url = "https://files.pythonhosted.org/packages/fb/0c/9108ae5266ee4cdf495f99205c44d4b5c83b4eb227c2b610d35c9e9fe961/pypdfium2-5.2.0-py3-none-android_23_arm64_v8a.whl", hash = "sha256:1ba4187a45ce4cf08f2a8c7e0f8970c36b9aa1770c8a3412a70781c1d80fb145", size = 2763268, upload-time = "2025-12-12T13:19:37.354Z" }, + { url = "https://files.pythonhosted.org/packages/35/8c/55f5c8a2c6b293f5c020be4aa123eaa891e797c514e5eccd8cb042740d37/pypdfium2-5.2.0-py3-none-android_23_armeabi_v7a.whl", hash = "sha256:80c55e10a8c9242f0901d35a9a306dd09accce8e497507bb23fcec017d45fe2e", size = 2301821, upload-time = "2025-12-12T13:19:39.484Z" }, + { url = "https://files.pythonhosted.org/packages/5e/7d/efa013e3795b41c59dd1e472f7201c241232c3a6553be4917e3a26b9f225/pypdfium2-5.2.0-py3-none-macosx_11_0_arm64.whl", hash = "sha256:73523ae69cd95c084c1342096893b2143ea73c36fdde35494780ba431e6a7d6e", size = 2816428, upload-time = "2025-12-12T13:19:41.735Z" }, + { url = "https://files.pythonhosted.org/packages/ec/ae/8c30af6ff2ab41a7cb84753ee79dd1e0a8932c9bda9fe19759d69cbbf115/pypdfium2-5.2.0-py3-none-macosx_11_0_x86_64.whl", hash = "sha256:19c501d22ef5eb98e42416d22cc3ac66d4808b436e3d06686392f24d8d9f708d", size = 2939486, upload-time = "2025-12-12T13:19:43.176Z" }, + { url = "https://files.pythonhosted.org/packages/64/64/454a73c49a04c2c290917ad86184e4da959e9e5aba94b3b046328c89be93/pypdfium2-5.2.0-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6ed15a3f58d6ee4905f0d0a731e30b381b457c30689512589c7f57950b0cdcec", size = 2979235, upload-time = "2025-12-12T13:19:44.635Z" }, + { url = "https://files.pythonhosted.org/packages/4e/29/f1cab8e31192dd367dc7b1afa71f45cfcb8ff0b176f1d2a0f528faf04052/pypdfium2-5.2.0-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:329cd1e9f068e8729e0d0b79a070d6126f52bc48ff1e40505cb207a5e20ce0ba", size = 2763001, upload-time = "2025-12-12T13:19:47.598Z" }, + { url = "https://files.pythonhosted.org/packages/bc/5d/e95fad8fdac960854173469c4b6931d5de5e09d05e6ee7d9756f8b95eef0/pypdfium2-5.2.0-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:325259759886e66619504df4721fef3b8deabf8a233e4f4a66e0c32ebae60c2f", size = 3057024, upload-time = "2025-12-12T13:19:49.179Z" }, + { url = "https://files.pythonhosted.org/packages/f4/32/468591d017ab67f8142d40f4db8163b6d8bb404fe0d22da75a5c661dc144/pypdfium2-5.2.0-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5683e8f08ab38ed05e0e59e611451ec74332803d4e78f8c45658ea1d372a17af", size = 3448598, upload-time = "2025-12-12T13:19:50.979Z" }, + { url = "https://files.pythonhosted.org/packages/f9/a5/57b4e389b77ab5f7e9361dc7fc03b5378e678ba81b21e791e85350fbb235/pypdfium2-5.2.0-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:da4815426a5adcf03bf4d2c5f26c0ff8109dbfaf2c3415984689931bc6006ef9", size = 2993946, upload-time = "2025-12-12T13:19:53.154Z" }, + { url = "https://files.pythonhosted.org/packages/84/3a/e03e9978f817632aa56183bb7a4989284086fdd45de3245ead35f147179b/pypdfium2-5.2.0-py3-none-manylinux_2_27_s390x.manylinux_2_28_s390x.whl", hash = "sha256:64bf5c039b2c314dab1fd158bfff99db96299a5b5c6d96fc056071166056f1de", size = 3673148, upload-time = "2025-12-12T13:19:54.528Z" }, + { url = "https://files.pythonhosted.org/packages/13/ee/e581506806553afa4b7939d47bf50dca35c1151b8cc960f4542a6eb135ce/pypdfium2-5.2.0-py3-none-manylinux_2_38_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:76b42a17748ac7dc04d5ef04d0561c6a0a4b546d113ec1d101d59650c6a340f7", size = 2964757, upload-time = "2025-12-12T13:19:56.406Z" }, + { url = "https://files.pythonhosted.org/packages/00/be/3715c652aff30f12284523dd337843d0efe3e721020f0ec303a99ffffd8d/pypdfium2-5.2.0-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:9d4367d471439fae846f0aba91ff9e8d66e524edcf3c8d6e02fe96fa306e13b9", size = 4130319, upload-time = "2025-12-12T13:19:57.889Z" }, + { url = "https://files.pythonhosted.org/packages/b0/0b/28aa2ede9004dd4192266bbad394df0896787f7c7bcfa4d1a6e091ad9a2c/pypdfium2-5.2.0-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:613f6bb2b47d76b66c0bf2ca581c7c33e3dd9dcb29d65d8c34fef4135f933149", size = 3746488, upload-time = "2025-12-12T13:19:59.469Z" }, + { url = "https://files.pythonhosted.org/packages/bc/04/1b791e1219652bbfc51df6498267d8dcec73ad508b99388b2890902ccd9d/pypdfium2-5.2.0-py3-none-musllinux_1_2_i686.whl", hash = "sha256:c03fad3f2fa68d358f5dd4deb07e438482fa26fae439c49d127576d969769ca1", size = 4336534, upload-time = "2025-12-12T13:20:01.28Z" }, + { url = "https://files.pythonhosted.org/packages/4f/e3/6f00f963bb702ffd2e3e2d9c7286bc3bb0bebcdfa96ca897d466f66976c6/pypdfium2-5.2.0-py3-none-musllinux_1_2_ppc64le.whl", hash = "sha256:f10be1900ae21879d02d9f4d58c2d2db3a2e6da611736a8e9decc22d1fb02909", size = 4375079, upload-time = "2025-12-12T13:20:03.117Z" }, + { url = "https://files.pythonhosted.org/packages/3a/2a/7ec2b191b5e1b7716a0dfc14e6860e89bb355fb3b94ed0c1d46db526858c/pypdfium2-5.2.0-py3-none-musllinux_1_2_riscv64.whl", hash = "sha256:97c1a126d30378726872f94866e38c055740cae80313638dafd1cd448d05e7c0", size = 3928648, upload-time = "2025-12-12T13:20:05.041Z" }, + { url = "https://files.pythonhosted.org/packages/bf/c3/c6d972fa095ff3ace76f9d3a91ceaf8a9dbbe0d9a5a84ac1d6178a46630e/pypdfium2-5.2.0-py3-none-musllinux_1_2_s390x.whl", hash = "sha256:c369f183a90781b788af9a357a877bc8caddc24801e8346d0bf23f3295f89f3a", size = 4997772, upload-time = "2025-12-12T13:20:06.453Z" }, + { url = "https://files.pythonhosted.org/packages/22/45/2c64584b7a3ca5c4652280a884f4b85b8ed24e27662adeebdc06d991c917/pypdfium2-5.2.0-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:b391f1cceb454934b612a05b54e90f98aafeffe5e73830d71700b17f0812226b", size = 4180046, upload-time = "2025-12-12T13:20:08.715Z" }, + { url = "https://files.pythonhosted.org/packages/d6/99/8d1ff87b626649400e62a2840e6e10fe258443ba518798e071fee4cd86f9/pypdfium2-5.2.0-py3-none-win32.whl", hash = "sha256:c68067938f617c37e4d17b18de7cac231fc7ce0eb7b6653b7283ebe8764d4999", size = 2990175, upload-time = "2025-12-12T13:20:10.241Z" }, + { url = "https://files.pythonhosted.org/packages/93/fc/114fff8895b620aac4984808e93d01b6d7b93e342a1635fcfe2a5f39cf39/pypdfium2-5.2.0-py3-none-win_amd64.whl", hash = "sha256:eb0591b720e8aaeab9475c66d653655ec1be0464b946f3f48a53922e843f0f3b", size = 3098615, upload-time = "2025-12-12T13:20:11.795Z" }, + { url = "https://files.pythonhosted.org/packages/08/97/eb738bff5998760d6e0cbcb7dd04cbf1a95a97b997fac6d4e57562a58992/pypdfium2-5.2.0-py3-none-win_arm64.whl", hash = "sha256:5dd1ef579f19fa3719aee4959b28bda44b1072405756708b5e83df8806a19521", size = 2939479, upload-time = "2025-12-12T13:20:13.815Z" }, ] [[package]] name = "pypika" version = "0.48.9" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/c7/2c/94ed7b91db81d61d7096ac8f2d325ec562fc75e35f3baea8749c85b28784/PyPika-0.48.9.tar.gz", hash = "sha256:838836a61747e7c8380cd1b7ff638694b7a7335345d0f559b04b2cd832ad5378", size = 67259 } +sdist = { url = "https://files.pythonhosted.org/packages/c7/2c/94ed7b91db81d61d7096ac8f2d325ec562fc75e35f3baea8749c85b28784/PyPika-0.48.9.tar.gz", hash = "sha256:838836a61747e7c8380cd1b7ff638694b7a7335345d0f559b04b2cd832ad5378", size = 67259, upload-time = "2022-03-15T11:22:57.066Z" } [[package]] name = "pyproject-hooks" version = "1.2.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/e7/82/28175b2414effca1cdac8dc99f76d660e7a4fb0ceefa4b4ab8f5f6742925/pyproject_hooks-1.2.0.tar.gz", hash = "sha256:1e859bd5c40fae9448642dd871adf459e5e2084186e8d2c2a79a824c970da1f8", size = 19228 } +sdist = { url = "https://files.pythonhosted.org/packages/e7/82/28175b2414effca1cdac8dc99f76d660e7a4fb0ceefa4b4ab8f5f6742925/pyproject_hooks-1.2.0.tar.gz", hash = "sha256:1e859bd5c40fae9448642dd871adf459e5e2084186e8d2c2a79a824c970da1f8", size = 19228, upload-time = "2024-09-29T09:24:13.293Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/bd/24/12818598c362d7f300f18e74db45963dbcb85150324092410c8b49405e42/pyproject_hooks-1.2.0-py3-none-any.whl", hash = "sha256:9e5c6bfa8dcc30091c74b0cf803c81fdd29d94f01992a7707bc97babb1141913", size = 10216 }, + { url = "https://files.pythonhosted.org/packages/bd/24/12818598c362d7f300f18e74db45963dbcb85150324092410c8b49405e42/pyproject_hooks-1.2.0-py3-none-any.whl", hash = "sha256:9e5c6bfa8dcc30091c74b0cf803c81fdd29d94f01992a7707bc97babb1141913", size = 10216, upload-time = "2024-09-29T09:24:11.978Z" }, ] [[package]] name = "pyreadline3" version = "3.5.4" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/0f/49/4cea918a08f02817aabae639e3d0ac046fef9f9180518a3ad394e22da148/pyreadline3-3.5.4.tar.gz", hash = "sha256:8d57d53039a1c75adba8e50dd3d992b28143480816187ea5efbd5c78e6c885b7", size = 99839 } +sdist = { url = "https://files.pythonhosted.org/packages/0f/49/4cea918a08f02817aabae639e3d0ac046fef9f9180518a3ad394e22da148/pyreadline3-3.5.4.tar.gz", hash = "sha256:8d57d53039a1c75adba8e50dd3d992b28143480816187ea5efbd5c78e6c885b7", size = 99839, upload-time = "2024-09-19T02:40:10.062Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/5a/dc/491b7661614ab97483abf2056be1deee4dc2490ecbf7bff9ab5cdbac86e1/pyreadline3-3.5.4-py3-none-any.whl", hash = "sha256:eaf8e6cc3c49bcccf145fc6067ba8643d1df34d604a1ec0eccbf7a18e6d3fae6", size = 83178 }, + { url = "https://files.pythonhosted.org/packages/5a/dc/491b7661614ab97483abf2056be1deee4dc2490ecbf7bff9ab5cdbac86e1/pyreadline3-3.5.4-py3-none-any.whl", hash = "sha256:eaf8e6cc3c49bcccf145fc6067ba8643d1df34d604a1ec0eccbf7a18e6d3fae6", size = 83178, upload-time = "2024-09-19T02:40:08.598Z" }, ] [[package]] @@ -5004,9 +5054,9 @@ dependencies = [ { name = "packaging" }, { name = "pluggy" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/ae/3c/c9d525a414d506893f0cd8a8d0de7706446213181570cdbd766691164e40/pytest-8.3.5.tar.gz", hash = "sha256:f4efe70cc14e511565ac476b57c279e12a855b11f48f212af1080ef2263d3845", size = 1450891 } +sdist = { url = "https://files.pythonhosted.org/packages/ae/3c/c9d525a414d506893f0cd8a8d0de7706446213181570cdbd766691164e40/pytest-8.3.5.tar.gz", hash = "sha256:f4efe70cc14e511565ac476b57c279e12a855b11f48f212af1080ef2263d3845", size = 1450891, upload-time = "2025-03-02T12:54:54.503Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/30/3d/64ad57c803f1fa1e963a7946b6e0fea4a70df53c1a7fed304586539c2bac/pytest-8.3.5-py3-none-any.whl", hash = "sha256:c69214aa47deac29fad6c2a4f590b9c4a9fdb16a403176fe154b79c0b4d4d820", size = 343634 }, + { url = "https://files.pythonhosted.org/packages/30/3d/64ad57c803f1fa1e963a7946b6e0fea4a70df53c1a7fed304586539c2bac/pytest-8.3.5-py3-none-any.whl", hash = "sha256:c69214aa47deac29fad6c2a4f590b9c4a9fdb16a403176fe154b79c0b4d4d820", size = 343634, upload-time = "2025-03-02T12:54:52.069Z" }, ] [[package]] @@ -5017,9 +5067,9 @@ dependencies = [ { name = "py-cpuinfo" }, { name = "pytest" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/28/08/e6b0067efa9a1f2a1eb3043ecd8a0c48bfeb60d3255006dcc829d72d5da2/pytest-benchmark-4.0.0.tar.gz", hash = "sha256:fb0785b83efe599a6a956361c0691ae1dbb5318018561af10f3e915caa0048d1", size = 334641 } +sdist = { url = "https://files.pythonhosted.org/packages/28/08/e6b0067efa9a1f2a1eb3043ecd8a0c48bfeb60d3255006dcc829d72d5da2/pytest-benchmark-4.0.0.tar.gz", hash = "sha256:fb0785b83efe599a6a956361c0691ae1dbb5318018561af10f3e915caa0048d1", size = 334641, upload-time = "2022-10-25T21:21:55.686Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/4d/a1/3b70862b5b3f830f0422844f25a823d0470739d994466be9dbbbb414d85a/pytest_benchmark-4.0.0-py3-none-any.whl", hash = "sha256:fdb7db64e31c8b277dff9850d2a2556d8b60bcb0ea6524e36e28ffd7c87f71d6", size = 43951 }, + { url = "https://files.pythonhosted.org/packages/4d/a1/3b70862b5b3f830f0422844f25a823d0470739d994466be9dbbbb414d85a/pytest_benchmark-4.0.0-py3-none-any.whl", hash = "sha256:fdb7db64e31c8b277dff9850d2a2556d8b60bcb0ea6524e36e28ffd7c87f71d6", size = 43951, upload-time = "2022-10-25T21:21:53.208Z" }, ] [[package]] @@ -5030,9 +5080,9 @@ dependencies = [ { name = "coverage", extra = ["toml"] }, { name = "pytest" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/7a/15/da3df99fd551507694a9b01f512a2f6cf1254f33601605843c3775f39460/pytest-cov-4.1.0.tar.gz", hash = "sha256:3904b13dfbfec47f003b8e77fd5b589cd11904a21ddf1ab38a64f204d6a10ef6", size = 63245 } +sdist = { url = "https://files.pythonhosted.org/packages/7a/15/da3df99fd551507694a9b01f512a2f6cf1254f33601605843c3775f39460/pytest-cov-4.1.0.tar.gz", hash = "sha256:3904b13dfbfec47f003b8e77fd5b589cd11904a21ddf1ab38a64f204d6a10ef6", size = 63245, upload-time = "2023-05-24T18:44:56.845Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/a7/4b/8b78d126e275efa2379b1c2e09dc52cf70df16fc3b90613ef82531499d73/pytest_cov-4.1.0-py3-none-any.whl", hash = "sha256:6ba70b9e97e69fcc3fb45bfeab2d0a138fb65c4d0d6a41ef33983ad114be8c3a", size = 21949 }, + { url = "https://files.pythonhosted.org/packages/a7/4b/8b78d126e275efa2379b1c2e09dc52cf70df16fc3b90613ef82531499d73/pytest_cov-4.1.0-py3-none-any.whl", hash = "sha256:6ba70b9e97e69fcc3fb45bfeab2d0a138fb65c4d0d6a41ef33983ad114be8c3a", size = 21949, upload-time = "2023-05-24T18:44:54.079Z" }, ] [[package]] @@ -5042,9 +5092,9 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "pytest" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/1f/31/27f28431a16b83cab7a636dce59cf397517807d247caa38ee67d65e71ef8/pytest_env-1.1.5.tar.gz", hash = "sha256:91209840aa0e43385073ac464a554ad2947cc2fd663a9debf88d03b01e0cc1cf", size = 8911 } +sdist = { url = "https://files.pythonhosted.org/packages/1f/31/27f28431a16b83cab7a636dce59cf397517807d247caa38ee67d65e71ef8/pytest_env-1.1.5.tar.gz", hash = "sha256:91209840aa0e43385073ac464a554ad2947cc2fd663a9debf88d03b01e0cc1cf", size = 8911, upload-time = "2024-09-17T22:39:18.566Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/de/b8/87cfb16045c9d4092cfcf526135d73b88101aac83bc1adcf82dfb5fd3833/pytest_env-1.1.5-py3-none-any.whl", hash = "sha256:ce90cf8772878515c24b31cd97c7fa1f4481cd68d588419fd45f10ecaee6bc30", size = 6141 }, + { url = "https://files.pythonhosted.org/packages/de/b8/87cfb16045c9d4092cfcf526135d73b88101aac83bc1adcf82dfb5fd3833/pytest_env-1.1.5-py3-none-any.whl", hash = "sha256:ce90cf8772878515c24b31cd97c7fa1f4481cd68d588419fd45f10ecaee6bc30", size = 6141, upload-time = "2024-09-17T22:39:16.942Z" }, ] [[package]] @@ -5054,9 +5104,9 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "pytest" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/71/28/67172c96ba684058a4d24ffe144d64783d2a270d0af0d9e792737bddc75c/pytest_mock-3.14.1.tar.gz", hash = "sha256:159e9edac4c451ce77a5cdb9fc5d1100708d2dd4ba3c3df572f14097351af80e", size = 33241 } +sdist = { url = "https://files.pythonhosted.org/packages/71/28/67172c96ba684058a4d24ffe144d64783d2a270d0af0d9e792737bddc75c/pytest_mock-3.14.1.tar.gz", hash = "sha256:159e9edac4c451ce77a5cdb9fc5d1100708d2dd4ba3c3df572f14097351af80e", size = 33241, upload-time = "2025-05-26T13:58:45.167Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/b2/05/77b60e520511c53d1c1ca75f1930c7dd8e971d0c4379b7f4b3f9644685ba/pytest_mock-3.14.1-py3-none-any.whl", hash = "sha256:178aefcd11307d874b4cd3100344e7e2d888d9791a6a1d9bfe90fbc1b74fd1d0", size = 9923 }, + { url = "https://files.pythonhosted.org/packages/b2/05/77b60e520511c53d1c1ca75f1930c7dd8e971d0c4379b7f4b3f9644685ba/pytest_mock-3.14.1-py3-none-any.whl", hash = "sha256:178aefcd11307d874b4cd3100344e7e2d888d9791a6a1d9bfe90fbc1b74fd1d0", size = 9923, upload-time = "2025-05-26T13:58:43.487Z" }, ] [[package]] @@ -5066,9 +5116,9 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "pytest" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/ac/82/4c9ecabab13363e72d880f2fb504c5f750433b2b6f16e99f4ec21ada284c/pytest_timeout-2.4.0.tar.gz", hash = "sha256:7e68e90b01f9eff71332b25001f85c75495fc4e3a836701876183c4bcfd0540a", size = 17973 } +sdist = { url = "https://files.pythonhosted.org/packages/ac/82/4c9ecabab13363e72d880f2fb504c5f750433b2b6f16e99f4ec21ada284c/pytest_timeout-2.4.0.tar.gz", hash = "sha256:7e68e90b01f9eff71332b25001f85c75495fc4e3a836701876183c4bcfd0540a", size = 17973, upload-time = "2025-05-05T19:44:34.99Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/fa/b6/3127540ecdf1464a00e5a01ee60a1b09175f6913f0644ac748494d9c4b21/pytest_timeout-2.4.0-py3-none-any.whl", hash = "sha256:c42667e5cdadb151aeb5b26d114aff6bdf5a907f176a007a30b940d3d865b5c2", size = 14382 }, + { url = "https://files.pythonhosted.org/packages/fa/b6/3127540ecdf1464a00e5a01ee60a1b09175f6913f0644ac748494d9c4b21/pytest_timeout-2.4.0-py3-none-any.whl", hash = "sha256:c42667e5cdadb151aeb5b26d114aff6bdf5a907f176a007a30b940d3d865b5c2", size = 14382, upload-time = "2025-05-05T19:44:33.502Z" }, ] [[package]] @@ -5076,43 +5126,43 @@ name = "python-calamine" version = "0.5.4" source = { registry = "https://pypi.org/simple" } wheels = [ - { url = "https://files.pythonhosted.org/packages/25/1a/ff59788a7e8bfeded91a501abdd068dc7e2f5865ee1a55432133b0f7f08c/python_calamine-0.5.4-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:944bcc072aca29d346456b4e42675c4831c52c25641db3e976c6013cdd07d4cd", size = 854308 }, - { url = "https://files.pythonhosted.org/packages/24/7d/33fc441a70b771093d10fa5086831be289766535cbcb2b443ff1d5e549d8/python_calamine-0.5.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e637382e50cabc263a37eda7a3cd33f054271e4391a304f68cecb2e490827533", size = 830841 }, - { url = "https://files.pythonhosted.org/packages/0f/38/b5b25e6ce0a983c9751fb026bd8c5d77eb81a775948cc3d9ce2b18b2fc91/python_calamine-0.5.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1b2a31d1e711c5661b4f04efd89975d311788bd9a43a111beff74d7c4c8f8d7a", size = 898287 }, - { url = "https://files.pythonhosted.org/packages/0f/e9/ab288cd489999f962f791d6c8544803c29dcf24e9b6dde24634c41ec09dd/python_calamine-0.5.4-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:2078ede35cbd26cf7186673405ff13321caacd9e45a5e57b54ce7b3ef0eec2ff", size = 886960 }, - { url = "https://files.pythonhosted.org/packages/f0/4d/2a261f2ccde7128a683cdb20733f9bc030ab37a90803d8de836bf6113e5b/python_calamine-0.5.4-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:faab9f59bb9cedba2b35c6e1f5dc72461d8f2837e8f6ab24fafff0d054ddc4b5", size = 1044123 }, - { url = "https://files.pythonhosted.org/packages/20/dc/a84c5a5a2c38816570bcc96ae4c9c89d35054e59c4199d3caef9c60b65cf/python_calamine-0.5.4-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:300d8d5e6c63bdecf79268d3b6d2a84078cda39cb3394ed09c5c00a61ce9ff32", size = 941997 }, - { url = "https://files.pythonhosted.org/packages/dd/92/b970d8316c54f274d9060e7c804b79dbfa250edeb6390cd94f5fcfeb5f87/python_calamine-0.5.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0019a74f1c0b1cbf08fee9ece114d310522837cdf63660a46fe46d3688f215ea", size = 905881 }, - { url = "https://files.pythonhosted.org/packages/ac/88/9186ac8d3241fc6f90995cc7539bdbd75b770d2dab20978a702c36fbce5f/python_calamine-0.5.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:30b40ffb374f7fb9ce20ca87f43a609288f568e41872f8a72e5af313a9e20af0", size = 947224 }, - { url = "https://files.pythonhosted.org/packages/ee/ec/6ac1882dc6b6fa829e2d1d94ffa58bd0c67df3dba074b2e2f3134d7f573a/python_calamine-0.5.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:206242690a5a5dff73a193fb1a1ca3c7a8aed95e2f9f10c875dece5a22068801", size = 1078351 }, - { url = "https://files.pythonhosted.org/packages/3e/f1/07aff6966b04b7452c41a802b37199d9e9ac656d66d6092b83ab0937e212/python_calamine-0.5.4-cp311-cp311-musllinux_1_1_armv7l.whl", hash = "sha256:88628e1a17a6f352d6433b0abf6edc4cb2295b8fbb3451392390f3a6a7a8cada", size = 1150148 }, - { url = "https://files.pythonhosted.org/packages/4e/be/90aedeb0b77ea592a698a20db09014a5217ce46a55b699121849e239c8e7/python_calamine-0.5.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:22524cfb7720d15894a02392bbd49f8e7a8c173493f0628a45814d78e4243fff", size = 1080101 }, - { url = "https://files.pythonhosted.org/packages/30/89/1fadd511d132d5ea9326c003c8753b6d234d61d9a72775fb1632cc94beb9/python_calamine-0.5.4-cp311-cp311-win32.whl", hash = "sha256:d159e98ef3475965555b67354f687257648f5c3686ed08e7faa34d54cc9274e1", size = 679593 }, - { url = "https://files.pythonhosted.org/packages/e9/ba/d7324400a02491549ef30e0e480561a3a841aa073ac7c096313bc2cea555/python_calamine-0.5.4-cp311-cp311-win_amd64.whl", hash = "sha256:0d019b082f9a114cf1e130dc52b77f9f881325ab13dc31485d7b4563ad9e0812", size = 721570 }, - { url = "https://files.pythonhosted.org/packages/4f/15/8c7895e603b4ae63ff279aae4aa6120658a15f805750ccdb5d8b311df616/python_calamine-0.5.4-cp311-cp311-win_arm64.whl", hash = "sha256:bb20875776e5b4c85134c2bf49fea12288e64448ed49f1d89a3a83f5bb16bd59", size = 685789 }, - { url = "https://files.pythonhosted.org/packages/ff/60/b1ace7a0fd636581b3bb27f1011cb7b2fe4d507b58401c4d328cfcb5c849/python_calamine-0.5.4-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:4d711f91283d28f19feb111ed666764de69e6d2a0201df8f84e81a238f68d193", size = 850087 }, - { url = "https://files.pythonhosted.org/packages/7f/32/32ca71ce50f9b7c7d6e7ec5fcc579a97ddd8b8ce314fe143ba2a19441dc7/python_calamine-0.5.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ed67afd3adedb5bcfb428cf1f2d7dfd936dea9fe979ab631194495ab092973ba", size = 825659 }, - { url = "https://files.pythonhosted.org/packages/63/c5/27ba71a9da2a09be9ff2f0dac522769956c8c89d6516565b21c9c78bfae6/python_calamine-0.5.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:13662895dac487315ccce25ea272a1ea7e7ac05d899cde4e33d59d6c43274c54", size = 897332 }, - { url = "https://files.pythonhosted.org/packages/5a/e7/c4be6ff8e8899ace98cacc9604a2dd1abc4901839b733addfb6ef32c22ba/python_calamine-0.5.4-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:23e354755583cfaa824ddcbe8b099c5c7ac19bf5179320426e7a88eea2f14bc5", size = 886885 }, - { url = "https://files.pythonhosted.org/packages/38/24/80258fb041435021efa10d0b528df6842e442585e48cbf130e73fed2529b/python_calamine-0.5.4-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4e1bc3f22107dcbdeb32d4d3c5c1e8831d3c85d4b004a8606dd779721b29843d", size = 1043907 }, - { url = "https://files.pythonhosted.org/packages/f2/20/157340787d03ef6113a967fd8f84218e867ba4c2f7fc58cc645d8665a61a/python_calamine-0.5.4-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:182b314117e47dbd952adaa2b19c515555083a48d6f9146f46faaabd9dab2f81", size = 942376 }, - { url = "https://files.pythonhosted.org/packages/98/f5/aec030f567ee14c60b6fc9028a78767687f484071cb080f7cfa328d6496e/python_calamine-0.5.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b8f882e092ab23f72ea07e2e48f5f2efb1885c1836fb949f22fd4540ae11742e", size = 906455 }, - { url = "https://files.pythonhosted.org/packages/29/58/4affc0d1389f837439ad45f400f3792e48030b75868ec757e88cb35d7626/python_calamine-0.5.4-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:62a9b4b7b9bd99d03373e58884dfb60d5a1c292c8e04e11f8b7420b77a46813e", size = 948132 }, - { url = "https://files.pythonhosted.org/packages/b4/2e/70ed04f39e682a9116730f56b7fbb54453244ccc1c3dae0662d4819f1c1d/python_calamine-0.5.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:98bb011d33c0e2d183ff30ab3d96792c3493f56f67a7aa2fcadad9a03539e79b", size = 1077436 }, - { url = "https://files.pythonhosted.org/packages/cb/ce/806f8ce06b5bb9db33007f85045c304cda410970e7aa07d08f6eaee67913/python_calamine-0.5.4-cp312-cp312-musllinux_1_1_armv7l.whl", hash = "sha256:6b218a95489ff2f1cc1de0bba2a16fcc82981254bbb23f31d41d29191282b9ad", size = 1150570 }, - { url = "https://files.pythonhosted.org/packages/18/da/61f13c8d107783128c1063cf52ca9cacdc064c58d58d3cf49c1728ce8296/python_calamine-0.5.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:e8296a4872dbe834205d25d26dd6cfcb33ee9da721668d81b21adc25a07c07e4", size = 1080286 }, - { url = "https://files.pythonhosted.org/packages/99/85/c5612a63292eb7d0648b17c5ff32ad5d6c6f3e1d78825f01af5c765f4d3f/python_calamine-0.5.4-cp312-cp312-win32.whl", hash = "sha256:cebb9c88983ae676c60c8c02aa29a9fe13563f240579e66de5c71b969ace5fd9", size = 676617 }, - { url = "https://files.pythonhosted.org/packages/bb/18/5a037942de8a8df0c805224b2fba06df6d25c1be3c9484ba9db1ca4f3ee6/python_calamine-0.5.4-cp312-cp312-win_amd64.whl", hash = "sha256:15abd7aff98fde36d7df91ac051e86e66e5d5326a7fa98d54697afe95a613501", size = 721464 }, - { url = "https://files.pythonhosted.org/packages/d1/8b/89ca17b44bcd8be5d0e8378d87b880ae17a837573553bd2147cceca7e759/python_calamine-0.5.4-cp312-cp312-win_arm64.whl", hash = "sha256:1cef0d0fc936974020a24acf1509ed2a285b30a4e1adf346c057112072e84251", size = 687268 }, - { url = "https://files.pythonhosted.org/packages/ab/a8/0e05992489f8ca99eadfb52e858a7653b01b27a7c66d040abddeb4bdf799/python_calamine-0.5.4-pp311-pypy311_pp73-macosx_10_12_x86_64.whl", hash = "sha256:8d4be45952555f129584e0ca6ddb442bed5cb97b8d7cd0fd5ae463237b98eb15", size = 856420 }, - { url = "https://files.pythonhosted.org/packages/f0/b0/5bbe52c97161acb94066e7020c2fed7eafbca4bf6852a4b02ed80bf0b24b/python_calamine-0.5.4-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:5b387d12cb8cae98c8e0c061c5400f80bad1f43f26fafcf95ff5934df995f50b", size = 833240 }, - { url = "https://files.pythonhosted.org/packages/c7/b9/44fa30f6bf479072d9042856d3fab8bdd1532d2d901e479e199bc1de0e6c/python_calamine-0.5.4-pp311-pypy311_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2103714954b7dbed72a0b0eff178b08e854bba130be283e3ae3d7c95521e8f69", size = 899470 }, - { url = "https://files.pythonhosted.org/packages/0e/f2/acbb2c1d6acba1eaf6b1efb6485c98995050bddedfb6b93ce05be2753a85/python_calamine-0.5.4-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c09fdebe23a5045d09e12b3366ff8fd45165b6fb56f55e9a12342a5daddbd11a", size = 906108 }, - { url = "https://files.pythonhosted.org/packages/77/28/ff007e689539d6924223565995db876ac044466b8859bade371696294659/python_calamine-0.5.4-pp311-pypy311_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:fa992d72fbd38f09107430100b7688c03046d8c1994e4cff9bbbd2a825811796", size = 948580 }, - { url = "https://files.pythonhosted.org/packages/a4/06/b423655446fb27e22bfc1ca5e5b11f3449e0350fe8fefa0ebd68675f7e85/python_calamine-0.5.4-pp311-pypy311_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:88e608c7589412d3159be40d270a90994e38c9eafc125bf8ad5a9c92deffd6dd", size = 1079516 }, - { url = "https://files.pythonhosted.org/packages/76/f5/c7132088978b712a5eddf1ca6bf64ae81335fbca9443ed486330519954c3/python_calamine-0.5.4-pp311-pypy311_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:51a007801aef12f6bc93a545040a36df48e9af920a7da9ded915584ad9a002b1", size = 1152379 }, - { url = "https://files.pythonhosted.org/packages/bd/c8/37a8d80b7e55e7cfbe649f7a92a7e838defc746aac12dca751aad5dd06a6/python_calamine-0.5.4-pp311-pypy311_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:b056db205e45ab9381990a5c15d869f1021c1262d065740c9cd296fc5d3fb248", size = 1080420 }, - { url = "https://files.pythonhosted.org/packages/10/52/9a96d06e75862d356dc80a4a465ad88fba544a19823568b4ff484e7a12f2/python_calamine-0.5.4-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:dd8f4123b2403fc22c92ec4f5e51c495427cf3739c5cb614b9829745a80922db", size = 722350 }, + { url = "https://files.pythonhosted.org/packages/25/1a/ff59788a7e8bfeded91a501abdd068dc7e2f5865ee1a55432133b0f7f08c/python_calamine-0.5.4-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:944bcc072aca29d346456b4e42675c4831c52c25641db3e976c6013cdd07d4cd", size = 854308, upload-time = "2025-10-21T07:10:55.17Z" }, + { url = "https://files.pythonhosted.org/packages/24/7d/33fc441a70b771093d10fa5086831be289766535cbcb2b443ff1d5e549d8/python_calamine-0.5.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e637382e50cabc263a37eda7a3cd33f054271e4391a304f68cecb2e490827533", size = 830841, upload-time = "2025-10-21T07:10:57.353Z" }, + { url = "https://files.pythonhosted.org/packages/0f/38/b5b25e6ce0a983c9751fb026bd8c5d77eb81a775948cc3d9ce2b18b2fc91/python_calamine-0.5.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1b2a31d1e711c5661b4f04efd89975d311788bd9a43a111beff74d7c4c8f8d7a", size = 898287, upload-time = "2025-10-21T07:10:58.977Z" }, + { url = "https://files.pythonhosted.org/packages/0f/e9/ab288cd489999f962f791d6c8544803c29dcf24e9b6dde24634c41ec09dd/python_calamine-0.5.4-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:2078ede35cbd26cf7186673405ff13321caacd9e45a5e57b54ce7b3ef0eec2ff", size = 886960, upload-time = "2025-10-21T07:11:00.462Z" }, + { url = "https://files.pythonhosted.org/packages/f0/4d/2a261f2ccde7128a683cdb20733f9bc030ab37a90803d8de836bf6113e5b/python_calamine-0.5.4-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:faab9f59bb9cedba2b35c6e1f5dc72461d8f2837e8f6ab24fafff0d054ddc4b5", size = 1044123, upload-time = "2025-10-21T07:11:02.153Z" }, + { url = "https://files.pythonhosted.org/packages/20/dc/a84c5a5a2c38816570bcc96ae4c9c89d35054e59c4199d3caef9c60b65cf/python_calamine-0.5.4-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:300d8d5e6c63bdecf79268d3b6d2a84078cda39cb3394ed09c5c00a61ce9ff32", size = 941997, upload-time = "2025-10-21T07:11:03.537Z" }, + { url = "https://files.pythonhosted.org/packages/dd/92/b970d8316c54f274d9060e7c804b79dbfa250edeb6390cd94f5fcfeb5f87/python_calamine-0.5.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0019a74f1c0b1cbf08fee9ece114d310522837cdf63660a46fe46d3688f215ea", size = 905881, upload-time = "2025-10-21T07:11:05.228Z" }, + { url = "https://files.pythonhosted.org/packages/ac/88/9186ac8d3241fc6f90995cc7539bdbd75b770d2dab20978a702c36fbce5f/python_calamine-0.5.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:30b40ffb374f7fb9ce20ca87f43a609288f568e41872f8a72e5af313a9e20af0", size = 947224, upload-time = "2025-10-21T07:11:06.618Z" }, + { url = "https://files.pythonhosted.org/packages/ee/ec/6ac1882dc6b6fa829e2d1d94ffa58bd0c67df3dba074b2e2f3134d7f573a/python_calamine-0.5.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:206242690a5a5dff73a193fb1a1ca3c7a8aed95e2f9f10c875dece5a22068801", size = 1078351, upload-time = "2025-10-21T07:11:08.368Z" }, + { url = "https://files.pythonhosted.org/packages/3e/f1/07aff6966b04b7452c41a802b37199d9e9ac656d66d6092b83ab0937e212/python_calamine-0.5.4-cp311-cp311-musllinux_1_1_armv7l.whl", hash = "sha256:88628e1a17a6f352d6433b0abf6edc4cb2295b8fbb3451392390f3a6a7a8cada", size = 1150148, upload-time = "2025-10-21T07:11:10.18Z" }, + { url = "https://files.pythonhosted.org/packages/4e/be/90aedeb0b77ea592a698a20db09014a5217ce46a55b699121849e239c8e7/python_calamine-0.5.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:22524cfb7720d15894a02392bbd49f8e7a8c173493f0628a45814d78e4243fff", size = 1080101, upload-time = "2025-10-21T07:11:11.489Z" }, + { url = "https://files.pythonhosted.org/packages/30/89/1fadd511d132d5ea9326c003c8753b6d234d61d9a72775fb1632cc94beb9/python_calamine-0.5.4-cp311-cp311-win32.whl", hash = "sha256:d159e98ef3475965555b67354f687257648f5c3686ed08e7faa34d54cc9274e1", size = 679593, upload-time = "2025-10-21T07:11:12.758Z" }, + { url = "https://files.pythonhosted.org/packages/e9/ba/d7324400a02491549ef30e0e480561a3a841aa073ac7c096313bc2cea555/python_calamine-0.5.4-cp311-cp311-win_amd64.whl", hash = "sha256:0d019b082f9a114cf1e130dc52b77f9f881325ab13dc31485d7b4563ad9e0812", size = 721570, upload-time = "2025-10-21T07:11:14.336Z" }, + { url = "https://files.pythonhosted.org/packages/4f/15/8c7895e603b4ae63ff279aae4aa6120658a15f805750ccdb5d8b311df616/python_calamine-0.5.4-cp311-cp311-win_arm64.whl", hash = "sha256:bb20875776e5b4c85134c2bf49fea12288e64448ed49f1d89a3a83f5bb16bd59", size = 685789, upload-time = "2025-10-21T07:11:15.646Z" }, + { url = "https://files.pythonhosted.org/packages/ff/60/b1ace7a0fd636581b3bb27f1011cb7b2fe4d507b58401c4d328cfcb5c849/python_calamine-0.5.4-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:4d711f91283d28f19feb111ed666764de69e6d2a0201df8f84e81a238f68d193", size = 850087, upload-time = "2025-10-21T07:11:17.002Z" }, + { url = "https://files.pythonhosted.org/packages/7f/32/32ca71ce50f9b7c7d6e7ec5fcc579a97ddd8b8ce314fe143ba2a19441dc7/python_calamine-0.5.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ed67afd3adedb5bcfb428cf1f2d7dfd936dea9fe979ab631194495ab092973ba", size = 825659, upload-time = "2025-10-21T07:11:18.248Z" }, + { url = "https://files.pythonhosted.org/packages/63/c5/27ba71a9da2a09be9ff2f0dac522769956c8c89d6516565b21c9c78bfae6/python_calamine-0.5.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:13662895dac487315ccce25ea272a1ea7e7ac05d899cde4e33d59d6c43274c54", size = 897332, upload-time = "2025-10-21T07:11:19.89Z" }, + { url = "https://files.pythonhosted.org/packages/5a/e7/c4be6ff8e8899ace98cacc9604a2dd1abc4901839b733addfb6ef32c22ba/python_calamine-0.5.4-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:23e354755583cfaa824ddcbe8b099c5c7ac19bf5179320426e7a88eea2f14bc5", size = 886885, upload-time = "2025-10-21T07:11:21.912Z" }, + { url = "https://files.pythonhosted.org/packages/38/24/80258fb041435021efa10d0b528df6842e442585e48cbf130e73fed2529b/python_calamine-0.5.4-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4e1bc3f22107dcbdeb32d4d3c5c1e8831d3c85d4b004a8606dd779721b29843d", size = 1043907, upload-time = "2025-10-21T07:11:23.3Z" }, + { url = "https://files.pythonhosted.org/packages/f2/20/157340787d03ef6113a967fd8f84218e867ba4c2f7fc58cc645d8665a61a/python_calamine-0.5.4-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:182b314117e47dbd952adaa2b19c515555083a48d6f9146f46faaabd9dab2f81", size = 942376, upload-time = "2025-10-21T07:11:24.866Z" }, + { url = "https://files.pythonhosted.org/packages/98/f5/aec030f567ee14c60b6fc9028a78767687f484071cb080f7cfa328d6496e/python_calamine-0.5.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b8f882e092ab23f72ea07e2e48f5f2efb1885c1836fb949f22fd4540ae11742e", size = 906455, upload-time = "2025-10-21T07:11:26.203Z" }, + { url = "https://files.pythonhosted.org/packages/29/58/4affc0d1389f837439ad45f400f3792e48030b75868ec757e88cb35d7626/python_calamine-0.5.4-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:62a9b4b7b9bd99d03373e58884dfb60d5a1c292c8e04e11f8b7420b77a46813e", size = 948132, upload-time = "2025-10-21T07:11:27.507Z" }, + { url = "https://files.pythonhosted.org/packages/b4/2e/70ed04f39e682a9116730f56b7fbb54453244ccc1c3dae0662d4819f1c1d/python_calamine-0.5.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:98bb011d33c0e2d183ff30ab3d96792c3493f56f67a7aa2fcadad9a03539e79b", size = 1077436, upload-time = "2025-10-21T07:11:28.801Z" }, + { url = "https://files.pythonhosted.org/packages/cb/ce/806f8ce06b5bb9db33007f85045c304cda410970e7aa07d08f6eaee67913/python_calamine-0.5.4-cp312-cp312-musllinux_1_1_armv7l.whl", hash = "sha256:6b218a95489ff2f1cc1de0bba2a16fcc82981254bbb23f31d41d29191282b9ad", size = 1150570, upload-time = "2025-10-21T07:11:30.237Z" }, + { url = "https://files.pythonhosted.org/packages/18/da/61f13c8d107783128c1063cf52ca9cacdc064c58d58d3cf49c1728ce8296/python_calamine-0.5.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:e8296a4872dbe834205d25d26dd6cfcb33ee9da721668d81b21adc25a07c07e4", size = 1080286, upload-time = "2025-10-21T07:11:31.564Z" }, + { url = "https://files.pythonhosted.org/packages/99/85/c5612a63292eb7d0648b17c5ff32ad5d6c6f3e1d78825f01af5c765f4d3f/python_calamine-0.5.4-cp312-cp312-win32.whl", hash = "sha256:cebb9c88983ae676c60c8c02aa29a9fe13563f240579e66de5c71b969ace5fd9", size = 676617, upload-time = "2025-10-21T07:11:32.833Z" }, + { url = "https://files.pythonhosted.org/packages/bb/18/5a037942de8a8df0c805224b2fba06df6d25c1be3c9484ba9db1ca4f3ee6/python_calamine-0.5.4-cp312-cp312-win_amd64.whl", hash = "sha256:15abd7aff98fde36d7df91ac051e86e66e5d5326a7fa98d54697afe95a613501", size = 721464, upload-time = "2025-10-21T07:11:34.383Z" }, + { url = "https://files.pythonhosted.org/packages/d1/8b/89ca17b44bcd8be5d0e8378d87b880ae17a837573553bd2147cceca7e759/python_calamine-0.5.4-cp312-cp312-win_arm64.whl", hash = "sha256:1cef0d0fc936974020a24acf1509ed2a285b30a4e1adf346c057112072e84251", size = 687268, upload-time = "2025-10-21T07:11:36.324Z" }, + { url = "https://files.pythonhosted.org/packages/ab/a8/0e05992489f8ca99eadfb52e858a7653b01b27a7c66d040abddeb4bdf799/python_calamine-0.5.4-pp311-pypy311_pp73-macosx_10_12_x86_64.whl", hash = "sha256:8d4be45952555f129584e0ca6ddb442bed5cb97b8d7cd0fd5ae463237b98eb15", size = 856420, upload-time = "2025-10-21T07:13:20.962Z" }, + { url = "https://files.pythonhosted.org/packages/f0/b0/5bbe52c97161acb94066e7020c2fed7eafbca4bf6852a4b02ed80bf0b24b/python_calamine-0.5.4-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:5b387d12cb8cae98c8e0c061c5400f80bad1f43f26fafcf95ff5934df995f50b", size = 833240, upload-time = "2025-10-21T07:13:22.801Z" }, + { url = "https://files.pythonhosted.org/packages/c7/b9/44fa30f6bf479072d9042856d3fab8bdd1532d2d901e479e199bc1de0e6c/python_calamine-0.5.4-pp311-pypy311_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2103714954b7dbed72a0b0eff178b08e854bba130be283e3ae3d7c95521e8f69", size = 899470, upload-time = "2025-10-21T07:13:25.176Z" }, + { url = "https://files.pythonhosted.org/packages/0e/f2/acbb2c1d6acba1eaf6b1efb6485c98995050bddedfb6b93ce05be2753a85/python_calamine-0.5.4-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c09fdebe23a5045d09e12b3366ff8fd45165b6fb56f55e9a12342a5daddbd11a", size = 906108, upload-time = "2025-10-21T07:13:26.709Z" }, + { url = "https://files.pythonhosted.org/packages/77/28/ff007e689539d6924223565995db876ac044466b8859bade371696294659/python_calamine-0.5.4-pp311-pypy311_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:fa992d72fbd38f09107430100b7688c03046d8c1994e4cff9bbbd2a825811796", size = 948580, upload-time = "2025-10-21T07:13:30.816Z" }, + { url = "https://files.pythonhosted.org/packages/a4/06/b423655446fb27e22bfc1ca5e5b11f3449e0350fe8fefa0ebd68675f7e85/python_calamine-0.5.4-pp311-pypy311_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:88e608c7589412d3159be40d270a90994e38c9eafc125bf8ad5a9c92deffd6dd", size = 1079516, upload-time = "2025-10-21T07:13:32.288Z" }, + { url = "https://files.pythonhosted.org/packages/76/f5/c7132088978b712a5eddf1ca6bf64ae81335fbca9443ed486330519954c3/python_calamine-0.5.4-pp311-pypy311_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:51a007801aef12f6bc93a545040a36df48e9af920a7da9ded915584ad9a002b1", size = 1152379, upload-time = "2025-10-21T07:13:33.739Z" }, + { url = "https://files.pythonhosted.org/packages/bd/c8/37a8d80b7e55e7cfbe649f7a92a7e838defc746aac12dca751aad5dd06a6/python_calamine-0.5.4-pp311-pypy311_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:b056db205e45ab9381990a5c15d869f1021c1262d065740c9cd296fc5d3fb248", size = 1080420, upload-time = "2025-10-21T07:13:35.33Z" }, + { url = "https://files.pythonhosted.org/packages/10/52/9a96d06e75862d356dc80a4a465ad88fba544a19823568b4ff484e7a12f2/python_calamine-0.5.4-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:dd8f4123b2403fc22c92ec4f5e51c495427cf3739c5cb614b9829745a80922db", size = 722350, upload-time = "2025-10-21T07:13:37.074Z" }, ] [[package]] @@ -5122,9 +5172,9 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "six" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/66/c0/0c8b6ad9f17a802ee498c46e004a0eb49bc148f2fd230864601a86dcf6db/python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3", size = 342432 } +sdist = { url = "https://files.pythonhosted.org/packages/66/c0/0c8b6ad9f17a802ee498c46e004a0eb49bc148f2fd230864601a86dcf6db/python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3", size = 342432, upload-time = "2024-03-01T18:36:20.211Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/ec/57/56b9bcc3c9c6a792fcbaf139543cee77261f3651ca9da0c93f5c1221264b/python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427", size = 229892 }, + { url = "https://files.pythonhosted.org/packages/ec/57/56b9bcc3c9c6a792fcbaf139543cee77261f3651ca9da0c93f5c1221264b/python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427", size = 229892, upload-time = "2024-03-01T18:36:18.57Z" }, ] [[package]] @@ -5135,45 +5185,45 @@ dependencies = [ { name = "lxml" }, { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/35/e4/386c514c53684772885009c12b67a7edd526c15157778ac1b138bc75063e/python_docx-1.1.2.tar.gz", hash = "sha256:0cf1f22e95b9002addca7948e16f2cd7acdfd498047f1941ca5d293db7762efd", size = 5656581 } +sdist = { url = "https://files.pythonhosted.org/packages/35/e4/386c514c53684772885009c12b67a7edd526c15157778ac1b138bc75063e/python_docx-1.1.2.tar.gz", hash = "sha256:0cf1f22e95b9002addca7948e16f2cd7acdfd498047f1941ca5d293db7762efd", size = 5656581, upload-time = "2024-05-01T19:41:57.772Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/3e/3d/330d9efbdb816d3f60bf2ad92f05e1708e4a1b9abe80461ac3444c83f749/python_docx-1.1.2-py3-none-any.whl", hash = "sha256:08c20d6058916fb19853fcf080f7f42b6270d89eac9fa5f8c15f691c0017fabe", size = 244315 }, + { url = "https://files.pythonhosted.org/packages/3e/3d/330d9efbdb816d3f60bf2ad92f05e1708e4a1b9abe80461ac3444c83f749/python_docx-1.1.2-py3-none-any.whl", hash = "sha256:08c20d6058916fb19853fcf080f7f42b6270d89eac9fa5f8c15f691c0017fabe", size = 244315, upload-time = "2024-05-01T19:41:47.006Z" }, ] [[package]] name = "python-dotenv" version = "1.0.1" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/bc/57/e84d88dfe0aec03b7a2d4327012c1627ab5f03652216c63d49846d7a6c58/python-dotenv-1.0.1.tar.gz", hash = "sha256:e324ee90a023d808f1959c46bcbc04446a10ced277783dc6ee09987c37ec10ca", size = 39115 } +sdist = { url = "https://files.pythonhosted.org/packages/bc/57/e84d88dfe0aec03b7a2d4327012c1627ab5f03652216c63d49846d7a6c58/python-dotenv-1.0.1.tar.gz", hash = "sha256:e324ee90a023d808f1959c46bcbc04446a10ced277783dc6ee09987c37ec10ca", size = 39115, upload-time = "2024-01-23T06:33:00.505Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/6a/3e/b68c118422ec867fa7ab88444e1274aa40681c606d59ac27de5a5588f082/python_dotenv-1.0.1-py3-none-any.whl", hash = "sha256:f7b63ef50f1b690dddf550d03497b66d609393b40b564ed0d674909a68ebf16a", size = 19863 }, + { url = "https://files.pythonhosted.org/packages/6a/3e/b68c118422ec867fa7ab88444e1274aa40681c606d59ac27de5a5588f082/python_dotenv-1.0.1-py3-none-any.whl", hash = "sha256:f7b63ef50f1b690dddf550d03497b66d609393b40b564ed0d674909a68ebf16a", size = 19863, upload-time = "2024-01-23T06:32:58.246Z" }, ] [[package]] name = "python-http-client" version = "3.3.7" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/56/fa/284e52a8c6dcbe25671f02d217bf2f85660db940088faf18ae7a05e97313/python_http_client-3.3.7.tar.gz", hash = "sha256:bf841ee45262747e00dec7ee9971dfb8c7d83083f5713596488d67739170cea0", size = 9377 } +sdist = { url = "https://files.pythonhosted.org/packages/56/fa/284e52a8c6dcbe25671f02d217bf2f85660db940088faf18ae7a05e97313/python_http_client-3.3.7.tar.gz", hash = "sha256:bf841ee45262747e00dec7ee9971dfb8c7d83083f5713596488d67739170cea0", size = 9377, upload-time = "2022-03-09T20:23:56.386Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/29/31/9b360138f4e4035ee9dac4fe1132b6437bd05751aaf1db2a2d83dc45db5f/python_http_client-3.3.7-py3-none-any.whl", hash = "sha256:ad371d2bbedc6ea15c26179c6222a78bc9308d272435ddf1d5c84f068f249a36", size = 8352 }, + { url = "https://files.pythonhosted.org/packages/29/31/9b360138f4e4035ee9dac4fe1132b6437bd05751aaf1db2a2d83dc45db5f/python_http_client-3.3.7-py3-none-any.whl", hash = "sha256:ad371d2bbedc6ea15c26179c6222a78bc9308d272435ddf1d5c84f068f249a36", size = 8352, upload-time = "2022-03-09T20:23:54.862Z" }, ] [[package]] name = "python-iso639" version = "2025.11.16" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/a1/3b/3e07aadeeb7bbb2574d6aa6ccacbc58b17bd2b1fb6c7196bf96ab0e45129/python_iso639-2025.11.16.tar.gz", hash = "sha256:aabe941267898384415a509f5236d7cfc191198c84c5c6f73dac73d9783f5169", size = 174186 } +sdist = { url = "https://files.pythonhosted.org/packages/a1/3b/3e07aadeeb7bbb2574d6aa6ccacbc58b17bd2b1fb6c7196bf96ab0e45129/python_iso639-2025.11.16.tar.gz", hash = "sha256:aabe941267898384415a509f5236d7cfc191198c84c5c6f73dac73d9783f5169", size = 174186, upload-time = "2025-11-16T21:53:37.031Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/b5/2d/563849c31e58eb2e273fa0c391a7d9987db32f4d9152fe6ecdac0a8ffe93/python_iso639-2025.11.16-py3-none-any.whl", hash = "sha256:65f6ac6c6d8e8207f6175f8bf7fff7db486c6dc5c1d8866c2b77d2a923370896", size = 167818 }, + { url = "https://files.pythonhosted.org/packages/b5/2d/563849c31e58eb2e273fa0c391a7d9987db32f4d9152fe6ecdac0a8ffe93/python_iso639-2025.11.16-py3-none-any.whl", hash = "sha256:65f6ac6c6d8e8207f6175f8bf7fff7db486c6dc5c1d8866c2b77d2a923370896", size = 167818, upload-time = "2025-11-16T21:53:35.36Z" }, ] [[package]] name = "python-magic" version = "0.4.27" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/da/db/0b3e28ac047452d079d375ec6798bf76a036a08182dbb39ed38116a49130/python-magic-0.4.27.tar.gz", hash = "sha256:c1ba14b08e4a5f5c31a302b7721239695b2f0f058d125bd5ce1ee36b9d9d3c3b", size = 14677 } +sdist = { url = "https://files.pythonhosted.org/packages/da/db/0b3e28ac047452d079d375ec6798bf76a036a08182dbb39ed38116a49130/python-magic-0.4.27.tar.gz", hash = "sha256:c1ba14b08e4a5f5c31a302b7721239695b2f0f058d125bd5ce1ee36b9d9d3c3b", size = 14677, upload-time = "2022-06-07T20:16:59.508Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/6c/73/9f872cb81fc5c3bb48f7227872c28975f998f3e7c2b1c16e95e6432bbb90/python_magic-0.4.27-py2.py3-none-any.whl", hash = "sha256:c212960ad306f700aa0d01e5d7a325d20548ff97eb9920dcd29513174f0294d3", size = 13840 }, + { url = "https://files.pythonhosted.org/packages/6c/73/9f872cb81fc5c3bb48f7227872c28975f998f3e7c2b1c16e95e6432bbb90/python_magic-0.4.27-py2.py3-none-any.whl", hash = "sha256:c212960ad306f700aa0d01e5d7a325d20548ff97eb9920dcd29513174f0294d3", size = 13840, upload-time = "2022-06-07T20:16:57.763Z" }, ] [[package]] @@ -5185,9 +5235,9 @@ dependencies = [ { name = "olefile" }, { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/a2/4e/869f34faedbc968796d2c7e9837dede079c9cb9750917356b1f1eda926e9/python_oxmsg-0.0.2.tar.gz", hash = "sha256:a6aff4deb1b5975d44d49dab1d9384089ffeec819e19c6940bc7ffbc84775fad", size = 34713 } +sdist = { url = "https://files.pythonhosted.org/packages/a2/4e/869f34faedbc968796d2c7e9837dede079c9cb9750917356b1f1eda926e9/python_oxmsg-0.0.2.tar.gz", hash = "sha256:a6aff4deb1b5975d44d49dab1d9384089ffeec819e19c6940bc7ffbc84775fad", size = 34713, upload-time = "2025-02-03T17:13:47.415Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/53/67/f56c69a98c7eb244025845506387d0f961681657c9fcd8b2d2edd148f9d2/python_oxmsg-0.0.2-py3-none-any.whl", hash = "sha256:22be29b14c46016bcd05e34abddfd8e05ee82082f53b82753d115da3fc7d0355", size = 31455 }, + { url = "https://files.pythonhosted.org/packages/53/67/f56c69a98c7eb244025845506387d0f961681657c9fcd8b2d2edd148f9d2/python_oxmsg-0.0.2-py3-none-any.whl", hash = "sha256:22be29b14c46016bcd05e34abddfd8e05ee82082f53b82753d115da3fc7d0355", size = 31455, upload-time = "2025-02-03T17:13:46.061Z" }, ] [[package]] @@ -5200,18 +5250,18 @@ dependencies = [ { name = "typing-extensions" }, { name = "xlsxwriter" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/52/a9/0c0db8d37b2b8a645666f7fd8accea4c6224e013c42b1d5c17c93590cd06/python_pptx-1.0.2.tar.gz", hash = "sha256:479a8af0eaf0f0d76b6f00b0887732874ad2e3188230315290cd1f9dd9cc7095", size = 10109297 } +sdist = { url = "https://files.pythonhosted.org/packages/52/a9/0c0db8d37b2b8a645666f7fd8accea4c6224e013c42b1d5c17c93590cd06/python_pptx-1.0.2.tar.gz", hash = "sha256:479a8af0eaf0f0d76b6f00b0887732874ad2e3188230315290cd1f9dd9cc7095", size = 10109297, upload-time = "2024-08-07T17:33:37.772Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/d9/4f/00be2196329ebbff56ce564aa94efb0fbc828d00de250b1980de1a34ab49/python_pptx-1.0.2-py3-none-any.whl", hash = "sha256:160838e0b8565a8b1f67947675886e9fea18aa5e795db7ae531606d68e785cba", size = 472788 }, + { url = "https://files.pythonhosted.org/packages/d9/4f/00be2196329ebbff56ce564aa94efb0fbc828d00de250b1980de1a34ab49/python_pptx-1.0.2-py3-none-any.whl", hash = "sha256:160838e0b8565a8b1f67947675886e9fea18aa5e795db7ae531606d68e785cba", size = 472788, upload-time = "2024-08-07T17:33:28.192Z" }, ] [[package]] name = "pytz" version = "2025.2" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/f8/bf/abbd3cdfb8fbc7fb3d4d38d320f2441b1e7cbe29be4f23797b4a2b5d8aac/pytz-2025.2.tar.gz", hash = "sha256:360b9e3dbb49a209c21ad61809c7fb453643e048b38924c765813546746e81c3", size = 320884 } +sdist = { url = "https://files.pythonhosted.org/packages/f8/bf/abbd3cdfb8fbc7fb3d4d38d320f2441b1e7cbe29be4f23797b4a2b5d8aac/pytz-2025.2.tar.gz", hash = "sha256:360b9e3dbb49a209c21ad61809c7fb453643e048b38924c765813546746e81c3", size = 320884, upload-time = "2025-03-25T02:25:00.538Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/81/c4/34e93fe5f5429d7570ec1fa436f1986fb1f00c3e0f43a589fe2bbcd22c3f/pytz-2025.2-py2.py3-none-any.whl", hash = "sha256:5ddf76296dd8c44c26eb8f4b6f35488f3ccbf6fbbd7adee0b7262d43f0ec2f00", size = 509225 }, + { url = "https://files.pythonhosted.org/packages/81/c4/34e93fe5f5429d7570ec1fa436f1986fb1f00c3e0f43a589fe2bbcd22c3f/pytz-2025.2-py2.py3-none-any.whl", hash = "sha256:5ddf76296dd8c44c26eb8f4b6f35488f3ccbf6fbbd7adee0b7262d43f0ec2f00", size = 509225, upload-time = "2025-03-25T02:24:58.468Z" }, ] [[package]] @@ -5219,48 +5269,48 @@ name = "pywin32" version = "311" source = { registry = "https://pypi.org/simple" } wheels = [ - { url = "https://files.pythonhosted.org/packages/7c/af/449a6a91e5d6db51420875c54f6aff7c97a86a3b13a0b4f1a5c13b988de3/pywin32-311-cp311-cp311-win32.whl", hash = "sha256:184eb5e436dea364dcd3d2316d577d625c0351bf237c4e9a5fabbcfa5a58b151", size = 8697031 }, - { url = "https://files.pythonhosted.org/packages/51/8f/9bb81dd5bb77d22243d33c8397f09377056d5c687aa6d4042bea7fbf8364/pywin32-311-cp311-cp311-win_amd64.whl", hash = "sha256:3ce80b34b22b17ccbd937a6e78e7225d80c52f5ab9940fe0506a1a16f3dab503", size = 9508308 }, - { url = "https://files.pythonhosted.org/packages/44/7b/9c2ab54f74a138c491aba1b1cd0795ba61f144c711daea84a88b63dc0f6c/pywin32-311-cp311-cp311-win_arm64.whl", hash = "sha256:a733f1388e1a842abb67ffa8e7aad0e70ac519e09b0f6a784e65a136ec7cefd2", size = 8703930 }, - { url = "https://files.pythonhosted.org/packages/e7/ab/01ea1943d4eba0f850c3c61e78e8dd59757ff815ff3ccd0a84de5f541f42/pywin32-311-cp312-cp312-win32.whl", hash = "sha256:750ec6e621af2b948540032557b10a2d43b0cee2ae9758c54154d711cc852d31", size = 8706543 }, - { url = "https://files.pythonhosted.org/packages/d1/a8/a0e8d07d4d051ec7502cd58b291ec98dcc0c3fff027caad0470b72cfcc2f/pywin32-311-cp312-cp312-win_amd64.whl", hash = "sha256:b8c095edad5c211ff31c05223658e71bf7116daa0ecf3ad85f3201ea3190d067", size = 9495040 }, - { url = "https://files.pythonhosted.org/packages/ba/3a/2ae996277b4b50f17d61f0603efd8253cb2d79cc7ae159468007b586396d/pywin32-311-cp312-cp312-win_arm64.whl", hash = "sha256:e286f46a9a39c4a18b319c28f59b61de793654af2f395c102b4f819e584b5852", size = 8710102 }, + { url = "https://files.pythonhosted.org/packages/7c/af/449a6a91e5d6db51420875c54f6aff7c97a86a3b13a0b4f1a5c13b988de3/pywin32-311-cp311-cp311-win32.whl", hash = "sha256:184eb5e436dea364dcd3d2316d577d625c0351bf237c4e9a5fabbcfa5a58b151", size = 8697031, upload-time = "2025-07-14T20:13:13.266Z" }, + { url = "https://files.pythonhosted.org/packages/51/8f/9bb81dd5bb77d22243d33c8397f09377056d5c687aa6d4042bea7fbf8364/pywin32-311-cp311-cp311-win_amd64.whl", hash = "sha256:3ce80b34b22b17ccbd937a6e78e7225d80c52f5ab9940fe0506a1a16f3dab503", size = 9508308, upload-time = "2025-07-14T20:13:15.147Z" }, + { url = "https://files.pythonhosted.org/packages/44/7b/9c2ab54f74a138c491aba1b1cd0795ba61f144c711daea84a88b63dc0f6c/pywin32-311-cp311-cp311-win_arm64.whl", hash = "sha256:a733f1388e1a842abb67ffa8e7aad0e70ac519e09b0f6a784e65a136ec7cefd2", size = 8703930, upload-time = "2025-07-14T20:13:16.945Z" }, + { url = "https://files.pythonhosted.org/packages/e7/ab/01ea1943d4eba0f850c3c61e78e8dd59757ff815ff3ccd0a84de5f541f42/pywin32-311-cp312-cp312-win32.whl", hash = "sha256:750ec6e621af2b948540032557b10a2d43b0cee2ae9758c54154d711cc852d31", size = 8706543, upload-time = "2025-07-14T20:13:20.765Z" }, + { url = "https://files.pythonhosted.org/packages/d1/a8/a0e8d07d4d051ec7502cd58b291ec98dcc0c3fff027caad0470b72cfcc2f/pywin32-311-cp312-cp312-win_amd64.whl", hash = "sha256:b8c095edad5c211ff31c05223658e71bf7116daa0ecf3ad85f3201ea3190d067", size = 9495040, upload-time = "2025-07-14T20:13:22.543Z" }, + { url = "https://files.pythonhosted.org/packages/ba/3a/2ae996277b4b50f17d61f0603efd8253cb2d79cc7ae159468007b586396d/pywin32-311-cp312-cp312-win_arm64.whl", hash = "sha256:e286f46a9a39c4a18b319c28f59b61de793654af2f395c102b4f819e584b5852", size = 8710102, upload-time = "2025-07-14T20:13:24.682Z" }, ] [[package]] name = "pyxlsb" version = "1.0.10" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/3f/13/eebaeb7a40b062d1c6f7f91d09e73d30a69e33e4baa7cbe4b7658548b1cd/pyxlsb-1.0.10.tar.gz", hash = "sha256:8062d1ea8626d3f1980e8b1cfe91a4483747449242ecb61013bc2df85435f685", size = 22424 } +sdist = { url = "https://files.pythonhosted.org/packages/3f/13/eebaeb7a40b062d1c6f7f91d09e73d30a69e33e4baa7cbe4b7658548b1cd/pyxlsb-1.0.10.tar.gz", hash = "sha256:8062d1ea8626d3f1980e8b1cfe91a4483747449242ecb61013bc2df85435f685", size = 22424, upload-time = "2022-10-14T19:17:47.308Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/7e/92/345823838ae367c59b63e03aef9c331f485370f9df6d049256a61a28f06d/pyxlsb-1.0.10-py2.py3-none-any.whl", hash = "sha256:87c122a9a622e35ca5e741d2e541201d28af00fb46bec492cfa9586890b120b4", size = 23849 }, + { url = "https://files.pythonhosted.org/packages/7e/92/345823838ae367c59b63e03aef9c331f485370f9df6d049256a61a28f06d/pyxlsb-1.0.10-py2.py3-none-any.whl", hash = "sha256:87c122a9a622e35ca5e741d2e541201d28af00fb46bec492cfa9586890b120b4", size = 23849, upload-time = "2022-10-14T19:17:46.079Z" }, ] [[package]] name = "pyyaml" version = "6.0.3" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/05/8e/961c0007c59b8dd7729d542c61a4d537767a59645b82a0b521206e1e25c2/pyyaml-6.0.3.tar.gz", hash = "sha256:d76623373421df22fb4cf8817020cbb7ef15c725b9d5e45f17e189bfc384190f", size = 130960 } +sdist = { url = "https://files.pythonhosted.org/packages/05/8e/961c0007c59b8dd7729d542c61a4d537767a59645b82a0b521206e1e25c2/pyyaml-6.0.3.tar.gz", hash = "sha256:d76623373421df22fb4cf8817020cbb7ef15c725b9d5e45f17e189bfc384190f", size = 130960, upload-time = "2025-09-25T21:33:16.546Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/6d/16/a95b6757765b7b031c9374925bb718d55e0a9ba8a1b6a12d25962ea44347/pyyaml-6.0.3-cp311-cp311-macosx_10_13_x86_64.whl", hash = "sha256:44edc647873928551a01e7a563d7452ccdebee747728c1080d881d68af7b997e", size = 185826 }, - { url = "https://files.pythonhosted.org/packages/16/19/13de8e4377ed53079ee996e1ab0a9c33ec2faf808a4647b7b4c0d46dd239/pyyaml-6.0.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:652cb6edd41e718550aad172851962662ff2681490a8a711af6a4d288dd96824", size = 175577 }, - { url = "https://files.pythonhosted.org/packages/0c/62/d2eb46264d4b157dae1275b573017abec435397aa59cbcdab6fc978a8af4/pyyaml-6.0.3-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:10892704fc220243f5305762e276552a0395f7beb4dbf9b14ec8fd43b57f126c", size = 775556 }, - { url = "https://files.pythonhosted.org/packages/10/cb/16c3f2cf3266edd25aaa00d6c4350381c8b012ed6f5276675b9eba8d9ff4/pyyaml-6.0.3-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:850774a7879607d3a6f50d36d04f00ee69e7fc816450e5f7e58d7f17f1ae5c00", size = 882114 }, - { url = "https://files.pythonhosted.org/packages/71/60/917329f640924b18ff085ab889a11c763e0b573da888e8404ff486657602/pyyaml-6.0.3-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b8bb0864c5a28024fac8a632c443c87c5aa6f215c0b126c449ae1a150412f31d", size = 806638 }, - { url = "https://files.pythonhosted.org/packages/dd/6f/529b0f316a9fd167281a6c3826b5583e6192dba792dd55e3203d3f8e655a/pyyaml-6.0.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:1d37d57ad971609cf3c53ba6a7e365e40660e3be0e5175fa9f2365a379d6095a", size = 767463 }, - { url = "https://files.pythonhosted.org/packages/f2/6a/b627b4e0c1dd03718543519ffb2f1deea4a1e6d42fbab8021936a4d22589/pyyaml-6.0.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:37503bfbfc9d2c40b344d06b2199cf0e96e97957ab1c1b546fd4f87e53e5d3e4", size = 794986 }, - { url = "https://files.pythonhosted.org/packages/45/91/47a6e1c42d9ee337c4839208f30d9f09caa9f720ec7582917b264defc875/pyyaml-6.0.3-cp311-cp311-win32.whl", hash = "sha256:8098f252adfa6c80ab48096053f512f2321f0b998f98150cea9bd23d83e1467b", size = 142543 }, - { url = "https://files.pythonhosted.org/packages/da/e3/ea007450a105ae919a72393cb06f122f288ef60bba2dc64b26e2646fa315/pyyaml-6.0.3-cp311-cp311-win_amd64.whl", hash = "sha256:9f3bfb4965eb874431221a3ff3fdcddc7e74e3b07799e0e84ca4a0f867d449bf", size = 158763 }, - { url = "https://files.pythonhosted.org/packages/d1/33/422b98d2195232ca1826284a76852ad5a86fe23e31b009c9886b2d0fb8b2/pyyaml-6.0.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:7f047e29dcae44602496db43be01ad42fc6f1cc0d8cd6c83d342306c32270196", size = 182063 }, - { url = "https://files.pythonhosted.org/packages/89/a0/6cf41a19a1f2f3feab0e9c0b74134aa2ce6849093d5517a0c550fe37a648/pyyaml-6.0.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:fc09d0aa354569bc501d4e787133afc08552722d3ab34836a80547331bb5d4a0", size = 173973 }, - { url = "https://files.pythonhosted.org/packages/ed/23/7a778b6bd0b9a8039df8b1b1d80e2e2ad78aa04171592c8a5c43a56a6af4/pyyaml-6.0.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:9149cad251584d5fb4981be1ecde53a1ca46c891a79788c0df828d2f166bda28", size = 775116 }, - { url = "https://files.pythonhosted.org/packages/65/30/d7353c338e12baef4ecc1b09e877c1970bd3382789c159b4f89d6a70dc09/pyyaml-6.0.3-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:5fdec68f91a0c6739b380c83b951e2c72ac0197ace422360e6d5a959d8d97b2c", size = 844011 }, - { url = "https://files.pythonhosted.org/packages/8b/9d/b3589d3877982d4f2329302ef98a8026e7f4443c765c46cfecc8858c6b4b/pyyaml-6.0.3-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ba1cc08a7ccde2d2ec775841541641e4548226580ab850948cbfda66a1befcdc", size = 807870 }, - { url = "https://files.pythonhosted.org/packages/05/c0/b3be26a015601b822b97d9149ff8cb5ead58c66f981e04fedf4e762f4bd4/pyyaml-6.0.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:8dc52c23056b9ddd46818a57b78404882310fb473d63f17b07d5c40421e47f8e", size = 761089 }, - { url = "https://files.pythonhosted.org/packages/be/8e/98435a21d1d4b46590d5459a22d88128103f8da4c2d4cb8f14f2a96504e1/pyyaml-6.0.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:41715c910c881bc081f1e8872880d3c650acf13dfa8214bad49ed4cede7c34ea", size = 790181 }, - { url = "https://files.pythonhosted.org/packages/74/93/7baea19427dcfbe1e5a372d81473250b379f04b1bd3c4c5ff825e2327202/pyyaml-6.0.3-cp312-cp312-win32.whl", hash = "sha256:96b533f0e99f6579b3d4d4995707cf36df9100d67e0c8303a0c55b27b5f99bc5", size = 137658 }, - { url = "https://files.pythonhosted.org/packages/86/bf/899e81e4cce32febab4fb42bb97dcdf66bc135272882d1987881a4b519e9/pyyaml-6.0.3-cp312-cp312-win_amd64.whl", hash = "sha256:5fcd34e47f6e0b794d17de1b4ff496c00986e1c83f7ab2fb8fcfe9616ff7477b", size = 154003 }, - { url = "https://files.pythonhosted.org/packages/1a/08/67bd04656199bbb51dbed1439b7f27601dfb576fb864099c7ef0c3e55531/pyyaml-6.0.3-cp312-cp312-win_arm64.whl", hash = "sha256:64386e5e707d03a7e172c0701abfb7e10f0fb753ee1d773128192742712a98fd", size = 140344 }, + { url = "https://files.pythonhosted.org/packages/6d/16/a95b6757765b7b031c9374925bb718d55e0a9ba8a1b6a12d25962ea44347/pyyaml-6.0.3-cp311-cp311-macosx_10_13_x86_64.whl", hash = "sha256:44edc647873928551a01e7a563d7452ccdebee747728c1080d881d68af7b997e", size = 185826, upload-time = "2025-09-25T21:31:58.655Z" }, + { url = "https://files.pythonhosted.org/packages/16/19/13de8e4377ed53079ee996e1ab0a9c33ec2faf808a4647b7b4c0d46dd239/pyyaml-6.0.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:652cb6edd41e718550aad172851962662ff2681490a8a711af6a4d288dd96824", size = 175577, upload-time = "2025-09-25T21:32:00.088Z" }, + { url = "https://files.pythonhosted.org/packages/0c/62/d2eb46264d4b157dae1275b573017abec435397aa59cbcdab6fc978a8af4/pyyaml-6.0.3-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:10892704fc220243f5305762e276552a0395f7beb4dbf9b14ec8fd43b57f126c", size = 775556, upload-time = "2025-09-25T21:32:01.31Z" }, + { url = "https://files.pythonhosted.org/packages/10/cb/16c3f2cf3266edd25aaa00d6c4350381c8b012ed6f5276675b9eba8d9ff4/pyyaml-6.0.3-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:850774a7879607d3a6f50d36d04f00ee69e7fc816450e5f7e58d7f17f1ae5c00", size = 882114, upload-time = "2025-09-25T21:32:03.376Z" }, + { url = "https://files.pythonhosted.org/packages/71/60/917329f640924b18ff085ab889a11c763e0b573da888e8404ff486657602/pyyaml-6.0.3-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b8bb0864c5a28024fac8a632c443c87c5aa6f215c0b126c449ae1a150412f31d", size = 806638, upload-time = "2025-09-25T21:32:04.553Z" }, + { url = "https://files.pythonhosted.org/packages/dd/6f/529b0f316a9fd167281a6c3826b5583e6192dba792dd55e3203d3f8e655a/pyyaml-6.0.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:1d37d57ad971609cf3c53ba6a7e365e40660e3be0e5175fa9f2365a379d6095a", size = 767463, upload-time = "2025-09-25T21:32:06.152Z" }, + { url = "https://files.pythonhosted.org/packages/f2/6a/b627b4e0c1dd03718543519ffb2f1deea4a1e6d42fbab8021936a4d22589/pyyaml-6.0.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:37503bfbfc9d2c40b344d06b2199cf0e96e97957ab1c1b546fd4f87e53e5d3e4", size = 794986, upload-time = "2025-09-25T21:32:07.367Z" }, + { url = "https://files.pythonhosted.org/packages/45/91/47a6e1c42d9ee337c4839208f30d9f09caa9f720ec7582917b264defc875/pyyaml-6.0.3-cp311-cp311-win32.whl", hash = "sha256:8098f252adfa6c80ab48096053f512f2321f0b998f98150cea9bd23d83e1467b", size = 142543, upload-time = "2025-09-25T21:32:08.95Z" }, + { url = "https://files.pythonhosted.org/packages/da/e3/ea007450a105ae919a72393cb06f122f288ef60bba2dc64b26e2646fa315/pyyaml-6.0.3-cp311-cp311-win_amd64.whl", hash = "sha256:9f3bfb4965eb874431221a3ff3fdcddc7e74e3b07799e0e84ca4a0f867d449bf", size = 158763, upload-time = "2025-09-25T21:32:09.96Z" }, + { url = "https://files.pythonhosted.org/packages/d1/33/422b98d2195232ca1826284a76852ad5a86fe23e31b009c9886b2d0fb8b2/pyyaml-6.0.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:7f047e29dcae44602496db43be01ad42fc6f1cc0d8cd6c83d342306c32270196", size = 182063, upload-time = "2025-09-25T21:32:11.445Z" }, + { url = "https://files.pythonhosted.org/packages/89/a0/6cf41a19a1f2f3feab0e9c0b74134aa2ce6849093d5517a0c550fe37a648/pyyaml-6.0.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:fc09d0aa354569bc501d4e787133afc08552722d3ab34836a80547331bb5d4a0", size = 173973, upload-time = "2025-09-25T21:32:12.492Z" }, + { url = "https://files.pythonhosted.org/packages/ed/23/7a778b6bd0b9a8039df8b1b1d80e2e2ad78aa04171592c8a5c43a56a6af4/pyyaml-6.0.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:9149cad251584d5fb4981be1ecde53a1ca46c891a79788c0df828d2f166bda28", size = 775116, upload-time = "2025-09-25T21:32:13.652Z" }, + { url = "https://files.pythonhosted.org/packages/65/30/d7353c338e12baef4ecc1b09e877c1970bd3382789c159b4f89d6a70dc09/pyyaml-6.0.3-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:5fdec68f91a0c6739b380c83b951e2c72ac0197ace422360e6d5a959d8d97b2c", size = 844011, upload-time = "2025-09-25T21:32:15.21Z" }, + { url = "https://files.pythonhosted.org/packages/8b/9d/b3589d3877982d4f2329302ef98a8026e7f4443c765c46cfecc8858c6b4b/pyyaml-6.0.3-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ba1cc08a7ccde2d2ec775841541641e4548226580ab850948cbfda66a1befcdc", size = 807870, upload-time = "2025-09-25T21:32:16.431Z" }, + { url = "https://files.pythonhosted.org/packages/05/c0/b3be26a015601b822b97d9149ff8cb5ead58c66f981e04fedf4e762f4bd4/pyyaml-6.0.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:8dc52c23056b9ddd46818a57b78404882310fb473d63f17b07d5c40421e47f8e", size = 761089, upload-time = "2025-09-25T21:32:17.56Z" }, + { url = "https://files.pythonhosted.org/packages/be/8e/98435a21d1d4b46590d5459a22d88128103f8da4c2d4cb8f14f2a96504e1/pyyaml-6.0.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:41715c910c881bc081f1e8872880d3c650acf13dfa8214bad49ed4cede7c34ea", size = 790181, upload-time = "2025-09-25T21:32:18.834Z" }, + { url = "https://files.pythonhosted.org/packages/74/93/7baea19427dcfbe1e5a372d81473250b379f04b1bd3c4c5ff825e2327202/pyyaml-6.0.3-cp312-cp312-win32.whl", hash = "sha256:96b533f0e99f6579b3d4d4995707cf36df9100d67e0c8303a0c55b27b5f99bc5", size = 137658, upload-time = "2025-09-25T21:32:20.209Z" }, + { url = "https://files.pythonhosted.org/packages/86/bf/899e81e4cce32febab4fb42bb97dcdf66bc135272882d1987881a4b519e9/pyyaml-6.0.3-cp312-cp312-win_amd64.whl", hash = "sha256:5fcd34e47f6e0b794d17de1b4ff496c00986e1c83f7ab2fb8fcfe9616ff7477b", size = 154003, upload-time = "2025-09-25T21:32:21.167Z" }, + { url = "https://files.pythonhosted.org/packages/1a/08/67bd04656199bbb51dbed1439b7f27601dfb576fb864099c7ef0c3e55531/pyyaml-6.0.3-cp312-cp312-win_arm64.whl", hash = "sha256:64386e5e707d03a7e172c0701abfb7e10f0fb753ee1d773128192742712a98fd", size = 140344, upload-time = "2025-09-25T21:32:22.617Z" }, ] [[package]] @@ -5276,44 +5326,44 @@ dependencies = [ { name = "pydantic" }, { name = "urllib3" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/86/cf/db06a74694bf8f126ed4a869c70ef576f01ee691ef20799fba3d561d3565/qdrant_client-1.9.0.tar.gz", hash = "sha256:7b1792f616651a6f0a76312f945c13d088e9451726795b82ce0350f7df3b7981", size = 199999 } +sdist = { url = "https://files.pythonhosted.org/packages/86/cf/db06a74694bf8f126ed4a869c70ef576f01ee691ef20799fba3d561d3565/qdrant_client-1.9.0.tar.gz", hash = "sha256:7b1792f616651a6f0a76312f945c13d088e9451726795b82ce0350f7df3b7981", size = 199999, upload-time = "2024-04-22T13:35:49.444Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/3a/fa/5abd82cde353f1009c068cca820195efd94e403d261b787e78ea7a9c8318/qdrant_client-1.9.0-py3-none-any.whl", hash = "sha256:ee02893eab1f642481b1ac1e38eb68ec30bab0f673bef7cc05c19fa5d2cbf43e", size = 229258 }, + { url = "https://files.pythonhosted.org/packages/3a/fa/5abd82cde353f1009c068cca820195efd94e403d261b787e78ea7a9c8318/qdrant_client-1.9.0-py3-none-any.whl", hash = "sha256:ee02893eab1f642481b1ac1e38eb68ec30bab0f673bef7cc05c19fa5d2cbf43e", size = 229258, upload-time = "2024-04-22T13:35:46.81Z" }, ] [[package]] name = "rapidfuzz" version = "3.14.3" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/d3/28/9d808fe62375b9aab5ba92fa9b29371297b067c2790b2d7cda648b1e2f8d/rapidfuzz-3.14.3.tar.gz", hash = "sha256:2491937177868bc4b1e469087601d53f925e8d270ccc21e07404b4b5814b7b5f", size = 57863900 } +sdist = { url = "https://files.pythonhosted.org/packages/d3/28/9d808fe62375b9aab5ba92fa9b29371297b067c2790b2d7cda648b1e2f8d/rapidfuzz-3.14.3.tar.gz", hash = "sha256:2491937177868bc4b1e469087601d53f925e8d270ccc21e07404b4b5814b7b5f", size = 57863900, upload-time = "2025-11-01T11:54:52.321Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/76/25/5b0a33ad3332ee1213068c66f7c14e9e221be90bab434f0cb4defa9d6660/rapidfuzz-3.14.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:dea2d113e260a5da0c4003e0a5e9fdf24a9dc2bb9eaa43abd030a1e46ce7837d", size = 1953885 }, - { url = "https://files.pythonhosted.org/packages/2d/ab/f1181f500c32c8fcf7c966f5920c7e56b9b1d03193386d19c956505c312d/rapidfuzz-3.14.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e6c31a4aa68cfa75d7eede8b0ed24b9e458447db604c2db53f358be9843d81d3", size = 1390200 }, - { url = "https://files.pythonhosted.org/packages/14/2a/0f2de974ececad873865c6bb3ea3ad07c976ac293d5025b2d73325aac1d4/rapidfuzz-3.14.3-cp311-cp311-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:02821366d928e68ddcb567fed8723dad7ea3a979fada6283e6914d5858674850", size = 1389319 }, - { url = "https://files.pythonhosted.org/packages/ed/69/309d8f3a0bb3031fd9b667174cc4af56000645298af7c2931be5c3d14bb4/rapidfuzz-3.14.3-cp311-cp311-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:cfe8df315ab4e6db4e1be72c5170f8e66021acde22cd2f9d04d2058a9fd8162e", size = 3178495 }, - { url = "https://files.pythonhosted.org/packages/10/b7/f9c44a99269ea5bf6fd6a40b84e858414b6e241288b9f2b74af470d222b1/rapidfuzz-3.14.3-cp311-cp311-manylinux_2_31_armv7l.whl", hash = "sha256:769f31c60cd79420188fcdb3c823227fc4a6deb35cafec9d14045c7f6743acae", size = 1228443 }, - { url = "https://files.pythonhosted.org/packages/f2/0a/3b3137abac7f19c9220e14cd7ce993e35071a7655e7ef697785a3edfea1a/rapidfuzz-3.14.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:54fa03062124e73086dae66a3451c553c1e20a39c077fd704dc7154092c34c63", size = 2411998 }, - { url = "https://files.pythonhosted.org/packages/f3/b6/983805a844d44670eaae63831024cdc97ada4e9c62abc6b20703e81e7f9b/rapidfuzz-3.14.3-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:834d1e818005ed0d4ae38f6b87b86fad9b0a74085467ece0727d20e15077c094", size = 2530120 }, - { url = "https://files.pythonhosted.org/packages/b4/cc/2c97beb2b1be2d7595d805682472f1b1b844111027d5ad89b65e16bdbaaa/rapidfuzz-3.14.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:948b00e8476a91f510dd1ec07272efc7d78c275d83b630455559671d4e33b678", size = 4283129 }, - { url = "https://files.pythonhosted.org/packages/4d/03/2f0e5e94941045aefe7eafab72320e61285c07b752df9884ce88d6b8b835/rapidfuzz-3.14.3-cp311-cp311-win32.whl", hash = "sha256:43d0305c36f504232f18ea04e55f2059bb89f169d3119c4ea96a0e15b59e2a91", size = 1724224 }, - { url = "https://files.pythonhosted.org/packages/cf/99/5fa23e204435803875daefda73fd61baeabc3c36b8fc0e34c1705aab8c7b/rapidfuzz-3.14.3-cp311-cp311-win_amd64.whl", hash = "sha256:ef6bf930b947bd0735c550683939a032090f1d688dfd8861d6b45307b96fd5c5", size = 1544259 }, - { url = "https://files.pythonhosted.org/packages/48/35/d657b85fcc615a42661b98ac90ce8e95bd32af474603a105643963749886/rapidfuzz-3.14.3-cp311-cp311-win_arm64.whl", hash = "sha256:f3eb0ff3b75d6fdccd40b55e7414bb859a1cda77c52762c9c82b85569f5088e7", size = 814734 }, - { url = "https://files.pythonhosted.org/packages/fa/8e/3c215e860b458cfbedb3ed73bc72e98eb7e0ed72f6b48099604a7a3260c2/rapidfuzz-3.14.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:685c93ea961d135893b5984a5a9851637d23767feabe414ec974f43babbd8226", size = 1945306 }, - { url = "https://files.pythonhosted.org/packages/36/d9/31b33512015c899f4a6e6af64df8dfe8acddf4c8b40a4b3e0e6e1bcd00e5/rapidfuzz-3.14.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:fa7c8f26f009f8c673fbfb443792f0cf8cf50c4e18121ff1e285b5e08a94fbdb", size = 1390788 }, - { url = "https://files.pythonhosted.org/packages/a9/67/2ee6f8de6e2081ccd560a571d9c9063184fe467f484a17fa90311a7f4a2e/rapidfuzz-3.14.3-cp312-cp312-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:57f878330c8d361b2ce76cebb8e3e1dc827293b6abf404e67d53260d27b5d941", size = 1374580 }, - { url = "https://files.pythonhosted.org/packages/30/83/80d22997acd928eda7deadc19ccd15883904622396d6571e935993e0453a/rapidfuzz-3.14.3-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:6c5f545f454871e6af05753a0172849c82feaf0f521c5ca62ba09e1b382d6382", size = 3154947 }, - { url = "https://files.pythonhosted.org/packages/5b/cf/9f49831085a16384695f9fb096b99662f589e30b89b4a589a1ebc1a19d34/rapidfuzz-3.14.3-cp312-cp312-manylinux_2_31_armv7l.whl", hash = "sha256:07aa0b5d8863e3151e05026a28e0d924accf0a7a3b605da978f0359bb804df43", size = 1223872 }, - { url = "https://files.pythonhosted.org/packages/c8/0f/41ee8034e744b871c2e071ef0d360686f5ccfe5659f4fd96c3ec406b3c8b/rapidfuzz-3.14.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:73b07566bc7e010e7b5bd490fb04bb312e820970180df6b5655e9e6224c137db", size = 2392512 }, - { url = "https://files.pythonhosted.org/packages/da/86/280038b6b0c2ccec54fb957c732ad6b41cc1fd03b288d76545b9cf98343f/rapidfuzz-3.14.3-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:6de00eb84c71476af7d3110cf25d8fe7c792d7f5fa86764ef0b4ca97e78ca3ed", size = 2521398 }, - { url = "https://files.pythonhosted.org/packages/fa/7b/05c26f939607dca0006505e3216248ae2de631e39ef94dd63dbbf0860021/rapidfuzz-3.14.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:d7843a1abf0091773a530636fdd2a49a41bcae22f9910b86b4f903e76ddc82dc", size = 4259416 }, - { url = "https://files.pythonhosted.org/packages/40/eb/9e3af4103d91788f81111af1b54a28de347cdbed8eaa6c91d5e98a889aab/rapidfuzz-3.14.3-cp312-cp312-win32.whl", hash = "sha256:dea97ac3ca18cd3ba8f3d04b5c1fe4aa60e58e8d9b7793d3bd595fdb04128d7a", size = 1709527 }, - { url = "https://files.pythonhosted.org/packages/b8/63/d06ecce90e2cf1747e29aeab9f823d21e5877a4c51b79720b2d3be7848f8/rapidfuzz-3.14.3-cp312-cp312-win_amd64.whl", hash = "sha256:b5100fd6bcee4d27f28f4e0a1c6b5127bc8ba7c2a9959cad9eab0bf4a7ab3329", size = 1538989 }, - { url = "https://files.pythonhosted.org/packages/fc/6d/beee32dcda64af8128aab3ace2ccb33d797ed58c434c6419eea015fec779/rapidfuzz-3.14.3-cp312-cp312-win_arm64.whl", hash = "sha256:4e49c9e992bc5fc873bd0fff7ef16a4405130ec42f2ce3d2b735ba5d3d4eb70f", size = 811161 }, - { url = "https://files.pythonhosted.org/packages/c9/33/b5bd6475c7c27164b5becc9b0e3eb978f1e3640fea590dd3dced6006ee83/rapidfuzz-3.14.3-pp311-pypy311_pp73-macosx_10_15_x86_64.whl", hash = "sha256:7cf174b52cb3ef5d49e45d0a1133b7e7d0ecf770ed01f97ae9962c5c91d97d23", size = 1888499 }, - { url = "https://files.pythonhosted.org/packages/30/d2/89d65d4db4bb931beade9121bc71ad916b5fa9396e807d11b33731494e8e/rapidfuzz-3.14.3-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:442cba39957a008dfc5bdef21a9c3f4379e30ffb4e41b8555dbaf4887eca9300", size = 1336747 }, - { url = "https://files.pythonhosted.org/packages/85/33/cd87d92b23f0b06e8914a61cea6850c6d495ca027f669fab7a379041827a/rapidfuzz-3.14.3-pp311-pypy311_pp73-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1faa0f8f76ba75fd7b142c984947c280ef6558b5067af2ae9b8729b0a0f99ede", size = 1352187 }, - { url = "https://files.pythonhosted.org/packages/22/20/9d30b4a1ab26aac22fff17d21dec7e9089ccddfe25151d0a8bb57001dc3d/rapidfuzz-3.14.3-pp311-pypy311_pp73-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:1e6eefec45625c634926a9fd46c9e4f31118ac8f3156fff9494422cee45207e6", size = 3101472 }, - { url = "https://files.pythonhosted.org/packages/b1/ad/fa2d3e5c29a04ead7eaa731c7cd1f30f9ec3c77b3a578fdf90280797cbcb/rapidfuzz-3.14.3-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:56fefb4382bb12250f164250240b9dd7772e41c5c8ae976fd598a32292449cc5", size = 1511361 }, + { url = "https://files.pythonhosted.org/packages/76/25/5b0a33ad3332ee1213068c66f7c14e9e221be90bab434f0cb4defa9d6660/rapidfuzz-3.14.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:dea2d113e260a5da0c4003e0a5e9fdf24a9dc2bb9eaa43abd030a1e46ce7837d", size = 1953885, upload-time = "2025-11-01T11:52:47.75Z" }, + { url = "https://files.pythonhosted.org/packages/2d/ab/f1181f500c32c8fcf7c966f5920c7e56b9b1d03193386d19c956505c312d/rapidfuzz-3.14.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e6c31a4aa68cfa75d7eede8b0ed24b9e458447db604c2db53f358be9843d81d3", size = 1390200, upload-time = "2025-11-01T11:52:49.491Z" }, + { url = "https://files.pythonhosted.org/packages/14/2a/0f2de974ececad873865c6bb3ea3ad07c976ac293d5025b2d73325aac1d4/rapidfuzz-3.14.3-cp311-cp311-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:02821366d928e68ddcb567fed8723dad7ea3a979fada6283e6914d5858674850", size = 1389319, upload-time = "2025-11-01T11:52:51.224Z" }, + { url = "https://files.pythonhosted.org/packages/ed/69/309d8f3a0bb3031fd9b667174cc4af56000645298af7c2931be5c3d14bb4/rapidfuzz-3.14.3-cp311-cp311-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:cfe8df315ab4e6db4e1be72c5170f8e66021acde22cd2f9d04d2058a9fd8162e", size = 3178495, upload-time = "2025-11-01T11:52:53.005Z" }, + { url = "https://files.pythonhosted.org/packages/10/b7/f9c44a99269ea5bf6fd6a40b84e858414b6e241288b9f2b74af470d222b1/rapidfuzz-3.14.3-cp311-cp311-manylinux_2_31_armv7l.whl", hash = "sha256:769f31c60cd79420188fcdb3c823227fc4a6deb35cafec9d14045c7f6743acae", size = 1228443, upload-time = "2025-11-01T11:52:54.991Z" }, + { url = "https://files.pythonhosted.org/packages/f2/0a/3b3137abac7f19c9220e14cd7ce993e35071a7655e7ef697785a3edfea1a/rapidfuzz-3.14.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:54fa03062124e73086dae66a3451c553c1e20a39c077fd704dc7154092c34c63", size = 2411998, upload-time = "2025-11-01T11:52:56.629Z" }, + { url = "https://files.pythonhosted.org/packages/f3/b6/983805a844d44670eaae63831024cdc97ada4e9c62abc6b20703e81e7f9b/rapidfuzz-3.14.3-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:834d1e818005ed0d4ae38f6b87b86fad9b0a74085467ece0727d20e15077c094", size = 2530120, upload-time = "2025-11-01T11:52:58.298Z" }, + { url = "https://files.pythonhosted.org/packages/b4/cc/2c97beb2b1be2d7595d805682472f1b1b844111027d5ad89b65e16bdbaaa/rapidfuzz-3.14.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:948b00e8476a91f510dd1ec07272efc7d78c275d83b630455559671d4e33b678", size = 4283129, upload-time = "2025-11-01T11:53:00.188Z" }, + { url = "https://files.pythonhosted.org/packages/4d/03/2f0e5e94941045aefe7eafab72320e61285c07b752df9884ce88d6b8b835/rapidfuzz-3.14.3-cp311-cp311-win32.whl", hash = "sha256:43d0305c36f504232f18ea04e55f2059bb89f169d3119c4ea96a0e15b59e2a91", size = 1724224, upload-time = "2025-11-01T11:53:02.149Z" }, + { url = "https://files.pythonhosted.org/packages/cf/99/5fa23e204435803875daefda73fd61baeabc3c36b8fc0e34c1705aab8c7b/rapidfuzz-3.14.3-cp311-cp311-win_amd64.whl", hash = "sha256:ef6bf930b947bd0735c550683939a032090f1d688dfd8861d6b45307b96fd5c5", size = 1544259, upload-time = "2025-11-01T11:53:03.66Z" }, + { url = "https://files.pythonhosted.org/packages/48/35/d657b85fcc615a42661b98ac90ce8e95bd32af474603a105643963749886/rapidfuzz-3.14.3-cp311-cp311-win_arm64.whl", hash = "sha256:f3eb0ff3b75d6fdccd40b55e7414bb859a1cda77c52762c9c82b85569f5088e7", size = 814734, upload-time = "2025-11-01T11:53:05.008Z" }, + { url = "https://files.pythonhosted.org/packages/fa/8e/3c215e860b458cfbedb3ed73bc72e98eb7e0ed72f6b48099604a7a3260c2/rapidfuzz-3.14.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:685c93ea961d135893b5984a5a9851637d23767feabe414ec974f43babbd8226", size = 1945306, upload-time = "2025-11-01T11:53:06.452Z" }, + { url = "https://files.pythonhosted.org/packages/36/d9/31b33512015c899f4a6e6af64df8dfe8acddf4c8b40a4b3e0e6e1bcd00e5/rapidfuzz-3.14.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:fa7c8f26f009f8c673fbfb443792f0cf8cf50c4e18121ff1e285b5e08a94fbdb", size = 1390788, upload-time = "2025-11-01T11:53:08.721Z" }, + { url = "https://files.pythonhosted.org/packages/a9/67/2ee6f8de6e2081ccd560a571d9c9063184fe467f484a17fa90311a7f4a2e/rapidfuzz-3.14.3-cp312-cp312-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:57f878330c8d361b2ce76cebb8e3e1dc827293b6abf404e67d53260d27b5d941", size = 1374580, upload-time = "2025-11-01T11:53:10.164Z" }, + { url = "https://files.pythonhosted.org/packages/30/83/80d22997acd928eda7deadc19ccd15883904622396d6571e935993e0453a/rapidfuzz-3.14.3-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:6c5f545f454871e6af05753a0172849c82feaf0f521c5ca62ba09e1b382d6382", size = 3154947, upload-time = "2025-11-01T11:53:12.093Z" }, + { url = "https://files.pythonhosted.org/packages/5b/cf/9f49831085a16384695f9fb096b99662f589e30b89b4a589a1ebc1a19d34/rapidfuzz-3.14.3-cp312-cp312-manylinux_2_31_armv7l.whl", hash = "sha256:07aa0b5d8863e3151e05026a28e0d924accf0a7a3b605da978f0359bb804df43", size = 1223872, upload-time = "2025-11-01T11:53:13.664Z" }, + { url = "https://files.pythonhosted.org/packages/c8/0f/41ee8034e744b871c2e071ef0d360686f5ccfe5659f4fd96c3ec406b3c8b/rapidfuzz-3.14.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:73b07566bc7e010e7b5bd490fb04bb312e820970180df6b5655e9e6224c137db", size = 2392512, upload-time = "2025-11-01T11:53:15.109Z" }, + { url = "https://files.pythonhosted.org/packages/da/86/280038b6b0c2ccec54fb957c732ad6b41cc1fd03b288d76545b9cf98343f/rapidfuzz-3.14.3-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:6de00eb84c71476af7d3110cf25d8fe7c792d7f5fa86764ef0b4ca97e78ca3ed", size = 2521398, upload-time = "2025-11-01T11:53:17.146Z" }, + { url = "https://files.pythonhosted.org/packages/fa/7b/05c26f939607dca0006505e3216248ae2de631e39ef94dd63dbbf0860021/rapidfuzz-3.14.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:d7843a1abf0091773a530636fdd2a49a41bcae22f9910b86b4f903e76ddc82dc", size = 4259416, upload-time = "2025-11-01T11:53:19.34Z" }, + { url = "https://files.pythonhosted.org/packages/40/eb/9e3af4103d91788f81111af1b54a28de347cdbed8eaa6c91d5e98a889aab/rapidfuzz-3.14.3-cp312-cp312-win32.whl", hash = "sha256:dea97ac3ca18cd3ba8f3d04b5c1fe4aa60e58e8d9b7793d3bd595fdb04128d7a", size = 1709527, upload-time = "2025-11-01T11:53:20.949Z" }, + { url = "https://files.pythonhosted.org/packages/b8/63/d06ecce90e2cf1747e29aeab9f823d21e5877a4c51b79720b2d3be7848f8/rapidfuzz-3.14.3-cp312-cp312-win_amd64.whl", hash = "sha256:b5100fd6bcee4d27f28f4e0a1c6b5127bc8ba7c2a9959cad9eab0bf4a7ab3329", size = 1538989, upload-time = "2025-11-01T11:53:22.428Z" }, + { url = "https://files.pythonhosted.org/packages/fc/6d/beee32dcda64af8128aab3ace2ccb33d797ed58c434c6419eea015fec779/rapidfuzz-3.14.3-cp312-cp312-win_arm64.whl", hash = "sha256:4e49c9e992bc5fc873bd0fff7ef16a4405130ec42f2ce3d2b735ba5d3d4eb70f", size = 811161, upload-time = "2025-11-01T11:53:23.811Z" }, + { url = "https://files.pythonhosted.org/packages/c9/33/b5bd6475c7c27164b5becc9b0e3eb978f1e3640fea590dd3dced6006ee83/rapidfuzz-3.14.3-pp311-pypy311_pp73-macosx_10_15_x86_64.whl", hash = "sha256:7cf174b52cb3ef5d49e45d0a1133b7e7d0ecf770ed01f97ae9962c5c91d97d23", size = 1888499, upload-time = "2025-11-01T11:54:42.094Z" }, + { url = "https://files.pythonhosted.org/packages/30/d2/89d65d4db4bb931beade9121bc71ad916b5fa9396e807d11b33731494e8e/rapidfuzz-3.14.3-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:442cba39957a008dfc5bdef21a9c3f4379e30ffb4e41b8555dbaf4887eca9300", size = 1336747, upload-time = "2025-11-01T11:54:43.957Z" }, + { url = "https://files.pythonhosted.org/packages/85/33/cd87d92b23f0b06e8914a61cea6850c6d495ca027f669fab7a379041827a/rapidfuzz-3.14.3-pp311-pypy311_pp73-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1faa0f8f76ba75fd7b142c984947c280ef6558b5067af2ae9b8729b0a0f99ede", size = 1352187, upload-time = "2025-11-01T11:54:45.518Z" }, + { url = "https://files.pythonhosted.org/packages/22/20/9d30b4a1ab26aac22fff17d21dec7e9089ccddfe25151d0a8bb57001dc3d/rapidfuzz-3.14.3-pp311-pypy311_pp73-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:1e6eefec45625c634926a9fd46c9e4f31118ac8f3156fff9494422cee45207e6", size = 3101472, upload-time = "2025-11-01T11:54:47.255Z" }, + { url = "https://files.pythonhosted.org/packages/b1/ad/fa2d3e5c29a04ead7eaa731c7cd1f30f9ec3c77b3a578fdf90280797cbcb/rapidfuzz-3.14.3-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:56fefb4382bb12250f164250240b9dd7772e41c5c8ae976fd598a32292449cc5", size = 1511361, upload-time = "2025-11-01T11:54:49.057Z" }, ] [[package]] @@ -5326,9 +5376,9 @@ dependencies = [ { name = "lxml" }, { name = "regex" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/b8/e4/260a202516886c2e0cc6e6ae96d1f491792d829098886d9529a2439fbe8e/readabilipy-0.3.0.tar.gz", hash = "sha256:e13313771216953935ac031db4234bdb9725413534bfb3c19dbd6caab0887ae0", size = 35491 } +sdist = { url = "https://files.pythonhosted.org/packages/b8/e4/260a202516886c2e0cc6e6ae96d1f491792d829098886d9529a2439fbe8e/readabilipy-0.3.0.tar.gz", hash = "sha256:e13313771216953935ac031db4234bdb9725413534bfb3c19dbd6caab0887ae0", size = 35491, upload-time = "2024-12-02T23:03:02.311Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/dd/46/8a640c6de1a6c6af971f858b2fb178ca5e1db91f223d8ba5f40efe1491e5/readabilipy-0.3.0-py3-none-any.whl", hash = "sha256:d106da0fad11d5fdfcde21f5c5385556bfa8ff0258483037d39ea6b1d6db3943", size = 22158 }, + { url = "https://files.pythonhosted.org/packages/dd/46/8a640c6de1a6c6af971f858b2fb178ca5e1db91f223d8ba5f40efe1491e5/readabilipy-0.3.0-py3-none-any.whl", hash = "sha256:d106da0fad11d5fdfcde21f5c5385556bfa8ff0258483037d39ea6b1d6db3943", size = 22158, upload-time = "2024-12-02T23:03:00.438Z" }, ] [[package]] @@ -5340,9 +5390,9 @@ dependencies = [ { name = "typing-extensions" }, { name = "websockets" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/d3/ca/e408fbdb6b344bf529c7e8bf020372d21114fe538392c72089462edd26e5/realtime-2.7.0.tar.gz", hash = "sha256:6b9434eeba8d756c8faf94fc0a32081d09f250d14d82b90341170602adbb019f", size = 18860 } +sdist = { url = "https://files.pythonhosted.org/packages/d3/ca/e408fbdb6b344bf529c7e8bf020372d21114fe538392c72089462edd26e5/realtime-2.7.0.tar.gz", hash = "sha256:6b9434eeba8d756c8faf94fc0a32081d09f250d14d82b90341170602adbb019f", size = 18860, upload-time = "2025-07-28T18:54:22.949Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/d2/07/a5c7aef12f9a3497f5ad77157a37915645861e8b23b89b2ad4b0f11b48ad/realtime-2.7.0-py3-none-any.whl", hash = "sha256:d55a278803529a69d61c7174f16563a9cfa5bacc1664f656959694481903d99c", size = 22409 }, + { url = "https://files.pythonhosted.org/packages/d2/07/a5c7aef12f9a3497f5ad77157a37915645861e8b23b89b2ad4b0f11b48ad/realtime-2.7.0-py3-none-any.whl", hash = "sha256:d55a278803529a69d61c7174f16563a9cfa5bacc1664f656959694481903d99c", size = 22409, upload-time = "2025-07-28T18:54:21.383Z" }, ] [[package]] @@ -5352,9 +5402,9 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "async-timeout", marker = "python_full_version < '3.11.3'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/07/8b/14ef373ffe71c0d2fde93c204eab78472ea13c021d9aee63b0e11bd65896/redis-6.1.1.tar.gz", hash = "sha256:88c689325b5b41cedcbdbdfd4d937ea86cf6dab2222a83e86d8a466e4b3d2600", size = 4629515 } +sdist = { url = "https://files.pythonhosted.org/packages/07/8b/14ef373ffe71c0d2fde93c204eab78472ea13c021d9aee63b0e11bd65896/redis-6.1.1.tar.gz", hash = "sha256:88c689325b5b41cedcbdbdfd4d937ea86cf6dab2222a83e86d8a466e4b3d2600", size = 4629515, upload-time = "2025-06-02T11:44:04.137Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/c2/cd/29503c609186104c363ef1f38d6e752e7d91ef387fc90aa165e96d69f446/redis-6.1.1-py3-none-any.whl", hash = "sha256:ed44d53d065bbe04ac6d76864e331cfe5c5353f86f6deccc095f8794fd15bb2e", size = 273930 }, + { url = "https://files.pythonhosted.org/packages/c2/cd/29503c609186104c363ef1f38d6e752e7d91ef387fc90aa165e96d69f446/redis-6.1.1-py3-none-any.whl", hash = "sha256:ed44d53d065bbe04ac6d76864e331cfe5c5353f86f6deccc095f8794fd15bb2e", size = 273930, upload-time = "2025-06-02T11:44:02.705Z" }, ] [package.optional-dependencies] @@ -5371,45 +5421,45 @@ dependencies = [ { name = "rpds-py" }, { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/22/f5/df4e9027acead3ecc63e50fe1e36aca1523e1719559c499951bb4b53188f/referencing-0.37.0.tar.gz", hash = "sha256:44aefc3142c5b842538163acb373e24cce6632bd54bdb01b21ad5863489f50d8", size = 78036 } +sdist = { url = "https://files.pythonhosted.org/packages/22/f5/df4e9027acead3ecc63e50fe1e36aca1523e1719559c499951bb4b53188f/referencing-0.37.0.tar.gz", hash = "sha256:44aefc3142c5b842538163acb373e24cce6632bd54bdb01b21ad5863489f50d8", size = 78036, upload-time = "2025-10-13T15:30:48.871Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/2c/58/ca301544e1fa93ed4f80d724bf5b194f6e4b945841c5bfd555878eea9fcb/referencing-0.37.0-py3-none-any.whl", hash = "sha256:381329a9f99628c9069361716891d34ad94af76e461dcb0335825aecc7692231", size = 26766 }, + { url = "https://files.pythonhosted.org/packages/2c/58/ca301544e1fa93ed4f80d724bf5b194f6e4b945841c5bfd555878eea9fcb/referencing-0.37.0-py3-none-any.whl", hash = "sha256:381329a9f99628c9069361716891d34ad94af76e461dcb0335825aecc7692231", size = 26766, upload-time = "2025-10-13T15:30:47.625Z" }, ] [[package]] name = "regex" version = "2025.11.3" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/cc/a9/546676f25e573a4cf00fe8e119b78a37b6a8fe2dc95cda877b30889c9c45/regex-2025.11.3.tar.gz", hash = "sha256:1fedc720f9bb2494ce31a58a1631f9c82df6a09b49c19517ea5cc280b4541e01", size = 414669 } +sdist = { url = "https://files.pythonhosted.org/packages/cc/a9/546676f25e573a4cf00fe8e119b78a37b6a8fe2dc95cda877b30889c9c45/regex-2025.11.3.tar.gz", hash = "sha256:1fedc720f9bb2494ce31a58a1631f9c82df6a09b49c19517ea5cc280b4541e01", size = 414669, upload-time = "2025-11-03T21:34:22.089Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/f7/90/4fb5056e5f03a7048abd2b11f598d464f0c167de4f2a51aa868c376b8c70/regex-2025.11.3-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:eadade04221641516fa25139273505a1c19f9bf97589a05bc4cfcd8b4a618031", size = 488081 }, - { url = "https://files.pythonhosted.org/packages/85/23/63e481293fac8b069d84fba0299b6666df720d875110efd0338406b5d360/regex-2025.11.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:feff9e54ec0dd3833d659257f5c3f5322a12eee58ffa360984b716f8b92983f4", size = 290554 }, - { url = "https://files.pythonhosted.org/packages/2b/9d/b101d0262ea293a0066b4522dfb722eb6a8785a8c3e084396a5f2c431a46/regex-2025.11.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:3b30bc921d50365775c09a7ed446359e5c0179e9e2512beec4a60cbcef6ddd50", size = 288407 }, - { url = "https://files.pythonhosted.org/packages/0c/64/79241c8209d5b7e00577ec9dca35cd493cc6be35b7d147eda367d6179f6d/regex-2025.11.3-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f99be08cfead2020c7ca6e396c13543baea32343b7a9a5780c462e323bd8872f", size = 793418 }, - { url = "https://files.pythonhosted.org/packages/3d/e2/23cd5d3573901ce8f9757c92ca4db4d09600b865919b6d3e7f69f03b1afd/regex-2025.11.3-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:6dd329a1b61c0ee95ba95385fb0c07ea0d3fe1a21e1349fa2bec272636217118", size = 860448 }, - { url = "https://files.pythonhosted.org/packages/2a/4c/aecf31beeaa416d0ae4ecb852148d38db35391aac19c687b5d56aedf3a8b/regex-2025.11.3-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:4c5238d32f3c5269d9e87be0cf096437b7622b6920f5eac4fd202468aaeb34d2", size = 907139 }, - { url = "https://files.pythonhosted.org/packages/61/22/b8cb00df7d2b5e0875f60628594d44dba283e951b1ae17c12f99e332cc0a/regex-2025.11.3-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:10483eefbfb0adb18ee9474498c9a32fcf4e594fbca0543bb94c48bac6183e2e", size = 800439 }, - { url = "https://files.pythonhosted.org/packages/02/a8/c4b20330a5cdc7a8eb265f9ce593f389a6a88a0c5f280cf4d978f33966bc/regex-2025.11.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:78c2d02bb6e1da0720eedc0bad578049cad3f71050ef8cd065ecc87691bed2b0", size = 782965 }, - { url = "https://files.pythonhosted.org/packages/b4/4c/ae3e52988ae74af4b04d2af32fee4e8077f26e51b62ec2d12d246876bea2/regex-2025.11.3-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:e6b49cd2aad93a1790ce9cffb18964f6d3a4b0b3dbdbd5de094b65296fce6e58", size = 854398 }, - { url = "https://files.pythonhosted.org/packages/06/d1/a8b9cf45874eda14b2e275157ce3b304c87e10fb38d9fc26a6e14eb18227/regex-2025.11.3-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:885b26aa3ee56433b630502dc3d36ba78d186a00cc535d3806e6bfd9ed3c70ab", size = 845897 }, - { url = "https://files.pythonhosted.org/packages/ea/fe/1830eb0236be93d9b145e0bd8ab499f31602fe0999b1f19e99955aa8fe20/regex-2025.11.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:ddd76a9f58e6a00f8772e72cff8ebcff78e022be95edf018766707c730593e1e", size = 788906 }, - { url = "https://files.pythonhosted.org/packages/66/47/dc2577c1f95f188c1e13e2e69d8825a5ac582ac709942f8a03af42ed6e93/regex-2025.11.3-cp311-cp311-win32.whl", hash = "sha256:3e816cc9aac1cd3cc9a4ec4d860f06d40f994b5c7b4d03b93345f44e08cc68bf", size = 265812 }, - { url = "https://files.pythonhosted.org/packages/50/1e/15f08b2f82a9bbb510621ec9042547b54d11e83cb620643ebb54e4eb7d71/regex-2025.11.3-cp311-cp311-win_amd64.whl", hash = "sha256:087511f5c8b7dfbe3a03f5d5ad0c2a33861b1fc387f21f6f60825a44865a385a", size = 277737 }, - { url = "https://files.pythonhosted.org/packages/f4/fc/6500eb39f5f76c5e47a398df82e6b535a5e345f839581012a418b16f9cc3/regex-2025.11.3-cp311-cp311-win_arm64.whl", hash = "sha256:1ff0d190c7f68ae7769cd0313fe45820ba07ffebfddfaa89cc1eb70827ba0ddc", size = 270290 }, - { url = "https://files.pythonhosted.org/packages/e8/74/18f04cb53e58e3fb107439699bd8375cf5a835eec81084e0bddbd122e4c2/regex-2025.11.3-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:bc8ab71e2e31b16e40868a40a69007bc305e1109bd4658eb6cad007e0bf67c41", size = 489312 }, - { url = "https://files.pythonhosted.org/packages/78/3f/37fcdd0d2b1e78909108a876580485ea37c91e1acf66d3bb8e736348f441/regex-2025.11.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:22b29dda7e1f7062a52359fca6e58e548e28c6686f205e780b02ad8ef710de36", size = 291256 }, - { url = "https://files.pythonhosted.org/packages/bf/26/0a575f58eb23b7ebd67a45fccbc02ac030b737b896b7e7a909ffe43ffd6a/regex-2025.11.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:3a91e4a29938bc1a082cc28fdea44be420bf2bebe2665343029723892eb073e1", size = 288921 }, - { url = "https://files.pythonhosted.org/packages/ea/98/6a8dff667d1af907150432cf5abc05a17ccd32c72a3615410d5365ac167a/regex-2025.11.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:08b884f4226602ad40c5d55f52bf91a9df30f513864e0054bad40c0e9cf1afb7", size = 798568 }, - { url = "https://files.pythonhosted.org/packages/64/15/92c1db4fa4e12733dd5a526c2dd2b6edcbfe13257e135fc0f6c57f34c173/regex-2025.11.3-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:3e0b11b2b2433d1c39c7c7a30e3f3d0aeeea44c2a8d0bae28f6b95f639927a69", size = 864165 }, - { url = "https://files.pythonhosted.org/packages/f9/e7/3ad7da8cdee1ce66c7cd37ab5ab05c463a86ffeb52b1a25fe7bd9293b36c/regex-2025.11.3-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:87eb52a81ef58c7ba4d45c3ca74e12aa4b4e77816f72ca25258a85b3ea96cb48", size = 912182 }, - { url = "https://files.pythonhosted.org/packages/84/bd/9ce9f629fcb714ffc2c3faf62b6766ecb7a585e1e885eb699bcf130a5209/regex-2025.11.3-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a12ab1f5c29b4e93db518f5e3872116b7e9b1646c9f9f426f777b50d44a09e8c", size = 803501 }, - { url = "https://files.pythonhosted.org/packages/7c/0f/8dc2e4349d8e877283e6edd6c12bdcebc20f03744e86f197ab6e4492bf08/regex-2025.11.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:7521684c8c7c4f6e88e35ec89680ee1aa8358d3f09d27dfbdf62c446f5d4c695", size = 787842 }, - { url = "https://files.pythonhosted.org/packages/f9/73/cff02702960bc185164d5619c0c62a2f598a6abff6695d391b096237d4ab/regex-2025.11.3-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:7fe6e5440584e94cc4b3f5f4d98a25e29ca12dccf8873679a635638349831b98", size = 858519 }, - { url = "https://files.pythonhosted.org/packages/61/83/0e8d1ae71e15bc1dc36231c90b46ee35f9d52fab2e226b0e039e7ea9c10a/regex-2025.11.3-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:8e026094aa12b43f4fd74576714e987803a315c76edb6b098b9809db5de58f74", size = 850611 }, - { url = "https://files.pythonhosted.org/packages/c8/f5/70a5cdd781dcfaa12556f2955bf170cd603cb1c96a1827479f8faea2df97/regex-2025.11.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:435bbad13e57eb5606a68443af62bed3556de2f46deb9f7d4237bc2f1c9fb3a0", size = 789759 }, - { url = "https://files.pythonhosted.org/packages/59/9b/7c29be7903c318488983e7d97abcf8ebd3830e4c956c4c540005fcfb0462/regex-2025.11.3-cp312-cp312-win32.whl", hash = "sha256:3839967cf4dc4b985e1570fd8d91078f0c519f30491c60f9ac42a8db039be204", size = 266194 }, - { url = "https://files.pythonhosted.org/packages/1a/67/3b92df89f179d7c367be654ab5626ae311cb28f7d5c237b6bb976cd5fbbb/regex-2025.11.3-cp312-cp312-win_amd64.whl", hash = "sha256:e721d1b46e25c481dc5ded6f4b3f66c897c58d2e8cfdf77bbced84339108b0b9", size = 277069 }, - { url = "https://files.pythonhosted.org/packages/d7/55/85ba4c066fe5094d35b249c3ce8df0ba623cfd35afb22d6764f23a52a1c5/regex-2025.11.3-cp312-cp312-win_arm64.whl", hash = "sha256:64350685ff08b1d3a6fff33f45a9ca183dc1d58bbfe4981604e70ec9801bbc26", size = 270330 }, + { url = "https://files.pythonhosted.org/packages/f7/90/4fb5056e5f03a7048abd2b11f598d464f0c167de4f2a51aa868c376b8c70/regex-2025.11.3-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:eadade04221641516fa25139273505a1c19f9bf97589a05bc4cfcd8b4a618031", size = 488081, upload-time = "2025-11-03T21:31:11.946Z" }, + { url = "https://files.pythonhosted.org/packages/85/23/63e481293fac8b069d84fba0299b6666df720d875110efd0338406b5d360/regex-2025.11.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:feff9e54ec0dd3833d659257f5c3f5322a12eee58ffa360984b716f8b92983f4", size = 290554, upload-time = "2025-11-03T21:31:13.387Z" }, + { url = "https://files.pythonhosted.org/packages/2b/9d/b101d0262ea293a0066b4522dfb722eb6a8785a8c3e084396a5f2c431a46/regex-2025.11.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:3b30bc921d50365775c09a7ed446359e5c0179e9e2512beec4a60cbcef6ddd50", size = 288407, upload-time = "2025-11-03T21:31:14.809Z" }, + { url = "https://files.pythonhosted.org/packages/0c/64/79241c8209d5b7e00577ec9dca35cd493cc6be35b7d147eda367d6179f6d/regex-2025.11.3-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f99be08cfead2020c7ca6e396c13543baea32343b7a9a5780c462e323bd8872f", size = 793418, upload-time = "2025-11-03T21:31:16.556Z" }, + { url = "https://files.pythonhosted.org/packages/3d/e2/23cd5d3573901ce8f9757c92ca4db4d09600b865919b6d3e7f69f03b1afd/regex-2025.11.3-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:6dd329a1b61c0ee95ba95385fb0c07ea0d3fe1a21e1349fa2bec272636217118", size = 860448, upload-time = "2025-11-03T21:31:18.12Z" }, + { url = "https://files.pythonhosted.org/packages/2a/4c/aecf31beeaa416d0ae4ecb852148d38db35391aac19c687b5d56aedf3a8b/regex-2025.11.3-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:4c5238d32f3c5269d9e87be0cf096437b7622b6920f5eac4fd202468aaeb34d2", size = 907139, upload-time = "2025-11-03T21:31:20.753Z" }, + { url = "https://files.pythonhosted.org/packages/61/22/b8cb00df7d2b5e0875f60628594d44dba283e951b1ae17c12f99e332cc0a/regex-2025.11.3-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:10483eefbfb0adb18ee9474498c9a32fcf4e594fbca0543bb94c48bac6183e2e", size = 800439, upload-time = "2025-11-03T21:31:22.069Z" }, + { url = "https://files.pythonhosted.org/packages/02/a8/c4b20330a5cdc7a8eb265f9ce593f389a6a88a0c5f280cf4d978f33966bc/regex-2025.11.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:78c2d02bb6e1da0720eedc0bad578049cad3f71050ef8cd065ecc87691bed2b0", size = 782965, upload-time = "2025-11-03T21:31:23.598Z" }, + { url = "https://files.pythonhosted.org/packages/b4/4c/ae3e52988ae74af4b04d2af32fee4e8077f26e51b62ec2d12d246876bea2/regex-2025.11.3-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:e6b49cd2aad93a1790ce9cffb18964f6d3a4b0b3dbdbd5de094b65296fce6e58", size = 854398, upload-time = "2025-11-03T21:31:25.008Z" }, + { url = "https://files.pythonhosted.org/packages/06/d1/a8b9cf45874eda14b2e275157ce3b304c87e10fb38d9fc26a6e14eb18227/regex-2025.11.3-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:885b26aa3ee56433b630502dc3d36ba78d186a00cc535d3806e6bfd9ed3c70ab", size = 845897, upload-time = "2025-11-03T21:31:26.427Z" }, + { url = "https://files.pythonhosted.org/packages/ea/fe/1830eb0236be93d9b145e0bd8ab499f31602fe0999b1f19e99955aa8fe20/regex-2025.11.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:ddd76a9f58e6a00f8772e72cff8ebcff78e022be95edf018766707c730593e1e", size = 788906, upload-time = "2025-11-03T21:31:28.078Z" }, + { url = "https://files.pythonhosted.org/packages/66/47/dc2577c1f95f188c1e13e2e69d8825a5ac582ac709942f8a03af42ed6e93/regex-2025.11.3-cp311-cp311-win32.whl", hash = "sha256:3e816cc9aac1cd3cc9a4ec4d860f06d40f994b5c7b4d03b93345f44e08cc68bf", size = 265812, upload-time = "2025-11-03T21:31:29.72Z" }, + { url = "https://files.pythonhosted.org/packages/50/1e/15f08b2f82a9bbb510621ec9042547b54d11e83cb620643ebb54e4eb7d71/regex-2025.11.3-cp311-cp311-win_amd64.whl", hash = "sha256:087511f5c8b7dfbe3a03f5d5ad0c2a33861b1fc387f21f6f60825a44865a385a", size = 277737, upload-time = "2025-11-03T21:31:31.422Z" }, + { url = "https://files.pythonhosted.org/packages/f4/fc/6500eb39f5f76c5e47a398df82e6b535a5e345f839581012a418b16f9cc3/regex-2025.11.3-cp311-cp311-win_arm64.whl", hash = "sha256:1ff0d190c7f68ae7769cd0313fe45820ba07ffebfddfaa89cc1eb70827ba0ddc", size = 270290, upload-time = "2025-11-03T21:31:33.041Z" }, + { url = "https://files.pythonhosted.org/packages/e8/74/18f04cb53e58e3fb107439699bd8375cf5a835eec81084e0bddbd122e4c2/regex-2025.11.3-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:bc8ab71e2e31b16e40868a40a69007bc305e1109bd4658eb6cad007e0bf67c41", size = 489312, upload-time = "2025-11-03T21:31:34.343Z" }, + { url = "https://files.pythonhosted.org/packages/78/3f/37fcdd0d2b1e78909108a876580485ea37c91e1acf66d3bb8e736348f441/regex-2025.11.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:22b29dda7e1f7062a52359fca6e58e548e28c6686f205e780b02ad8ef710de36", size = 291256, upload-time = "2025-11-03T21:31:35.675Z" }, + { url = "https://files.pythonhosted.org/packages/bf/26/0a575f58eb23b7ebd67a45fccbc02ac030b737b896b7e7a909ffe43ffd6a/regex-2025.11.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:3a91e4a29938bc1a082cc28fdea44be420bf2bebe2665343029723892eb073e1", size = 288921, upload-time = "2025-11-03T21:31:37.07Z" }, + { url = "https://files.pythonhosted.org/packages/ea/98/6a8dff667d1af907150432cf5abc05a17ccd32c72a3615410d5365ac167a/regex-2025.11.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:08b884f4226602ad40c5d55f52bf91a9df30f513864e0054bad40c0e9cf1afb7", size = 798568, upload-time = "2025-11-03T21:31:38.784Z" }, + { url = "https://files.pythonhosted.org/packages/64/15/92c1db4fa4e12733dd5a526c2dd2b6edcbfe13257e135fc0f6c57f34c173/regex-2025.11.3-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:3e0b11b2b2433d1c39c7c7a30e3f3d0aeeea44c2a8d0bae28f6b95f639927a69", size = 864165, upload-time = "2025-11-03T21:31:40.559Z" }, + { url = "https://files.pythonhosted.org/packages/f9/e7/3ad7da8cdee1ce66c7cd37ab5ab05c463a86ffeb52b1a25fe7bd9293b36c/regex-2025.11.3-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:87eb52a81ef58c7ba4d45c3ca74e12aa4b4e77816f72ca25258a85b3ea96cb48", size = 912182, upload-time = "2025-11-03T21:31:42.002Z" }, + { url = "https://files.pythonhosted.org/packages/84/bd/9ce9f629fcb714ffc2c3faf62b6766ecb7a585e1e885eb699bcf130a5209/regex-2025.11.3-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a12ab1f5c29b4e93db518f5e3872116b7e9b1646c9f9f426f777b50d44a09e8c", size = 803501, upload-time = "2025-11-03T21:31:43.815Z" }, + { url = "https://files.pythonhosted.org/packages/7c/0f/8dc2e4349d8e877283e6edd6c12bdcebc20f03744e86f197ab6e4492bf08/regex-2025.11.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:7521684c8c7c4f6e88e35ec89680ee1aa8358d3f09d27dfbdf62c446f5d4c695", size = 787842, upload-time = "2025-11-03T21:31:45.353Z" }, + { url = "https://files.pythonhosted.org/packages/f9/73/cff02702960bc185164d5619c0c62a2f598a6abff6695d391b096237d4ab/regex-2025.11.3-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:7fe6e5440584e94cc4b3f5f4d98a25e29ca12dccf8873679a635638349831b98", size = 858519, upload-time = "2025-11-03T21:31:46.814Z" }, + { url = "https://files.pythonhosted.org/packages/61/83/0e8d1ae71e15bc1dc36231c90b46ee35f9d52fab2e226b0e039e7ea9c10a/regex-2025.11.3-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:8e026094aa12b43f4fd74576714e987803a315c76edb6b098b9809db5de58f74", size = 850611, upload-time = "2025-11-03T21:31:48.289Z" }, + { url = "https://files.pythonhosted.org/packages/c8/f5/70a5cdd781dcfaa12556f2955bf170cd603cb1c96a1827479f8faea2df97/regex-2025.11.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:435bbad13e57eb5606a68443af62bed3556de2f46deb9f7d4237bc2f1c9fb3a0", size = 789759, upload-time = "2025-11-03T21:31:49.759Z" }, + { url = "https://files.pythonhosted.org/packages/59/9b/7c29be7903c318488983e7d97abcf8ebd3830e4c956c4c540005fcfb0462/regex-2025.11.3-cp312-cp312-win32.whl", hash = "sha256:3839967cf4dc4b985e1570fd8d91078f0c519f30491c60f9ac42a8db039be204", size = 266194, upload-time = "2025-11-03T21:31:51.53Z" }, + { url = "https://files.pythonhosted.org/packages/1a/67/3b92df89f179d7c367be654ab5626ae311cb28f7d5c237b6bb976cd5fbbb/regex-2025.11.3-cp312-cp312-win_amd64.whl", hash = "sha256:e721d1b46e25c481dc5ded6f4b3f66c897c58d2e8cfdf77bbced84339108b0b9", size = 277069, upload-time = "2025-11-03T21:31:53.151Z" }, + { url = "https://files.pythonhosted.org/packages/d7/55/85ba4c066fe5094d35b249c3ce8df0ba623cfd35afb22d6764f23a52a1c5/regex-2025.11.3-cp312-cp312-win_arm64.whl", hash = "sha256:64350685ff08b1d3a6fff33f45a9ca183dc1d58bbfe4981604e70ec9801bbc26", size = 270330, upload-time = "2025-11-03T21:31:54.514Z" }, ] [[package]] @@ -5422,9 +5472,9 @@ dependencies = [ { name = "idna" }, { name = "urllib3" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/c9/74/b3ff8e6c8446842c3f5c837e9c3dfcfe2018ea6ecef224c710c85ef728f4/requests-2.32.5.tar.gz", hash = "sha256:dbba0bac56e100853db0ea71b82b4dfd5fe2bf6d3754a8893c3af500cec7d7cf", size = 134517 } +sdist = { url = "https://files.pythonhosted.org/packages/c9/74/b3ff8e6c8446842c3f5c837e9c3dfcfe2018ea6ecef224c710c85ef728f4/requests-2.32.5.tar.gz", hash = "sha256:dbba0bac56e100853db0ea71b82b4dfd5fe2bf6d3754a8893c3af500cec7d7cf", size = 134517, upload-time = "2025-08-18T20:46:02.573Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/1e/db/4254e3eabe8020b458f1a747140d32277ec7a271daf1d235b70dc0b4e6e3/requests-2.32.5-py3-none-any.whl", hash = "sha256:2462f94637a34fd532264295e186976db0f5d453d1cdd31473c85a6a161affb6", size = 64738 }, + { url = "https://files.pythonhosted.org/packages/1e/db/4254e3eabe8020b458f1a747140d32277ec7a271daf1d235b70dc0b4e6e3/requests-2.32.5-py3-none-any.whl", hash = "sha256:2462f94637a34fd532264295e186976db0f5d453d1cdd31473c85a6a161affb6", size = 64738, upload-time = "2025-08-18T20:46:00.542Z" }, ] [[package]] @@ -5435,9 +5485,9 @@ dependencies = [ { name = "oauthlib" }, { name = "requests" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/42/f2/05f29bc3913aea15eb670be136045bf5c5bbf4b99ecb839da9b422bb2c85/requests-oauthlib-2.0.0.tar.gz", hash = "sha256:b3dffaebd884d8cd778494369603a9e7b58d29111bf6b41bdc2dcd87203af4e9", size = 55650 } +sdist = { url = "https://files.pythonhosted.org/packages/42/f2/05f29bc3913aea15eb670be136045bf5c5bbf4b99ecb839da9b422bb2c85/requests-oauthlib-2.0.0.tar.gz", hash = "sha256:b3dffaebd884d8cd778494369603a9e7b58d29111bf6b41bdc2dcd87203af4e9", size = 55650, upload-time = "2024-03-22T20:32:29.939Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/3b/5d/63d4ae3b9daea098d5d6f5da83984853c1bbacd5dc826764b249fe119d24/requests_oauthlib-2.0.0-py2.py3-none-any.whl", hash = "sha256:7dd8a5c40426b779b0868c404bdef9768deccf22749cde15852df527e6269b36", size = 24179 }, + { url = "https://files.pythonhosted.org/packages/3b/5d/63d4ae3b9daea098d5d6f5da83984853c1bbacd5dc826764b249fe119d24/requests_oauthlib-2.0.0-py2.py3-none-any.whl", hash = "sha256:7dd8a5c40426b779b0868c404bdef9768deccf22749cde15852df527e6269b36", size = 24179, upload-time = "2024-03-22T20:32:28.055Z" }, ] [[package]] @@ -5447,9 +5497,9 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "requests" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/f3/61/d7545dafb7ac2230c70d38d31cbfe4cc64f7144dc41f6e4e4b78ecd9f5bb/requests-toolbelt-1.0.0.tar.gz", hash = "sha256:7681a0a3d047012b5bdc0ee37d7f8f07ebe76ab08caeccfc3921ce23c88d5bc6", size = 206888 } +sdist = { url = "https://files.pythonhosted.org/packages/f3/61/d7545dafb7ac2230c70d38d31cbfe4cc64f7144dc41f6e4e4b78ecd9f5bb/requests-toolbelt-1.0.0.tar.gz", hash = "sha256:7681a0a3d047012b5bdc0ee37d7f8f07ebe76ab08caeccfc3921ce23c88d5bc6", size = 206888, upload-time = "2023-05-01T04:11:33.229Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/3f/51/d4db610ef29373b879047326cbf6fa98b6c1969d6f6dc423279de2b1be2c/requests_toolbelt-1.0.0-py2.py3-none-any.whl", hash = "sha256:cccfdd665f0a24fcf4726e690f65639d272bb0637b9b92dfd91a5568ccf6bd06", size = 54481 }, + { url = "https://files.pythonhosted.org/packages/3f/51/d4db610ef29373b879047326cbf6fa98b6c1969d6f6dc423279de2b1be2c/requests_toolbelt-1.0.0-py2.py3-none-any.whl", hash = "sha256:cccfdd665f0a24fcf4726e690f65639d272bb0637b9b92dfd91a5568ccf6bd06", size = 54481, upload-time = "2023-05-01T04:11:28.427Z" }, ] [[package]] @@ -5460,9 +5510,9 @@ dependencies = [ { name = "requests" }, { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/1f/2a/535a794e5b64f6ef4abc1342ef1a43465af2111c5185e98b4cca2a6b6b7a/resend-2.9.0.tar.gz", hash = "sha256:e8d4c909a7fe7701119789f848a6befb0a4a668e2182d7bbfe764742f1952bd3", size = 13600 } +sdist = { url = "https://files.pythonhosted.org/packages/1f/2a/535a794e5b64f6ef4abc1342ef1a43465af2111c5185e98b4cca2a6b6b7a/resend-2.9.0.tar.gz", hash = "sha256:e8d4c909a7fe7701119789f848a6befb0a4a668e2182d7bbfe764742f1952bd3", size = 13600, upload-time = "2025-05-06T00:35:20.363Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/96/81/ba1feb9959bafbcde6466b78d4628405d69cd14613f6eba12b928a77b86a/resend-2.9.0-py2.py3-none-any.whl", hash = "sha256:6607f75e3a9257a219c0640f935b8d1211338190d553eb043c25732affb92949", size = 20173 }, + { url = "https://files.pythonhosted.org/packages/96/81/ba1feb9959bafbcde6466b78d4628405d69cd14613f6eba12b928a77b86a/resend-2.9.0-py2.py3-none-any.whl", hash = "sha256:6607f75e3a9257a219c0640f935b8d1211338190d553eb043c25732affb92949", size = 20173, upload-time = "2025-05-06T00:35:18.963Z" }, ] [[package]] @@ -5473,9 +5523,9 @@ dependencies = [ { name = "decorator" }, { name = "py" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/9d/72/75d0b85443fbc8d9f38d08d2b1b67cc184ce35280e4a3813cda2f445f3a4/retry-0.9.2.tar.gz", hash = "sha256:f8bfa8b99b69c4506d6f5bd3b0aabf77f98cdb17f3c9fc3f5ca820033336fba4", size = 6448 } +sdist = { url = "https://files.pythonhosted.org/packages/9d/72/75d0b85443fbc8d9f38d08d2b1b67cc184ce35280e4a3813cda2f445f3a4/retry-0.9.2.tar.gz", hash = "sha256:f8bfa8b99b69c4506d6f5bd3b0aabf77f98cdb17f3c9fc3f5ca820033336fba4", size = 6448, upload-time = "2016-05-11T13:58:51.541Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/4b/0d/53aea75710af4528a25ed6837d71d117602b01946b307a3912cb3cfcbcba/retry-0.9.2-py2.py3-none-any.whl", hash = "sha256:ccddf89761fa2c726ab29391837d4327f819ea14d244c232a1d24c67a2f98606", size = 7986 }, + { url = "https://files.pythonhosted.org/packages/4b/0d/53aea75710af4528a25ed6837d71d117602b01946b307a3912cb3cfcbcba/retry-0.9.2-py2.py3-none-any.whl", hash = "sha256:ccddf89761fa2c726ab29391837d4327f819ea14d244c232a1d24c67a2f98606", size = 7986, upload-time = "2016-05-11T13:58:39.925Z" }, ] [[package]] @@ -5486,59 +5536,59 @@ dependencies = [ { name = "markdown-it-py" }, { name = "pygments" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/fb/d2/8920e102050a0de7bfabeb4c4614a49248cf8d5d7a8d01885fbb24dc767a/rich-14.2.0.tar.gz", hash = "sha256:73ff50c7c0c1c77c8243079283f4edb376f0f6442433aecb8ce7e6d0b92d1fe4", size = 219990 } +sdist = { url = "https://files.pythonhosted.org/packages/fb/d2/8920e102050a0de7bfabeb4c4614a49248cf8d5d7a8d01885fbb24dc767a/rich-14.2.0.tar.gz", hash = "sha256:73ff50c7c0c1c77c8243079283f4edb376f0f6442433aecb8ce7e6d0b92d1fe4", size = 219990, upload-time = "2025-10-09T14:16:53.064Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/25/7a/b0178788f8dc6cafce37a212c99565fa1fe7872c70c6c9c1e1a372d9d88f/rich-14.2.0-py3-none-any.whl", hash = "sha256:76bc51fe2e57d2b1be1f96c524b890b816e334ab4c1e45888799bfaab0021edd", size = 243393 }, + { url = "https://files.pythonhosted.org/packages/25/7a/b0178788f8dc6cafce37a212c99565fa1fe7872c70c6c9c1e1a372d9d88f/rich-14.2.0-py3-none-any.whl", hash = "sha256:76bc51fe2e57d2b1be1f96c524b890b816e334ab4c1e45888799bfaab0021edd", size = 243393, upload-time = "2025-10-09T14:16:51.245Z" }, ] [[package]] name = "rpds-py" version = "0.29.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/98/33/23b3b3419b6a3e0f559c7c0d2ca8fc1b9448382b25245033788785921332/rpds_py-0.29.0.tar.gz", hash = "sha256:fe55fe686908f50154d1dc599232016e50c243b438c3b7432f24e2895b0e5359", size = 69359 } +sdist = { url = "https://files.pythonhosted.org/packages/98/33/23b3b3419b6a3e0f559c7c0d2ca8fc1b9448382b25245033788785921332/rpds_py-0.29.0.tar.gz", hash = "sha256:fe55fe686908f50154d1dc599232016e50c243b438c3b7432f24e2895b0e5359", size = 69359, upload-time = "2025-11-16T14:50:39.532Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/36/ab/7fb95163a53ab122c74a7c42d2d2f012819af2cf3deb43fb0d5acf45cc1a/rpds_py-0.29.0-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:9b9c764a11fd637e0322a488560533112837f5334ffeb48b1be20f6d98a7b437", size = 372344 }, - { url = "https://files.pythonhosted.org/packages/b3/45/f3c30084c03b0d0f918cb4c5ae2c20b0a148b51ba2b3f6456765b629bedd/rpds_py-0.29.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:3fd2164d73812026ce970d44c3ebd51e019d2a26a4425a5dcbdfa93a34abc383", size = 363041 }, - { url = "https://files.pythonhosted.org/packages/e3/e9/4d044a1662608c47a87cbb37b999d4d5af54c6d6ebdda93a4d8bbf8b2a10/rpds_py-0.29.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4a097b7f7f7274164566ae90a221fd725363c0e9d243e2e9ed43d195ccc5495c", size = 391775 }, - { url = "https://files.pythonhosted.org/packages/50/c9/7616d3ace4e6731aeb6e3cd85123e03aec58e439044e214b9c5c60fd8eb1/rpds_py-0.29.0-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:7cdc0490374e31cedefefaa1520d5fe38e82fde8748cbc926e7284574c714d6b", size = 405624 }, - { url = "https://files.pythonhosted.org/packages/c2/e2/6d7d6941ca0843609fd2d72c966a438d6f22617baf22d46c3d2156c31350/rpds_py-0.29.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:89ca2e673ddd5bde9b386da9a0aac0cab0e76f40c8f0aaf0d6311b6bbf2aa311", size = 527894 }, - { url = "https://files.pythonhosted.org/packages/8d/f7/aee14dc2db61bb2ae1e3068f134ca9da5f28c586120889a70ff504bb026f/rpds_py-0.29.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a5d9da3ff5af1ca1249b1adb8ef0573b94c76e6ae880ba1852f033bf429d4588", size = 412720 }, - { url = "https://files.pythonhosted.org/packages/2f/e2/2293f236e887c0360c2723d90c00d48dee296406994d6271faf1712e94ec/rpds_py-0.29.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8238d1d310283e87376c12f658b61e1ee23a14c0e54c7c0ce953efdbdc72deed", size = 392945 }, - { url = "https://files.pythonhosted.org/packages/14/cd/ceea6147acd3bd1fd028d1975228f08ff19d62098078d5ec3eed49703797/rpds_py-0.29.0-cp311-cp311-manylinux_2_31_riscv64.whl", hash = "sha256:2d6fb2ad1c36f91c4646989811e84b1ea5e0c3cf9690b826b6e32b7965853a63", size = 406385 }, - { url = "https://files.pythonhosted.org/packages/52/36/fe4dead19e45eb77a0524acfdbf51e6cda597b26fc5b6dddbff55fbbb1a5/rpds_py-0.29.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:534dc9df211387547267ccdb42253aa30527482acb38dd9b21c5c115d66a96d2", size = 423943 }, - { url = "https://files.pythonhosted.org/packages/a1/7b/4551510803b582fa4abbc8645441a2d15aa0c962c3b21ebb380b7e74f6a1/rpds_py-0.29.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:d456e64724a075441e4ed648d7f154dc62e9aabff29bcdf723d0c00e9e1d352f", size = 574204 }, - { url = "https://files.pythonhosted.org/packages/64/ba/071ccdd7b171e727a6ae079f02c26f75790b41555f12ca8f1151336d2124/rpds_py-0.29.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:a738f2da2f565989401bd6fd0b15990a4d1523c6d7fe83f300b7e7d17212feca", size = 600587 }, - { url = "https://files.pythonhosted.org/packages/03/09/96983d48c8cf5a1e03c7d9cc1f4b48266adfb858ae48c7c2ce978dbba349/rpds_py-0.29.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:a110e14508fd26fd2e472bb541f37c209409876ba601cf57e739e87d8a53cf95", size = 562287 }, - { url = "https://files.pythonhosted.org/packages/40/f0/8c01aaedc0fa92156f0391f39ea93b5952bc0ec56b897763858f95da8168/rpds_py-0.29.0-cp311-cp311-win32.whl", hash = "sha256:923248a56dd8d158389a28934f6f69ebf89f218ef96a6b216a9be6861804d3f4", size = 221394 }, - { url = "https://files.pythonhosted.org/packages/7e/a5/a8b21c54c7d234efdc83dc034a4d7cd9668e3613b6316876a29b49dece71/rpds_py-0.29.0-cp311-cp311-win_amd64.whl", hash = "sha256:539eb77eb043afcc45314d1be09ea6d6cafb3addc73e0547c171c6d636957f60", size = 235713 }, - { url = "https://files.pythonhosted.org/packages/a7/1f/df3c56219523947b1be402fa12e6323fe6d61d883cf35d6cb5d5bb6db9d9/rpds_py-0.29.0-cp311-cp311-win_arm64.whl", hash = "sha256:bdb67151ea81fcf02d8f494703fb728d4d34d24556cbff5f417d74f6f5792e7c", size = 229157 }, - { url = "https://files.pythonhosted.org/packages/3c/50/bc0e6e736d94e420df79be4deb5c9476b63165c87bb8f19ef75d100d21b3/rpds_py-0.29.0-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:a0891cfd8db43e085c0ab93ab7e9b0c8fee84780d436d3b266b113e51e79f954", size = 376000 }, - { url = "https://files.pythonhosted.org/packages/3e/3a/46676277160f014ae95f24de53bed0e3b7ea66c235e7de0b9df7bd5d68ba/rpds_py-0.29.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:3897924d3f9a0361472d884051f9a2460358f9a45b1d85a39a158d2f8f1ad71c", size = 360575 }, - { url = "https://files.pythonhosted.org/packages/75/ba/411d414ed99ea1afdd185bbabeeaac00624bd1e4b22840b5e9967ade6337/rpds_py-0.29.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2a21deb8e0d1571508c6491ce5ea5e25669b1dd4adf1c9d64b6314842f708b5d", size = 392159 }, - { url = "https://files.pythonhosted.org/packages/8f/b1/e18aa3a331f705467a48d0296778dc1fea9d7f6cf675bd261f9a846c7e90/rpds_py-0.29.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:9efe71687d6427737a0a2de9ca1c0a216510e6cd08925c44162be23ed7bed2d5", size = 410602 }, - { url = "https://files.pythonhosted.org/packages/2f/6c/04f27f0c9f2299274c76612ac9d2c36c5048bb2c6c2e52c38c60bf3868d9/rpds_py-0.29.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:40f65470919dc189c833e86b2c4bd21bd355f98436a2cef9e0a9a92aebc8e57e", size = 515808 }, - { url = "https://files.pythonhosted.org/packages/83/56/a8412aa464fb151f8bc0d91fb0bb888adc9039bd41c1c6ba8d94990d8cf8/rpds_py-0.29.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:def48ff59f181130f1a2cb7c517d16328efac3ec03951cca40c1dc2049747e83", size = 416015 }, - { url = "https://files.pythonhosted.org/packages/04/4c/f9b8a05faca3d9e0a6397c90d13acb9307c9792b2bff621430c58b1d6e76/rpds_py-0.29.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ad7bd570be92695d89285a4b373006930715b78d96449f686af422debb4d3949", size = 395325 }, - { url = "https://files.pythonhosted.org/packages/34/60/869f3bfbf8ed7b54f1ad9a5543e0fdffdd40b5a8f587fe300ee7b4f19340/rpds_py-0.29.0-cp312-cp312-manylinux_2_31_riscv64.whl", hash = "sha256:5a572911cd053137bbff8e3a52d31c5d2dba51d3a67ad902629c70185f3f2181", size = 410160 }, - { url = "https://files.pythonhosted.org/packages/91/aa/e5b496334e3aba4fe4c8a80187b89f3c1294c5c36f2a926da74338fa5a73/rpds_py-0.29.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d583d4403bcbf10cffc3ab5cee23d7643fcc960dff85973fd3c2d6c86e8dbb0c", size = 425309 }, - { url = "https://files.pythonhosted.org/packages/85/68/4e24a34189751ceb6d66b28f18159922828dd84155876551f7ca5b25f14f/rpds_py-0.29.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:070befbb868f257d24c3bb350dbd6e2f645e83731f31264b19d7231dd5c396c7", size = 574644 }, - { url = "https://files.pythonhosted.org/packages/8c/cf/474a005ea4ea9c3b4f17b6108b6b13cebfc98ebaff11d6e1b193204b3a93/rpds_py-0.29.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:fc935f6b20b0c9f919a8ff024739174522abd331978f750a74bb68abd117bd19", size = 601605 }, - { url = "https://files.pythonhosted.org/packages/f4/b1/c56f6a9ab8c5f6bb5c65c4b5f8229167a3a525245b0773f2c0896686b64e/rpds_py-0.29.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:8c5a8ecaa44ce2d8d9d20a68a2483a74c07f05d72e94a4dff88906c8807e77b0", size = 564593 }, - { url = "https://files.pythonhosted.org/packages/b3/13/0494cecce4848f68501e0a229432620b4b57022388b071eeff95f3e1e75b/rpds_py-0.29.0-cp312-cp312-win32.whl", hash = "sha256:ba5e1aeaf8dd6d8f6caba1f5539cddda87d511331714b7b5fc908b6cfc3636b7", size = 223853 }, - { url = "https://files.pythonhosted.org/packages/1f/6a/51e9aeb444a00cdc520b032a28b07e5f8dc7bc328b57760c53e7f96997b4/rpds_py-0.29.0-cp312-cp312-win_amd64.whl", hash = "sha256:b5f6134faf54b3cb83375db0f113506f8b7770785be1f95a631e7e2892101977", size = 239895 }, - { url = "https://files.pythonhosted.org/packages/d1/d4/8bce56cdad1ab873e3f27cb31c6a51d8f384d66b022b820525b879f8bed1/rpds_py-0.29.0-cp312-cp312-win_arm64.whl", hash = "sha256:b016eddf00dca7944721bf0cd85b6af7f6c4efaf83ee0b37c4133bd39757a8c7", size = 230321 }, - { url = "https://files.pythonhosted.org/packages/f2/ac/b97e80bf107159e5b9ba9c91df1ab95f69e5e41b435f27bdd737f0d583ac/rpds_py-0.29.0-pp311-pypy311_pp73-macosx_10_12_x86_64.whl", hash = "sha256:acd82a9e39082dc5f4492d15a6b6c8599aa21db5c35aaf7d6889aea16502c07d", size = 373963 }, - { url = "https://files.pythonhosted.org/packages/40/5a/55e72962d5d29bd912f40c594e68880d3c7a52774b0f75542775f9250712/rpds_py-0.29.0-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:715b67eac317bf1c7657508170a3e011a1ea6ccb1c9d5f296e20ba14196be6b3", size = 364644 }, - { url = "https://files.pythonhosted.org/packages/99/2a/6b6524d0191b7fc1351c3c0840baac42250515afb48ae40c7ed15499a6a2/rpds_py-0.29.0-pp311-pypy311_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f3b1b87a237cb2dba4db18bcfaaa44ba4cd5936b91121b62292ff21df577fc43", size = 393847 }, - { url = "https://files.pythonhosted.org/packages/1c/b8/c5692a7df577b3c0c7faed7ac01ee3c608b81750fc5d89f84529229b6873/rpds_py-0.29.0-pp311-pypy311_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:1c3c3e8101bb06e337c88eb0c0ede3187131f19d97d43ea0e1c5407ea74c0cbf", size = 407281 }, - { url = "https://files.pythonhosted.org/packages/f0/57/0546c6f84031b7ea08b76646a8e33e45607cc6bd879ff1917dc077bb881e/rpds_py-0.29.0-pp311-pypy311_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2b8e54d6e61f3ecd3abe032065ce83ea63417a24f437e4a3d73d2f85ce7b7cfe", size = 529213 }, - { url = "https://files.pythonhosted.org/packages/fa/c1/01dd5f444233605555bc11fe5fed6a5c18f379f02013870c176c8e630a23/rpds_py-0.29.0-pp311-pypy311_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3fbd4e9aebf110473a420dea85a238b254cf8a15acb04b22a5a6b5ce8925b760", size = 413808 }, - { url = "https://files.pythonhosted.org/packages/aa/0a/60f98b06156ea2a7af849fb148e00fbcfdb540909a5174a5ed10c93745c7/rpds_py-0.29.0-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:80fdf53d36e6c72819993e35d1ebeeb8e8fc688d0c6c2b391b55e335b3afba5a", size = 394600 }, - { url = "https://files.pythonhosted.org/packages/37/f1/dc9312fc9bec040ece08396429f2bd9e0977924ba7a11c5ad7056428465e/rpds_py-0.29.0-pp311-pypy311_pp73-manylinux_2_31_riscv64.whl", hash = "sha256:ea7173df5d86f625f8dde6d5929629ad811ed8decda3b60ae603903839ac9ac0", size = 408634 }, - { url = "https://files.pythonhosted.org/packages/ed/41/65024c9fd40c89bb7d604cf73beda4cbdbcebe92d8765345dd65855b6449/rpds_py-0.29.0-pp311-pypy311_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:76054d540061eda273274f3d13a21a4abdde90e13eaefdc205db37c05230efce", size = 426064 }, - { url = "https://files.pythonhosted.org/packages/a2/e0/cf95478881fc88ca2fdbf56381d7df36567cccc39a05394beac72182cd62/rpds_py-0.29.0-pp311-pypy311_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:9f84c549746a5be3bc7415830747a3a0312573afc9f95785eb35228bb17742ec", size = 575871 }, - { url = "https://files.pythonhosted.org/packages/ea/c0/df88097e64339a0218b57bd5f9ca49898e4c394db756c67fccc64add850a/rpds_py-0.29.0-pp311-pypy311_pp73-musllinux_1_2_i686.whl", hash = "sha256:0ea962671af5cb9a260489e311fa22b2e97103e3f9f0caaea6f81390af96a9ed", size = 601702 }, - { url = "https://files.pythonhosted.org/packages/87/f4/09ffb3ebd0cbb9e2c7c9b84d252557ecf434cd71584ee1e32f66013824df/rpds_py-0.29.0-pp311-pypy311_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:f7728653900035fb7b8d06e1e5900545d8088efc9d5d4545782da7df03ec803f", size = 564054 }, + { url = "https://files.pythonhosted.org/packages/36/ab/7fb95163a53ab122c74a7c42d2d2f012819af2cf3deb43fb0d5acf45cc1a/rpds_py-0.29.0-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:9b9c764a11fd637e0322a488560533112837f5334ffeb48b1be20f6d98a7b437", size = 372344, upload-time = "2025-11-16T14:47:57.279Z" }, + { url = "https://files.pythonhosted.org/packages/b3/45/f3c30084c03b0d0f918cb4c5ae2c20b0a148b51ba2b3f6456765b629bedd/rpds_py-0.29.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:3fd2164d73812026ce970d44c3ebd51e019d2a26a4425a5dcbdfa93a34abc383", size = 363041, upload-time = "2025-11-16T14:47:58.908Z" }, + { url = "https://files.pythonhosted.org/packages/e3/e9/4d044a1662608c47a87cbb37b999d4d5af54c6d6ebdda93a4d8bbf8b2a10/rpds_py-0.29.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4a097b7f7f7274164566ae90a221fd725363c0e9d243e2e9ed43d195ccc5495c", size = 391775, upload-time = "2025-11-16T14:48:00.197Z" }, + { url = "https://files.pythonhosted.org/packages/50/c9/7616d3ace4e6731aeb6e3cd85123e03aec58e439044e214b9c5c60fd8eb1/rpds_py-0.29.0-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:7cdc0490374e31cedefefaa1520d5fe38e82fde8748cbc926e7284574c714d6b", size = 405624, upload-time = "2025-11-16T14:48:01.496Z" }, + { url = "https://files.pythonhosted.org/packages/c2/e2/6d7d6941ca0843609fd2d72c966a438d6f22617baf22d46c3d2156c31350/rpds_py-0.29.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:89ca2e673ddd5bde9b386da9a0aac0cab0e76f40c8f0aaf0d6311b6bbf2aa311", size = 527894, upload-time = "2025-11-16T14:48:03.167Z" }, + { url = "https://files.pythonhosted.org/packages/8d/f7/aee14dc2db61bb2ae1e3068f134ca9da5f28c586120889a70ff504bb026f/rpds_py-0.29.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a5d9da3ff5af1ca1249b1adb8ef0573b94c76e6ae880ba1852f033bf429d4588", size = 412720, upload-time = "2025-11-16T14:48:04.413Z" }, + { url = "https://files.pythonhosted.org/packages/2f/e2/2293f236e887c0360c2723d90c00d48dee296406994d6271faf1712e94ec/rpds_py-0.29.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8238d1d310283e87376c12f658b61e1ee23a14c0e54c7c0ce953efdbdc72deed", size = 392945, upload-time = "2025-11-16T14:48:06.252Z" }, + { url = "https://files.pythonhosted.org/packages/14/cd/ceea6147acd3bd1fd028d1975228f08ff19d62098078d5ec3eed49703797/rpds_py-0.29.0-cp311-cp311-manylinux_2_31_riscv64.whl", hash = "sha256:2d6fb2ad1c36f91c4646989811e84b1ea5e0c3cf9690b826b6e32b7965853a63", size = 406385, upload-time = "2025-11-16T14:48:07.575Z" }, + { url = "https://files.pythonhosted.org/packages/52/36/fe4dead19e45eb77a0524acfdbf51e6cda597b26fc5b6dddbff55fbbb1a5/rpds_py-0.29.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:534dc9df211387547267ccdb42253aa30527482acb38dd9b21c5c115d66a96d2", size = 423943, upload-time = "2025-11-16T14:48:10.175Z" }, + { url = "https://files.pythonhosted.org/packages/a1/7b/4551510803b582fa4abbc8645441a2d15aa0c962c3b21ebb380b7e74f6a1/rpds_py-0.29.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:d456e64724a075441e4ed648d7f154dc62e9aabff29bcdf723d0c00e9e1d352f", size = 574204, upload-time = "2025-11-16T14:48:11.499Z" }, + { url = "https://files.pythonhosted.org/packages/64/ba/071ccdd7b171e727a6ae079f02c26f75790b41555f12ca8f1151336d2124/rpds_py-0.29.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:a738f2da2f565989401bd6fd0b15990a4d1523c6d7fe83f300b7e7d17212feca", size = 600587, upload-time = "2025-11-16T14:48:12.822Z" }, + { url = "https://files.pythonhosted.org/packages/03/09/96983d48c8cf5a1e03c7d9cc1f4b48266adfb858ae48c7c2ce978dbba349/rpds_py-0.29.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:a110e14508fd26fd2e472bb541f37c209409876ba601cf57e739e87d8a53cf95", size = 562287, upload-time = "2025-11-16T14:48:14.108Z" }, + { url = "https://files.pythonhosted.org/packages/40/f0/8c01aaedc0fa92156f0391f39ea93b5952bc0ec56b897763858f95da8168/rpds_py-0.29.0-cp311-cp311-win32.whl", hash = "sha256:923248a56dd8d158389a28934f6f69ebf89f218ef96a6b216a9be6861804d3f4", size = 221394, upload-time = "2025-11-16T14:48:15.374Z" }, + { url = "https://files.pythonhosted.org/packages/7e/a5/a8b21c54c7d234efdc83dc034a4d7cd9668e3613b6316876a29b49dece71/rpds_py-0.29.0-cp311-cp311-win_amd64.whl", hash = "sha256:539eb77eb043afcc45314d1be09ea6d6cafb3addc73e0547c171c6d636957f60", size = 235713, upload-time = "2025-11-16T14:48:16.636Z" }, + { url = "https://files.pythonhosted.org/packages/a7/1f/df3c56219523947b1be402fa12e6323fe6d61d883cf35d6cb5d5bb6db9d9/rpds_py-0.29.0-cp311-cp311-win_arm64.whl", hash = "sha256:bdb67151ea81fcf02d8f494703fb728d4d34d24556cbff5f417d74f6f5792e7c", size = 229157, upload-time = "2025-11-16T14:48:17.891Z" }, + { url = "https://files.pythonhosted.org/packages/3c/50/bc0e6e736d94e420df79be4deb5c9476b63165c87bb8f19ef75d100d21b3/rpds_py-0.29.0-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:a0891cfd8db43e085c0ab93ab7e9b0c8fee84780d436d3b266b113e51e79f954", size = 376000, upload-time = "2025-11-16T14:48:19.141Z" }, + { url = "https://files.pythonhosted.org/packages/3e/3a/46676277160f014ae95f24de53bed0e3b7ea66c235e7de0b9df7bd5d68ba/rpds_py-0.29.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:3897924d3f9a0361472d884051f9a2460358f9a45b1d85a39a158d2f8f1ad71c", size = 360575, upload-time = "2025-11-16T14:48:20.443Z" }, + { url = "https://files.pythonhosted.org/packages/75/ba/411d414ed99ea1afdd185bbabeeaac00624bd1e4b22840b5e9967ade6337/rpds_py-0.29.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2a21deb8e0d1571508c6491ce5ea5e25669b1dd4adf1c9d64b6314842f708b5d", size = 392159, upload-time = "2025-11-16T14:48:22.12Z" }, + { url = "https://files.pythonhosted.org/packages/8f/b1/e18aa3a331f705467a48d0296778dc1fea9d7f6cf675bd261f9a846c7e90/rpds_py-0.29.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:9efe71687d6427737a0a2de9ca1c0a216510e6cd08925c44162be23ed7bed2d5", size = 410602, upload-time = "2025-11-16T14:48:23.563Z" }, + { url = "https://files.pythonhosted.org/packages/2f/6c/04f27f0c9f2299274c76612ac9d2c36c5048bb2c6c2e52c38c60bf3868d9/rpds_py-0.29.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:40f65470919dc189c833e86b2c4bd21bd355f98436a2cef9e0a9a92aebc8e57e", size = 515808, upload-time = "2025-11-16T14:48:24.949Z" }, + { url = "https://files.pythonhosted.org/packages/83/56/a8412aa464fb151f8bc0d91fb0bb888adc9039bd41c1c6ba8d94990d8cf8/rpds_py-0.29.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:def48ff59f181130f1a2cb7c517d16328efac3ec03951cca40c1dc2049747e83", size = 416015, upload-time = "2025-11-16T14:48:26.782Z" }, + { url = "https://files.pythonhosted.org/packages/04/4c/f9b8a05faca3d9e0a6397c90d13acb9307c9792b2bff621430c58b1d6e76/rpds_py-0.29.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ad7bd570be92695d89285a4b373006930715b78d96449f686af422debb4d3949", size = 395325, upload-time = "2025-11-16T14:48:28.055Z" }, + { url = "https://files.pythonhosted.org/packages/34/60/869f3bfbf8ed7b54f1ad9a5543e0fdffdd40b5a8f587fe300ee7b4f19340/rpds_py-0.29.0-cp312-cp312-manylinux_2_31_riscv64.whl", hash = "sha256:5a572911cd053137bbff8e3a52d31c5d2dba51d3a67ad902629c70185f3f2181", size = 410160, upload-time = "2025-11-16T14:48:29.338Z" }, + { url = "https://files.pythonhosted.org/packages/91/aa/e5b496334e3aba4fe4c8a80187b89f3c1294c5c36f2a926da74338fa5a73/rpds_py-0.29.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d583d4403bcbf10cffc3ab5cee23d7643fcc960dff85973fd3c2d6c86e8dbb0c", size = 425309, upload-time = "2025-11-16T14:48:30.691Z" }, + { url = "https://files.pythonhosted.org/packages/85/68/4e24a34189751ceb6d66b28f18159922828dd84155876551f7ca5b25f14f/rpds_py-0.29.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:070befbb868f257d24c3bb350dbd6e2f645e83731f31264b19d7231dd5c396c7", size = 574644, upload-time = "2025-11-16T14:48:31.964Z" }, + { url = "https://files.pythonhosted.org/packages/8c/cf/474a005ea4ea9c3b4f17b6108b6b13cebfc98ebaff11d6e1b193204b3a93/rpds_py-0.29.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:fc935f6b20b0c9f919a8ff024739174522abd331978f750a74bb68abd117bd19", size = 601605, upload-time = "2025-11-16T14:48:33.252Z" }, + { url = "https://files.pythonhosted.org/packages/f4/b1/c56f6a9ab8c5f6bb5c65c4b5f8229167a3a525245b0773f2c0896686b64e/rpds_py-0.29.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:8c5a8ecaa44ce2d8d9d20a68a2483a74c07f05d72e94a4dff88906c8807e77b0", size = 564593, upload-time = "2025-11-16T14:48:34.643Z" }, + { url = "https://files.pythonhosted.org/packages/b3/13/0494cecce4848f68501e0a229432620b4b57022388b071eeff95f3e1e75b/rpds_py-0.29.0-cp312-cp312-win32.whl", hash = "sha256:ba5e1aeaf8dd6d8f6caba1f5539cddda87d511331714b7b5fc908b6cfc3636b7", size = 223853, upload-time = "2025-11-16T14:48:36.419Z" }, + { url = "https://files.pythonhosted.org/packages/1f/6a/51e9aeb444a00cdc520b032a28b07e5f8dc7bc328b57760c53e7f96997b4/rpds_py-0.29.0-cp312-cp312-win_amd64.whl", hash = "sha256:b5f6134faf54b3cb83375db0f113506f8b7770785be1f95a631e7e2892101977", size = 239895, upload-time = "2025-11-16T14:48:37.956Z" }, + { url = "https://files.pythonhosted.org/packages/d1/d4/8bce56cdad1ab873e3f27cb31c6a51d8f384d66b022b820525b879f8bed1/rpds_py-0.29.0-cp312-cp312-win_arm64.whl", hash = "sha256:b016eddf00dca7944721bf0cd85b6af7f6c4efaf83ee0b37c4133bd39757a8c7", size = 230321, upload-time = "2025-11-16T14:48:39.71Z" }, + { url = "https://files.pythonhosted.org/packages/f2/ac/b97e80bf107159e5b9ba9c91df1ab95f69e5e41b435f27bdd737f0d583ac/rpds_py-0.29.0-pp311-pypy311_pp73-macosx_10_12_x86_64.whl", hash = "sha256:acd82a9e39082dc5f4492d15a6b6c8599aa21db5c35aaf7d6889aea16502c07d", size = 373963, upload-time = "2025-11-16T14:50:16.205Z" }, + { url = "https://files.pythonhosted.org/packages/40/5a/55e72962d5d29bd912f40c594e68880d3c7a52774b0f75542775f9250712/rpds_py-0.29.0-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:715b67eac317bf1c7657508170a3e011a1ea6ccb1c9d5f296e20ba14196be6b3", size = 364644, upload-time = "2025-11-16T14:50:18.22Z" }, + { url = "https://files.pythonhosted.org/packages/99/2a/6b6524d0191b7fc1351c3c0840baac42250515afb48ae40c7ed15499a6a2/rpds_py-0.29.0-pp311-pypy311_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f3b1b87a237cb2dba4db18bcfaaa44ba4cd5936b91121b62292ff21df577fc43", size = 393847, upload-time = "2025-11-16T14:50:20.012Z" }, + { url = "https://files.pythonhosted.org/packages/1c/b8/c5692a7df577b3c0c7faed7ac01ee3c608b81750fc5d89f84529229b6873/rpds_py-0.29.0-pp311-pypy311_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:1c3c3e8101bb06e337c88eb0c0ede3187131f19d97d43ea0e1c5407ea74c0cbf", size = 407281, upload-time = "2025-11-16T14:50:21.64Z" }, + { url = "https://files.pythonhosted.org/packages/f0/57/0546c6f84031b7ea08b76646a8e33e45607cc6bd879ff1917dc077bb881e/rpds_py-0.29.0-pp311-pypy311_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2b8e54d6e61f3ecd3abe032065ce83ea63417a24f437e4a3d73d2f85ce7b7cfe", size = 529213, upload-time = "2025-11-16T14:50:23.219Z" }, + { url = "https://files.pythonhosted.org/packages/fa/c1/01dd5f444233605555bc11fe5fed6a5c18f379f02013870c176c8e630a23/rpds_py-0.29.0-pp311-pypy311_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3fbd4e9aebf110473a420dea85a238b254cf8a15acb04b22a5a6b5ce8925b760", size = 413808, upload-time = "2025-11-16T14:50:25.262Z" }, + { url = "https://files.pythonhosted.org/packages/aa/0a/60f98b06156ea2a7af849fb148e00fbcfdb540909a5174a5ed10c93745c7/rpds_py-0.29.0-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:80fdf53d36e6c72819993e35d1ebeeb8e8fc688d0c6c2b391b55e335b3afba5a", size = 394600, upload-time = "2025-11-16T14:50:26.956Z" }, + { url = "https://files.pythonhosted.org/packages/37/f1/dc9312fc9bec040ece08396429f2bd9e0977924ba7a11c5ad7056428465e/rpds_py-0.29.0-pp311-pypy311_pp73-manylinux_2_31_riscv64.whl", hash = "sha256:ea7173df5d86f625f8dde6d5929629ad811ed8decda3b60ae603903839ac9ac0", size = 408634, upload-time = "2025-11-16T14:50:28.989Z" }, + { url = "https://files.pythonhosted.org/packages/ed/41/65024c9fd40c89bb7d604cf73beda4cbdbcebe92d8765345dd65855b6449/rpds_py-0.29.0-pp311-pypy311_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:76054d540061eda273274f3d13a21a4abdde90e13eaefdc205db37c05230efce", size = 426064, upload-time = "2025-11-16T14:50:30.674Z" }, + { url = "https://files.pythonhosted.org/packages/a2/e0/cf95478881fc88ca2fdbf56381d7df36567cccc39a05394beac72182cd62/rpds_py-0.29.0-pp311-pypy311_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:9f84c549746a5be3bc7415830747a3a0312573afc9f95785eb35228bb17742ec", size = 575871, upload-time = "2025-11-16T14:50:33.428Z" }, + { url = "https://files.pythonhosted.org/packages/ea/c0/df88097e64339a0218b57bd5f9ca49898e4c394db756c67fccc64add850a/rpds_py-0.29.0-pp311-pypy311_pp73-musllinux_1_2_i686.whl", hash = "sha256:0ea962671af5cb9a260489e311fa22b2e97103e3f9f0caaea6f81390af96a9ed", size = 601702, upload-time = "2025-11-16T14:50:36.051Z" }, + { url = "https://files.pythonhosted.org/packages/87/f4/09ffb3ebd0cbb9e2c7c9b84d252557ecf434cd71584ee1e32f66013824df/rpds_py-0.29.0-pp311-pypy311_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:f7728653900035fb7b8d06e1e5900545d8088efc9d5d4545782da7df03ec803f", size = 564054, upload-time = "2025-11-16T14:50:37.733Z" }, ] [[package]] @@ -5548,35 +5598,35 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "pyasn1" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/da/8a/22b7beea3ee0d44b1916c0c1cb0ee3af23b700b6da9f04991899d0c555d4/rsa-4.9.1.tar.gz", hash = "sha256:e7bdbfdb5497da4c07dfd35530e1a902659db6ff241e39d9953cad06ebd0ae75", size = 29034 } +sdist = { url = "https://files.pythonhosted.org/packages/da/8a/22b7beea3ee0d44b1916c0c1cb0ee3af23b700b6da9f04991899d0c555d4/rsa-4.9.1.tar.gz", hash = "sha256:e7bdbfdb5497da4c07dfd35530e1a902659db6ff241e39d9953cad06ebd0ae75", size = 29034, upload-time = "2025-04-16T09:51:18.218Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/64/8d/0133e4eb4beed9e425d9a98ed6e081a55d195481b7632472be1af08d2f6b/rsa-4.9.1-py3-none-any.whl", hash = "sha256:68635866661c6836b8d39430f97a996acbd61bfa49406748ea243539fe239762", size = 34696 }, + { url = "https://files.pythonhosted.org/packages/64/8d/0133e4eb4beed9e425d9a98ed6e081a55d195481b7632472be1af08d2f6b/rsa-4.9.1-py3-none-any.whl", hash = "sha256:68635866661c6836b8d39430f97a996acbd61bfa49406748ea243539fe239762", size = 34696, upload-time = "2025-04-16T09:51:17.142Z" }, ] [[package]] name = "ruff" version = "0.14.6" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/52/f0/62b5a1a723fe183650109407fa56abb433b00aa1c0b9ba555f9c4efec2c6/ruff-0.14.6.tar.gz", hash = "sha256:6f0c742ca6a7783a736b867a263b9a7a80a45ce9bee391eeda296895f1b4e1cc", size = 5669501 } +sdist = { url = "https://files.pythonhosted.org/packages/52/f0/62b5a1a723fe183650109407fa56abb433b00aa1c0b9ba555f9c4efec2c6/ruff-0.14.6.tar.gz", hash = "sha256:6f0c742ca6a7783a736b867a263b9a7a80a45ce9bee391eeda296895f1b4e1cc", size = 5669501, upload-time = "2025-11-21T14:26:17.903Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/67/d2/7dd544116d107fffb24a0064d41a5d2ed1c9d6372d142f9ba108c8e39207/ruff-0.14.6-py3-none-linux_armv6l.whl", hash = "sha256:d724ac2f1c240dbd01a2ae98db5d1d9a5e1d9e96eba999d1c48e30062df578a3", size = 13326119 }, - { url = "https://files.pythonhosted.org/packages/36/6a/ad66d0a3315d6327ed6b01f759d83df3c4d5f86c30462121024361137b6a/ruff-0.14.6-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:9f7539ea257aa4d07b7ce87aed580e485c40143f2473ff2f2b75aee003186004", size = 13526007 }, - { url = "https://files.pythonhosted.org/packages/a3/9d/dae6db96df28e0a15dea8e986ee393af70fc97fd57669808728080529c37/ruff-0.14.6-py3-none-macosx_11_0_arm64.whl", hash = "sha256:7f6007e55b90a2a7e93083ba48a9f23c3158c433591c33ee2e99a49b889c6332", size = 12676572 }, - { url = "https://files.pythonhosted.org/packages/76/a4/f319e87759949062cfee1b26245048e92e2acce900ad3a909285f9db1859/ruff-0.14.6-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0a8e7b9d73d8728b68f632aa8e824ef041d068d231d8dbc7808532d3629a6bef", size = 13140745 }, - { url = "https://files.pythonhosted.org/packages/95/d3/248c1efc71a0a8ed4e8e10b4b2266845d7dfc7a0ab64354afe049eaa1310/ruff-0.14.6-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d50d45d4553a3ebcbd33e7c5e0fe6ca4aafd9a9122492de357205c2c48f00775", size = 13076486 }, - { url = "https://files.pythonhosted.org/packages/a5/19/b68d4563fe50eba4b8c92aa842149bb56dd24d198389c0ed12e7faff4f7d/ruff-0.14.6-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:118548dd121f8a21bfa8ab2c5b80e5b4aed67ead4b7567790962554f38e598ce", size = 13727563 }, - { url = "https://files.pythonhosted.org/packages/47/ac/943169436832d4b0e867235abbdb57ce3a82367b47e0280fa7b4eabb7593/ruff-0.14.6-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:57256efafbfefcb8748df9d1d766062f62b20150691021f8ab79e2d919f7c11f", size = 15199755 }, - { url = "https://files.pythonhosted.org/packages/c9/b9/288bb2399860a36d4bb0541cb66cce3c0f4156aaff009dc8499be0c24bf2/ruff-0.14.6-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ff18134841e5c68f8e5df1999a64429a02d5549036b394fafbe410f886e1989d", size = 14850608 }, - { url = "https://files.pythonhosted.org/packages/ee/b1/a0d549dd4364e240f37e7d2907e97ee80587480d98c7799d2d8dc7a2f605/ruff-0.14.6-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:29c4b7ec1e66a105d5c27bd57fa93203637d66a26d10ca9809dc7fc18ec58440", size = 14118754 }, - { url = "https://files.pythonhosted.org/packages/13/ac/9b9fe63716af8bdfddfacd0882bc1586f29985d3b988b3c62ddce2e202c3/ruff-0.14.6-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:167843a6f78680746d7e226f255d920aeed5e4ad9c03258094a2d49d3028b105", size = 13949214 }, - { url = "https://files.pythonhosted.org/packages/12/27/4dad6c6a77fede9560b7df6802b1b697e97e49ceabe1f12baf3ea20862e9/ruff-0.14.6-py3-none-manylinux_2_31_riscv64.whl", hash = "sha256:16a33af621c9c523b1ae006b1b99b159bf5ac7e4b1f20b85b2572455018e0821", size = 14106112 }, - { url = "https://files.pythonhosted.org/packages/6a/db/23e322d7177873eaedea59a7932ca5084ec5b7e20cb30f341ab594130a71/ruff-0.14.6-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:1432ab6e1ae2dc565a7eea707d3b03a0c234ef401482a6f1621bc1f427c2ff55", size = 13035010 }, - { url = "https://files.pythonhosted.org/packages/a8/9c/20e21d4d69dbb35e6a1df7691e02f363423658a20a2afacf2a2c011800dc/ruff-0.14.6-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:4c55cfbbe7abb61eb914bfd20683d14cdfb38a6d56c6c66efa55ec6570ee4e71", size = 13054082 }, - { url = "https://files.pythonhosted.org/packages/66/25/906ee6a0464c3125c8d673c589771a974965c2be1a1e28b5c3b96cb6ef88/ruff-0.14.6-py3-none-musllinux_1_2_i686.whl", hash = "sha256:efea3c0f21901a685fff4befda6d61a1bf4cb43de16da87e8226a281d614350b", size = 13303354 }, - { url = "https://files.pythonhosted.org/packages/4c/58/60577569e198d56922b7ead07b465f559002b7b11d53f40937e95067ca1c/ruff-0.14.6-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:344d97172576d75dc6afc0e9243376dbe1668559c72de1864439c4fc95f78185", size = 14054487 }, - { url = "https://files.pythonhosted.org/packages/67/0b/8e4e0639e4cc12547f41cb771b0b44ec8225b6b6a93393176d75fe6f7d40/ruff-0.14.6-py3-none-win32.whl", hash = "sha256:00169c0c8b85396516fdd9ce3446c7ca20c2a8f90a77aa945ba6b8f2bfe99e85", size = 13013361 }, - { url = "https://files.pythonhosted.org/packages/fb/02/82240553b77fd1341f80ebb3eaae43ba011c7a91b4224a9f317d8e6591af/ruff-0.14.6-py3-none-win_amd64.whl", hash = "sha256:390e6480c5e3659f8a4c8d6a0373027820419ac14fa0d2713bd8e6c3e125b8b9", size = 14432087 }, - { url = "https://files.pythonhosted.org/packages/a5/1f/93f9b0fad9470e4c829a5bb678da4012f0c710d09331b860ee555216f4ea/ruff-0.14.6-py3-none-win_arm64.whl", hash = "sha256:d43c81fbeae52cfa8728d8766bbf46ee4298c888072105815b392da70ca836b2", size = 13520930 }, + { url = "https://files.pythonhosted.org/packages/67/d2/7dd544116d107fffb24a0064d41a5d2ed1c9d6372d142f9ba108c8e39207/ruff-0.14.6-py3-none-linux_armv6l.whl", hash = "sha256:d724ac2f1c240dbd01a2ae98db5d1d9a5e1d9e96eba999d1c48e30062df578a3", size = 13326119, upload-time = "2025-11-21T14:25:24.2Z" }, + { url = "https://files.pythonhosted.org/packages/36/6a/ad66d0a3315d6327ed6b01f759d83df3c4d5f86c30462121024361137b6a/ruff-0.14.6-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:9f7539ea257aa4d07b7ce87aed580e485c40143f2473ff2f2b75aee003186004", size = 13526007, upload-time = "2025-11-21T14:25:26.906Z" }, + { url = "https://files.pythonhosted.org/packages/a3/9d/dae6db96df28e0a15dea8e986ee393af70fc97fd57669808728080529c37/ruff-0.14.6-py3-none-macosx_11_0_arm64.whl", hash = "sha256:7f6007e55b90a2a7e93083ba48a9f23c3158c433591c33ee2e99a49b889c6332", size = 12676572, upload-time = "2025-11-21T14:25:29.826Z" }, + { url = "https://files.pythonhosted.org/packages/76/a4/f319e87759949062cfee1b26245048e92e2acce900ad3a909285f9db1859/ruff-0.14.6-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0a8e7b9d73d8728b68f632aa8e824ef041d068d231d8dbc7808532d3629a6bef", size = 13140745, upload-time = "2025-11-21T14:25:32.788Z" }, + { url = "https://files.pythonhosted.org/packages/95/d3/248c1efc71a0a8ed4e8e10b4b2266845d7dfc7a0ab64354afe049eaa1310/ruff-0.14.6-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d50d45d4553a3ebcbd33e7c5e0fe6ca4aafd9a9122492de357205c2c48f00775", size = 13076486, upload-time = "2025-11-21T14:25:35.601Z" }, + { url = "https://files.pythonhosted.org/packages/a5/19/b68d4563fe50eba4b8c92aa842149bb56dd24d198389c0ed12e7faff4f7d/ruff-0.14.6-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:118548dd121f8a21bfa8ab2c5b80e5b4aed67ead4b7567790962554f38e598ce", size = 13727563, upload-time = "2025-11-21T14:25:38.514Z" }, + { url = "https://files.pythonhosted.org/packages/47/ac/943169436832d4b0e867235abbdb57ce3a82367b47e0280fa7b4eabb7593/ruff-0.14.6-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:57256efafbfefcb8748df9d1d766062f62b20150691021f8ab79e2d919f7c11f", size = 15199755, upload-time = "2025-11-21T14:25:41.516Z" }, + { url = "https://files.pythonhosted.org/packages/c9/b9/288bb2399860a36d4bb0541cb66cce3c0f4156aaff009dc8499be0c24bf2/ruff-0.14.6-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ff18134841e5c68f8e5df1999a64429a02d5549036b394fafbe410f886e1989d", size = 14850608, upload-time = "2025-11-21T14:25:44.428Z" }, + { url = "https://files.pythonhosted.org/packages/ee/b1/a0d549dd4364e240f37e7d2907e97ee80587480d98c7799d2d8dc7a2f605/ruff-0.14.6-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:29c4b7ec1e66a105d5c27bd57fa93203637d66a26d10ca9809dc7fc18ec58440", size = 14118754, upload-time = "2025-11-21T14:25:47.214Z" }, + { url = "https://files.pythonhosted.org/packages/13/ac/9b9fe63716af8bdfddfacd0882bc1586f29985d3b988b3c62ddce2e202c3/ruff-0.14.6-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:167843a6f78680746d7e226f255d920aeed5e4ad9c03258094a2d49d3028b105", size = 13949214, upload-time = "2025-11-21T14:25:50.002Z" }, + { url = "https://files.pythonhosted.org/packages/12/27/4dad6c6a77fede9560b7df6802b1b697e97e49ceabe1f12baf3ea20862e9/ruff-0.14.6-py3-none-manylinux_2_31_riscv64.whl", hash = "sha256:16a33af621c9c523b1ae006b1b99b159bf5ac7e4b1f20b85b2572455018e0821", size = 14106112, upload-time = "2025-11-21T14:25:52.841Z" }, + { url = "https://files.pythonhosted.org/packages/6a/db/23e322d7177873eaedea59a7932ca5084ec5b7e20cb30f341ab594130a71/ruff-0.14.6-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:1432ab6e1ae2dc565a7eea707d3b03a0c234ef401482a6f1621bc1f427c2ff55", size = 13035010, upload-time = "2025-11-21T14:25:55.536Z" }, + { url = "https://files.pythonhosted.org/packages/a8/9c/20e21d4d69dbb35e6a1df7691e02f363423658a20a2afacf2a2c011800dc/ruff-0.14.6-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:4c55cfbbe7abb61eb914bfd20683d14cdfb38a6d56c6c66efa55ec6570ee4e71", size = 13054082, upload-time = "2025-11-21T14:25:58.625Z" }, + { url = "https://files.pythonhosted.org/packages/66/25/906ee6a0464c3125c8d673c589771a974965c2be1a1e28b5c3b96cb6ef88/ruff-0.14.6-py3-none-musllinux_1_2_i686.whl", hash = "sha256:efea3c0f21901a685fff4befda6d61a1bf4cb43de16da87e8226a281d614350b", size = 13303354, upload-time = "2025-11-21T14:26:01.816Z" }, + { url = "https://files.pythonhosted.org/packages/4c/58/60577569e198d56922b7ead07b465f559002b7b11d53f40937e95067ca1c/ruff-0.14.6-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:344d97172576d75dc6afc0e9243376dbe1668559c72de1864439c4fc95f78185", size = 14054487, upload-time = "2025-11-21T14:26:05.058Z" }, + { url = "https://files.pythonhosted.org/packages/67/0b/8e4e0639e4cc12547f41cb771b0b44ec8225b6b6a93393176d75fe6f7d40/ruff-0.14.6-py3-none-win32.whl", hash = "sha256:00169c0c8b85396516fdd9ce3446c7ca20c2a8f90a77aa945ba6b8f2bfe99e85", size = 13013361, upload-time = "2025-11-21T14:26:08.152Z" }, + { url = "https://files.pythonhosted.org/packages/fb/02/82240553b77fd1341f80ebb3eaae43ba011c7a91b4224a9f317d8e6591af/ruff-0.14.6-py3-none-win_amd64.whl", hash = "sha256:390e6480c5e3659f8a4c8d6a0373027820419ac14fa0d2713bd8e6c3e125b8b9", size = 14432087, upload-time = "2025-11-21T14:26:10.891Z" }, + { url = "https://files.pythonhosted.org/packages/a5/1f/93f9b0fad9470e4c829a5bb678da4012f0c710d09331b860ee555216f4ea/ruff-0.14.6-py3-none-win_arm64.whl", hash = "sha256:d43c81fbeae52cfa8728d8766bbf46ee4298c888072105815b392da70ca836b2", size = 13520930, upload-time = "2025-11-21T14:26:13.951Z" }, ] [[package]] @@ -5586,31 +5636,31 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "botocore" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/c0/0a/1cdbabf9edd0ea7747efdf6c9ab4e7061b085aa7f9bfc36bb1601563b069/s3transfer-0.10.4.tar.gz", hash = "sha256:29edc09801743c21eb5ecbc617a152df41d3c287f67b615f73e5f750583666a7", size = 145287 } +sdist = { url = "https://files.pythonhosted.org/packages/c0/0a/1cdbabf9edd0ea7747efdf6c9ab4e7061b085aa7f9bfc36bb1601563b069/s3transfer-0.10.4.tar.gz", hash = "sha256:29edc09801743c21eb5ecbc617a152df41d3c287f67b615f73e5f750583666a7", size = 145287, upload-time = "2024-11-20T21:06:05.981Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/66/05/7957af15543b8c9799209506df4660cba7afc4cf94bfb60513827e96bed6/s3transfer-0.10.4-py3-none-any.whl", hash = "sha256:244a76a24355363a68164241438de1b72f8781664920260c48465896b712a41e", size = 83175 }, + { url = "https://files.pythonhosted.org/packages/66/05/7957af15543b8c9799209506df4660cba7afc4cf94bfb60513827e96bed6/s3transfer-0.10.4-py3-none-any.whl", hash = "sha256:244a76a24355363a68164241438de1b72f8781664920260c48465896b712a41e", size = 83175, upload-time = "2024-11-20T21:06:03.961Z" }, ] [[package]] name = "safetensors" version = "0.7.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/29/9c/6e74567782559a63bd040a236edca26fd71bc7ba88de2ef35d75df3bca5e/safetensors-0.7.0.tar.gz", hash = "sha256:07663963b67e8bd9f0b8ad15bb9163606cd27cc5a1b96235a50d8369803b96b0", size = 200878 } +sdist = { url = "https://files.pythonhosted.org/packages/29/9c/6e74567782559a63bd040a236edca26fd71bc7ba88de2ef35d75df3bca5e/safetensors-0.7.0.tar.gz", hash = "sha256:07663963b67e8bd9f0b8ad15bb9163606cd27cc5a1b96235a50d8369803b96b0", size = 200878, upload-time = "2025-11-19T15:18:43.199Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/fa/47/aef6c06649039accf914afef490268e1067ed82be62bcfa5b7e886ad15e8/safetensors-0.7.0-cp38-abi3-macosx_10_12_x86_64.whl", hash = "sha256:c82f4d474cf725255d9e6acf17252991c3c8aac038d6ef363a4bf8be2f6db517", size = 467781 }, - { url = "https://files.pythonhosted.org/packages/e8/00/374c0c068e30cd31f1e1b46b4b5738168ec79e7689ca82ee93ddfea05109/safetensors-0.7.0-cp38-abi3-macosx_11_0_arm64.whl", hash = "sha256:94fd4858284736bb67a897a41608b5b0c2496c9bdb3bf2af1fa3409127f20d57", size = 447058 }, - { url = "https://files.pythonhosted.org/packages/f1/06/578ffed52c2296f93d7fd2d844cabfa92be51a587c38c8afbb8ae449ca89/safetensors-0.7.0-cp38-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e07d91d0c92a31200f25351f4acb2bc6aff7f48094e13ebb1d0fb995b54b6542", size = 491748 }, - { url = "https://files.pythonhosted.org/packages/ae/33/1debbbb70e4791dde185edb9413d1fe01619255abb64b300157d7f15dddd/safetensors-0.7.0-cp38-abi3-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:8469155f4cb518bafb4acf4865e8bb9d6804110d2d9bdcaa78564b9fd841e104", size = 503881 }, - { url = "https://files.pythonhosted.org/packages/8e/1c/40c2ca924d60792c3be509833df711b553c60effbd91da6f5284a83f7122/safetensors-0.7.0-cp38-abi3-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:54bef08bf00a2bff599982f6b08e8770e09cc012d7bba00783fc7ea38f1fb37d", size = 623463 }, - { url = "https://files.pythonhosted.org/packages/9b/3a/13784a9364bd43b0d61eef4bea2845039bc2030458b16594a1bd787ae26e/safetensors-0.7.0-cp38-abi3-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:42cb091236206bb2016d245c377ed383aa7f78691748f3bb6ee1bfa51ae2ce6a", size = 532855 }, - { url = "https://files.pythonhosted.org/packages/a0/60/429e9b1cb3fc651937727befe258ea24122d9663e4d5709a48c9cbfceecb/safetensors-0.7.0-cp38-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dac7252938f0696ddea46f5e855dd3138444e82236e3be475f54929f0c510d48", size = 507152 }, - { url = "https://files.pythonhosted.org/packages/3c/a8/4b45e4e059270d17af60359713ffd83f97900d45a6afa73aaa0d737d48b6/safetensors-0.7.0-cp38-abi3-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:1d060c70284127fa805085d8f10fbd0962792aed71879d00864acda69dbab981", size = 541856 }, - { url = "https://files.pythonhosted.org/packages/06/87/d26d8407c44175d8ae164a95b5a62707fcc445f3c0c56108e37d98070a3d/safetensors-0.7.0-cp38-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:cdab83a366799fa730f90a4ebb563e494f28e9e92c4819e556152ad55e43591b", size = 674060 }, - { url = "https://files.pythonhosted.org/packages/11/f5/57644a2ff08dc6325816ba7217e5095f17269dada2554b658442c66aed51/safetensors-0.7.0-cp38-abi3-musllinux_1_2_armv7l.whl", hash = "sha256:672132907fcad9f2aedcb705b2d7b3b93354a2aec1b2f706c4db852abe338f85", size = 771715 }, - { url = "https://files.pythonhosted.org/packages/86/31/17883e13a814bd278ae6e266b13282a01049b0c81341da7fd0e3e71a80a3/safetensors-0.7.0-cp38-abi3-musllinux_1_2_i686.whl", hash = "sha256:5d72abdb8a4d56d4020713724ba81dac065fedb7f3667151c4a637f1d3fb26c0", size = 714377 }, - { url = "https://files.pythonhosted.org/packages/4a/d8/0c8a7dc9b41dcac53c4cbf9df2b9c83e0e0097203de8b37a712b345c0be5/safetensors-0.7.0-cp38-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:b0f6d66c1c538d5a94a73aa9ddca8ccc4227e6c9ff555322ea40bdd142391dd4", size = 677368 }, - { url = "https://files.pythonhosted.org/packages/05/e5/cb4b713c8a93469e3c5be7c3f8d77d307e65fe89673e731f5c2bfd0a9237/safetensors-0.7.0-cp38-abi3-win32.whl", hash = "sha256:c74af94bf3ac15ac4d0f2a7c7b4663a15f8c2ab15ed0fc7531ca61d0835eccba", size = 326423 }, - { url = "https://files.pythonhosted.org/packages/5d/e6/ec8471c8072382cb91233ba7267fd931219753bb43814cbc71757bfd4dab/safetensors-0.7.0-cp38-abi3-win_amd64.whl", hash = "sha256:d1239932053f56f3456f32eb9625590cc7582e905021f94636202a864d470755", size = 341380 }, + { url = "https://files.pythonhosted.org/packages/fa/47/aef6c06649039accf914afef490268e1067ed82be62bcfa5b7e886ad15e8/safetensors-0.7.0-cp38-abi3-macosx_10_12_x86_64.whl", hash = "sha256:c82f4d474cf725255d9e6acf17252991c3c8aac038d6ef363a4bf8be2f6db517", size = 467781, upload-time = "2025-11-19T15:18:35.84Z" }, + { url = "https://files.pythonhosted.org/packages/e8/00/374c0c068e30cd31f1e1b46b4b5738168ec79e7689ca82ee93ddfea05109/safetensors-0.7.0-cp38-abi3-macosx_11_0_arm64.whl", hash = "sha256:94fd4858284736bb67a897a41608b5b0c2496c9bdb3bf2af1fa3409127f20d57", size = 447058, upload-time = "2025-11-19T15:18:34.416Z" }, + { url = "https://files.pythonhosted.org/packages/f1/06/578ffed52c2296f93d7fd2d844cabfa92be51a587c38c8afbb8ae449ca89/safetensors-0.7.0-cp38-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e07d91d0c92a31200f25351f4acb2bc6aff7f48094e13ebb1d0fb995b54b6542", size = 491748, upload-time = "2025-11-19T15:18:09.79Z" }, + { url = "https://files.pythonhosted.org/packages/ae/33/1debbbb70e4791dde185edb9413d1fe01619255abb64b300157d7f15dddd/safetensors-0.7.0-cp38-abi3-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:8469155f4cb518bafb4acf4865e8bb9d6804110d2d9bdcaa78564b9fd841e104", size = 503881, upload-time = "2025-11-19T15:18:16.145Z" }, + { url = "https://files.pythonhosted.org/packages/8e/1c/40c2ca924d60792c3be509833df711b553c60effbd91da6f5284a83f7122/safetensors-0.7.0-cp38-abi3-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:54bef08bf00a2bff599982f6b08e8770e09cc012d7bba00783fc7ea38f1fb37d", size = 623463, upload-time = "2025-11-19T15:18:21.11Z" }, + { url = "https://files.pythonhosted.org/packages/9b/3a/13784a9364bd43b0d61eef4bea2845039bc2030458b16594a1bd787ae26e/safetensors-0.7.0-cp38-abi3-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:42cb091236206bb2016d245c377ed383aa7f78691748f3bb6ee1bfa51ae2ce6a", size = 532855, upload-time = "2025-11-19T15:18:25.719Z" }, + { url = "https://files.pythonhosted.org/packages/a0/60/429e9b1cb3fc651937727befe258ea24122d9663e4d5709a48c9cbfceecb/safetensors-0.7.0-cp38-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dac7252938f0696ddea46f5e855dd3138444e82236e3be475f54929f0c510d48", size = 507152, upload-time = "2025-11-19T15:18:33.023Z" }, + { url = "https://files.pythonhosted.org/packages/3c/a8/4b45e4e059270d17af60359713ffd83f97900d45a6afa73aaa0d737d48b6/safetensors-0.7.0-cp38-abi3-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:1d060c70284127fa805085d8f10fbd0962792aed71879d00864acda69dbab981", size = 541856, upload-time = "2025-11-19T15:18:31.075Z" }, + { url = "https://files.pythonhosted.org/packages/06/87/d26d8407c44175d8ae164a95b5a62707fcc445f3c0c56108e37d98070a3d/safetensors-0.7.0-cp38-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:cdab83a366799fa730f90a4ebb563e494f28e9e92c4819e556152ad55e43591b", size = 674060, upload-time = "2025-11-19T15:18:37.211Z" }, + { url = "https://files.pythonhosted.org/packages/11/f5/57644a2ff08dc6325816ba7217e5095f17269dada2554b658442c66aed51/safetensors-0.7.0-cp38-abi3-musllinux_1_2_armv7l.whl", hash = "sha256:672132907fcad9f2aedcb705b2d7b3b93354a2aec1b2f706c4db852abe338f85", size = 771715, upload-time = "2025-11-19T15:18:38.689Z" }, + { url = "https://files.pythonhosted.org/packages/86/31/17883e13a814bd278ae6e266b13282a01049b0c81341da7fd0e3e71a80a3/safetensors-0.7.0-cp38-abi3-musllinux_1_2_i686.whl", hash = "sha256:5d72abdb8a4d56d4020713724ba81dac065fedb7f3667151c4a637f1d3fb26c0", size = 714377, upload-time = "2025-11-19T15:18:40.162Z" }, + { url = "https://files.pythonhosted.org/packages/4a/d8/0c8a7dc9b41dcac53c4cbf9df2b9c83e0e0097203de8b37a712b345c0be5/safetensors-0.7.0-cp38-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:b0f6d66c1c538d5a94a73aa9ddca8ccc4227e6c9ff555322ea40bdd142391dd4", size = 677368, upload-time = "2025-11-19T15:18:41.627Z" }, + { url = "https://files.pythonhosted.org/packages/05/e5/cb4b713c8a93469e3c5be7c3f8d77d307e65fe89673e731f5c2bfd0a9237/safetensors-0.7.0-cp38-abi3-win32.whl", hash = "sha256:c74af94bf3ac15ac4d0f2a7c7b4663a15f8c2ab15ed0fc7531ca61d0835eccba", size = 326423, upload-time = "2025-11-19T15:18:45.74Z" }, + { url = "https://files.pythonhosted.org/packages/5d/e6/ec8471c8072382cb91233ba7267fd931219753bb43814cbc71757bfd4dab/safetensors-0.7.0-cp38-abi3-win_amd64.whl", hash = "sha256:d1239932053f56f3456f32eb9625590cc7582e905021f94636202a864d470755", size = 341380, upload-time = "2025-11-19T15:18:44.427Z" }, ] [[package]] @@ -5620,9 +5670,9 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "optype", extra = ["numpy"] }, ] -sdist = { url = "https://files.pythonhosted.org/packages/0b/3e/8baf960c68f012b8297930d4686b235813974833a417db8d0af798b0b93d/scipy_stubs-1.16.3.1.tar.gz", hash = "sha256:0738d55a7f8b0c94cdb8063f711d53330ebefe166f7d48dec9ffd932a337226d", size = 359990 } +sdist = { url = "https://files.pythonhosted.org/packages/0b/3e/8baf960c68f012b8297930d4686b235813974833a417db8d0af798b0b93d/scipy_stubs-1.16.3.1.tar.gz", hash = "sha256:0738d55a7f8b0c94cdb8063f711d53330ebefe166f7d48dec9ffd932a337226d", size = 359990, upload-time = "2025-11-23T23:05:21.274Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/0c/39/e2a69866518f88dc01940c9b9b044db97c3387f2826bd2a173e49a5c0469/scipy_stubs-1.16.3.1-py3-none-any.whl", hash = "sha256:69bc52ef6c3f8e09208abdfaf32291eb51e9ddf8fa4389401ccd9473bdd2a26d", size = 560397 }, + { url = "https://files.pythonhosted.org/packages/0c/39/e2a69866518f88dc01940c9b9b044db97c3387f2826bd2a173e49a5c0469/scipy_stubs-1.16.3.1-py3-none-any.whl", hash = "sha256:69bc52ef6c3f8e09208abdfaf32291eb51e9ddf8fa4389401ccd9473bdd2a26d", size = 560397, upload-time = "2025-11-23T23:05:19.432Z" }, ] [[package]] @@ -5634,9 +5684,9 @@ dependencies = [ { name = "python-http-client" }, { name = "werkzeug" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/da/fa/f718b2b953f99c1f0085811598ac7e31ccbd4229a81ec2a5290be868187a/sendgrid-6.12.5.tar.gz", hash = "sha256:ea9aae30cd55c332e266bccd11185159482edfc07c149b6cd15cf08869fabdb7", size = 50310 } +sdist = { url = "https://files.pythonhosted.org/packages/da/fa/f718b2b953f99c1f0085811598ac7e31ccbd4229a81ec2a5290be868187a/sendgrid-6.12.5.tar.gz", hash = "sha256:ea9aae30cd55c332e266bccd11185159482edfc07c149b6cd15cf08869fabdb7", size = 50310, upload-time = "2025-09-19T06:23:09.229Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/bd/55/b3c3880a77082e8f7374954e0074aafafaa9bc78bdf9c8f5a92c2e7afc6a/sendgrid-6.12.5-py3-none-any.whl", hash = "sha256:96f92cc91634bf552fdb766b904bbb53968018da7ae41fdac4d1090dc0311ca8", size = 102173 }, + { url = "https://files.pythonhosted.org/packages/bd/55/b3c3880a77082e8f7374954e0074aafafaa9bc78bdf9c8f5a92c2e7afc6a/sendgrid-6.12.5-py3-none-any.whl", hash = "sha256:96f92cc91634bf552fdb766b904bbb53968018da7ae41fdac4d1090dc0311ca8", size = 102173, upload-time = "2025-09-19T06:23:07.93Z" }, ] [[package]] @@ -5647,9 +5697,9 @@ dependencies = [ { name = "certifi" }, { name = "urllib3" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/5e/bb/6a41b2e0e9121bed4d2ec68d50568ab95c49f4744156a9bbb789c866c66d/sentry_sdk-2.28.0.tar.gz", hash = "sha256:14d2b73bc93afaf2a9412490329099e6217761cbab13b6ee8bc0e82927e1504e", size = 325052 } +sdist = { url = "https://files.pythonhosted.org/packages/5e/bb/6a41b2e0e9121bed4d2ec68d50568ab95c49f4744156a9bbb789c866c66d/sentry_sdk-2.28.0.tar.gz", hash = "sha256:14d2b73bc93afaf2a9412490329099e6217761cbab13b6ee8bc0e82927e1504e", size = 325052, upload-time = "2025-05-12T07:53:12.785Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/9b/4e/b1575833094c088dfdef63fbca794518860fcbc8002aadf51ebe8b6a387f/sentry_sdk-2.28.0-py2.py3-none-any.whl", hash = "sha256:51496e6cb3cb625b99c8e08907c67a9112360259b0ef08470e532c3ab184a232", size = 341693 }, + { url = "https://files.pythonhosted.org/packages/9b/4e/b1575833094c088dfdef63fbca794518860fcbc8002aadf51ebe8b6a387f/sentry_sdk-2.28.0-py2.py3-none-any.whl", hash = "sha256:51496e6cb3cb625b99c8e08907c67a9112360259b0ef08470e532c3ab184a232", size = 341693, upload-time = "2025-05-12T07:53:10.882Z" }, ] [package.optional-dependencies] @@ -5663,9 +5713,9 @@ flask = [ name = "setuptools" version = "80.9.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/18/5d/3bf57dcd21979b887f014ea83c24ae194cfcd12b9e0fda66b957c69d1fca/setuptools-80.9.0.tar.gz", hash = "sha256:f36b47402ecde768dbfafc46e8e4207b4360c654f1f3bb84475f0a28628fb19c", size = 1319958 } +sdist = { url = "https://files.pythonhosted.org/packages/18/5d/3bf57dcd21979b887f014ea83c24ae194cfcd12b9e0fda66b957c69d1fca/setuptools-80.9.0.tar.gz", hash = "sha256:f36b47402ecde768dbfafc46e8e4207b4360c654f1f3bb84475f0a28628fb19c", size = 1319958, upload-time = "2025-05-27T00:56:51.443Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/a3/dc/17031897dae0efacfea57dfd3a82fdd2a2aeb58e0ff71b77b87e44edc772/setuptools-80.9.0-py3-none-any.whl", hash = "sha256:062d34222ad13e0cc312a4c02d73f059e86a4acbfbdea8f8f76b28c99f306922", size = 1201486 }, + { url = "https://files.pythonhosted.org/packages/a3/dc/17031897dae0efacfea57dfd3a82fdd2a2aeb58e0ff71b77b87e44edc772/setuptools-80.9.0-py3-none-any.whl", hash = "sha256:062d34222ad13e0cc312a4c02d73f059e86a4acbfbdea8f8f76b28c99f306922", size = 1201486, upload-time = "2025-05-27T00:56:49.664Z" }, ] [[package]] @@ -5675,87 +5725,87 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "numpy" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/4d/bc/0989043118a27cccb4e906a46b7565ce36ca7b57f5a18b78f4f1b0f72d9d/shapely-2.1.2.tar.gz", hash = "sha256:2ed4ecb28320a433db18a5bf029986aa8afcfd740745e78847e330d5d94922a9", size = 315489 } +sdist = { url = "https://files.pythonhosted.org/packages/4d/bc/0989043118a27cccb4e906a46b7565ce36ca7b57f5a18b78f4f1b0f72d9d/shapely-2.1.2.tar.gz", hash = "sha256:2ed4ecb28320a433db18a5bf029986aa8afcfd740745e78847e330d5d94922a9", size = 315489, upload-time = "2025-09-24T13:51:41.432Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/8f/8d/1ff672dea9ec6a7b5d422eb6d095ed886e2e523733329f75fdcb14ee1149/shapely-2.1.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:91121757b0a36c9aac3427a651a7e6567110a4a67c97edf04f8d55d4765f6618", size = 1820038 }, - { url = "https://files.pythonhosted.org/packages/4f/ce/28fab8c772ce5db23a0d86bf0adaee0c4c79d5ad1db766055fa3dab442e2/shapely-2.1.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:16a9c722ba774cf50b5d4541242b4cce05aafd44a015290c82ba8a16931ff63d", size = 1626039 }, - { url = "https://files.pythonhosted.org/packages/70/8b/868b7e3f4982f5006e9395c1e12343c66a8155c0374fdc07c0e6a1ab547d/shapely-2.1.2-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:cc4f7397459b12c0b196c9efe1f9d7e92463cbba142632b4cc6d8bbbbd3e2b09", size = 3001519 }, - { url = "https://files.pythonhosted.org/packages/13/02/58b0b8d9c17c93ab6340edd8b7308c0c5a5b81f94ce65705819b7416dba5/shapely-2.1.2-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:136ab87b17e733e22f0961504d05e77e7be8c9b5a8184f685b4a91a84efe3c26", size = 3110842 }, - { url = "https://files.pythonhosted.org/packages/af/61/8e389c97994d5f331dcffb25e2fa761aeedfb52b3ad9bcdd7b8671f4810a/shapely-2.1.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:16c5d0fc45d3aa0a69074979f4f1928ca2734fb2e0dde8af9611e134e46774e7", size = 4021316 }, - { url = "https://files.pythonhosted.org/packages/d3/d4/9b2a9fe6039f9e42ccf2cb3e84f219fd8364b0c3b8e7bbc857b5fbe9c14c/shapely-2.1.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:6ddc759f72b5b2b0f54a7e7cde44acef680a55019eb52ac63a7af2cf17cb9cd2", size = 4178586 }, - { url = "https://files.pythonhosted.org/packages/16/f6/9840f6963ed4decf76b08fd6d7fed14f8779fb7a62cb45c5617fa8ac6eab/shapely-2.1.2-cp311-cp311-win32.whl", hash = "sha256:2fa78b49485391224755a856ed3b3bd91c8455f6121fee0db0e71cefb07d0ef6", size = 1543961 }, - { url = "https://files.pythonhosted.org/packages/38/1e/3f8ea46353c2a33c1669eb7327f9665103aa3a8dfe7f2e4ef714c210b2c2/shapely-2.1.2-cp311-cp311-win_amd64.whl", hash = "sha256:c64d5c97b2f47e3cd9b712eaced3b061f2b71234b3fc263e0fcf7d889c6559dc", size = 1722856 }, - { url = "https://files.pythonhosted.org/packages/24/c0/f3b6453cf2dfa99adc0ba6675f9aaff9e526d2224cbd7ff9c1a879238693/shapely-2.1.2-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:fe2533caae6a91a543dec62e8360fe86ffcdc42a7c55f9dfd0128a977a896b94", size = 1833550 }, - { url = "https://files.pythonhosted.org/packages/86/07/59dee0bc4b913b7ab59ab1086225baca5b8f19865e6101db9ebb7243e132/shapely-2.1.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ba4d1333cc0bc94381d6d4308d2e4e008e0bd128bdcff5573199742ee3634359", size = 1643556 }, - { url = "https://files.pythonhosted.org/packages/26/29/a5397e75b435b9895cd53e165083faed5d12fd9626eadec15a83a2411f0f/shapely-2.1.2-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:0bd308103340030feef6c111d3eb98d50dc13feea33affc8a6f9fa549e9458a3", size = 2988308 }, - { url = "https://files.pythonhosted.org/packages/b9/37/e781683abac55dde9771e086b790e554811a71ed0b2b8a1e789b7430dd44/shapely-2.1.2-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:1e7d4d7ad262a48bb44277ca12c7c78cb1b0f56b32c10734ec9a1d30c0b0c54b", size = 3099844 }, - { url = "https://files.pythonhosted.org/packages/d8/f3/9876b64d4a5a321b9dc482c92bb6f061f2fa42131cba643c699f39317cb9/shapely-2.1.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:e9eddfe513096a71896441a7c37db72da0687b34752c4e193577a145c71736fc", size = 3988842 }, - { url = "https://files.pythonhosted.org/packages/d1/a0/704c7292f7014c7e74ec84eddb7b109e1fbae74a16deae9c1504b1d15565/shapely-2.1.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:980c777c612514c0cf99bc8a9de6d286f5e186dcaf9091252fcd444e5638193d", size = 4152714 }, - { url = "https://files.pythonhosted.org/packages/53/46/319c9dc788884ad0785242543cdffac0e6530e4d0deb6c4862bc4143dcf3/shapely-2.1.2-cp312-cp312-win32.whl", hash = "sha256:9111274b88e4d7b54a95218e243282709b330ef52b7b86bc6aaf4f805306f454", size = 1542745 }, - { url = "https://files.pythonhosted.org/packages/ec/bf/cb6c1c505cb31e818e900b9312d514f381fbfa5c4363edfce0fcc4f8c1a4/shapely-2.1.2-cp312-cp312-win_amd64.whl", hash = "sha256:743044b4cfb34f9a67205cee9279feaf60ba7d02e69febc2afc609047cb49179", size = 1722861 }, + { url = "https://files.pythonhosted.org/packages/8f/8d/1ff672dea9ec6a7b5d422eb6d095ed886e2e523733329f75fdcb14ee1149/shapely-2.1.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:91121757b0a36c9aac3427a651a7e6567110a4a67c97edf04f8d55d4765f6618", size = 1820038, upload-time = "2025-09-24T13:50:15.628Z" }, + { url = "https://files.pythonhosted.org/packages/4f/ce/28fab8c772ce5db23a0d86bf0adaee0c4c79d5ad1db766055fa3dab442e2/shapely-2.1.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:16a9c722ba774cf50b5d4541242b4cce05aafd44a015290c82ba8a16931ff63d", size = 1626039, upload-time = "2025-09-24T13:50:16.881Z" }, + { url = "https://files.pythonhosted.org/packages/70/8b/868b7e3f4982f5006e9395c1e12343c66a8155c0374fdc07c0e6a1ab547d/shapely-2.1.2-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:cc4f7397459b12c0b196c9efe1f9d7e92463cbba142632b4cc6d8bbbbd3e2b09", size = 3001519, upload-time = "2025-09-24T13:50:18.606Z" }, + { url = "https://files.pythonhosted.org/packages/13/02/58b0b8d9c17c93ab6340edd8b7308c0c5a5b81f94ce65705819b7416dba5/shapely-2.1.2-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:136ab87b17e733e22f0961504d05e77e7be8c9b5a8184f685b4a91a84efe3c26", size = 3110842, upload-time = "2025-09-24T13:50:21.77Z" }, + { url = "https://files.pythonhosted.org/packages/af/61/8e389c97994d5f331dcffb25e2fa761aeedfb52b3ad9bcdd7b8671f4810a/shapely-2.1.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:16c5d0fc45d3aa0a69074979f4f1928ca2734fb2e0dde8af9611e134e46774e7", size = 4021316, upload-time = "2025-09-24T13:50:23.626Z" }, + { url = "https://files.pythonhosted.org/packages/d3/d4/9b2a9fe6039f9e42ccf2cb3e84f219fd8364b0c3b8e7bbc857b5fbe9c14c/shapely-2.1.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:6ddc759f72b5b2b0f54a7e7cde44acef680a55019eb52ac63a7af2cf17cb9cd2", size = 4178586, upload-time = "2025-09-24T13:50:25.443Z" }, + { url = "https://files.pythonhosted.org/packages/16/f6/9840f6963ed4decf76b08fd6d7fed14f8779fb7a62cb45c5617fa8ac6eab/shapely-2.1.2-cp311-cp311-win32.whl", hash = "sha256:2fa78b49485391224755a856ed3b3bd91c8455f6121fee0db0e71cefb07d0ef6", size = 1543961, upload-time = "2025-09-24T13:50:26.968Z" }, + { url = "https://files.pythonhosted.org/packages/38/1e/3f8ea46353c2a33c1669eb7327f9665103aa3a8dfe7f2e4ef714c210b2c2/shapely-2.1.2-cp311-cp311-win_amd64.whl", hash = "sha256:c64d5c97b2f47e3cd9b712eaced3b061f2b71234b3fc263e0fcf7d889c6559dc", size = 1722856, upload-time = "2025-09-24T13:50:28.497Z" }, + { url = "https://files.pythonhosted.org/packages/24/c0/f3b6453cf2dfa99adc0ba6675f9aaff9e526d2224cbd7ff9c1a879238693/shapely-2.1.2-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:fe2533caae6a91a543dec62e8360fe86ffcdc42a7c55f9dfd0128a977a896b94", size = 1833550, upload-time = "2025-09-24T13:50:30.019Z" }, + { url = "https://files.pythonhosted.org/packages/86/07/59dee0bc4b913b7ab59ab1086225baca5b8f19865e6101db9ebb7243e132/shapely-2.1.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ba4d1333cc0bc94381d6d4308d2e4e008e0bd128bdcff5573199742ee3634359", size = 1643556, upload-time = "2025-09-24T13:50:32.291Z" }, + { url = "https://files.pythonhosted.org/packages/26/29/a5397e75b435b9895cd53e165083faed5d12fd9626eadec15a83a2411f0f/shapely-2.1.2-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:0bd308103340030feef6c111d3eb98d50dc13feea33affc8a6f9fa549e9458a3", size = 2988308, upload-time = "2025-09-24T13:50:33.862Z" }, + { url = "https://files.pythonhosted.org/packages/b9/37/e781683abac55dde9771e086b790e554811a71ed0b2b8a1e789b7430dd44/shapely-2.1.2-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:1e7d4d7ad262a48bb44277ca12c7c78cb1b0f56b32c10734ec9a1d30c0b0c54b", size = 3099844, upload-time = "2025-09-24T13:50:35.459Z" }, + { url = "https://files.pythonhosted.org/packages/d8/f3/9876b64d4a5a321b9dc482c92bb6f061f2fa42131cba643c699f39317cb9/shapely-2.1.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:e9eddfe513096a71896441a7c37db72da0687b34752c4e193577a145c71736fc", size = 3988842, upload-time = "2025-09-24T13:50:37.478Z" }, + { url = "https://files.pythonhosted.org/packages/d1/a0/704c7292f7014c7e74ec84eddb7b109e1fbae74a16deae9c1504b1d15565/shapely-2.1.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:980c777c612514c0cf99bc8a9de6d286f5e186dcaf9091252fcd444e5638193d", size = 4152714, upload-time = "2025-09-24T13:50:39.9Z" }, + { url = "https://files.pythonhosted.org/packages/53/46/319c9dc788884ad0785242543cdffac0e6530e4d0deb6c4862bc4143dcf3/shapely-2.1.2-cp312-cp312-win32.whl", hash = "sha256:9111274b88e4d7b54a95218e243282709b330ef52b7b86bc6aaf4f805306f454", size = 1542745, upload-time = "2025-09-24T13:50:41.414Z" }, + { url = "https://files.pythonhosted.org/packages/ec/bf/cb6c1c505cb31e818e900b9312d514f381fbfa5c4363edfce0fcc4f8c1a4/shapely-2.1.2-cp312-cp312-win_amd64.whl", hash = "sha256:743044b4cfb34f9a67205cee9279feaf60ba7d02e69febc2afc609047cb49179", size = 1722861, upload-time = "2025-09-24T13:50:43.35Z" }, ] [[package]] name = "shellingham" version = "1.5.4" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/58/15/8b3609fd3830ef7b27b655beb4b4e9c62313a4e8da8c676e142cc210d58e/shellingham-1.5.4.tar.gz", hash = "sha256:8dbca0739d487e5bd35ab3ca4b36e11c4078f3a234bfce294b0a0291363404de", size = 10310 } +sdist = { url = "https://files.pythonhosted.org/packages/58/15/8b3609fd3830ef7b27b655beb4b4e9c62313a4e8da8c676e142cc210d58e/shellingham-1.5.4.tar.gz", hash = "sha256:8dbca0739d487e5bd35ab3ca4b36e11c4078f3a234bfce294b0a0291363404de", size = 10310, upload-time = "2023-10-24T04:13:40.426Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/e0/f9/0595336914c5619e5f28a1fb793285925a8cd4b432c9da0a987836c7f822/shellingham-1.5.4-py2.py3-none-any.whl", hash = "sha256:7ecfff8f2fd72616f7481040475a65b2bf8af90a56c89140852d1120324e8686", size = 9755 }, + { url = "https://files.pythonhosted.org/packages/e0/f9/0595336914c5619e5f28a1fb793285925a8cd4b432c9da0a987836c7f822/shellingham-1.5.4-py2.py3-none-any.whl", hash = "sha256:7ecfff8f2fd72616f7481040475a65b2bf8af90a56c89140852d1120324e8686", size = 9755, upload-time = "2023-10-24T04:13:38.866Z" }, ] [[package]] name = "six" version = "1.17.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/94/e7/b2c673351809dca68a0e064b6af791aa332cf192da575fd474ed7d6f16a2/six-1.17.0.tar.gz", hash = "sha256:ff70335d468e7eb6ec65b95b99d3a2836546063f63acc5171de367e834932a81", size = 34031 } +sdist = { url = "https://files.pythonhosted.org/packages/94/e7/b2c673351809dca68a0e064b6af791aa332cf192da575fd474ed7d6f16a2/six-1.17.0.tar.gz", hash = "sha256:ff70335d468e7eb6ec65b95b99d3a2836546063f63acc5171de367e834932a81", size = 34031, upload-time = "2024-12-04T17:35:28.174Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/b7/ce/149a00dd41f10bc29e5921b496af8b574d8413afcd5e30dfa0ed46c2cc5e/six-1.17.0-py2.py3-none-any.whl", hash = "sha256:4721f391ed90541fddacab5acf947aa0d3dc7d27b2e1e8eda2be8970586c3274", size = 11050 }, + { url = "https://files.pythonhosted.org/packages/b7/ce/149a00dd41f10bc29e5921b496af8b574d8413afcd5e30dfa0ed46c2cc5e/six-1.17.0-py2.py3-none-any.whl", hash = "sha256:4721f391ed90541fddacab5acf947aa0d3dc7d27b2e1e8eda2be8970586c3274", size = 11050, upload-time = "2024-12-04T17:35:26.475Z" }, ] [[package]] name = "smmap" version = "5.0.2" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/44/cd/a040c4b3119bbe532e5b0732286f805445375489fceaec1f48306068ee3b/smmap-5.0.2.tar.gz", hash = "sha256:26ea65a03958fa0c8a1c7e8c7a58fdc77221b8910f6be2131affade476898ad5", size = 22329 } +sdist = { url = "https://files.pythonhosted.org/packages/44/cd/a040c4b3119bbe532e5b0732286f805445375489fceaec1f48306068ee3b/smmap-5.0.2.tar.gz", hash = "sha256:26ea65a03958fa0c8a1c7e8c7a58fdc77221b8910f6be2131affade476898ad5", size = 22329, upload-time = "2025-01-02T07:14:40.909Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/04/be/d09147ad1ec7934636ad912901c5fd7667e1c858e19d355237db0d0cd5e4/smmap-5.0.2-py3-none-any.whl", hash = "sha256:b30115f0def7d7531d22a0fb6502488d879e75b260a9db4d0819cfb25403af5e", size = 24303 }, + { url = "https://files.pythonhosted.org/packages/04/be/d09147ad1ec7934636ad912901c5fd7667e1c858e19d355237db0d0cd5e4/smmap-5.0.2-py3-none-any.whl", hash = "sha256:b30115f0def7d7531d22a0fb6502488d879e75b260a9db4d0819cfb25403af5e", size = 24303, upload-time = "2025-01-02T07:14:38.724Z" }, ] [[package]] name = "sniffio" version = "1.3.1" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/a2/87/a6771e1546d97e7e041b6ae58d80074f81b7d5121207425c964ddf5cfdbd/sniffio-1.3.1.tar.gz", hash = "sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc", size = 20372 } +sdist = { url = "https://files.pythonhosted.org/packages/a2/87/a6771e1546d97e7e041b6ae58d80074f81b7d5121207425c964ddf5cfdbd/sniffio-1.3.1.tar.gz", hash = "sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc", size = 20372, upload-time = "2024-02-25T23:20:04.057Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/e9/44/75a9c9421471a6c4805dbf2356f7c181a29c1879239abab1ea2cc8f38b40/sniffio-1.3.1-py3-none-any.whl", hash = "sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2", size = 10235 }, + { url = "https://files.pythonhosted.org/packages/e9/44/75a9c9421471a6c4805dbf2356f7c181a29c1879239abab1ea2cc8f38b40/sniffio-1.3.1-py3-none-any.whl", hash = "sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2", size = 10235, upload-time = "2024-02-25T23:20:01.196Z" }, ] [[package]] name = "socksio" version = "1.0.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/f8/5c/48a7d9495be3d1c651198fd99dbb6ce190e2274d0f28b9051307bdec6b85/socksio-1.0.0.tar.gz", hash = "sha256:f88beb3da5b5c38b9890469de67d0cb0f9d494b78b106ca1845f96c10b91c4ac", size = 19055 } +sdist = { url = "https://files.pythonhosted.org/packages/f8/5c/48a7d9495be3d1c651198fd99dbb6ce190e2274d0f28b9051307bdec6b85/socksio-1.0.0.tar.gz", hash = "sha256:f88beb3da5b5c38b9890469de67d0cb0f9d494b78b106ca1845f96c10b91c4ac", size = 19055, upload-time = "2020-04-17T15:50:34.664Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/37/c3/6eeb6034408dac0fa653d126c9204ade96b819c936e136c5e8a6897eee9c/socksio-1.0.0-py3-none-any.whl", hash = "sha256:95dc1f15f9b34e8d7b16f06d74b8ccf48f609af32ab33c608d08761c5dcbb1f3", size = 12763 }, + { url = "https://files.pythonhosted.org/packages/37/c3/6eeb6034408dac0fa653d126c9204ade96b819c936e136c5e8a6897eee9c/socksio-1.0.0-py3-none-any.whl", hash = "sha256:95dc1f15f9b34e8d7b16f06d74b8ccf48f609af32ab33c608d08761c5dcbb1f3", size = 12763, upload-time = "2020-04-17T15:50:31.878Z" }, ] [[package]] name = "sortedcontainers" version = "2.4.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/e8/c4/ba2f8066cceb6f23394729afe52f3bf7adec04bf9ed2c820b39e19299111/sortedcontainers-2.4.0.tar.gz", hash = "sha256:25caa5a06cc30b6b83d11423433f65d1f9d76c4c6a0c90e3379eaa43b9bfdb88", size = 30594 } +sdist = { url = "https://files.pythonhosted.org/packages/e8/c4/ba2f8066cceb6f23394729afe52f3bf7adec04bf9ed2c820b39e19299111/sortedcontainers-2.4.0.tar.gz", hash = "sha256:25caa5a06cc30b6b83d11423433f65d1f9d76c4c6a0c90e3379eaa43b9bfdb88", size = 30594, upload-time = "2021-05-16T22:03:42.897Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/32/46/9cb0e58b2deb7f82b84065f37f3bffeb12413f947f9388e4cac22c4621ce/sortedcontainers-2.4.0-py2.py3-none-any.whl", hash = "sha256:a163dcaede0f1c021485e957a39245190e74249897e2ae4b2aa38595db237ee0", size = 29575 }, + { url = "https://files.pythonhosted.org/packages/32/46/9cb0e58b2deb7f82b84065f37f3bffeb12413f947f9388e4cac22c4621ce/sortedcontainers-2.4.0-py2.py3-none-any.whl", hash = "sha256:a163dcaede0f1c021485e957a39245190e74249897e2ae4b2aa38595db237ee0", size = 29575, upload-time = "2021-05-16T22:03:41.177Z" }, ] [[package]] name = "soupsieve" version = "2.8" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/6d/e6/21ccce3262dd4889aa3332e5a119a3491a95e8f60939870a3a035aabac0d/soupsieve-2.8.tar.gz", hash = "sha256:e2dd4a40a628cb5f28f6d4b0db8800b8f581b65bb380b97de22ba5ca8d72572f", size = 103472 } +sdist = { url = "https://files.pythonhosted.org/packages/6d/e6/21ccce3262dd4889aa3332e5a119a3491a95e8f60939870a3a035aabac0d/soupsieve-2.8.tar.gz", hash = "sha256:e2dd4a40a628cb5f28f6d4b0db8800b8f581b65bb380b97de22ba5ca8d72572f", size = 103472, upload-time = "2025-08-27T15:39:51.78Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/14/a0/bb38d3b76b8cae341dad93a2dd83ab7462e6dbcdd84d43f54ee60a8dc167/soupsieve-2.8-py3-none-any.whl", hash = "sha256:0cc76456a30e20f5d7f2e14a98a4ae2ee4e5abdc7c5ea0aafe795f344bc7984c", size = 36679 }, + { url = "https://files.pythonhosted.org/packages/14/a0/bb38d3b76b8cae341dad93a2dd83ab7462e6dbcdd84d43f54ee60a8dc167/soupsieve-2.8-py3-none-any.whl", hash = "sha256:0cc76456a30e20f5d7f2e14a98a4ae2ee4e5abdc7c5ea0aafe795f344bc7984c", size = 36679, upload-time = "2025-08-27T15:39:50.179Z" }, ] [[package]] @@ -5766,52 +5816,52 @@ dependencies = [ { name = "greenlet", marker = "platform_machine == 'AMD64' or platform_machine == 'WIN32' or platform_machine == 'aarch64' or platform_machine == 'amd64' or platform_machine == 'ppc64le' or platform_machine == 'win32' or platform_machine == 'x86_64'" }, { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/f0/f2/840d7b9496825333f532d2e3976b8eadbf52034178aac53630d09fe6e1ef/sqlalchemy-2.0.44.tar.gz", hash = "sha256:0ae7454e1ab1d780aee69fd2aae7d6b8670a581d8847f2d1e0f7ddfbf47e5a22", size = 9819830 } +sdist = { url = "https://files.pythonhosted.org/packages/f0/f2/840d7b9496825333f532d2e3976b8eadbf52034178aac53630d09fe6e1ef/sqlalchemy-2.0.44.tar.gz", hash = "sha256:0ae7454e1ab1d780aee69fd2aae7d6b8670a581d8847f2d1e0f7ddfbf47e5a22", size = 9819830, upload-time = "2025-10-10T14:39:12.935Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/e3/81/15d7c161c9ddf0900b076b55345872ed04ff1ed6a0666e5e94ab44b0163c/sqlalchemy-2.0.44-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:0fe3917059c7ab2ee3f35e77757062b1bea10a0b6ca633c58391e3f3c6c488dd", size = 2140517 }, - { url = "https://files.pythonhosted.org/packages/d4/d5/4abd13b245c7d91bdf131d4916fd9e96a584dac74215f8b5bc945206a974/sqlalchemy-2.0.44-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:de4387a354ff230bc979b46b2207af841dc8bf29847b6c7dbe60af186d97aefa", size = 2130738 }, - { url = "https://files.pythonhosted.org/packages/cb/3c/8418969879c26522019c1025171cefbb2a8586b6789ea13254ac602986c0/sqlalchemy-2.0.44-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c3678a0fb72c8a6a29422b2732fe423db3ce119c34421b5f9955873eb9b62c1e", size = 3304145 }, - { url = "https://files.pythonhosted.org/packages/94/2d/fdb9246d9d32518bda5d90f4b65030b9bf403a935cfe4c36a474846517cb/sqlalchemy-2.0.44-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3cf6872a23601672d61a68f390e44703442639a12ee9dd5a88bbce52a695e46e", size = 3304511 }, - { url = "https://files.pythonhosted.org/packages/7d/fb/40f2ad1da97d5c83f6c1269664678293d3fe28e90ad17a1093b735420549/sqlalchemy-2.0.44-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:329aa42d1be9929603f406186630135be1e7a42569540577ba2c69952b7cf399", size = 3235161 }, - { url = "https://files.pythonhosted.org/packages/95/cb/7cf4078b46752dca917d18cf31910d4eff6076e5b513c2d66100c4293d83/sqlalchemy-2.0.44-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:70e03833faca7166e6a9927fbee7c27e6ecde436774cd0b24bbcc96353bce06b", size = 3261426 }, - { url = "https://files.pythonhosted.org/packages/f8/3b/55c09b285cb2d55bdfa711e778bdffdd0dc3ffa052b0af41f1c5d6e582fa/sqlalchemy-2.0.44-cp311-cp311-win32.whl", hash = "sha256:253e2f29843fb303eca6b2fc645aca91fa7aa0aa70b38b6950da92d44ff267f3", size = 2105392 }, - { url = "https://files.pythonhosted.org/packages/c7/23/907193c2f4d680aedbfbdf7bf24c13925e3c7c292e813326c1b84a0b878e/sqlalchemy-2.0.44-cp311-cp311-win_amd64.whl", hash = "sha256:7a8694107eb4308a13b425ca8c0e67112f8134c846b6e1f722698708741215d5", size = 2130293 }, - { url = "https://files.pythonhosted.org/packages/62/c4/59c7c9b068e6813c898b771204aad36683c96318ed12d4233e1b18762164/sqlalchemy-2.0.44-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:72fea91746b5890f9e5e0997f16cbf3d53550580d76355ba2d998311b17b2250", size = 2139675 }, - { url = "https://files.pythonhosted.org/packages/d6/ae/eeb0920537a6f9c5a3708e4a5fc55af25900216bdb4847ec29cfddf3bf3a/sqlalchemy-2.0.44-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:585c0c852a891450edbb1eaca8648408a3cc125f18cf433941fa6babcc359e29", size = 2127726 }, - { url = "https://files.pythonhosted.org/packages/d8/d5/2ebbabe0379418eda8041c06b0b551f213576bfe4c2f09d77c06c07c8cc5/sqlalchemy-2.0.44-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9b94843a102efa9ac68a7a30cd46df3ff1ed9c658100d30a725d10d9c60a2f44", size = 3327603 }, - { url = "https://files.pythonhosted.org/packages/45/e5/5aa65852dadc24b7d8ae75b7efb8d19303ed6ac93482e60c44a585930ea5/sqlalchemy-2.0.44-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:119dc41e7a7defcefc57189cfa0e61b1bf9c228211aba432b53fb71ef367fda1", size = 3337842 }, - { url = "https://files.pythonhosted.org/packages/41/92/648f1afd3f20b71e880ca797a960f638d39d243e233a7082c93093c22378/sqlalchemy-2.0.44-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:0765e318ee9179b3718c4fd7ba35c434f4dd20332fbc6857a5e8df17719c24d7", size = 3264558 }, - { url = "https://files.pythonhosted.org/packages/40/cf/e27d7ee61a10f74b17740918e23cbc5bc62011b48282170dc4c66da8ec0f/sqlalchemy-2.0.44-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:2e7b5b079055e02d06a4308d0481658e4f06bc7ef211567edc8f7d5dce52018d", size = 3301570 }, - { url = "https://files.pythonhosted.org/packages/3b/3d/3116a9a7b63e780fb402799b6da227435be878b6846b192f076d2f838654/sqlalchemy-2.0.44-cp312-cp312-win32.whl", hash = "sha256:846541e58b9a81cce7dee8329f352c318de25aa2f2bbe1e31587eb1f057448b4", size = 2103447 }, - { url = "https://files.pythonhosted.org/packages/25/83/24690e9dfc241e6ab062df82cc0df7f4231c79ba98b273fa496fb3dd78ed/sqlalchemy-2.0.44-cp312-cp312-win_amd64.whl", hash = "sha256:7cbcb47fd66ab294703e1644f78971f6f2f1126424d2b300678f419aa73c7b6e", size = 2130912 }, - { url = "https://files.pythonhosted.org/packages/9c/5e/6a29fa884d9fb7ddadf6b69490a9d45fded3b38541713010dad16b77d015/sqlalchemy-2.0.44-py3-none-any.whl", hash = "sha256:19de7ca1246fbef9f9d1bff8f1ab25641569df226364a0e40457dc5457c54b05", size = 1928718 }, + { url = "https://files.pythonhosted.org/packages/e3/81/15d7c161c9ddf0900b076b55345872ed04ff1ed6a0666e5e94ab44b0163c/sqlalchemy-2.0.44-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:0fe3917059c7ab2ee3f35e77757062b1bea10a0b6ca633c58391e3f3c6c488dd", size = 2140517, upload-time = "2025-10-10T15:36:15.64Z" }, + { url = "https://files.pythonhosted.org/packages/d4/d5/4abd13b245c7d91bdf131d4916fd9e96a584dac74215f8b5bc945206a974/sqlalchemy-2.0.44-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:de4387a354ff230bc979b46b2207af841dc8bf29847b6c7dbe60af186d97aefa", size = 2130738, upload-time = "2025-10-10T15:36:16.91Z" }, + { url = "https://files.pythonhosted.org/packages/cb/3c/8418969879c26522019c1025171cefbb2a8586b6789ea13254ac602986c0/sqlalchemy-2.0.44-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c3678a0fb72c8a6a29422b2732fe423db3ce119c34421b5f9955873eb9b62c1e", size = 3304145, upload-time = "2025-10-10T15:34:19.569Z" }, + { url = "https://files.pythonhosted.org/packages/94/2d/fdb9246d9d32518bda5d90f4b65030b9bf403a935cfe4c36a474846517cb/sqlalchemy-2.0.44-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3cf6872a23601672d61a68f390e44703442639a12ee9dd5a88bbce52a695e46e", size = 3304511, upload-time = "2025-10-10T15:47:05.088Z" }, + { url = "https://files.pythonhosted.org/packages/7d/fb/40f2ad1da97d5c83f6c1269664678293d3fe28e90ad17a1093b735420549/sqlalchemy-2.0.44-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:329aa42d1be9929603f406186630135be1e7a42569540577ba2c69952b7cf399", size = 3235161, upload-time = "2025-10-10T15:34:21.193Z" }, + { url = "https://files.pythonhosted.org/packages/95/cb/7cf4078b46752dca917d18cf31910d4eff6076e5b513c2d66100c4293d83/sqlalchemy-2.0.44-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:70e03833faca7166e6a9927fbee7c27e6ecde436774cd0b24bbcc96353bce06b", size = 3261426, upload-time = "2025-10-10T15:47:07.196Z" }, + { url = "https://files.pythonhosted.org/packages/f8/3b/55c09b285cb2d55bdfa711e778bdffdd0dc3ffa052b0af41f1c5d6e582fa/sqlalchemy-2.0.44-cp311-cp311-win32.whl", hash = "sha256:253e2f29843fb303eca6b2fc645aca91fa7aa0aa70b38b6950da92d44ff267f3", size = 2105392, upload-time = "2025-10-10T15:38:20.051Z" }, + { url = "https://files.pythonhosted.org/packages/c7/23/907193c2f4d680aedbfbdf7bf24c13925e3c7c292e813326c1b84a0b878e/sqlalchemy-2.0.44-cp311-cp311-win_amd64.whl", hash = "sha256:7a8694107eb4308a13b425ca8c0e67112f8134c846b6e1f722698708741215d5", size = 2130293, upload-time = "2025-10-10T15:38:21.601Z" }, + { url = "https://files.pythonhosted.org/packages/62/c4/59c7c9b068e6813c898b771204aad36683c96318ed12d4233e1b18762164/sqlalchemy-2.0.44-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:72fea91746b5890f9e5e0997f16cbf3d53550580d76355ba2d998311b17b2250", size = 2139675, upload-time = "2025-10-10T16:03:31.064Z" }, + { url = "https://files.pythonhosted.org/packages/d6/ae/eeb0920537a6f9c5a3708e4a5fc55af25900216bdb4847ec29cfddf3bf3a/sqlalchemy-2.0.44-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:585c0c852a891450edbb1eaca8648408a3cc125f18cf433941fa6babcc359e29", size = 2127726, upload-time = "2025-10-10T16:03:35.934Z" }, + { url = "https://files.pythonhosted.org/packages/d8/d5/2ebbabe0379418eda8041c06b0b551f213576bfe4c2f09d77c06c07c8cc5/sqlalchemy-2.0.44-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9b94843a102efa9ac68a7a30cd46df3ff1ed9c658100d30a725d10d9c60a2f44", size = 3327603, upload-time = "2025-10-10T15:35:28.322Z" }, + { url = "https://files.pythonhosted.org/packages/45/e5/5aa65852dadc24b7d8ae75b7efb8d19303ed6ac93482e60c44a585930ea5/sqlalchemy-2.0.44-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:119dc41e7a7defcefc57189cfa0e61b1bf9c228211aba432b53fb71ef367fda1", size = 3337842, upload-time = "2025-10-10T15:43:45.431Z" }, + { url = "https://files.pythonhosted.org/packages/41/92/648f1afd3f20b71e880ca797a960f638d39d243e233a7082c93093c22378/sqlalchemy-2.0.44-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:0765e318ee9179b3718c4fd7ba35c434f4dd20332fbc6857a5e8df17719c24d7", size = 3264558, upload-time = "2025-10-10T15:35:29.93Z" }, + { url = "https://files.pythonhosted.org/packages/40/cf/e27d7ee61a10f74b17740918e23cbc5bc62011b48282170dc4c66da8ec0f/sqlalchemy-2.0.44-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:2e7b5b079055e02d06a4308d0481658e4f06bc7ef211567edc8f7d5dce52018d", size = 3301570, upload-time = "2025-10-10T15:43:48.407Z" }, + { url = "https://files.pythonhosted.org/packages/3b/3d/3116a9a7b63e780fb402799b6da227435be878b6846b192f076d2f838654/sqlalchemy-2.0.44-cp312-cp312-win32.whl", hash = "sha256:846541e58b9a81cce7dee8329f352c318de25aa2f2bbe1e31587eb1f057448b4", size = 2103447, upload-time = "2025-10-10T15:03:21.678Z" }, + { url = "https://files.pythonhosted.org/packages/25/83/24690e9dfc241e6ab062df82cc0df7f4231c79ba98b273fa496fb3dd78ed/sqlalchemy-2.0.44-cp312-cp312-win_amd64.whl", hash = "sha256:7cbcb47fd66ab294703e1644f78971f6f2f1126424d2b300678f419aa73c7b6e", size = 2130912, upload-time = "2025-10-10T15:03:24.656Z" }, + { url = "https://files.pythonhosted.org/packages/9c/5e/6a29fa884d9fb7ddadf6b69490a9d45fded3b38541713010dad16b77d015/sqlalchemy-2.0.44-py3-none-any.whl", hash = "sha256:19de7ca1246fbef9f9d1bff8f1ab25641569df226364a0e40457dc5457c54b05", size = 1928718, upload-time = "2025-10-10T15:29:45.32Z" }, ] [[package]] name = "sqlglot" version = "28.0.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/52/8d/9ce5904aca760b81adf821c77a1dcf07c98f9caaa7e3b5c991c541ff89d2/sqlglot-28.0.0.tar.gz", hash = "sha256:cc9a651ef4182e61dac58aa955e5fb21845a5865c6a4d7d7b5a7857450285ad4", size = 5520798 } +sdist = { url = "https://files.pythonhosted.org/packages/52/8d/9ce5904aca760b81adf821c77a1dcf07c98f9caaa7e3b5c991c541ff89d2/sqlglot-28.0.0.tar.gz", hash = "sha256:cc9a651ef4182e61dac58aa955e5fb21845a5865c6a4d7d7b5a7857450285ad4", size = 5520798, upload-time = "2025-11-17T10:34:57.016Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/56/6d/86de134f40199105d2fee1b066741aa870b3ce75ee74018d9c8508bbb182/sqlglot-28.0.0-py3-none-any.whl", hash = "sha256:ac1778e7fa4812f4f7e5881b260632fc167b00ca4c1226868891fb15467122e4", size = 536127 }, + { url = "https://files.pythonhosted.org/packages/56/6d/86de134f40199105d2fee1b066741aa870b3ce75ee74018d9c8508bbb182/sqlglot-28.0.0-py3-none-any.whl", hash = "sha256:ac1778e7fa4812f4f7e5881b260632fc167b00ca4c1226868891fb15467122e4", size = 536127, upload-time = "2025-11-17T10:34:55.192Z" }, ] [[package]] name = "sqlparse" version = "0.5.3" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/e5/40/edede8dd6977b0d3da179a342c198ed100dd2aba4be081861ee5911e4da4/sqlparse-0.5.3.tar.gz", hash = "sha256:09f67787f56a0b16ecdbde1bfc7f5d9c3371ca683cfeaa8e6ff60b4807ec9272", size = 84999 } +sdist = { url = "https://files.pythonhosted.org/packages/e5/40/edede8dd6977b0d3da179a342c198ed100dd2aba4be081861ee5911e4da4/sqlparse-0.5.3.tar.gz", hash = "sha256:09f67787f56a0b16ecdbde1bfc7f5d9c3371ca683cfeaa8e6ff60b4807ec9272", size = 84999, upload-time = "2024-12-10T12:05:30.728Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/a9/5c/bfd6bd0bf979426d405cc6e71eceb8701b148b16c21d2dc3c261efc61c7b/sqlparse-0.5.3-py3-none-any.whl", hash = "sha256:cf2196ed3418f3ba5de6af7e82c694a9fbdbfecccdfc72e281548517081f16ca", size = 44415 }, + { url = "https://files.pythonhosted.org/packages/a9/5c/bfd6bd0bf979426d405cc6e71eceb8701b148b16c21d2dc3c261efc61c7b/sqlparse-0.5.3-py3-none-any.whl", hash = "sha256:cf2196ed3418f3ba5de6af7e82c694a9fbdbfecccdfc72e281548517081f16ca", size = 44415, upload-time = "2024-12-10T12:05:27.824Z" }, ] [[package]] name = "sseclient-py" version = "1.8.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/e8/ed/3df5ab8bb0c12f86c28d0cadb11ed1de44a92ed35ce7ff4fd5518a809325/sseclient-py-1.8.0.tar.gz", hash = "sha256:c547c5c1a7633230a38dc599a21a2dc638f9b5c297286b48b46b935c71fac3e8", size = 7791 } +sdist = { url = "https://files.pythonhosted.org/packages/e8/ed/3df5ab8bb0c12f86c28d0cadb11ed1de44a92ed35ce7ff4fd5518a809325/sseclient-py-1.8.0.tar.gz", hash = "sha256:c547c5c1a7633230a38dc599a21a2dc638f9b5c297286b48b46b935c71fac3e8", size = 7791, upload-time = "2023-09-01T19:39:20.45Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/49/58/97655efdfeb5b4eeab85b1fc5d3fa1023661246c2ab2a26ea8e47402d4f2/sseclient_py-1.8.0-py2.py3-none-any.whl", hash = "sha256:4ecca6dc0b9f963f8384e9d7fd529bf93dd7d708144c4fb5da0e0a1a926fee83", size = 8828 }, + { url = "https://files.pythonhosted.org/packages/49/58/97655efdfeb5b4eeab85b1fc5d3fa1023661246c2ab2a26ea8e47402d4f2/sseclient_py-1.8.0-py2.py3-none-any.whl", hash = "sha256:4ecca6dc0b9f963f8384e9d7fd529bf93dd7d708144c4fb5da0e0a1a926fee83", size = 8828, upload-time = "2023-09-01T19:39:17.627Z" }, ] [[package]] @@ -5822,18 +5872,18 @@ dependencies = [ { name = "anyio" }, { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/1b/3f/507c21db33b66fb027a332f2cb3abbbe924cc3a79ced12f01ed8645955c9/starlette-0.49.1.tar.gz", hash = "sha256:481a43b71e24ed8c43b11ea02f5353d77840e01480881b8cb5a26b8cae64a8cb", size = 2654703 } +sdist = { url = "https://files.pythonhosted.org/packages/1b/3f/507c21db33b66fb027a332f2cb3abbbe924cc3a79ced12f01ed8645955c9/starlette-0.49.1.tar.gz", hash = "sha256:481a43b71e24ed8c43b11ea02f5353d77840e01480881b8cb5a26b8cae64a8cb", size = 2654703, upload-time = "2025-10-28T17:34:10.928Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/51/da/545b75d420bb23b5d494b0517757b351963e974e79933f01e05c929f20a6/starlette-0.49.1-py3-none-any.whl", hash = "sha256:d92ce9f07e4a3caa3ac13a79523bd18e3bc0042bb8ff2d759a8e7dd0e1859875", size = 74175 }, + { url = "https://files.pythonhosted.org/packages/51/da/545b75d420bb23b5d494b0517757b351963e974e79933f01e05c929f20a6/starlette-0.49.1-py3-none-any.whl", hash = "sha256:d92ce9f07e4a3caa3ac13a79523bd18e3bc0042bb8ff2d759a8e7dd0e1859875", size = 74175, upload-time = "2025-10-28T17:34:09.13Z" }, ] [[package]] name = "stdlib-list" version = "0.11.1" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/5d/09/8d5c564931ae23bef17420a6c72618463a59222ca4291a7dd88de8a0d490/stdlib_list-0.11.1.tar.gz", hash = "sha256:95ebd1d73da9333bba03ccc097f5bac05e3aa03e6822a0c0290f87e1047f1857", size = 60442 } +sdist = { url = "https://files.pythonhosted.org/packages/5d/09/8d5c564931ae23bef17420a6c72618463a59222ca4291a7dd88de8a0d490/stdlib_list-0.11.1.tar.gz", hash = "sha256:95ebd1d73da9333bba03ccc097f5bac05e3aa03e6822a0c0290f87e1047f1857", size = 60442, upload-time = "2025-02-18T15:39:38.769Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/88/c7/4102536de33c19d090ed2b04e90e7452e2e3dc653cf3323208034eaaca27/stdlib_list-0.11.1-py3-none-any.whl", hash = "sha256:9029ea5e3dfde8cd4294cfd4d1797be56a67fc4693c606181730148c3fd1da29", size = 83620 }, + { url = "https://files.pythonhosted.org/packages/88/c7/4102536de33c19d090ed2b04e90e7452e2e3dc653cf3323208034eaaca27/stdlib_list-0.11.1-py3-none-any.whl", hash = "sha256:9029ea5e3dfde8cd4294cfd4d1797be56a67fc4693c606181730148c3fd1da29", size = 83620, upload-time = "2025-02-18T15:39:37.02Z" }, ] [[package]] @@ -5845,18 +5895,18 @@ dependencies = [ { name = "httpx", extra = ["http2"] }, { name = "python-dateutil" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/b9/e2/280fe75f65e7a3ca680b7843acfc572a63aa41230e3d3c54c66568809c85/storage3-0.12.1.tar.gz", hash = "sha256:32ea8f5eb2f7185c2114a4f6ae66d577722e32503f0a30b56e7ed5c7f13e6b48", size = 10198 } +sdist = { url = "https://files.pythonhosted.org/packages/b9/e2/280fe75f65e7a3ca680b7843acfc572a63aa41230e3d3c54c66568809c85/storage3-0.12.1.tar.gz", hash = "sha256:32ea8f5eb2f7185c2114a4f6ae66d577722e32503f0a30b56e7ed5c7f13e6b48", size = 10198, upload-time = "2025-08-05T18:09:11.989Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/7f/3b/c5f8709fc5349928e591fee47592eeff78d29a7d75b097f96a4e01de028d/storage3-0.12.1-py3-none-any.whl", hash = "sha256:9da77fd4f406b019fdcba201e9916aefbf615ef87f551253ce427d8136459a34", size = 18420 }, + { url = "https://files.pythonhosted.org/packages/7f/3b/c5f8709fc5349928e591fee47592eeff78d29a7d75b097f96a4e01de028d/storage3-0.12.1-py3-none-any.whl", hash = "sha256:9da77fd4f406b019fdcba201e9916aefbf615ef87f551253ce427d8136459a34", size = 18420, upload-time = "2025-08-05T18:09:10.365Z" }, ] [[package]] name = "strenum" version = "0.4.15" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/85/ad/430fb60d90e1d112a62ff57bdd1f286ec73a2a0331272febfddd21f330e1/StrEnum-0.4.15.tar.gz", hash = "sha256:878fb5ab705442070e4dd1929bb5e2249511c0bcf2b0eeacf3bcd80875c82eff", size = 23384 } +sdist = { url = "https://files.pythonhosted.org/packages/85/ad/430fb60d90e1d112a62ff57bdd1f286ec73a2a0331272febfddd21f330e1/StrEnum-0.4.15.tar.gz", hash = "sha256:878fb5ab705442070e4dd1929bb5e2249511c0bcf2b0eeacf3bcd80875c82eff", size = 23384, upload-time = "2023-06-29T22:02:58.399Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/81/69/297302c5f5f59c862faa31e6cb9a4cd74721cd1e052b38e464c5b402df8b/StrEnum-0.4.15-py3-none-any.whl", hash = "sha256:a30cda4af7cc6b5bf52c8055bc4bf4b2b6b14a93b574626da33df53cf7740659", size = 8851 }, + { url = "https://files.pythonhosted.org/packages/81/69/297302c5f5f59c862faa31e6cb9a4cd74721cd1e052b38e464c5b402df8b/StrEnum-0.4.15-py3-none-any.whl", hash = "sha256:a30cda4af7cc6b5bf52c8055bc4bf4b2b6b14a93b574626da33df53cf7740659", size = 8851, upload-time = "2023-06-29T22:02:56.947Z" }, ] [[package]] @@ -5871,9 +5921,9 @@ dependencies = [ { name = "supabase-auth" }, { name = "supabase-functions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/99/d2/3b135af55dd5788bd47875bb81f99c870054b990c030e51fd641a61b10b5/supabase-2.18.1.tar.gz", hash = "sha256:205787b1fbb43d6bc997c06fe3a56137336d885a1b56ec10f0012f2a2905285d", size = 11549 } +sdist = { url = "https://files.pythonhosted.org/packages/99/d2/3b135af55dd5788bd47875bb81f99c870054b990c030e51fd641a61b10b5/supabase-2.18.1.tar.gz", hash = "sha256:205787b1fbb43d6bc997c06fe3a56137336d885a1b56ec10f0012f2a2905285d", size = 11549, upload-time = "2025-08-12T19:02:27.852Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/a8/33/0e0062fea22cfe01d466dee83f56b3ed40c89bdcbca671bafeba3fe86b92/supabase-2.18.1-py3-none-any.whl", hash = "sha256:4fdd7b7247178a847f97ecd34f018dcb4775e487c8ff46b1208a01c933691fe9", size = 18683 }, + { url = "https://files.pythonhosted.org/packages/a8/33/0e0062fea22cfe01d466dee83f56b3ed40c89bdcbca671bafeba3fe86b92/supabase-2.18.1-py3-none-any.whl", hash = "sha256:4fdd7b7247178a847f97ecd34f018dcb4775e487c8ff46b1208a01c933691fe9", size = 18683, upload-time = "2025-08-12T19:02:26.68Z" }, ] [[package]] @@ -5885,9 +5935,9 @@ dependencies = [ { name = "pydantic" }, { name = "pyjwt" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/e9/e9/3d6f696a604752803b9e389b04d454f4b26a29b5d155b257fea4af8dc543/supabase_auth-2.12.3.tar.gz", hash = "sha256:8d3b67543f3b27f5adbfe46b66990424c8504c6b08c1141ec572a9802761edc2", size = 38430 } +sdist = { url = "https://files.pythonhosted.org/packages/e9/e9/3d6f696a604752803b9e389b04d454f4b26a29b5d155b257fea4af8dc543/supabase_auth-2.12.3.tar.gz", hash = "sha256:8d3b67543f3b27f5adbfe46b66990424c8504c6b08c1141ec572a9802761edc2", size = 38430, upload-time = "2025-07-04T06:49:22.906Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/96/a6/4102d5fa08a8521d9432b4d10bb58fedbd1f92b211d1b45d5394f5cb9021/supabase_auth-2.12.3-py3-none-any.whl", hash = "sha256:15c7580e1313d30ffddeb3221cb3cdb87c2a80fd220bf85d67db19cd1668435b", size = 44417 }, + { url = "https://files.pythonhosted.org/packages/96/a6/4102d5fa08a8521d9432b4d10bb58fedbd1f92b211d1b45d5394f5cb9021/supabase_auth-2.12.3-py3-none-any.whl", hash = "sha256:15c7580e1313d30ffddeb3221cb3cdb87c2a80fd220bf85d67db19cd1668435b", size = 44417, upload-time = "2025-07-04T06:49:21.351Z" }, ] [[package]] @@ -5898,9 +5948,9 @@ dependencies = [ { name = "httpx", extra = ["http2"] }, { name = "strenum" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/6c/e4/6df7cd4366396553449e9907c745862ebf010305835b2bac99933dd7db9d/supabase_functions-0.10.1.tar.gz", hash = "sha256:4779d33a1cc3d4aea567f586b16d8efdb7cddcd6b40ce367c5fb24288af3a4f1", size = 5025 } +sdist = { url = "https://files.pythonhosted.org/packages/6c/e4/6df7cd4366396553449e9907c745862ebf010305835b2bac99933dd7db9d/supabase_functions-0.10.1.tar.gz", hash = "sha256:4779d33a1cc3d4aea567f586b16d8efdb7cddcd6b40ce367c5fb24288af3a4f1", size = 5025, upload-time = "2025-06-23T18:26:12.239Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/bc/06/060118a1e602c9bda8e4bf950bd1c8b5e1542349f2940ec57541266fabe1/supabase_functions-0.10.1-py3-none-any.whl", hash = "sha256:1db85e20210b465075aacee4e171332424f7305f9903c5918096be1423d6fcc5", size = 8275 }, + { url = "https://files.pythonhosted.org/packages/bc/06/060118a1e602c9bda8e4bf950bd1c8b5e1542349f2940ec57541266fabe1/supabase_functions-0.10.1-py3-none-any.whl", hash = "sha256:1db85e20210b465075aacee4e171332424f7305f9903c5918096be1423d6fcc5", size = 8275, upload-time = "2025-06-23T18:26:10.387Z" }, ] [[package]] @@ -5910,9 +5960,9 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "mpmath" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/83/d3/803453b36afefb7c2bb238361cd4ae6125a569b4db67cd9e79846ba2d68c/sympy-1.14.0.tar.gz", hash = "sha256:d3d3fe8df1e5a0b42f0e7bdf50541697dbe7d23746e894990c030e2b05e72517", size = 7793921 } +sdist = { url = "https://files.pythonhosted.org/packages/83/d3/803453b36afefb7c2bb238361cd4ae6125a569b4db67cd9e79846ba2d68c/sympy-1.14.0.tar.gz", hash = "sha256:d3d3fe8df1e5a0b42f0e7bdf50541697dbe7d23746e894990c030e2b05e72517", size = 7793921, upload-time = "2025-04-27T18:05:01.611Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/a2/09/77d55d46fd61b4a135c444fc97158ef34a095e5681d0a6c10b75bf356191/sympy-1.14.0-py3-none-any.whl", hash = "sha256:e091cc3e99d2141a0ba2847328f5479b05d94a6635cb96148ccb3f34671bd8f5", size = 6299353 }, + { url = "https://files.pythonhosted.org/packages/a2/09/77d55d46fd61b4a135c444fc97158ef34a095e5681d0a6c10b75bf356191/sympy-1.14.0-py3-none-any.whl", hash = "sha256:e091cc3e99d2141a0ba2847328f5479b05d94a6635cb96148ccb3f34671bd8f5", size = 6299353, upload-time = "2025-04-27T18:04:59.103Z" }, ] [[package]] @@ -5930,18 +5980,18 @@ dependencies = [ { name = "six" }, { name = "urllib3" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/f1/39/47a3ec8e42fe74dd05af1dfed9c3b02b8f8adfdd8656b2c5d4f95f975c9f/tablestore-6.3.7.tar.gz", hash = "sha256:990682dbf6b602f317a2d359b4281dcd054b4326081e7a67b73dbbe95407be51", size = 117440 } +sdist = { url = "https://files.pythonhosted.org/packages/f1/39/47a3ec8e42fe74dd05af1dfed9c3b02b8f8adfdd8656b2c5d4f95f975c9f/tablestore-6.3.7.tar.gz", hash = "sha256:990682dbf6b602f317a2d359b4281dcd054b4326081e7a67b73dbbe95407be51", size = 117440, upload-time = "2025-10-29T02:57:57.415Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/fe/55/1b24d8c369204a855ac652712f815e88a4909802094e613fe3742a2d80e3/tablestore-6.3.7-py3-none-any.whl", hash = "sha256:38dcc55085912ab2515e183afd4532a58bb628a763590a99fc1bd2a4aba6855c", size = 139041 }, + { url = "https://files.pythonhosted.org/packages/fe/55/1b24d8c369204a855ac652712f815e88a4909802094e613fe3742a2d80e3/tablestore-6.3.7-py3-none-any.whl", hash = "sha256:38dcc55085912ab2515e183afd4532a58bb628a763590a99fc1bd2a4aba6855c", size = 139041, upload-time = "2025-10-29T02:57:55.727Z" }, ] [[package]] name = "tabulate" version = "0.9.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/ec/fe/802052aecb21e3797b8f7902564ab6ea0d60ff8ca23952079064155d1ae1/tabulate-0.9.0.tar.gz", hash = "sha256:0095b12bf5966de529c0feb1fa08671671b3368eec77d7ef7ab114be2c068b3c", size = 81090 } +sdist = { url = "https://files.pythonhosted.org/packages/ec/fe/802052aecb21e3797b8f7902564ab6ea0d60ff8ca23952079064155d1ae1/tabulate-0.9.0.tar.gz", hash = "sha256:0095b12bf5966de529c0feb1fa08671671b3368eec77d7ef7ab114be2c068b3c", size = 81090, upload-time = "2022-10-06T17:21:48.54Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/40/44/4a5f08c96eb108af5cb50b41f76142f0afa346dfa99d5296fe7202a11854/tabulate-0.9.0-py3-none-any.whl", hash = "sha256:024ca478df22e9340661486f85298cff5f6dcdba14f3813e8830015b9ed1948f", size = 35252 }, + { url = "https://files.pythonhosted.org/packages/40/44/4a5f08c96eb108af5cb50b41f76142f0afa346dfa99d5296fe7202a11854/tabulate-0.9.0-py3-none-any.whl", hash = "sha256:024ca478df22e9340661486f85298cff5f6dcdba14f3813e8830015b9ed1948f", size = 35252, upload-time = "2022-10-06T17:21:44.262Z" }, ] [[package]] @@ -5954,7 +6004,7 @@ dependencies = [ { name = "numpy" }, { name = "tqdm" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/20/81/be13f41706520018208bb674f314eec0f29ef63c919959d60e55dfcc4912/tcvdb_text-1.1.2.tar.gz", hash = "sha256:d47c37c95a81f379b12e3b00b8f37200c7e7339afa9a35d24fc7b683917985ec", size = 57859909 } +sdist = { url = "https://files.pythonhosted.org/packages/20/81/be13f41706520018208bb674f314eec0f29ef63c919959d60e55dfcc4912/tcvdb_text-1.1.2.tar.gz", hash = "sha256:d47c37c95a81f379b12e3b00b8f37200c7e7339afa9a35d24fc7b683917985ec", size = 57859909, upload-time = "2025-07-11T08:20:19.569Z" } [[package]] name = "tcvectordb" @@ -5971,18 +6021,18 @@ dependencies = [ { name = "ujson" }, { name = "urllib3" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/19/ec/c80579aff1539257aafcf8dc3f3c13630171f299d65b33b68440e166f27c/tcvectordb-1.6.4.tar.gz", hash = "sha256:6fb18e15ccc6744d5147e9bbd781f84df3d66112de7d9cc615878b3f72d3a29a", size = 75188 } +sdist = { url = "https://files.pythonhosted.org/packages/19/ec/c80579aff1539257aafcf8dc3f3c13630171f299d65b33b68440e166f27c/tcvectordb-1.6.4.tar.gz", hash = "sha256:6fb18e15ccc6744d5147e9bbd781f84df3d66112de7d9cc615878b3f72d3a29a", size = 75188, upload-time = "2025-03-05T09:14:19.925Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/68/bf/f38d9f629324ecffca8fe934e8df47e1233a9021b0739447e59e9fb248f9/tcvectordb-1.6.4-py3-none-any.whl", hash = "sha256:06ef13e7edb4575b04615065fc90e1a28374e318ada305f3786629aec5c9318a", size = 88917 }, + { url = "https://files.pythonhosted.org/packages/68/bf/f38d9f629324ecffca8fe934e8df47e1233a9021b0739447e59e9fb248f9/tcvectordb-1.6.4-py3-none-any.whl", hash = "sha256:06ef13e7edb4575b04615065fc90e1a28374e318ada305f3786629aec5c9318a", size = 88917, upload-time = "2025-03-05T09:14:17.494Z" }, ] [[package]] name = "tenacity" version = "9.1.2" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/0a/d4/2b0cd0fe285e14b36db076e78c93766ff1d529d70408bd1d2a5a84f1d929/tenacity-9.1.2.tar.gz", hash = "sha256:1169d376c297e7de388d18b4481760d478b0e99a777cad3a9c86e556f4b697cb", size = 48036 } +sdist = { url = "https://files.pythonhosted.org/packages/0a/d4/2b0cd0fe285e14b36db076e78c93766ff1d529d70408bd1d2a5a84f1d929/tenacity-9.1.2.tar.gz", hash = "sha256:1169d376c297e7de388d18b4481760d478b0e99a777cad3a9c86e556f4b697cb", size = 48036, upload-time = "2025-04-02T08:25:09.966Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/e5/30/643397144bfbfec6f6ef821f36f33e57d35946c44a2352d3c9f0ae847619/tenacity-9.1.2-py3-none-any.whl", hash = "sha256:f77bf36710d8b73a50b2dd155c97b870017ad21afe6ab300326b0371b3b05138", size = 28248 }, + { url = "https://files.pythonhosted.org/packages/e5/30/643397144bfbfec6f6ef821f36f33e57d35946c44a2352d3c9f0ae847619/tenacity-9.1.2-py3-none-any.whl", hash = "sha256:f77bf36710d8b73a50b2dd155c97b870017ad21afe6ab300326b0371b3b05138", size = 28248, upload-time = "2025-04-02T08:25:07.678Z" }, ] [[package]] @@ -5996,9 +6046,9 @@ dependencies = [ { name = "urllib3" }, { name = "wrapt" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/fc/b3/c272537f3ea2f312555efeb86398cc382cd07b740d5f3c730918c36e64e1/testcontainers-4.13.3.tar.gz", hash = "sha256:9d82a7052c9a53c58b69e1dc31da8e7a715e8b3ec1c4df5027561b47e2efe646", size = 79064 } +sdist = { url = "https://files.pythonhosted.org/packages/fc/b3/c272537f3ea2f312555efeb86398cc382cd07b740d5f3c730918c36e64e1/testcontainers-4.13.3.tar.gz", hash = "sha256:9d82a7052c9a53c58b69e1dc31da8e7a715e8b3ec1c4df5027561b47e2efe646", size = 79064, upload-time = "2025-11-14T05:08:47.584Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/73/27/c2f24b19dafa197c514abe70eda69bc031c5152c6b1f1e5b20099e2ceedd/testcontainers-4.13.3-py3-none-any.whl", hash = "sha256:063278c4805ffa6dd85e56648a9da3036939e6c0ac1001e851c9276b19b05970", size = 124784 }, + { url = "https://files.pythonhosted.org/packages/73/27/c2f24b19dafa197c514abe70eda69bc031c5152c6b1f1e5b20099e2ceedd/testcontainers-4.13.3-py3-none-any.whl", hash = "sha256:063278c4805ffa6dd85e56648a9da3036939e6c0ac1001e851c9276b19b05970", size = 124784, upload-time = "2025-11-14T05:08:46.053Z" }, ] [[package]] @@ -6008,9 +6058,9 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "numpy" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/1a/98/ab324fdfbbf064186ca621e21aa3871ddf886ecb78358a9864509241e802/tidb_vector-0.0.9.tar.gz", hash = "sha256:e10680872532808e1bcffa7a92dd2b05bb65d63982f833edb3c6cd590dec7709", size = 16948 } +sdist = { url = "https://files.pythonhosted.org/packages/1a/98/ab324fdfbbf064186ca621e21aa3871ddf886ecb78358a9864509241e802/tidb_vector-0.0.9.tar.gz", hash = "sha256:e10680872532808e1bcffa7a92dd2b05bb65d63982f833edb3c6cd590dec7709", size = 16948, upload-time = "2024-05-08T07:54:36.955Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/5d/bb/0f3b7b4d31537e90f4dd01f50fa58daef48807c789c1c1bdd610204ff103/tidb_vector-0.0.9-py3-none-any.whl", hash = "sha256:db060ee1c981326d3882d0810e0b8b57811f278668f9381168997b360c4296c2", size = 17026 }, + { url = "https://files.pythonhosted.org/packages/5d/bb/0f3b7b4d31537e90f4dd01f50fa58daef48807c789c1c1bdd610204ff103/tidb_vector-0.0.9-py3-none-any.whl", hash = "sha256:db060ee1c981326d3882d0810e0b8b57811f278668f9381168997b360c4296c2", size = 17026, upload-time = "2024-05-08T07:54:34.849Z" }, ] [[package]] @@ -6021,20 +6071,20 @@ dependencies = [ { name = "regex" }, { name = "requests" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/ea/cf/756fedf6981e82897f2d570dd25fa597eb3f4459068ae0572d7e888cfd6f/tiktoken-0.9.0.tar.gz", hash = "sha256:d02a5ca6a938e0490e1ff957bc48c8b078c88cb83977be1625b1fd8aac792c5d", size = 35991 } +sdist = { url = "https://files.pythonhosted.org/packages/ea/cf/756fedf6981e82897f2d570dd25fa597eb3f4459068ae0572d7e888cfd6f/tiktoken-0.9.0.tar.gz", hash = "sha256:d02a5ca6a938e0490e1ff957bc48c8b078c88cb83977be1625b1fd8aac792c5d", size = 35991, upload-time = "2025-02-14T06:03:01.003Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/4d/ae/4613a59a2a48e761c5161237fc850eb470b4bb93696db89da51b79a871f1/tiktoken-0.9.0-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:f32cc56168eac4851109e9b5d327637f15fd662aa30dd79f964b7c39fbadd26e", size = 1065987 }, - { url = "https://files.pythonhosted.org/packages/3f/86/55d9d1f5b5a7e1164d0f1538a85529b5fcba2b105f92db3622e5d7de6522/tiktoken-0.9.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:45556bc41241e5294063508caf901bf92ba52d8ef9222023f83d2483a3055348", size = 1009155 }, - { url = "https://files.pythonhosted.org/packages/03/58/01fb6240df083b7c1916d1dcb024e2b761213c95d576e9f780dfb5625a76/tiktoken-0.9.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:03935988a91d6d3216e2ec7c645afbb3d870b37bcb67ada1943ec48678e7ee33", size = 1142898 }, - { url = "https://files.pythonhosted.org/packages/b1/73/41591c525680cd460a6becf56c9b17468d3711b1df242c53d2c7b2183d16/tiktoken-0.9.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8b3d80aad8d2c6b9238fc1a5524542087c52b860b10cbf952429ffb714bc1136", size = 1197535 }, - { url = "https://files.pythonhosted.org/packages/7d/7c/1069f25521c8f01a1a182f362e5c8e0337907fae91b368b7da9c3e39b810/tiktoken-0.9.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:b2a21133be05dc116b1d0372af051cd2c6aa1d2188250c9b553f9fa49301b336", size = 1259548 }, - { url = "https://files.pythonhosted.org/packages/6f/07/c67ad1724b8e14e2b4c8cca04b15da158733ac60136879131db05dda7c30/tiktoken-0.9.0-cp311-cp311-win_amd64.whl", hash = "sha256:11a20e67fdf58b0e2dea7b8654a288e481bb4fc0289d3ad21291f8d0849915fb", size = 893895 }, - { url = "https://files.pythonhosted.org/packages/cf/e5/21ff33ecfa2101c1bb0f9b6df750553bd873b7fb532ce2cb276ff40b197f/tiktoken-0.9.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:e88f121c1c22b726649ce67c089b90ddda8b9662545a8aeb03cfef15967ddd03", size = 1065073 }, - { url = "https://files.pythonhosted.org/packages/8e/03/a95e7b4863ee9ceec1c55983e4cc9558bcfd8f4f80e19c4f8a99642f697d/tiktoken-0.9.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:a6600660f2f72369acb13a57fb3e212434ed38b045fd8cc6cdd74947b4b5d210", size = 1008075 }, - { url = "https://files.pythonhosted.org/packages/40/10/1305bb02a561595088235a513ec73e50b32e74364fef4de519da69bc8010/tiktoken-0.9.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:95e811743b5dfa74f4b227927ed86cbc57cad4df859cb3b643be797914e41794", size = 1140754 }, - { url = "https://files.pythonhosted.org/packages/1b/40/da42522018ca496432ffd02793c3a72a739ac04c3794a4914570c9bb2925/tiktoken-0.9.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:99376e1370d59bcf6935c933cb9ba64adc29033b7e73f5f7569f3aad86552b22", size = 1196678 }, - { url = "https://files.pythonhosted.org/packages/5c/41/1e59dddaae270ba20187ceb8aa52c75b24ffc09f547233991d5fd822838b/tiktoken-0.9.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:badb947c32739fb6ddde173e14885fb3de4d32ab9d8c591cbd013c22b4c31dd2", size = 1259283 }, - { url = "https://files.pythonhosted.org/packages/5b/64/b16003419a1d7728d0d8c0d56a4c24325e7b10a21a9dd1fc0f7115c02f0a/tiktoken-0.9.0-cp312-cp312-win_amd64.whl", hash = "sha256:5a62d7a25225bafed786a524c1b9f0910a1128f4232615bf3f8257a73aaa3b16", size = 894897 }, + { url = "https://files.pythonhosted.org/packages/4d/ae/4613a59a2a48e761c5161237fc850eb470b4bb93696db89da51b79a871f1/tiktoken-0.9.0-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:f32cc56168eac4851109e9b5d327637f15fd662aa30dd79f964b7c39fbadd26e", size = 1065987, upload-time = "2025-02-14T06:02:14.174Z" }, + { url = "https://files.pythonhosted.org/packages/3f/86/55d9d1f5b5a7e1164d0f1538a85529b5fcba2b105f92db3622e5d7de6522/tiktoken-0.9.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:45556bc41241e5294063508caf901bf92ba52d8ef9222023f83d2483a3055348", size = 1009155, upload-time = "2025-02-14T06:02:15.384Z" }, + { url = "https://files.pythonhosted.org/packages/03/58/01fb6240df083b7c1916d1dcb024e2b761213c95d576e9f780dfb5625a76/tiktoken-0.9.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:03935988a91d6d3216e2ec7c645afbb3d870b37bcb67ada1943ec48678e7ee33", size = 1142898, upload-time = "2025-02-14T06:02:16.666Z" }, + { url = "https://files.pythonhosted.org/packages/b1/73/41591c525680cd460a6becf56c9b17468d3711b1df242c53d2c7b2183d16/tiktoken-0.9.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8b3d80aad8d2c6b9238fc1a5524542087c52b860b10cbf952429ffb714bc1136", size = 1197535, upload-time = "2025-02-14T06:02:18.595Z" }, + { url = "https://files.pythonhosted.org/packages/7d/7c/1069f25521c8f01a1a182f362e5c8e0337907fae91b368b7da9c3e39b810/tiktoken-0.9.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:b2a21133be05dc116b1d0372af051cd2c6aa1d2188250c9b553f9fa49301b336", size = 1259548, upload-time = "2025-02-14T06:02:20.729Z" }, + { url = "https://files.pythonhosted.org/packages/6f/07/c67ad1724b8e14e2b4c8cca04b15da158733ac60136879131db05dda7c30/tiktoken-0.9.0-cp311-cp311-win_amd64.whl", hash = "sha256:11a20e67fdf58b0e2dea7b8654a288e481bb4fc0289d3ad21291f8d0849915fb", size = 893895, upload-time = "2025-02-14T06:02:22.67Z" }, + { url = "https://files.pythonhosted.org/packages/cf/e5/21ff33ecfa2101c1bb0f9b6df750553bd873b7fb532ce2cb276ff40b197f/tiktoken-0.9.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:e88f121c1c22b726649ce67c089b90ddda8b9662545a8aeb03cfef15967ddd03", size = 1065073, upload-time = "2025-02-14T06:02:24.768Z" }, + { url = "https://files.pythonhosted.org/packages/8e/03/a95e7b4863ee9ceec1c55983e4cc9558bcfd8f4f80e19c4f8a99642f697d/tiktoken-0.9.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:a6600660f2f72369acb13a57fb3e212434ed38b045fd8cc6cdd74947b4b5d210", size = 1008075, upload-time = "2025-02-14T06:02:26.92Z" }, + { url = "https://files.pythonhosted.org/packages/40/10/1305bb02a561595088235a513ec73e50b32e74364fef4de519da69bc8010/tiktoken-0.9.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:95e811743b5dfa74f4b227927ed86cbc57cad4df859cb3b643be797914e41794", size = 1140754, upload-time = "2025-02-14T06:02:28.124Z" }, + { url = "https://files.pythonhosted.org/packages/1b/40/da42522018ca496432ffd02793c3a72a739ac04c3794a4914570c9bb2925/tiktoken-0.9.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:99376e1370d59bcf6935c933cb9ba64adc29033b7e73f5f7569f3aad86552b22", size = 1196678, upload-time = "2025-02-14T06:02:29.845Z" }, + { url = "https://files.pythonhosted.org/packages/5c/41/1e59dddaae270ba20187ceb8aa52c75b24ffc09f547233991d5fd822838b/tiktoken-0.9.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:badb947c32739fb6ddde173e14885fb3de4d32ab9d8c591cbd013c22b4c31dd2", size = 1259283, upload-time = "2025-02-14T06:02:33.838Z" }, + { url = "https://files.pythonhosted.org/packages/5b/64/b16003419a1d7728d0d8c0d56a4c24325e7b10a21a9dd1fc0f7115c02f0a/tiktoken-0.9.0-cp312-cp312-win_amd64.whl", hash = "sha256:5a62d7a25225bafed786a524c1b9f0910a1128f4232615bf3f8257a73aaa3b16", size = 894897, upload-time = "2025-02-14T06:02:36.265Z" }, ] [[package]] @@ -6044,61 +6094,61 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "huggingface-hub" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/1c/46/fb6854cec3278fbfa4a75b50232c77622bc517ac886156e6afbfa4d8fc6e/tokenizers-0.22.1.tar.gz", hash = "sha256:61de6522785310a309b3407bac22d99c4db5dba349935e99e4d15ea2226af2d9", size = 363123 } +sdist = { url = "https://files.pythonhosted.org/packages/1c/46/fb6854cec3278fbfa4a75b50232c77622bc517ac886156e6afbfa4d8fc6e/tokenizers-0.22.1.tar.gz", hash = "sha256:61de6522785310a309b3407bac22d99c4db5dba349935e99e4d15ea2226af2d9", size = 363123, upload-time = "2025-09-19T09:49:23.424Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/bf/33/f4b2d94ada7ab297328fc671fed209368ddb82f965ec2224eb1892674c3a/tokenizers-0.22.1-cp39-abi3-macosx_10_12_x86_64.whl", hash = "sha256:59fdb013df17455e5f950b4b834a7b3ee2e0271e6378ccb33aa74d178b513c73", size = 3069318 }, - { url = "https://files.pythonhosted.org/packages/1c/58/2aa8c874d02b974990e89ff95826a4852a8b2a273c7d1b4411cdd45a4565/tokenizers-0.22.1-cp39-abi3-macosx_11_0_arm64.whl", hash = "sha256:8d4e484f7b0827021ac5f9f71d4794aaef62b979ab7608593da22b1d2e3c4edc", size = 2926478 }, - { url = "https://files.pythonhosted.org/packages/1e/3b/55e64befa1e7bfea963cf4b787b2cea1011362c4193f5477047532ce127e/tokenizers-0.22.1-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:19d2962dd28bc67c1f205ab180578a78eef89ac60ca7ef7cbe9635a46a56422a", size = 3256994 }, - { url = "https://files.pythonhosted.org/packages/71/0b/fbfecf42f67d9b7b80fde4aabb2b3110a97fac6585c9470b5bff103a80cb/tokenizers-0.22.1-cp39-abi3-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:38201f15cdb1f8a6843e6563e6e79f4abd053394992b9bbdf5213ea3469b4ae7", size = 3153141 }, - { url = "https://files.pythonhosted.org/packages/17/a9/b38f4e74e0817af8f8ef925507c63c6ae8171e3c4cb2d5d4624bf58fca69/tokenizers-0.22.1-cp39-abi3-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d1cbe5454c9a15df1b3443c726063d930c16f047a3cc724b9e6e1a91140e5a21", size = 3508049 }, - { url = "https://files.pythonhosted.org/packages/d2/48/dd2b3dac46bb9134a88e35d72e1aa4869579eacc1a27238f1577270773ff/tokenizers-0.22.1-cp39-abi3-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e7d094ae6312d69cc2a872b54b91b309f4f6fbce871ef28eb27b52a98e4d0214", size = 3710730 }, - { url = "https://files.pythonhosted.org/packages/93/0e/ccabc8d16ae4ba84a55d41345207c1e2ea88784651a5a487547d80851398/tokenizers-0.22.1-cp39-abi3-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:afd7594a56656ace95cdd6df4cca2e4059d294c5cfb1679c57824b605556cb2f", size = 3412560 }, - { url = "https://files.pythonhosted.org/packages/d0/c6/dc3a0db5a6766416c32c034286d7c2d406da1f498e4de04ab1b8959edd00/tokenizers-0.22.1-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e2ef6063d7a84994129732b47e7915e8710f27f99f3a3260b8a38fc7ccd083f4", size = 3250221 }, - { url = "https://files.pythonhosted.org/packages/d7/a6/2c8486eef79671601ff57b093889a345dd3d576713ef047776015dc66de7/tokenizers-0.22.1-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:ba0a64f450b9ef412c98f6bcd2a50c6df6e2443b560024a09fa6a03189726879", size = 9345569 }, - { url = "https://files.pythonhosted.org/packages/6b/16/32ce667f14c35537f5f605fe9bea3e415ea1b0a646389d2295ec348d5657/tokenizers-0.22.1-cp39-abi3-musllinux_1_2_armv7l.whl", hash = "sha256:331d6d149fa9c7d632cde4490fb8bbb12337fa3a0232e77892be656464f4b446", size = 9271599 }, - { url = "https://files.pythonhosted.org/packages/51/7c/a5f7898a3f6baa3fc2685c705e04c98c1094c523051c805cdd9306b8f87e/tokenizers-0.22.1-cp39-abi3-musllinux_1_2_i686.whl", hash = "sha256:607989f2ea68a46cb1dfbaf3e3aabdf3f21d8748312dbeb6263d1b3b66c5010a", size = 9533862 }, - { url = "https://files.pythonhosted.org/packages/36/65/7e75caea90bc73c1dd8d40438adf1a7bc26af3b8d0a6705ea190462506e1/tokenizers-0.22.1-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:a0f307d490295717726598ef6fa4f24af9d484809223bbc253b201c740a06390", size = 9681250 }, - { url = "https://files.pythonhosted.org/packages/30/2c/959dddef581b46e6209da82df3b78471e96260e2bc463f89d23b1bf0e52a/tokenizers-0.22.1-cp39-abi3-win32.whl", hash = "sha256:b5120eed1442765cd90b903bb6cfef781fd8fe64e34ccaecbae4c619b7b12a82", size = 2472003 }, - { url = "https://files.pythonhosted.org/packages/b3/46/e33a8c93907b631a99377ef4c5f817ab453d0b34f93529421f42ff559671/tokenizers-0.22.1-cp39-abi3-win_amd64.whl", hash = "sha256:65fd6e3fb11ca1e78a6a93602490f134d1fdeb13bcef99389d5102ea318ed138", size = 2674684 }, + { url = "https://files.pythonhosted.org/packages/bf/33/f4b2d94ada7ab297328fc671fed209368ddb82f965ec2224eb1892674c3a/tokenizers-0.22.1-cp39-abi3-macosx_10_12_x86_64.whl", hash = "sha256:59fdb013df17455e5f950b4b834a7b3ee2e0271e6378ccb33aa74d178b513c73", size = 3069318, upload-time = "2025-09-19T09:49:11.848Z" }, + { url = "https://files.pythonhosted.org/packages/1c/58/2aa8c874d02b974990e89ff95826a4852a8b2a273c7d1b4411cdd45a4565/tokenizers-0.22.1-cp39-abi3-macosx_11_0_arm64.whl", hash = "sha256:8d4e484f7b0827021ac5f9f71d4794aaef62b979ab7608593da22b1d2e3c4edc", size = 2926478, upload-time = "2025-09-19T09:49:09.759Z" }, + { url = "https://files.pythonhosted.org/packages/1e/3b/55e64befa1e7bfea963cf4b787b2cea1011362c4193f5477047532ce127e/tokenizers-0.22.1-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:19d2962dd28bc67c1f205ab180578a78eef89ac60ca7ef7cbe9635a46a56422a", size = 3256994, upload-time = "2025-09-19T09:48:56.701Z" }, + { url = "https://files.pythonhosted.org/packages/71/0b/fbfecf42f67d9b7b80fde4aabb2b3110a97fac6585c9470b5bff103a80cb/tokenizers-0.22.1-cp39-abi3-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:38201f15cdb1f8a6843e6563e6e79f4abd053394992b9bbdf5213ea3469b4ae7", size = 3153141, upload-time = "2025-09-19T09:48:59.749Z" }, + { url = "https://files.pythonhosted.org/packages/17/a9/b38f4e74e0817af8f8ef925507c63c6ae8171e3c4cb2d5d4624bf58fca69/tokenizers-0.22.1-cp39-abi3-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d1cbe5454c9a15df1b3443c726063d930c16f047a3cc724b9e6e1a91140e5a21", size = 3508049, upload-time = "2025-09-19T09:49:05.868Z" }, + { url = "https://files.pythonhosted.org/packages/d2/48/dd2b3dac46bb9134a88e35d72e1aa4869579eacc1a27238f1577270773ff/tokenizers-0.22.1-cp39-abi3-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e7d094ae6312d69cc2a872b54b91b309f4f6fbce871ef28eb27b52a98e4d0214", size = 3710730, upload-time = "2025-09-19T09:49:01.832Z" }, + { url = "https://files.pythonhosted.org/packages/93/0e/ccabc8d16ae4ba84a55d41345207c1e2ea88784651a5a487547d80851398/tokenizers-0.22.1-cp39-abi3-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:afd7594a56656ace95cdd6df4cca2e4059d294c5cfb1679c57824b605556cb2f", size = 3412560, upload-time = "2025-09-19T09:49:03.867Z" }, + { url = "https://files.pythonhosted.org/packages/d0/c6/dc3a0db5a6766416c32c034286d7c2d406da1f498e4de04ab1b8959edd00/tokenizers-0.22.1-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e2ef6063d7a84994129732b47e7915e8710f27f99f3a3260b8a38fc7ccd083f4", size = 3250221, upload-time = "2025-09-19T09:49:07.664Z" }, + { url = "https://files.pythonhosted.org/packages/d7/a6/2c8486eef79671601ff57b093889a345dd3d576713ef047776015dc66de7/tokenizers-0.22.1-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:ba0a64f450b9ef412c98f6bcd2a50c6df6e2443b560024a09fa6a03189726879", size = 9345569, upload-time = "2025-09-19T09:49:14.214Z" }, + { url = "https://files.pythonhosted.org/packages/6b/16/32ce667f14c35537f5f605fe9bea3e415ea1b0a646389d2295ec348d5657/tokenizers-0.22.1-cp39-abi3-musllinux_1_2_armv7l.whl", hash = "sha256:331d6d149fa9c7d632cde4490fb8bbb12337fa3a0232e77892be656464f4b446", size = 9271599, upload-time = "2025-09-19T09:49:16.639Z" }, + { url = "https://files.pythonhosted.org/packages/51/7c/a5f7898a3f6baa3fc2685c705e04c98c1094c523051c805cdd9306b8f87e/tokenizers-0.22.1-cp39-abi3-musllinux_1_2_i686.whl", hash = "sha256:607989f2ea68a46cb1dfbaf3e3aabdf3f21d8748312dbeb6263d1b3b66c5010a", size = 9533862, upload-time = "2025-09-19T09:49:19.146Z" }, + { url = "https://files.pythonhosted.org/packages/36/65/7e75caea90bc73c1dd8d40438adf1a7bc26af3b8d0a6705ea190462506e1/tokenizers-0.22.1-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:a0f307d490295717726598ef6fa4f24af9d484809223bbc253b201c740a06390", size = 9681250, upload-time = "2025-09-19T09:49:21.501Z" }, + { url = "https://files.pythonhosted.org/packages/30/2c/959dddef581b46e6209da82df3b78471e96260e2bc463f89d23b1bf0e52a/tokenizers-0.22.1-cp39-abi3-win32.whl", hash = "sha256:b5120eed1442765cd90b903bb6cfef781fd8fe64e34ccaecbae4c619b7b12a82", size = 2472003, upload-time = "2025-09-19T09:49:27.089Z" }, + { url = "https://files.pythonhosted.org/packages/b3/46/e33a8c93907b631a99377ef4c5f817ab453d0b34f93529421f42ff559671/tokenizers-0.22.1-cp39-abi3-win_amd64.whl", hash = "sha256:65fd6e3fb11ca1e78a6a93602490f134d1fdeb13bcef99389d5102ea318ed138", size = 2674684, upload-time = "2025-09-19T09:49:24.953Z" }, ] [[package]] name = "toml" version = "0.10.2" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/be/ba/1f744cdc819428fc6b5084ec34d9b30660f6f9daaf70eead706e3203ec3c/toml-0.10.2.tar.gz", hash = "sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f", size = 22253 } +sdist = { url = "https://files.pythonhosted.org/packages/be/ba/1f744cdc819428fc6b5084ec34d9b30660f6f9daaf70eead706e3203ec3c/toml-0.10.2.tar.gz", hash = "sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f", size = 22253, upload-time = "2020-11-01T01:40:22.204Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/44/6f/7120676b6d73228c96e17f1f794d8ab046fc910d781c8d151120c3f1569e/toml-0.10.2-py2.py3-none-any.whl", hash = "sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b", size = 16588 }, + { url = "https://files.pythonhosted.org/packages/44/6f/7120676b6d73228c96e17f1f794d8ab046fc910d781c8d151120c3f1569e/toml-0.10.2-py2.py3-none-any.whl", hash = "sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b", size = 16588, upload-time = "2020-11-01T01:40:20.672Z" }, ] [[package]] name = "tomli" version = "2.3.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/52/ed/3f73f72945444548f33eba9a87fc7a6e969915e7b1acc8260b30e1f76a2f/tomli-2.3.0.tar.gz", hash = "sha256:64be704a875d2a59753d80ee8a533c3fe183e3f06807ff7dc2232938ccb01549", size = 17392 } +sdist = { url = "https://files.pythonhosted.org/packages/52/ed/3f73f72945444548f33eba9a87fc7a6e969915e7b1acc8260b30e1f76a2f/tomli-2.3.0.tar.gz", hash = "sha256:64be704a875d2a59753d80ee8a533c3fe183e3f06807ff7dc2232938ccb01549", size = 17392, upload-time = "2025-10-08T22:01:47.119Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/b3/2e/299f62b401438d5fe1624119c723f5d877acc86a4c2492da405626665f12/tomli-2.3.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:88bd15eb972f3664f5ed4b57c1634a97153b4bac4479dcb6a495f41921eb7f45", size = 153236 }, - { url = "https://files.pythonhosted.org/packages/86/7f/d8fffe6a7aefdb61bced88fcb5e280cfd71e08939da5894161bd71bea022/tomli-2.3.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:883b1c0d6398a6a9d29b508c331fa56adbcdff647f6ace4dfca0f50e90dfd0ba", size = 148084 }, - { url = "https://files.pythonhosted.org/packages/47/5c/24935fb6a2ee63e86d80e4d3b58b222dafaf438c416752c8b58537c8b89a/tomli-2.3.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d1381caf13ab9f300e30dd8feadb3de072aeb86f1d34a8569453ff32a7dea4bf", size = 234832 }, - { url = "https://files.pythonhosted.org/packages/89/da/75dfd804fc11e6612846758a23f13271b76d577e299592b4371a4ca4cd09/tomli-2.3.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a0e285d2649b78c0d9027570d4da3425bdb49830a6156121360b3f8511ea3441", size = 242052 }, - { url = "https://files.pythonhosted.org/packages/70/8c/f48ac899f7b3ca7eb13af73bacbc93aec37f9c954df3c08ad96991c8c373/tomli-2.3.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:0a154a9ae14bfcf5d8917a59b51ffd5a3ac1fd149b71b47a3a104ca4edcfa845", size = 239555 }, - { url = "https://files.pythonhosted.org/packages/ba/28/72f8afd73f1d0e7829bfc093f4cb98ce0a40ffc0cc997009ee1ed94ba705/tomli-2.3.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:74bf8464ff93e413514fefd2be591c3b0b23231a77f901db1eb30d6f712fc42c", size = 245128 }, - { url = "https://files.pythonhosted.org/packages/b6/eb/a7679c8ac85208706d27436e8d421dfa39d4c914dcf5fa8083a9305f58d9/tomli-2.3.0-cp311-cp311-win32.whl", hash = "sha256:00b5f5d95bbfc7d12f91ad8c593a1659b6387b43f054104cda404be6bda62456", size = 96445 }, - { url = "https://files.pythonhosted.org/packages/0a/fe/3d3420c4cb1ad9cb462fb52967080575f15898da97e21cb6f1361d505383/tomli-2.3.0-cp311-cp311-win_amd64.whl", hash = "sha256:4dc4ce8483a5d429ab602f111a93a6ab1ed425eae3122032db7e9acf449451be", size = 107165 }, - { url = "https://files.pythonhosted.org/packages/ff/b7/40f36368fcabc518bb11c8f06379a0fd631985046c038aca08c6d6a43c6e/tomli-2.3.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:d7d86942e56ded512a594786a5ba0a5e521d02529b3826e7761a05138341a2ac", size = 154891 }, - { url = "https://files.pythonhosted.org/packages/f9/3f/d9dd692199e3b3aab2e4e4dd948abd0f790d9ded8cd10cbaae276a898434/tomli-2.3.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:73ee0b47d4dad1c5e996e3cd33b8a76a50167ae5f96a2607cbe8cc773506ab22", size = 148796 }, - { url = "https://files.pythonhosted.org/packages/60/83/59bff4996c2cf9f9387a0f5a3394629c7efa5ef16142076a23a90f1955fa/tomli-2.3.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:792262b94d5d0a466afb5bc63c7daa9d75520110971ee269152083270998316f", size = 242121 }, - { url = "https://files.pythonhosted.org/packages/45/e5/7c5119ff39de8693d6baab6c0b6dcb556d192c165596e9fc231ea1052041/tomli-2.3.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4f195fe57ecceac95a66a75ac24d9d5fbc98ef0962e09b2eddec5d39375aae52", size = 250070 }, - { url = "https://files.pythonhosted.org/packages/45/12/ad5126d3a278f27e6701abde51d342aa78d06e27ce2bb596a01f7709a5a2/tomli-2.3.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:e31d432427dcbf4d86958c184b9bfd1e96b5b71f8eb17e6d02531f434fd335b8", size = 245859 }, - { url = "https://files.pythonhosted.org/packages/fb/a1/4d6865da6a71c603cfe6ad0e6556c73c76548557a8d658f9e3b142df245f/tomli-2.3.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:7b0882799624980785240ab732537fcfc372601015c00f7fc367c55308c186f6", size = 250296 }, - { url = "https://files.pythonhosted.org/packages/a0/b7/a7a7042715d55c9ba6e8b196d65d2cb662578b4d8cd17d882d45322b0d78/tomli-2.3.0-cp312-cp312-win32.whl", hash = "sha256:ff72b71b5d10d22ecb084d345fc26f42b5143c5533db5e2eaba7d2d335358876", size = 97124 }, - { url = "https://files.pythonhosted.org/packages/06/1e/f22f100db15a68b520664eb3328fb0ae4e90530887928558112c8d1f4515/tomli-2.3.0-cp312-cp312-win_amd64.whl", hash = "sha256:1cb4ed918939151a03f33d4242ccd0aa5f11b3547d0cf30f7c74a408a5b99878", size = 107698 }, - { url = "https://files.pythonhosted.org/packages/77/b8/0135fadc89e73be292b473cb820b4f5a08197779206b33191e801feeae40/tomli-2.3.0-py3-none-any.whl", hash = "sha256:e95b1af3c5b07d9e643909b5abbec77cd9f1217e6d0bca72b0234736b9fb1f1b", size = 14408 }, + { url = "https://files.pythonhosted.org/packages/b3/2e/299f62b401438d5fe1624119c723f5d877acc86a4c2492da405626665f12/tomli-2.3.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:88bd15eb972f3664f5ed4b57c1634a97153b4bac4479dcb6a495f41921eb7f45", size = 153236, upload-time = "2025-10-08T22:01:00.137Z" }, + { url = "https://files.pythonhosted.org/packages/86/7f/d8fffe6a7aefdb61bced88fcb5e280cfd71e08939da5894161bd71bea022/tomli-2.3.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:883b1c0d6398a6a9d29b508c331fa56adbcdff647f6ace4dfca0f50e90dfd0ba", size = 148084, upload-time = "2025-10-08T22:01:01.63Z" }, + { url = "https://files.pythonhosted.org/packages/47/5c/24935fb6a2ee63e86d80e4d3b58b222dafaf438c416752c8b58537c8b89a/tomli-2.3.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d1381caf13ab9f300e30dd8feadb3de072aeb86f1d34a8569453ff32a7dea4bf", size = 234832, upload-time = "2025-10-08T22:01:02.543Z" }, + { url = "https://files.pythonhosted.org/packages/89/da/75dfd804fc11e6612846758a23f13271b76d577e299592b4371a4ca4cd09/tomli-2.3.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a0e285d2649b78c0d9027570d4da3425bdb49830a6156121360b3f8511ea3441", size = 242052, upload-time = "2025-10-08T22:01:03.836Z" }, + { url = "https://files.pythonhosted.org/packages/70/8c/f48ac899f7b3ca7eb13af73bacbc93aec37f9c954df3c08ad96991c8c373/tomli-2.3.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:0a154a9ae14bfcf5d8917a59b51ffd5a3ac1fd149b71b47a3a104ca4edcfa845", size = 239555, upload-time = "2025-10-08T22:01:04.834Z" }, + { url = "https://files.pythonhosted.org/packages/ba/28/72f8afd73f1d0e7829bfc093f4cb98ce0a40ffc0cc997009ee1ed94ba705/tomli-2.3.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:74bf8464ff93e413514fefd2be591c3b0b23231a77f901db1eb30d6f712fc42c", size = 245128, upload-time = "2025-10-08T22:01:05.84Z" }, + { url = "https://files.pythonhosted.org/packages/b6/eb/a7679c8ac85208706d27436e8d421dfa39d4c914dcf5fa8083a9305f58d9/tomli-2.3.0-cp311-cp311-win32.whl", hash = "sha256:00b5f5d95bbfc7d12f91ad8c593a1659b6387b43f054104cda404be6bda62456", size = 96445, upload-time = "2025-10-08T22:01:06.896Z" }, + { url = "https://files.pythonhosted.org/packages/0a/fe/3d3420c4cb1ad9cb462fb52967080575f15898da97e21cb6f1361d505383/tomli-2.3.0-cp311-cp311-win_amd64.whl", hash = "sha256:4dc4ce8483a5d429ab602f111a93a6ab1ed425eae3122032db7e9acf449451be", size = 107165, upload-time = "2025-10-08T22:01:08.107Z" }, + { url = "https://files.pythonhosted.org/packages/ff/b7/40f36368fcabc518bb11c8f06379a0fd631985046c038aca08c6d6a43c6e/tomli-2.3.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:d7d86942e56ded512a594786a5ba0a5e521d02529b3826e7761a05138341a2ac", size = 154891, upload-time = "2025-10-08T22:01:09.082Z" }, + { url = "https://files.pythonhosted.org/packages/f9/3f/d9dd692199e3b3aab2e4e4dd948abd0f790d9ded8cd10cbaae276a898434/tomli-2.3.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:73ee0b47d4dad1c5e996e3cd33b8a76a50167ae5f96a2607cbe8cc773506ab22", size = 148796, upload-time = "2025-10-08T22:01:10.266Z" }, + { url = "https://files.pythonhosted.org/packages/60/83/59bff4996c2cf9f9387a0f5a3394629c7efa5ef16142076a23a90f1955fa/tomli-2.3.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:792262b94d5d0a466afb5bc63c7daa9d75520110971ee269152083270998316f", size = 242121, upload-time = "2025-10-08T22:01:11.332Z" }, + { url = "https://files.pythonhosted.org/packages/45/e5/7c5119ff39de8693d6baab6c0b6dcb556d192c165596e9fc231ea1052041/tomli-2.3.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4f195fe57ecceac95a66a75ac24d9d5fbc98ef0962e09b2eddec5d39375aae52", size = 250070, upload-time = "2025-10-08T22:01:12.498Z" }, + { url = "https://files.pythonhosted.org/packages/45/12/ad5126d3a278f27e6701abde51d342aa78d06e27ce2bb596a01f7709a5a2/tomli-2.3.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:e31d432427dcbf4d86958c184b9bfd1e96b5b71f8eb17e6d02531f434fd335b8", size = 245859, upload-time = "2025-10-08T22:01:13.551Z" }, + { url = "https://files.pythonhosted.org/packages/fb/a1/4d6865da6a71c603cfe6ad0e6556c73c76548557a8d658f9e3b142df245f/tomli-2.3.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:7b0882799624980785240ab732537fcfc372601015c00f7fc367c55308c186f6", size = 250296, upload-time = "2025-10-08T22:01:14.614Z" }, + { url = "https://files.pythonhosted.org/packages/a0/b7/a7a7042715d55c9ba6e8b196d65d2cb662578b4d8cd17d882d45322b0d78/tomli-2.3.0-cp312-cp312-win32.whl", hash = "sha256:ff72b71b5d10d22ecb084d345fc26f42b5143c5533db5e2eaba7d2d335358876", size = 97124, upload-time = "2025-10-08T22:01:15.629Z" }, + { url = "https://files.pythonhosted.org/packages/06/1e/f22f100db15a68b520664eb3328fb0ae4e90530887928558112c8d1f4515/tomli-2.3.0-cp312-cp312-win_amd64.whl", hash = "sha256:1cb4ed918939151a03f33d4242ccd0aa5f11b3547d0cf30f7c74a408a5b99878", size = 107698, upload-time = "2025-10-08T22:01:16.51Z" }, + { url = "https://files.pythonhosted.org/packages/77/b8/0135fadc89e73be292b473cb820b4f5a08197779206b33191e801feeae40/tomli-2.3.0-py3-none-any.whl", hash = "sha256:e95b1af3c5b07d9e643909b5abbec77cd9f1217e6d0bca72b0234736b9fb1f1b", size = 14408, upload-time = "2025-10-08T22:01:46.04Z" }, ] [[package]] name = "tos" -version = "2.7.2" +version = "2.9.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "crcmod" }, @@ -6106,8 +6156,9 @@ dependencies = [ { name = "pytz" }, { name = "requests" }, { name = "six" }, + { name = "wrapt" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/0c/01/f811af86f1f80d5f289be075c3b281e74bf3fe081cfbe5cfce44954d2c3a/tos-2.7.2.tar.gz", hash = "sha256:3c31257716785bca7b2cac51474ff32543cda94075a7b7aff70d769c15c7b7ed", size = 123407 } +sdist = { url = "https://files.pythonhosted.org/packages/9a/b3/13451226f564f88d9db2323e9b7eabcced792a0ad5ee1e333751a7634257/tos-2.9.0.tar.gz", hash = "sha256:861cfc348e770f099f911cb96b2c41774ada6c9c51b7a89d97e0c426074dd99e", size = 157071, upload-time = "2026-01-06T04:13:08.921Z" } [[package]] name = "tqdm" @@ -6116,9 +6167,9 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "colorama", marker = "sys_platform == 'win32'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/a8/4b/29b4ef32e036bb34e4ab51796dd745cdba7ed47ad142a9f4a1eb8e0c744d/tqdm-4.67.1.tar.gz", hash = "sha256:f8aef9c52c08c13a65f30ea34f4e5aac3fd1a34959879d7e59e63027286627f2", size = 169737 } +sdist = { url = "https://files.pythonhosted.org/packages/a8/4b/29b4ef32e036bb34e4ab51796dd745cdba7ed47ad142a9f4a1eb8e0c744d/tqdm-4.67.1.tar.gz", hash = "sha256:f8aef9c52c08c13a65f30ea34f4e5aac3fd1a34959879d7e59e63027286627f2", size = 169737, upload-time = "2024-11-24T20:12:22.481Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/d0/30/dc54f88dd4a2b5dc8a0279bdd7270e735851848b762aeb1c1184ed1f6b14/tqdm-4.67.1-py3-none-any.whl", hash = "sha256:26445eca388f82e72884e0d580d5464cd801a3ea01e63e5601bdff9ba6a48de2", size = 78540 }, + { url = "https://files.pythonhosted.org/packages/d0/30/dc54f88dd4a2b5dc8a0279bdd7270e735851848b762aeb1c1184ed1f6b14/tqdm-4.67.1-py3-none-any.whl", hash = "sha256:26445eca388f82e72884e0d580d5464cd801a3ea01e63e5601bdff9ba6a48de2", size = 78540, upload-time = "2024-11-24T20:12:19.698Z" }, ] [[package]] @@ -6137,34 +6188,34 @@ dependencies = [ { name = "tokenizers" }, { name = "tqdm" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/e5/82/0bcfddd134cdf53440becb5e738257cc3cf34cf229d63b57bfd288e6579f/transformers-4.56.2.tar.gz", hash = "sha256:5e7c623e2d7494105c726dd10f6f90c2c99a55ebe86eef7233765abd0cb1c529", size = 9844296 } +sdist = { url = "https://files.pythonhosted.org/packages/e5/82/0bcfddd134cdf53440becb5e738257cc3cf34cf229d63b57bfd288e6579f/transformers-4.56.2.tar.gz", hash = "sha256:5e7c623e2d7494105c726dd10f6f90c2c99a55ebe86eef7233765abd0cb1c529", size = 9844296, upload-time = "2025-09-19T15:16:26.778Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/70/26/2591b48412bde75e33bfd292034103ffe41743cacd03120e3242516cd143/transformers-4.56.2-py3-none-any.whl", hash = "sha256:79c03d0e85b26cb573c109ff9eafa96f3c8d4febfd8a0774e8bba32702dd6dde", size = 11608055 }, + { url = "https://files.pythonhosted.org/packages/70/26/2591b48412bde75e33bfd292034103ffe41743cacd03120e3242516cd143/transformers-4.56.2-py3-none-any.whl", hash = "sha256:79c03d0e85b26cb573c109ff9eafa96f3c8d4febfd8a0774e8bba32702dd6dde", size = 11608055, upload-time = "2025-09-19T15:16:23.736Z" }, ] [[package]] name = "ty" version = "0.0.1a27" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/8f/65/3592d7c73d80664378fc90d0a00c33449a99cbf13b984433c883815245f3/ty-0.0.1a27.tar.gz", hash = "sha256:d34fe04979f2c912700cbf0919e8f9b4eeaa10c4a2aff7450e5e4c90f998bc28", size = 4516059 } +sdist = { url = "https://files.pythonhosted.org/packages/8f/65/3592d7c73d80664378fc90d0a00c33449a99cbf13b984433c883815245f3/ty-0.0.1a27.tar.gz", hash = "sha256:d34fe04979f2c912700cbf0919e8f9b4eeaa10c4a2aff7450e5e4c90f998bc28", size = 4516059, upload-time = "2025-11-18T21:55:18.381Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/e6/05/7945aa97356446fd53ed3ddc7ee02a88d8ad394217acd9428f472d6b109d/ty-0.0.1a27-py3-none-linux_armv6l.whl", hash = "sha256:3cbb735f5ecb3a7a5f5b82fb24da17912788c109086df4e97d454c8fb236fbc5", size = 9375047 }, - { url = "https://files.pythonhosted.org/packages/69/4e/89b167a03de0e9ec329dc89bc02e8694768e4576337ef6c0699987681342/ty-0.0.1a27-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:4a6367236dc456ba2416563301d498aef8c6f8959be88777ef7ba5ac1bf15f0b", size = 9169540 }, - { url = "https://files.pythonhosted.org/packages/38/07/e62009ab9cc242e1becb2bd992097c80a133fce0d4f055fba6576150d08a/ty-0.0.1a27-py3-none-macosx_11_0_arm64.whl", hash = "sha256:8e93e231a1bcde964cdb062d2d5e549c24493fb1638eecae8fcc42b81e9463a4", size = 8711942 }, - { url = "https://files.pythonhosted.org/packages/b5/43/f35716ec15406f13085db52e762a3cc663c651531a8124481d0ba602eca0/ty-0.0.1a27-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c5b6a8166b60117da1179851a3d719cc798bf7e61f91b35d76242f0059e9ae1d", size = 8984208 }, - { url = "https://files.pythonhosted.org/packages/2d/79/486a3374809523172379768de882c7a369861165802990177fe81489b85f/ty-0.0.1a27-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:bfbe8b0e831c072b79a078d6c126d7f4d48ca17f64a103de1b93aeda32265dc5", size = 9157209 }, - { url = "https://files.pythonhosted.org/packages/ff/08/9a7c8efcb327197d7d347c548850ef4b54de1c254981b65e8cd0672dc327/ty-0.0.1a27-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:90e09678331552e7c25d7eb47868b0910dc5b9b212ae22c8ce71a52d6576ddbb", size = 9519207 }, - { url = "https://files.pythonhosted.org/packages/e0/9d/7b4680683e83204b9edec551bb91c21c789ebc586b949c5218157ee474b7/ty-0.0.1a27-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:88c03e4beeca79d85a5618921e44b3a6ea957e0453e08b1cdd418b51da645939", size = 10148794 }, - { url = "https://files.pythonhosted.org/packages/89/21/8b961b0ab00c28223f06b33222427a8e31aa04f39d1b236acc93021c626c/ty-0.0.1a27-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3ece5811322789fefe22fc088ed36c5879489cd39e913f9c1ff2a7678f089c61", size = 9900563 }, - { url = "https://files.pythonhosted.org/packages/85/eb/95e1f0b426c2ea8d443aa923fcab509059c467bbe64a15baaf573fea1203/ty-0.0.1a27-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2f2ccb4f0fddcd6e2017c268dfce2489e9a36cb82a5900afe6425835248b1086", size = 9926355 }, - { url = "https://files.pythonhosted.org/packages/f5/78/40e7f072049e63c414f2845df780be3a494d92198c87c2ffa65e63aecf3f/ty-0.0.1a27-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:33450528312e41d003e96a1647780b2783ab7569bbc29c04fc76f2d1908061e3", size = 9480580 }, - { url = "https://files.pythonhosted.org/packages/18/da/f4a2dfedab39096808ddf7475f35ceb750d9a9da840bee4afd47b871742f/ty-0.0.1a27-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:a0a9ac635deaa2b15947701197ede40cdecd13f89f19351872d16f9ccd773fa1", size = 8957524 }, - { url = "https://files.pythonhosted.org/packages/21/ea/26fee9a20cf77a157316fd3ab9c6db8ad5a0b20b2d38a43f3452622587ac/ty-0.0.1a27-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:797fb2cd49b6b9b3ac9f2f0e401fb02d3aa155badc05a8591d048d38d28f1e0c", size = 9201098 }, - { url = "https://files.pythonhosted.org/packages/b0/53/e14591d1275108c9ae28f97ac5d4b93adcc2c8a4b1b9a880dfa9d07c15f8/ty-0.0.1a27-py3-none-musllinux_1_2_i686.whl", hash = "sha256:7fe81679a0941f85e98187d444604e24b15bde0a85874957c945751756314d03", size = 9275470 }, - { url = "https://files.pythonhosted.org/packages/37/44/e2c9acecac70bf06fb41de285e7be2433c2c9828f71e3bf0e886fc85c4fd/ty-0.0.1a27-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:355f651d0cdb85535a82bd9f0583f77b28e3fd7bba7b7da33dcee5a576eff28b", size = 9592394 }, - { url = "https://files.pythonhosted.org/packages/ee/a7/4636369731b24ed07c2b4c7805b8d990283d677180662c532d82e4ef1a36/ty-0.0.1a27-py3-none-win32.whl", hash = "sha256:61782e5f40e6df622093847b34c366634b75d53f839986f1bf4481672ad6cb55", size = 8783816 }, - { url = "https://files.pythonhosted.org/packages/a7/1d/b76487725628d9e81d9047dc0033a5e167e0d10f27893d04de67fe1a9763/ty-0.0.1a27-py3-none-win_amd64.whl", hash = "sha256:c682b238085d3191acddcf66ef22641562946b1bba2a7f316012d5b2a2f4de11", size = 9616833 }, - { url = "https://files.pythonhosted.org/packages/3a/db/c7cd5276c8f336a3cf87992b75ba9d486a7cf54e753fcd42495b3bc56fb7/ty-0.0.1a27-py3-none-win_arm64.whl", hash = "sha256:e146dfa32cbb0ac6afb0cb65659e87e4e313715e68d76fe5ae0a4b3d5b912ce8", size = 9137796 }, + { url = "https://files.pythonhosted.org/packages/e6/05/7945aa97356446fd53ed3ddc7ee02a88d8ad394217acd9428f472d6b109d/ty-0.0.1a27-py3-none-linux_armv6l.whl", hash = "sha256:3cbb735f5ecb3a7a5f5b82fb24da17912788c109086df4e97d454c8fb236fbc5", size = 9375047, upload-time = "2025-11-18T21:54:31.577Z" }, + { url = "https://files.pythonhosted.org/packages/69/4e/89b167a03de0e9ec329dc89bc02e8694768e4576337ef6c0699987681342/ty-0.0.1a27-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:4a6367236dc456ba2416563301d498aef8c6f8959be88777ef7ba5ac1bf15f0b", size = 9169540, upload-time = "2025-11-18T21:54:34.036Z" }, + { url = "https://files.pythonhosted.org/packages/38/07/e62009ab9cc242e1becb2bd992097c80a133fce0d4f055fba6576150d08a/ty-0.0.1a27-py3-none-macosx_11_0_arm64.whl", hash = "sha256:8e93e231a1bcde964cdb062d2d5e549c24493fb1638eecae8fcc42b81e9463a4", size = 8711942, upload-time = "2025-11-18T21:54:36.3Z" }, + { url = "https://files.pythonhosted.org/packages/b5/43/f35716ec15406f13085db52e762a3cc663c651531a8124481d0ba602eca0/ty-0.0.1a27-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c5b6a8166b60117da1179851a3d719cc798bf7e61f91b35d76242f0059e9ae1d", size = 8984208, upload-time = "2025-11-18T21:54:39.453Z" }, + { url = "https://files.pythonhosted.org/packages/2d/79/486a3374809523172379768de882c7a369861165802990177fe81489b85f/ty-0.0.1a27-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:bfbe8b0e831c072b79a078d6c126d7f4d48ca17f64a103de1b93aeda32265dc5", size = 9157209, upload-time = "2025-11-18T21:54:42.664Z" }, + { url = "https://files.pythonhosted.org/packages/ff/08/9a7c8efcb327197d7d347c548850ef4b54de1c254981b65e8cd0672dc327/ty-0.0.1a27-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:90e09678331552e7c25d7eb47868b0910dc5b9b212ae22c8ce71a52d6576ddbb", size = 9519207, upload-time = "2025-11-18T21:54:45.311Z" }, + { url = "https://files.pythonhosted.org/packages/e0/9d/7b4680683e83204b9edec551bb91c21c789ebc586b949c5218157ee474b7/ty-0.0.1a27-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:88c03e4beeca79d85a5618921e44b3a6ea957e0453e08b1cdd418b51da645939", size = 10148794, upload-time = "2025-11-18T21:54:48.329Z" }, + { url = "https://files.pythonhosted.org/packages/89/21/8b961b0ab00c28223f06b33222427a8e31aa04f39d1b236acc93021c626c/ty-0.0.1a27-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3ece5811322789fefe22fc088ed36c5879489cd39e913f9c1ff2a7678f089c61", size = 9900563, upload-time = "2025-11-18T21:54:51.214Z" }, + { url = "https://files.pythonhosted.org/packages/85/eb/95e1f0b426c2ea8d443aa923fcab509059c467bbe64a15baaf573fea1203/ty-0.0.1a27-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2f2ccb4f0fddcd6e2017c268dfce2489e9a36cb82a5900afe6425835248b1086", size = 9926355, upload-time = "2025-11-18T21:54:53.927Z" }, + { url = "https://files.pythonhosted.org/packages/f5/78/40e7f072049e63c414f2845df780be3a494d92198c87c2ffa65e63aecf3f/ty-0.0.1a27-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:33450528312e41d003e96a1647780b2783ab7569bbc29c04fc76f2d1908061e3", size = 9480580, upload-time = "2025-11-18T21:54:56.617Z" }, + { url = "https://files.pythonhosted.org/packages/18/da/f4a2dfedab39096808ddf7475f35ceb750d9a9da840bee4afd47b871742f/ty-0.0.1a27-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:a0a9ac635deaa2b15947701197ede40cdecd13f89f19351872d16f9ccd773fa1", size = 8957524, upload-time = "2025-11-18T21:54:59.085Z" }, + { url = "https://files.pythonhosted.org/packages/21/ea/26fee9a20cf77a157316fd3ab9c6db8ad5a0b20b2d38a43f3452622587ac/ty-0.0.1a27-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:797fb2cd49b6b9b3ac9f2f0e401fb02d3aa155badc05a8591d048d38d28f1e0c", size = 9201098, upload-time = "2025-11-18T21:55:01.845Z" }, + { url = "https://files.pythonhosted.org/packages/b0/53/e14591d1275108c9ae28f97ac5d4b93adcc2c8a4b1b9a880dfa9d07c15f8/ty-0.0.1a27-py3-none-musllinux_1_2_i686.whl", hash = "sha256:7fe81679a0941f85e98187d444604e24b15bde0a85874957c945751756314d03", size = 9275470, upload-time = "2025-11-18T21:55:04.23Z" }, + { url = "https://files.pythonhosted.org/packages/37/44/e2c9acecac70bf06fb41de285e7be2433c2c9828f71e3bf0e886fc85c4fd/ty-0.0.1a27-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:355f651d0cdb85535a82bd9f0583f77b28e3fd7bba7b7da33dcee5a576eff28b", size = 9592394, upload-time = "2025-11-18T21:55:06.542Z" }, + { url = "https://files.pythonhosted.org/packages/ee/a7/4636369731b24ed07c2b4c7805b8d990283d677180662c532d82e4ef1a36/ty-0.0.1a27-py3-none-win32.whl", hash = "sha256:61782e5f40e6df622093847b34c366634b75d53f839986f1bf4481672ad6cb55", size = 8783816, upload-time = "2025-11-18T21:55:09.648Z" }, + { url = "https://files.pythonhosted.org/packages/a7/1d/b76487725628d9e81d9047dc0033a5e167e0d10f27893d04de67fe1a9763/ty-0.0.1a27-py3-none-win_amd64.whl", hash = "sha256:c682b238085d3191acddcf66ef22641562946b1bba2a7f316012d5b2a2f4de11", size = 9616833, upload-time = "2025-11-18T21:55:12.457Z" }, + { url = "https://files.pythonhosted.org/packages/3a/db/c7cd5276c8f336a3cf87992b75ba9d486a7cf54e753fcd42495b3bc56fb7/ty-0.0.1a27-py3-none-win_arm64.whl", hash = "sha256:e146dfa32cbb0ac6afb0cb65659e87e4e313715e68d76fe5ae0a4b3d5b912ce8", size = 9137796, upload-time = "2025-11-18T21:55:15.897Z" }, ] [[package]] @@ -6177,27 +6228,27 @@ dependencies = [ { name = "shellingham" }, { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/8f/28/7c85c8032b91dbe79725b6f17d2fffc595dff06a35c7a30a37bef73a1ab4/typer-0.20.0.tar.gz", hash = "sha256:1aaf6494031793e4876fb0bacfa6a912b551cf43c1e63c800df8b1a866720c37", size = 106492 } +sdist = { url = "https://files.pythonhosted.org/packages/8f/28/7c85c8032b91dbe79725b6f17d2fffc595dff06a35c7a30a37bef73a1ab4/typer-0.20.0.tar.gz", hash = "sha256:1aaf6494031793e4876fb0bacfa6a912b551cf43c1e63c800df8b1a866720c37", size = 106492, upload-time = "2025-10-20T17:03:49.445Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/78/64/7713ffe4b5983314e9d436a90d5bd4f63b6054e2aca783a3cfc44cb95bbf/typer-0.20.0-py3-none-any.whl", hash = "sha256:5b463df6793ec1dca6213a3cf4c0f03bc6e322ac5e16e13ddd622a889489784a", size = 47028 }, + { url = "https://files.pythonhosted.org/packages/78/64/7713ffe4b5983314e9d436a90d5bd4f63b6054e2aca783a3cfc44cb95bbf/typer-0.20.0-py3-none-any.whl", hash = "sha256:5b463df6793ec1dca6213a3cf4c0f03bc6e322ac5e16e13ddd622a889489784a", size = 47028, upload-time = "2025-10-20T17:03:47.617Z" }, ] [[package]] name = "types-aiofiles" version = "24.1.0.20250822" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/19/48/c64471adac9206cc844afb33ed311ac5a65d2f59df3d861e0f2d0cad7414/types_aiofiles-24.1.0.20250822.tar.gz", hash = "sha256:9ab90d8e0c307fe97a7cf09338301e3f01a163e39f3b529ace82466355c84a7b", size = 14484 } +sdist = { url = "https://files.pythonhosted.org/packages/19/48/c64471adac9206cc844afb33ed311ac5a65d2f59df3d861e0f2d0cad7414/types_aiofiles-24.1.0.20250822.tar.gz", hash = "sha256:9ab90d8e0c307fe97a7cf09338301e3f01a163e39f3b529ace82466355c84a7b", size = 14484, upload-time = "2025-08-22T03:02:23.039Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/bc/8e/5e6d2215e1d8f7c2a94c6e9d0059ae8109ce0f5681956d11bb0a228cef04/types_aiofiles-24.1.0.20250822-py3-none-any.whl", hash = "sha256:0ec8f8909e1a85a5a79aed0573af7901f53120dd2a29771dd0b3ef48e12328b0", size = 14322 }, + { url = "https://files.pythonhosted.org/packages/bc/8e/5e6d2215e1d8f7c2a94c6e9d0059ae8109ce0f5681956d11bb0a228cef04/types_aiofiles-24.1.0.20250822-py3-none-any.whl", hash = "sha256:0ec8f8909e1a85a5a79aed0573af7901f53120dd2a29771dd0b3ef48e12328b0", size = 14322, upload-time = "2025-08-22T03:02:21.918Z" }, ] [[package]] name = "types-awscrt" version = "0.29.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/6e/77/c25c0fbdd3b269b13139c08180bcd1521957c79bd133309533384125810c/types_awscrt-0.29.0.tar.gz", hash = "sha256:7f81040846095cbaf64e6b79040434750d4f2f487544d7748b778c349d393510", size = 17715 } +sdist = { url = "https://files.pythonhosted.org/packages/6e/77/c25c0fbdd3b269b13139c08180bcd1521957c79bd133309533384125810c/types_awscrt-0.29.0.tar.gz", hash = "sha256:7f81040846095cbaf64e6b79040434750d4f2f487544d7748b778c349d393510", size = 17715, upload-time = "2025-11-21T21:01:24.223Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/37/a9/6b7a0ceb8e6f2396cc290ae2f1520a1598842119f09b943d83d6ff01bc49/types_awscrt-0.29.0-py3-none-any.whl", hash = "sha256:ece1906d5708b51b6603b56607a702ed1e5338a2df9f31950e000f03665ac387", size = 42343 }, + { url = "https://files.pythonhosted.org/packages/37/a9/6b7a0ceb8e6f2396cc290ae2f1520a1598842119f09b943d83d6ff01bc49/types_awscrt-0.29.0-py3-none-any.whl", hash = "sha256:ece1906d5708b51b6603b56607a702ed1e5338a2df9f31950e000f03665ac387", size = 42343, upload-time = "2025-11-21T21:01:22.979Z" }, ] [[package]] @@ -6207,18 +6258,18 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "types-html5lib" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/6d/d1/32b410f6d65eda94d3dfb0b3d0ca151f12cb1dc4cef731dcf7cbfd8716ff/types_beautifulsoup4-4.12.0.20250516.tar.gz", hash = "sha256:aa19dd73b33b70d6296adf92da8ab8a0c945c507e6fb7d5db553415cc77b417e", size = 16628 } +sdist = { url = "https://files.pythonhosted.org/packages/6d/d1/32b410f6d65eda94d3dfb0b3d0ca151f12cb1dc4cef731dcf7cbfd8716ff/types_beautifulsoup4-4.12.0.20250516.tar.gz", hash = "sha256:aa19dd73b33b70d6296adf92da8ab8a0c945c507e6fb7d5db553415cc77b417e", size = 16628, upload-time = "2025-05-16T03:09:09.93Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/7c/79/d84de200a80085b32f12c5820d4fd0addcbe7ba6dce8c1c9d8605e833c8e/types_beautifulsoup4-4.12.0.20250516-py3-none-any.whl", hash = "sha256:5923399d4a1ba9cc8f0096fe334cc732e130269541d66261bb42ab039c0376ee", size = 16879 }, + { url = "https://files.pythonhosted.org/packages/7c/79/d84de200a80085b32f12c5820d4fd0addcbe7ba6dce8c1c9d8605e833c8e/types_beautifulsoup4-4.12.0.20250516-py3-none-any.whl", hash = "sha256:5923399d4a1ba9cc8f0096fe334cc732e130269541d66261bb42ab039c0376ee", size = 16879, upload-time = "2025-05-16T03:09:09.051Z" }, ] [[package]] name = "types-cachetools" version = "5.5.0.20240820" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/c2/7e/ad6ba4a56b2a994e0f0a04a61a50466b60ee88a13d10a18c83ac14a66c61/types-cachetools-5.5.0.20240820.tar.gz", hash = "sha256:b888ab5c1a48116f7799cd5004b18474cd82b5463acb5ffb2db2fc9c7b053bc0", size = 4198 } +sdist = { url = "https://files.pythonhosted.org/packages/c2/7e/ad6ba4a56b2a994e0f0a04a61a50466b60ee88a13d10a18c83ac14a66c61/types-cachetools-5.5.0.20240820.tar.gz", hash = "sha256:b888ab5c1a48116f7799cd5004b18474cd82b5463acb5ffb2db2fc9c7b053bc0", size = 4198, upload-time = "2024-08-20T02:30:07.525Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/27/4d/fd7cc050e2d236d5570c4d92531c0396573a1e14b31735870e849351c717/types_cachetools-5.5.0.20240820-py3-none-any.whl", hash = "sha256:efb2ed8bf27a4b9d3ed70d33849f536362603a90b8090a328acf0cd42fda82e2", size = 4149 }, + { url = "https://files.pythonhosted.org/packages/27/4d/fd7cc050e2d236d5570c4d92531c0396573a1e14b31735870e849351c717/types_cachetools-5.5.0.20240820-py3-none-any.whl", hash = "sha256:efb2ed8bf27a4b9d3ed70d33849f536362603a90b8090a328acf0cd42fda82e2", size = 4149, upload-time = "2024-08-20T02:30:06.461Z" }, ] [[package]] @@ -6228,45 +6279,45 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "types-setuptools" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/2a/98/ea454cea03e5f351323af6a482c65924f3c26c515efd9090dede58f2b4b6/types_cffi-1.17.0.20250915.tar.gz", hash = "sha256:4362e20368f78dabd5c56bca8004752cc890e07a71605d9e0d9e069dbaac8c06", size = 17229 } +sdist = { url = "https://files.pythonhosted.org/packages/2a/98/ea454cea03e5f351323af6a482c65924f3c26c515efd9090dede58f2b4b6/types_cffi-1.17.0.20250915.tar.gz", hash = "sha256:4362e20368f78dabd5c56bca8004752cc890e07a71605d9e0d9e069dbaac8c06", size = 17229, upload-time = "2025-09-15T03:01:25.31Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/aa/ec/092f2b74b49ec4855cdb53050deb9699f7105b8fda6fe034c0781b8687f3/types_cffi-1.17.0.20250915-py3-none-any.whl", hash = "sha256:cef4af1116c83359c11bb4269283c50f0688e9fc1d7f0eeb390f3661546da52c", size = 20112 }, + { url = "https://files.pythonhosted.org/packages/aa/ec/092f2b74b49ec4855cdb53050deb9699f7105b8fda6fe034c0781b8687f3/types_cffi-1.17.0.20250915-py3-none-any.whl", hash = "sha256:cef4af1116c83359c11bb4269283c50f0688e9fc1d7f0eeb390f3661546da52c", size = 20112, upload-time = "2025-09-15T03:01:24.187Z" }, ] [[package]] name = "types-colorama" version = "0.4.15.20250801" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/99/37/af713e7d73ca44738c68814cbacf7a655aa40ddd2e8513d431ba78ace7b3/types_colorama-0.4.15.20250801.tar.gz", hash = "sha256:02565d13d68963d12237d3f330f5ecd622a3179f7b5b14ee7f16146270c357f5", size = 10437 } +sdist = { url = "https://files.pythonhosted.org/packages/99/37/af713e7d73ca44738c68814cbacf7a655aa40ddd2e8513d431ba78ace7b3/types_colorama-0.4.15.20250801.tar.gz", hash = "sha256:02565d13d68963d12237d3f330f5ecd622a3179f7b5b14ee7f16146270c357f5", size = 10437, upload-time = "2025-08-01T03:48:22.605Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/95/3a/44ccbbfef6235aeea84c74041dc6dfee6c17ff3ddba782a0250e41687ec7/types_colorama-0.4.15.20250801-py3-none-any.whl", hash = "sha256:b6e89bd3b250fdad13a8b6a465c933f4a5afe485ea2e2f104d739be50b13eea9", size = 10743 }, + { url = "https://files.pythonhosted.org/packages/95/3a/44ccbbfef6235aeea84c74041dc6dfee6c17ff3ddba782a0250e41687ec7/types_colorama-0.4.15.20250801-py3-none-any.whl", hash = "sha256:b6e89bd3b250fdad13a8b6a465c933f4a5afe485ea2e2f104d739be50b13eea9", size = 10743, upload-time = "2025-08-01T03:48:21.774Z" }, ] [[package]] name = "types-defusedxml" version = "0.7.0.20250822" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/7d/4a/5b997ae87bf301d1796f72637baa4e0e10d7db17704a8a71878a9f77f0c0/types_defusedxml-0.7.0.20250822.tar.gz", hash = "sha256:ba6c395105f800c973bba8a25e41b215483e55ec79c8ca82b6fe90ba0bc3f8b2", size = 10590 } +sdist = { url = "https://files.pythonhosted.org/packages/7d/4a/5b997ae87bf301d1796f72637baa4e0e10d7db17704a8a71878a9f77f0c0/types_defusedxml-0.7.0.20250822.tar.gz", hash = "sha256:ba6c395105f800c973bba8a25e41b215483e55ec79c8ca82b6fe90ba0bc3f8b2", size = 10590, upload-time = "2025-08-22T03:02:59.547Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/13/73/8a36998cee9d7c9702ed64a31f0866c7f192ecffc22771d44dbcc7878f18/types_defusedxml-0.7.0.20250822-py3-none-any.whl", hash = "sha256:5ee219f8a9a79c184773599ad216123aedc62a969533ec36737ec98601f20dcf", size = 13430 }, + { url = "https://files.pythonhosted.org/packages/13/73/8a36998cee9d7c9702ed64a31f0866c7f192ecffc22771d44dbcc7878f18/types_defusedxml-0.7.0.20250822-py3-none-any.whl", hash = "sha256:5ee219f8a9a79c184773599ad216123aedc62a969533ec36737ec98601f20dcf", size = 13430, upload-time = "2025-08-22T03:02:58.466Z" }, ] [[package]] name = "types-deprecated" version = "1.2.15.20250304" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/0e/67/eeefaaabb03b288aad85483d410452c8bbcbf8b2bd876b0e467ebd97415b/types_deprecated-1.2.15.20250304.tar.gz", hash = "sha256:c329030553029de5cc6cb30f269c11f4e00e598c4241290179f63cda7d33f719", size = 8015 } +sdist = { url = "https://files.pythonhosted.org/packages/0e/67/eeefaaabb03b288aad85483d410452c8bbcbf8b2bd876b0e467ebd97415b/types_deprecated-1.2.15.20250304.tar.gz", hash = "sha256:c329030553029de5cc6cb30f269c11f4e00e598c4241290179f63cda7d33f719", size = 8015, upload-time = "2025-03-04T02:48:17.894Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/4d/e3/c18aa72ab84e0bc127a3a94e93be1a6ac2cb281371d3a45376ab7cfdd31c/types_deprecated-1.2.15.20250304-py3-none-any.whl", hash = "sha256:86a65aa550ea8acf49f27e226b8953288cd851de887970fbbdf2239c116c3107", size = 8553 }, + { url = "https://files.pythonhosted.org/packages/4d/e3/c18aa72ab84e0bc127a3a94e93be1a6ac2cb281371d3a45376ab7cfdd31c/types_deprecated-1.2.15.20250304-py3-none-any.whl", hash = "sha256:86a65aa550ea8acf49f27e226b8953288cd851de887970fbbdf2239c116c3107", size = 8553, upload-time = "2025-03-04T02:48:16.666Z" }, ] [[package]] name = "types-docutils" version = "0.21.0.20250809" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/be/9b/f92917b004e0a30068e024e8925c7d9b10440687b96d91f26d8762f4b68c/types_docutils-0.21.0.20250809.tar.gz", hash = "sha256:cc2453c87dc729b5aae499597496e4f69b44aa5fccb27051ed8bb55b0bd5e31b", size = 54770 } +sdist = { url = "https://files.pythonhosted.org/packages/be/9b/f92917b004e0a30068e024e8925c7d9b10440687b96d91f26d8762f4b68c/types_docutils-0.21.0.20250809.tar.gz", hash = "sha256:cc2453c87dc729b5aae499597496e4f69b44aa5fccb27051ed8bb55b0bd5e31b", size = 54770, upload-time = "2025-08-09T03:15:42.752Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/7e/a9/46bc12e4c918c4109b67401bf87fd450babdffbebd5dbd7833f5096f42a5/types_docutils-0.21.0.20250809-py3-none-any.whl", hash = "sha256:af02c82327e8ded85f57dd85c8ebf93b6a0b643d85a44c32d471e3395604ea50", size = 89598 }, + { url = "https://files.pythonhosted.org/packages/7e/a9/46bc12e4c918c4109b67401bf87fd450babdffbebd5dbd7833f5096f42a5/types_docutils-0.21.0.20250809-py3-none-any.whl", hash = "sha256:af02c82327e8ded85f57dd85c8ebf93b6a0b643d85a44c32d471e3395604ea50", size = 89598, upload-time = "2025-08-09T03:15:41.503Z" }, ] [[package]] @@ -6276,9 +6327,9 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "flask" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/a4/f3/dd2f0d274ecb77772d3ce83735f75ad14713461e8cf7e6d61a7c272037b1/types_flask_cors-5.0.0.20250413.tar.gz", hash = "sha256:b346d052f4ef3b606b73faf13e868e458f1efdbfedcbe1aba739eb2f54a6cf5f", size = 9921 } +sdist = { url = "https://files.pythonhosted.org/packages/a4/f3/dd2f0d274ecb77772d3ce83735f75ad14713461e8cf7e6d61a7c272037b1/types_flask_cors-5.0.0.20250413.tar.gz", hash = "sha256:b346d052f4ef3b606b73faf13e868e458f1efdbfedcbe1aba739eb2f54a6cf5f", size = 9921, upload-time = "2025-04-13T04:04:15.515Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/66/34/7d64eb72d80bfd5b9e6dd31e7fe351a1c9a735f5c01e85b1d3b903a9d656/types_flask_cors-5.0.0.20250413-py3-none-any.whl", hash = "sha256:8183fdba764d45a5b40214468a1d5daa0e86c4ee6042d13f38cc428308f27a64", size = 9982 }, + { url = "https://files.pythonhosted.org/packages/66/34/7d64eb72d80bfd5b9e6dd31e7fe351a1c9a735f5c01e85b1d3b903a9d656/types_flask_cors-5.0.0.20250413-py3-none-any.whl", hash = "sha256:8183fdba764d45a5b40214468a1d5daa0e86c4ee6042d13f38cc428308f27a64", size = 9982, upload-time = "2025-04-13T04:04:14.27Z" }, ] [[package]] @@ -6289,9 +6340,9 @@ dependencies = [ { name = "flask" }, { name = "flask-sqlalchemy" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/d5/d1/d11799471725b7db070c4f1caa3161f556230d4fb5dad76d23559da1be4d/types_flask_migrate-4.1.0.20250809.tar.gz", hash = "sha256:fdf97a262c86aca494d75874a2374e84f2d37bef6467d9540fa3b054b67db04e", size = 8636 } +sdist = { url = "https://files.pythonhosted.org/packages/d5/d1/d11799471725b7db070c4f1caa3161f556230d4fb5dad76d23559da1be4d/types_flask_migrate-4.1.0.20250809.tar.gz", hash = "sha256:fdf97a262c86aca494d75874a2374e84f2d37bef6467d9540fa3b054b67db04e", size = 8636, upload-time = "2025-08-09T03:17:03.957Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/b4/53/f5fd40fb6c21c1f8e7da8325f3504492d027a7921d5c80061cd434c3a0fc/types_flask_migrate-4.1.0.20250809-py3-none-any.whl", hash = "sha256:92ad2c0d4000a53bf1e2f7813dd067edbbcc4c503961158a763e2b0ae297555d", size = 8648 }, + { url = "https://files.pythonhosted.org/packages/b4/53/f5fd40fb6c21c1f8e7da8325f3504492d027a7921d5c80061cd434c3a0fc/types_flask_migrate-4.1.0.20250809-py3-none-any.whl", hash = "sha256:92ad2c0d4000a53bf1e2f7813dd067edbbcc4c503961158a763e2b0ae297555d", size = 8648, upload-time = "2025-08-09T03:17:02.952Z" }, ] [[package]] @@ -6302,18 +6353,18 @@ dependencies = [ { name = "types-greenlet" }, { name = "types-psutil" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/4c/21/552d818a475e1a31780fb7ae50308feb64211a05eb403491d1a34df95e5f/types_gevent-25.9.0.20251102.tar.gz", hash = "sha256:76f93513af63f4577bb4178c143676dd6c4780abc305f405a4e8ff8f1fa177f8", size = 38096 } +sdist = { url = "https://files.pythonhosted.org/packages/4c/21/552d818a475e1a31780fb7ae50308feb64211a05eb403491d1a34df95e5f/types_gevent-25.9.0.20251102.tar.gz", hash = "sha256:76f93513af63f4577bb4178c143676dd6c4780abc305f405a4e8ff8f1fa177f8", size = 38096, upload-time = "2025-11-02T03:07:42.112Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/60/a1/776d2de31a02123f225aaa790641113ae47f738f6e8e3091d3012240a88e/types_gevent-25.9.0.20251102-py3-none-any.whl", hash = "sha256:0f14b9977cb04bf3d94444b5ae6ec5d78ac30f74c4df83483e0facec86f19d8b", size = 55592 }, + { url = "https://files.pythonhosted.org/packages/60/a1/776d2de31a02123f225aaa790641113ae47f738f6e8e3091d3012240a88e/types_gevent-25.9.0.20251102-py3-none-any.whl", hash = "sha256:0f14b9977cb04bf3d94444b5ae6ec5d78ac30f74c4df83483e0facec86f19d8b", size = 55592, upload-time = "2025-11-02T03:07:41.003Z" }, ] [[package]] name = "types-greenlet" version = "3.1.0.20250401" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/c0/c9/50405ed194a02f02a418311311e6ee4dd73eed446608b679e6df8170d5b7/types_greenlet-3.1.0.20250401.tar.gz", hash = "sha256:949389b64c34ca9472f6335189e9fe0b2e9704436d4f0850e39e9b7145909082", size = 8460 } +sdist = { url = "https://files.pythonhosted.org/packages/c0/c9/50405ed194a02f02a418311311e6ee4dd73eed446608b679e6df8170d5b7/types_greenlet-3.1.0.20250401.tar.gz", hash = "sha256:949389b64c34ca9472f6335189e9fe0b2e9704436d4f0850e39e9b7145909082", size = 8460, upload-time = "2025-04-01T03:06:44.216Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/a5/f3/36c5a6db23761c810d91227146f20b6e501aa50a51a557bd14e021cd9aea/types_greenlet-3.1.0.20250401-py3-none-any.whl", hash = "sha256:77987f3249b0f21415dc0254057e1ae4125a696a9bba28b0bcb67ee9e3dc14f6", size = 8821 }, + { url = "https://files.pythonhosted.org/packages/a5/f3/36c5a6db23761c810d91227146f20b6e501aa50a51a557bd14e021cd9aea/types_greenlet-3.1.0.20250401-py3-none-any.whl", hash = "sha256:77987f3249b0f21415dc0254057e1ae4125a696a9bba28b0bcb67ee9e3dc14f6", size = 8821, upload-time = "2025-04-01T03:06:42.945Z" }, ] [[package]] @@ -6323,18 +6374,18 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "types-webencodings" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/c8/f3/d9a1bbba7b42b5558a3f9fe017d967f5338cf8108d35991d9b15fdea3e0d/types_html5lib-1.1.11.20251117.tar.gz", hash = "sha256:1a6a3ac5394aa12bf547fae5d5eff91dceec46b6d07c4367d9b39a37f42f201a", size = 18100 } +sdist = { url = "https://files.pythonhosted.org/packages/c8/f3/d9a1bbba7b42b5558a3f9fe017d967f5338cf8108d35991d9b15fdea3e0d/types_html5lib-1.1.11.20251117.tar.gz", hash = "sha256:1a6a3ac5394aa12bf547fae5d5eff91dceec46b6d07c4367d9b39a37f42f201a", size = 18100, upload-time = "2025-11-17T03:08:00.78Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/f0/ab/f5606db367c1f57f7400d3cb3bead6665ee2509621439af1b29c35ef6f9e/types_html5lib-1.1.11.20251117-py3-none-any.whl", hash = "sha256:2a3fc935de788a4d2659f4535002a421e05bea5e172b649d33232e99d4272d08", size = 24302 }, + { url = "https://files.pythonhosted.org/packages/f0/ab/f5606db367c1f57f7400d3cb3bead6665ee2509621439af1b29c35ef6f9e/types_html5lib-1.1.11.20251117-py3-none-any.whl", hash = "sha256:2a3fc935de788a4d2659f4535002a421e05bea5e172b649d33232e99d4272d08", size = 24302, upload-time = "2025-11-17T03:07:59.996Z" }, ] [[package]] name = "types-jmespath" version = "1.0.2.20250809" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/d5/ff/6848b1603ca47fff317b44dfff78cc1fb0828262f840b3ab951b619d5a22/types_jmespath-1.0.2.20250809.tar.gz", hash = "sha256:e194efec21c0aeae789f701ae25f17c57c25908e789b1123a5c6f8d915b4adff", size = 10248 } +sdist = { url = "https://files.pythonhosted.org/packages/d5/ff/6848b1603ca47fff317b44dfff78cc1fb0828262f840b3ab951b619d5a22/types_jmespath-1.0.2.20250809.tar.gz", hash = "sha256:e194efec21c0aeae789f701ae25f17c57c25908e789b1123a5c6f8d915b4adff", size = 10248, upload-time = "2025-08-09T03:14:57.996Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/0e/6a/65c8be6b6555beaf1a654ae1c2308c2e19a610c0b318a9730e691b79ac79/types_jmespath-1.0.2.20250809-py3-none-any.whl", hash = "sha256:4147d17cc33454f0dac7e78b4e18e532a1330c518d85f7f6d19e5818ab83da21", size = 11494 }, + { url = "https://files.pythonhosted.org/packages/0e/6a/65c8be6b6555beaf1a654ae1c2308c2e19a610c0b318a9730e691b79ac79/types_jmespath-1.0.2.20250809-py3-none-any.whl", hash = "sha256:4147d17cc33454f0dac7e78b4e18e532a1330c518d85f7f6d19e5818ab83da21", size = 11494, upload-time = "2025-08-09T03:14:57.292Z" }, ] [[package]] @@ -6344,90 +6395,90 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "referencing" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/a0/ec/27ea5bffdb306bf261f6677a98b6993d93893b2c2e30f7ecc1d2c99d32e7/types_jsonschema-4.23.0.20250516.tar.gz", hash = "sha256:9ace09d9d35c4390a7251ccd7d833b92ccc189d24d1b347f26212afce361117e", size = 14911 } +sdist = { url = "https://files.pythonhosted.org/packages/a0/ec/27ea5bffdb306bf261f6677a98b6993d93893b2c2e30f7ecc1d2c99d32e7/types_jsonschema-4.23.0.20250516.tar.gz", hash = "sha256:9ace09d9d35c4390a7251ccd7d833b92ccc189d24d1b347f26212afce361117e", size = 14911, upload-time = "2025-05-16T03:09:33.728Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/e6/48/73ae8b388e19fc4a2a8060d0876325ec7310cfd09b53a2185186fd35959f/types_jsonschema-4.23.0.20250516-py3-none-any.whl", hash = "sha256:e7d0dd7db7e59e63c26e3230e26ffc64c4704cc5170dc21270b366a35ead1618", size = 15027 }, + { url = "https://files.pythonhosted.org/packages/e6/48/73ae8b388e19fc4a2a8060d0876325ec7310cfd09b53a2185186fd35959f/types_jsonschema-4.23.0.20250516-py3-none-any.whl", hash = "sha256:e7d0dd7db7e59e63c26e3230e26ffc64c4704cc5170dc21270b366a35ead1618", size = 15027, upload-time = "2025-05-16T03:09:32.499Z" }, ] [[package]] name = "types-markdown" version = "3.7.0.20250322" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/bd/fd/b4bd01b8c46f021c35a07aa31fe1dc45d21adc9fc8d53064bfa577aae73d/types_markdown-3.7.0.20250322.tar.gz", hash = "sha256:a48ed82dfcb6954592a10f104689d2d44df9125ce51b3cee20e0198a5216d55c", size = 18052 } +sdist = { url = "https://files.pythonhosted.org/packages/bd/fd/b4bd01b8c46f021c35a07aa31fe1dc45d21adc9fc8d53064bfa577aae73d/types_markdown-3.7.0.20250322.tar.gz", hash = "sha256:a48ed82dfcb6954592a10f104689d2d44df9125ce51b3cee20e0198a5216d55c", size = 18052, upload-time = "2025-03-22T02:48:46.193Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/56/59/ee46617bc2b5e43bc06a000fdcd6358a013957e30ad545bed5e3456a4341/types_markdown-3.7.0.20250322-py3-none-any.whl", hash = "sha256:7e855503027b4290355a310fb834871940d9713da7c111f3e98a5e1cbc77acfb", size = 23699 }, + { url = "https://files.pythonhosted.org/packages/56/59/ee46617bc2b5e43bc06a000fdcd6358a013957e30ad545bed5e3456a4341/types_markdown-3.7.0.20250322-py3-none-any.whl", hash = "sha256:7e855503027b4290355a310fb834871940d9713da7c111f3e98a5e1cbc77acfb", size = 23699, upload-time = "2025-03-22T02:48:45.001Z" }, ] [[package]] name = "types-oauthlib" version = "3.2.0.20250516" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/b1/2c/dba2c193ccff2d1e2835589d4075b230d5627b9db363e9c8de153261d6ec/types_oauthlib-3.2.0.20250516.tar.gz", hash = "sha256:56bf2cffdb8443ae718d4e83008e3fbd5f861230b4774e6d7799527758119d9a", size = 24683 } +sdist = { url = "https://files.pythonhosted.org/packages/b1/2c/dba2c193ccff2d1e2835589d4075b230d5627b9db363e9c8de153261d6ec/types_oauthlib-3.2.0.20250516.tar.gz", hash = "sha256:56bf2cffdb8443ae718d4e83008e3fbd5f861230b4774e6d7799527758119d9a", size = 24683, upload-time = "2025-05-16T03:07:42.484Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/b8/54/cdd62283338616fd2448f534b29110d79a42aaabffaf5f45e7aed365a366/types_oauthlib-3.2.0.20250516-py3-none-any.whl", hash = "sha256:5799235528bc9bd262827149a1633ff55ae6e5a5f5f151f4dae74359783a31b3", size = 45671 }, + { url = "https://files.pythonhosted.org/packages/b8/54/cdd62283338616fd2448f534b29110d79a42aaabffaf5f45e7aed365a366/types_oauthlib-3.2.0.20250516-py3-none-any.whl", hash = "sha256:5799235528bc9bd262827149a1633ff55ae6e5a5f5f151f4dae74359783a31b3", size = 45671, upload-time = "2025-05-16T03:07:41.268Z" }, ] [[package]] name = "types-objgraph" version = "3.6.0.20240907" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/22/48/ba0ec63d392904eee34ef1cbde2d8798f79a3663950e42fbbc25fd1bd6f7/types-objgraph-3.6.0.20240907.tar.gz", hash = "sha256:2e3dee675843ae387889731550b0ddfed06e9420946cf78a4bca565b5fc53634", size = 2928 } +sdist = { url = "https://files.pythonhosted.org/packages/22/48/ba0ec63d392904eee34ef1cbde2d8798f79a3663950e42fbbc25fd1bd6f7/types-objgraph-3.6.0.20240907.tar.gz", hash = "sha256:2e3dee675843ae387889731550b0ddfed06e9420946cf78a4bca565b5fc53634", size = 2928, upload-time = "2024-09-07T02:35:21.214Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/16/c9/6d647a947f3937b19bcc6d52262921ddad60d90060ff66511a4bd7e990c5/types_objgraph-3.6.0.20240907-py3-none-any.whl", hash = "sha256:67207633a9b5789ee1911d740b269c3371081b79c0d8f68b00e7b8539f5c43f5", size = 3314 }, + { url = "https://files.pythonhosted.org/packages/16/c9/6d647a947f3937b19bcc6d52262921ddad60d90060ff66511a4bd7e990c5/types_objgraph-3.6.0.20240907-py3-none-any.whl", hash = "sha256:67207633a9b5789ee1911d740b269c3371081b79c0d8f68b00e7b8539f5c43f5", size = 3314, upload-time = "2024-09-07T02:35:19.865Z" }, ] [[package]] name = "types-olefile" version = "0.47.0.20240806" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/49/18/9d87a1bc394323ce22690308c751680c4301fc3fbe47cd58e16d760b563a/types-olefile-0.47.0.20240806.tar.gz", hash = "sha256:96490f208cbb449a52283855319d73688ba9167ae58858ef8c506bf7ca2c6b67", size = 4369 } +sdist = { url = "https://files.pythonhosted.org/packages/49/18/9d87a1bc394323ce22690308c751680c4301fc3fbe47cd58e16d760b563a/types-olefile-0.47.0.20240806.tar.gz", hash = "sha256:96490f208cbb449a52283855319d73688ba9167ae58858ef8c506bf7ca2c6b67", size = 4369, upload-time = "2024-08-06T02:30:01.966Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/a9/4d/f8acae53dd95353f8a789a06ea27423ae41f2067eb6ce92946fdc6a1f7a7/types_olefile-0.47.0.20240806-py3-none-any.whl", hash = "sha256:c760a3deab7adb87a80d33b0e4edbbfbab865204a18d5121746022d7f8555118", size = 4758 }, + { url = "https://files.pythonhosted.org/packages/a9/4d/f8acae53dd95353f8a789a06ea27423ae41f2067eb6ce92946fdc6a1f7a7/types_olefile-0.47.0.20240806-py3-none-any.whl", hash = "sha256:c760a3deab7adb87a80d33b0e4edbbfbab865204a18d5121746022d7f8555118", size = 4758, upload-time = "2024-08-06T02:30:01.15Z" }, ] [[package]] name = "types-openpyxl" version = "3.1.5.20250919" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/c4/12/8bc4a25d49f1e4b7bbca868daa3ee80b1983d8137b4986867b5b65ab2ecd/types_openpyxl-3.1.5.20250919.tar.gz", hash = "sha256:232b5906773eebace1509b8994cdadda043f692cfdba9bfbb86ca921d54d32d7", size = 100880 } +sdist = { url = "https://files.pythonhosted.org/packages/c4/12/8bc4a25d49f1e4b7bbca868daa3ee80b1983d8137b4986867b5b65ab2ecd/types_openpyxl-3.1.5.20250919.tar.gz", hash = "sha256:232b5906773eebace1509b8994cdadda043f692cfdba9bfbb86ca921d54d32d7", size = 100880, upload-time = "2025-09-19T02:54:39.997Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/36/3c/d49cf3f4489a10e9ddefde18fd258f120754c5825d06d145d9a0aaac770b/types_openpyxl-3.1.5.20250919-py3-none-any.whl", hash = "sha256:bd06f18b12fd5e1c9f0b666ee6151d8140216afa7496f7ebb9fe9d33a1a3ce99", size = 166078 }, + { url = "https://files.pythonhosted.org/packages/36/3c/d49cf3f4489a10e9ddefde18fd258f120754c5825d06d145d9a0aaac770b/types_openpyxl-3.1.5.20250919-py3-none-any.whl", hash = "sha256:bd06f18b12fd5e1c9f0b666ee6151d8140216afa7496f7ebb9fe9d33a1a3ce99", size = 166078, upload-time = "2025-09-19T02:54:38.657Z" }, ] [[package]] name = "types-pexpect" version = "4.9.0.20250916" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/0c/e6/cc43e306dc7de14ec7861c24ac4957f688741ae39ae685049695d796b587/types_pexpect-4.9.0.20250916.tar.gz", hash = "sha256:69e5fed6199687a730a572de780a5749248a4c5df2ff1521e194563475c9928d", size = 13322 } +sdist = { url = "https://files.pythonhosted.org/packages/0c/e6/cc43e306dc7de14ec7861c24ac4957f688741ae39ae685049695d796b587/types_pexpect-4.9.0.20250916.tar.gz", hash = "sha256:69e5fed6199687a730a572de780a5749248a4c5df2ff1521e194563475c9928d", size = 13322, upload-time = "2025-09-16T02:49:25.61Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/aa/6d/7740e235a9fb2570968da7d386d7feb511ce68cd23472402ff8cdf7fc78f/types_pexpect-4.9.0.20250916-py3-none-any.whl", hash = "sha256:7fa43cb96042ac58bc74f7c28e5d85782be0ee01344149886849e9d90936fe8a", size = 17057 }, + { url = "https://files.pythonhosted.org/packages/aa/6d/7740e235a9fb2570968da7d386d7feb511ce68cd23472402ff8cdf7fc78f/types_pexpect-4.9.0.20250916-py3-none-any.whl", hash = "sha256:7fa43cb96042ac58bc74f7c28e5d85782be0ee01344149886849e9d90936fe8a", size = 17057, upload-time = "2025-09-16T02:49:24.546Z" }, ] [[package]] name = "types-protobuf" version = "5.29.1.20250403" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/78/6d/62a2e73b966c77609560800004dd49a926920dd4976a9fdd86cf998e7048/types_protobuf-5.29.1.20250403.tar.gz", hash = "sha256:7ff44f15022119c9d7558ce16e78b2d485bf7040b4fadced4dd069bb5faf77a2", size = 59413 } +sdist = { url = "https://files.pythonhosted.org/packages/78/6d/62a2e73b966c77609560800004dd49a926920dd4976a9fdd86cf998e7048/types_protobuf-5.29.1.20250403.tar.gz", hash = "sha256:7ff44f15022119c9d7558ce16e78b2d485bf7040b4fadced4dd069bb5faf77a2", size = 59413, upload-time = "2025-04-02T10:07:17.138Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/69/e3/b74dcc2797b21b39d5a4f08a8b08e20369b4ca250d718df7af41a60dd9f0/types_protobuf-5.29.1.20250403-py3-none-any.whl", hash = "sha256:c71de04106a2d54e5b2173d0a422058fae0ef2d058d70cf369fb797bf61ffa59", size = 73874 }, + { url = "https://files.pythonhosted.org/packages/69/e3/b74dcc2797b21b39d5a4f08a8b08e20369b4ca250d718df7af41a60dd9f0/types_protobuf-5.29.1.20250403-py3-none-any.whl", hash = "sha256:c71de04106a2d54e5b2173d0a422058fae0ef2d058d70cf369fb797bf61ffa59", size = 73874, upload-time = "2025-04-02T10:07:15.755Z" }, ] [[package]] name = "types-psutil" version = "7.0.0.20251116" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/47/ec/c1e9308b91582cad1d7e7d3007fd003ef45a62c2500f8219313df5fc3bba/types_psutil-7.0.0.20251116.tar.gz", hash = "sha256:92b5c78962e55ce1ed7b0189901a4409ece36ab9fd50c3029cca7e681c606c8a", size = 22192 } +sdist = { url = "https://files.pythonhosted.org/packages/47/ec/c1e9308b91582cad1d7e7d3007fd003ef45a62c2500f8219313df5fc3bba/types_psutil-7.0.0.20251116.tar.gz", hash = "sha256:92b5c78962e55ce1ed7b0189901a4409ece36ab9fd50c3029cca7e681c606c8a", size = 22192, upload-time = "2025-11-16T03:10:32.859Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/c3/0e/11ba08a5375c21039ed5f8e6bba41e9452fb69f0e2f7ee05ed5cca2a2cdf/types_psutil-7.0.0.20251116-py3-none-any.whl", hash = "sha256:74c052de077c2024b85cd435e2cba971165fe92a5eace79cbeb821e776dbc047", size = 25376 }, + { url = "https://files.pythonhosted.org/packages/c3/0e/11ba08a5375c21039ed5f8e6bba41e9452fb69f0e2f7ee05ed5cca2a2cdf/types_psutil-7.0.0.20251116-py3-none-any.whl", hash = "sha256:74c052de077c2024b85cd435e2cba971165fe92a5eace79cbeb821e776dbc047", size = 25376, upload-time = "2025-11-16T03:10:31.813Z" }, ] [[package]] name = "types-psycopg2" version = "2.9.21.20251012" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/9b/b3/2d09eaf35a084cffd329c584970a3fa07101ca465c13cad1576d7c392587/types_psycopg2-2.9.21.20251012.tar.gz", hash = "sha256:4cdafd38927da0cfde49804f39ab85afd9c6e9c492800e42f1f0c1a1b0312935", size = 26710 } +sdist = { url = "https://files.pythonhosted.org/packages/9b/b3/2d09eaf35a084cffd329c584970a3fa07101ca465c13cad1576d7c392587/types_psycopg2-2.9.21.20251012.tar.gz", hash = "sha256:4cdafd38927da0cfde49804f39ab85afd9c6e9c492800e42f1f0c1a1b0312935", size = 26710, upload-time = "2025-10-12T02:55:39.5Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/ec/0c/05feaf8cb51159f2c0af04b871dab7e98a2f83a3622f5f216331d2dd924c/types_psycopg2-2.9.21.20251012-py3-none-any.whl", hash = "sha256:712bad5c423fe979e357edbf40a07ca40ef775d74043de72bd4544ca328cc57e", size = 24883 }, + { url = "https://files.pythonhosted.org/packages/ec/0c/05feaf8cb51159f2c0af04b871dab7e98a2f83a3622f5f216331d2dd924c/types_psycopg2-2.9.21.20251012-py3-none-any.whl", hash = "sha256:712bad5c423fe979e357edbf40a07ca40ef775d74043de72bd4544ca328cc57e", size = 24883, upload-time = "2025-10-12T02:55:38.439Z" }, ] [[package]] @@ -6437,18 +6488,18 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "types-docutils" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/90/3b/cd650700ce9e26b56bd1a6aa4af397bbbc1784e22a03971cb633cdb0b601/types_pygments-2.19.0.20251121.tar.gz", hash = "sha256:eef114fde2ef6265365522045eac0f8354978a566852f69e75c531f0553822b1", size = 18590 } +sdist = { url = "https://files.pythonhosted.org/packages/90/3b/cd650700ce9e26b56bd1a6aa4af397bbbc1784e22a03971cb633cdb0b601/types_pygments-2.19.0.20251121.tar.gz", hash = "sha256:eef114fde2ef6265365522045eac0f8354978a566852f69e75c531f0553822b1", size = 18590, upload-time = "2025-11-21T03:03:46.623Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/99/8a/9244b21f1d60dcc62e261435d76b02f1853b4771663d7ec7d287e47a9ba9/types_pygments-2.19.0.20251121-py3-none-any.whl", hash = "sha256:cb3bfde34eb75b984c98fb733ce4f795213bd3378f855c32e75b49318371bb25", size = 25674 }, + { url = "https://files.pythonhosted.org/packages/99/8a/9244b21f1d60dcc62e261435d76b02f1853b4771663d7ec7d287e47a9ba9/types_pygments-2.19.0.20251121-py3-none-any.whl", hash = "sha256:cb3bfde34eb75b984c98fb733ce4f795213bd3378f855c32e75b49318371bb25", size = 25674, upload-time = "2025-11-21T03:03:45.72Z" }, ] [[package]] name = "types-pymysql" version = "1.1.0.20250916" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/1f/12/bda1d977c07e0e47502bede1c44a986dd45946494d89e005e04cdeb0f8de/types_pymysql-1.1.0.20250916.tar.gz", hash = "sha256:98d75731795fcc06723a192786662bdfa760e1e00f22809c104fbb47bac5e29b", size = 22131 } +sdist = { url = "https://files.pythonhosted.org/packages/1f/12/bda1d977c07e0e47502bede1c44a986dd45946494d89e005e04cdeb0f8de/types_pymysql-1.1.0.20250916.tar.gz", hash = "sha256:98d75731795fcc06723a192786662bdfa760e1e00f22809c104fbb47bac5e29b", size = 22131, upload-time = "2025-09-16T02:49:22.039Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/21/eb/a225e32a6e7b196af67ab2f1b07363595f63255374cc3b88bfdab53b4ee8/types_pymysql-1.1.0.20250916-py3-none-any.whl", hash = "sha256:873eb9836bb5e3de4368cc7010ca72775f86e9692a5c7810f8c7f48da082e55b", size = 23063 }, + { url = "https://files.pythonhosted.org/packages/21/eb/a225e32a6e7b196af67ab2f1b07363595f63255374cc3b88bfdab53b4ee8/types_pymysql-1.1.0.20250916-py3-none-any.whl", hash = "sha256:873eb9836bb5e3de4368cc7010ca72775f86e9692a5c7810f8c7f48da082e55b", size = 23063, upload-time = "2025-09-16T02:49:20.933Z" }, ] [[package]] @@ -6459,54 +6510,54 @@ dependencies = [ { name = "cryptography" }, { name = "types-cffi" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/93/29/47a346550fd2020dac9a7a6d033ea03fccb92fa47c726056618cc889745e/types-pyOpenSSL-24.1.0.20240722.tar.gz", hash = "sha256:47913b4678a01d879f503a12044468221ed8576263c1540dcb0484ca21b08c39", size = 8458 } +sdist = { url = "https://files.pythonhosted.org/packages/93/29/47a346550fd2020dac9a7a6d033ea03fccb92fa47c726056618cc889745e/types-pyOpenSSL-24.1.0.20240722.tar.gz", hash = "sha256:47913b4678a01d879f503a12044468221ed8576263c1540dcb0484ca21b08c39", size = 8458, upload-time = "2024-07-22T02:32:22.558Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/98/05/c868a850b6fbb79c26f5f299b768ee0adc1f9816d3461dcf4287916f655b/types_pyOpenSSL-24.1.0.20240722-py3-none-any.whl", hash = "sha256:6a7a5d2ec042537934cfb4c9d4deb0e16c4c6250b09358df1f083682fe6fda54", size = 7499 }, + { url = "https://files.pythonhosted.org/packages/98/05/c868a850b6fbb79c26f5f299b768ee0adc1f9816d3461dcf4287916f655b/types_pyOpenSSL-24.1.0.20240722-py3-none-any.whl", hash = "sha256:6a7a5d2ec042537934cfb4c9d4deb0e16c4c6250b09358df1f083682fe6fda54", size = 7499, upload-time = "2024-07-22T02:32:21.232Z" }, ] [[package]] name = "types-python-dateutil" version = "2.9.0.20251115" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/6a/36/06d01fb52c0d57e9ad0c237654990920fa41195e4b3d640830dabf9eeb2f/types_python_dateutil-2.9.0.20251115.tar.gz", hash = "sha256:8a47f2c3920f52a994056b8786309b43143faa5a64d4cbb2722d6addabdf1a58", size = 16363 } +sdist = { url = "https://files.pythonhosted.org/packages/6a/36/06d01fb52c0d57e9ad0c237654990920fa41195e4b3d640830dabf9eeb2f/types_python_dateutil-2.9.0.20251115.tar.gz", hash = "sha256:8a47f2c3920f52a994056b8786309b43143faa5a64d4cbb2722d6addabdf1a58", size = 16363, upload-time = "2025-11-15T03:00:13.717Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/43/0b/56961d3ba517ed0df9b3a27bfda6514f3d01b28d499d1bce9068cfe4edd1/types_python_dateutil-2.9.0.20251115-py3-none-any.whl", hash = "sha256:9cf9c1c582019753b8639a081deefd7e044b9fa36bd8217f565c6c4e36ee0624", size = 18251 }, + { url = "https://files.pythonhosted.org/packages/43/0b/56961d3ba517ed0df9b3a27bfda6514f3d01b28d499d1bce9068cfe4edd1/types_python_dateutil-2.9.0.20251115-py3-none-any.whl", hash = "sha256:9cf9c1c582019753b8639a081deefd7e044b9fa36bd8217f565c6c4e36ee0624", size = 18251, upload-time = "2025-11-15T03:00:12.317Z" }, ] [[package]] name = "types-python-http-client" version = "3.3.7.20250708" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/55/a0/0ad93698a3ebc6846ca23aca20ff6f6f8ebe7b4f0c1de7f19e87c03dbe8f/types_python_http_client-3.3.7.20250708.tar.gz", hash = "sha256:5f85b32dc64671a4e5e016142169aa187c5abed0b196680944e4efd3d5ce3322", size = 7707 } +sdist = { url = "https://files.pythonhosted.org/packages/55/a0/0ad93698a3ebc6846ca23aca20ff6f6f8ebe7b4f0c1de7f19e87c03dbe8f/types_python_http_client-3.3.7.20250708.tar.gz", hash = "sha256:5f85b32dc64671a4e5e016142169aa187c5abed0b196680944e4efd3d5ce3322", size = 7707, upload-time = "2025-07-08T03:14:36.197Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/85/4f/b88274658cf489e35175be8571c970e9a1219713bafd8fc9e166d7351ecb/types_python_http_client-3.3.7.20250708-py3-none-any.whl", hash = "sha256:e2fc253859decab36713d82fc7f205868c3ddeaee79dbb55956ad9ca77abe12b", size = 8890 }, + { url = "https://files.pythonhosted.org/packages/85/4f/b88274658cf489e35175be8571c970e9a1219713bafd8fc9e166d7351ecb/types_python_http_client-3.3.7.20250708-py3-none-any.whl", hash = "sha256:e2fc253859decab36713d82fc7f205868c3ddeaee79dbb55956ad9ca77abe12b", size = 8890, upload-time = "2025-07-08T03:14:35.506Z" }, ] [[package]] name = "types-pytz" version = "2025.2.0.20251108" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/40/ff/c047ddc68c803b46470a357454ef76f4acd8c1088f5cc4891cdd909bfcf6/types_pytz-2025.2.0.20251108.tar.gz", hash = "sha256:fca87917836ae843f07129567b74c1929f1870610681b4c92cb86a3df5817bdb", size = 10961 } +sdist = { url = "https://files.pythonhosted.org/packages/40/ff/c047ddc68c803b46470a357454ef76f4acd8c1088f5cc4891cdd909bfcf6/types_pytz-2025.2.0.20251108.tar.gz", hash = "sha256:fca87917836ae843f07129567b74c1929f1870610681b4c92cb86a3df5817bdb", size = 10961, upload-time = "2025-11-08T02:55:57.001Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/e7/c1/56ef16bf5dcd255155cc736d276efa6ae0a5c26fd685e28f0412a4013c01/types_pytz-2025.2.0.20251108-py3-none-any.whl", hash = "sha256:0f1c9792cab4eb0e46c52f8845c8f77cf1e313cb3d68bf826aa867fe4717d91c", size = 10116 }, + { url = "https://files.pythonhosted.org/packages/e7/c1/56ef16bf5dcd255155cc736d276efa6ae0a5c26fd685e28f0412a4013c01/types_pytz-2025.2.0.20251108-py3-none-any.whl", hash = "sha256:0f1c9792cab4eb0e46c52f8845c8f77cf1e313cb3d68bf826aa867fe4717d91c", size = 10116, upload-time = "2025-11-08T02:55:56.194Z" }, ] [[package]] name = "types-pywin32" version = "310.0.0.20250516" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/6c/bc/c7be2934a37cc8c645c945ca88450b541e482c4df3ac51e5556377d34811/types_pywin32-310.0.0.20250516.tar.gz", hash = "sha256:91e5bfc033f65c9efb443722eff8101e31d690dd9a540fa77525590d3da9cc9d", size = 328459 } +sdist = { url = "https://files.pythonhosted.org/packages/6c/bc/c7be2934a37cc8c645c945ca88450b541e482c4df3ac51e5556377d34811/types_pywin32-310.0.0.20250516.tar.gz", hash = "sha256:91e5bfc033f65c9efb443722eff8101e31d690dd9a540fa77525590d3da9cc9d", size = 328459, upload-time = "2025-05-16T03:07:57.411Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/9b/72/469e4cc32399dbe6c843e38fdb6d04fee755e984e137c0da502f74d3ac59/types_pywin32-310.0.0.20250516-py3-none-any.whl", hash = "sha256:f9ef83a1ec3e5aae2b0e24c5f55ab41272b5dfeaabb9a0451d33684c9545e41a", size = 390411 }, + { url = "https://files.pythonhosted.org/packages/9b/72/469e4cc32399dbe6c843e38fdb6d04fee755e984e137c0da502f74d3ac59/types_pywin32-310.0.0.20250516-py3-none-any.whl", hash = "sha256:f9ef83a1ec3e5aae2b0e24c5f55ab41272b5dfeaabb9a0451d33684c9545e41a", size = 390411, upload-time = "2025-05-16T03:07:56.282Z" }, ] [[package]] name = "types-pyyaml" version = "6.0.12.20250915" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/7e/69/3c51b36d04da19b92f9e815be12753125bd8bc247ba0470a982e6979e71c/types_pyyaml-6.0.12.20250915.tar.gz", hash = "sha256:0f8b54a528c303f0e6f7165687dd33fafa81c807fcac23f632b63aa624ced1d3", size = 17522 } +sdist = { url = "https://files.pythonhosted.org/packages/7e/69/3c51b36d04da19b92f9e815be12753125bd8bc247ba0470a982e6979e71c/types_pyyaml-6.0.12.20250915.tar.gz", hash = "sha256:0f8b54a528c303f0e6f7165687dd33fafa81c807fcac23f632b63aa624ced1d3", size = 17522, upload-time = "2025-09-15T03:01:00.728Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/bd/e0/1eed384f02555dde685fff1a1ac805c1c7dcb6dd019c916fe659b1c1f9ec/types_pyyaml-6.0.12.20250915-py3-none-any.whl", hash = "sha256:e7d4d9e064e89a3b3cae120b4990cd370874d2bf12fa5f46c97018dd5d3c9ab6", size = 20338 }, + { url = "https://files.pythonhosted.org/packages/bd/e0/1eed384f02555dde685fff1a1ac805c1c7dcb6dd019c916fe659b1c1f9ec/types_pyyaml-6.0.12.20250915-py3-none-any.whl", hash = "sha256:e7d4d9e064e89a3b3cae120b4990cd370874d2bf12fa5f46c97018dd5d3c9ab6", size = 20338, upload-time = "2025-09-15T03:00:59.218Z" }, ] [[package]] @@ -6517,18 +6568,18 @@ dependencies = [ { name = "cryptography" }, { name = "types-pyopenssl" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/3a/95/c054d3ac940e8bac4ca216470c80c26688a0e79e09f520a942bb27da3386/types-redis-4.6.0.20241004.tar.gz", hash = "sha256:5f17d2b3f9091ab75384153bfa276619ffa1cf6a38da60e10d5e6749cc5b902e", size = 49679 } +sdist = { url = "https://files.pythonhosted.org/packages/3a/95/c054d3ac940e8bac4ca216470c80c26688a0e79e09f520a942bb27da3386/types-redis-4.6.0.20241004.tar.gz", hash = "sha256:5f17d2b3f9091ab75384153bfa276619ffa1cf6a38da60e10d5e6749cc5b902e", size = 49679, upload-time = "2024-10-04T02:43:59.224Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/55/82/7d25dce10aad92d2226b269bce2f85cfd843b4477cd50245d7d40ecf8f89/types_redis-4.6.0.20241004-py3-none-any.whl", hash = "sha256:ef5da68cb827e5f606c8f9c0b49eeee4c2669d6d97122f301d3a55dc6a63f6ed", size = 58737 }, + { url = "https://files.pythonhosted.org/packages/55/82/7d25dce10aad92d2226b269bce2f85cfd843b4477cd50245d7d40ecf8f89/types_redis-4.6.0.20241004-py3-none-any.whl", hash = "sha256:ef5da68cb827e5f606c8f9c0b49eeee4c2669d6d97122f301d3a55dc6a63f6ed", size = 58737, upload-time = "2024-10-04T02:43:57.968Z" }, ] [[package]] name = "types-regex" version = "2024.11.6.20250403" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/c7/75/012b90c8557d3abb3b58a9073a94d211c8f75c9b2e26bf0d8af7ecf7bc78/types_regex-2024.11.6.20250403.tar.gz", hash = "sha256:3fdf2a70bbf830de4b3a28e9649a52d43dabb57cdb18fbfe2252eefb53666665", size = 12394 } +sdist = { url = "https://files.pythonhosted.org/packages/c7/75/012b90c8557d3abb3b58a9073a94d211c8f75c9b2e26bf0d8af7ecf7bc78/types_regex-2024.11.6.20250403.tar.gz", hash = "sha256:3fdf2a70bbf830de4b3a28e9649a52d43dabb57cdb18fbfe2252eefb53666665", size = 12394, upload-time = "2025-04-03T02:54:35.379Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/61/49/67200c4708f557be6aa4ecdb1fa212d67a10558c5240251efdc799cca22f/types_regex-2024.11.6.20250403-py3-none-any.whl", hash = "sha256:e22c0f67d73f4b4af6086a340f387b6f7d03bed8a0bb306224b75c51a29b0001", size = 10396 }, + { url = "https://files.pythonhosted.org/packages/61/49/67200c4708f557be6aa4ecdb1fa212d67a10558c5240251efdc799cca22f/types_regex-2024.11.6.20250403-py3-none-any.whl", hash = "sha256:e22c0f67d73f4b4af6086a340f387b6f7d03bed8a0bb306224b75c51a29b0001", size = 10396, upload-time = "2025-04-03T02:54:34.555Z" }, ] [[package]] @@ -6538,27 +6589,27 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "urllib3" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/36/27/489922f4505975b11de2b5ad07b4fe1dca0bca9be81a703f26c5f3acfce5/types_requests-2.32.4.20250913.tar.gz", hash = "sha256:abd6d4f9ce3a9383f269775a9835a4c24e5cd6b9f647d64f88aa4613c33def5d", size = 23113 } +sdist = { url = "https://files.pythonhosted.org/packages/36/27/489922f4505975b11de2b5ad07b4fe1dca0bca9be81a703f26c5f3acfce5/types_requests-2.32.4.20250913.tar.gz", hash = "sha256:abd6d4f9ce3a9383f269775a9835a4c24e5cd6b9f647d64f88aa4613c33def5d", size = 23113, upload-time = "2025-09-13T02:40:02.309Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/2a/20/9a227ea57c1285986c4cf78400d0a91615d25b24e257fd9e2969606bdfae/types_requests-2.32.4.20250913-py3-none-any.whl", hash = "sha256:78c9c1fffebbe0fa487a418e0fa5252017e9c60d1a2da394077f1780f655d7e1", size = 20658 }, + { url = "https://files.pythonhosted.org/packages/2a/20/9a227ea57c1285986c4cf78400d0a91615d25b24e257fd9e2969606bdfae/types_requests-2.32.4.20250913-py3-none-any.whl", hash = "sha256:78c9c1fffebbe0fa487a418e0fa5252017e9c60d1a2da394077f1780f655d7e1", size = 20658, upload-time = "2025-09-13T02:40:01.115Z" }, ] [[package]] name = "types-s3transfer" version = "0.15.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/79/bf/b00dcbecb037c4999b83c8109b8096fe78f87f1266cadc4f95d4af196292/types_s3transfer-0.15.0.tar.gz", hash = "sha256:43a523e0c43a88e447dfda5f4f6b63bf3da85316fdd2625f650817f2b170b5f7", size = 14236 } +sdist = { url = "https://files.pythonhosted.org/packages/79/bf/b00dcbecb037c4999b83c8109b8096fe78f87f1266cadc4f95d4af196292/types_s3transfer-0.15.0.tar.gz", hash = "sha256:43a523e0c43a88e447dfda5f4f6b63bf3da85316fdd2625f650817f2b170b5f7", size = 14236, upload-time = "2025-11-21T21:16:26.553Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/8a/39/39a322d7209cc259e3e27c4d498129e9583a2f3a8aea57eb1a9941cb5e9e/types_s3transfer-0.15.0-py3-none-any.whl", hash = "sha256:1e617b14a9d3ce5be565f4b187fafa1d96075546b52072121f8fda8e0a444aed", size = 19702 }, + { url = "https://files.pythonhosted.org/packages/8a/39/39a322d7209cc259e3e27c4d498129e9583a2f3a8aea57eb1a9941cb5e9e/types_s3transfer-0.15.0-py3-none-any.whl", hash = "sha256:1e617b14a9d3ce5be565f4b187fafa1d96075546b52072121f8fda8e0a444aed", size = 19702, upload-time = "2025-11-21T21:16:25.146Z" }, ] [[package]] name = "types-setuptools" version = "80.9.0.20250822" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/19/bd/1e5f949b7cb740c9f0feaac430e301b8f1c5f11a81e26324299ea671a237/types_setuptools-80.9.0.20250822.tar.gz", hash = "sha256:070ea7716968ec67a84c7f7768d9952ff24d28b65b6594797a464f1b3066f965", size = 41296 } +sdist = { url = "https://files.pythonhosted.org/packages/19/bd/1e5f949b7cb740c9f0feaac430e301b8f1c5f11a81e26324299ea671a237/types_setuptools-80.9.0.20250822.tar.gz", hash = "sha256:070ea7716968ec67a84c7f7768d9952ff24d28b65b6594797a464f1b3066f965", size = 41296, upload-time = "2025-08-22T03:02:08.771Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/b6/2d/475bf15c1cdc172e7a0d665b6e373ebfb1e9bf734d3f2f543d668b07a142/types_setuptools-80.9.0.20250822-py3-none-any.whl", hash = "sha256:53bf881cb9d7e46ed12c76ef76c0aaf28cfe6211d3fab12e0b83620b1a8642c3", size = 63179 }, + { url = "https://files.pythonhosted.org/packages/b6/2d/475bf15c1cdc172e7a0d665b6e373ebfb1e9bf734d3f2f543d668b07a142/types_setuptools-80.9.0.20250822-py3-none-any.whl", hash = "sha256:53bf881cb9d7e46ed12c76ef76c0aaf28cfe6211d3fab12e0b83620b1a8642c3", size = 63179, upload-time = "2025-08-22T03:02:07.643Z" }, ] [[package]] @@ -6568,27 +6619,27 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "numpy" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/fa/19/7f28b10994433d43b9caa66f3b9bd6a0a9192b7ce8b5a7fc41534e54b821/types_shapely-2.1.0.20250917.tar.gz", hash = "sha256:5c56670742105aebe40c16414390d35fcaa55d6f774d328c1a18273ab0e2134a", size = 26363 } +sdist = { url = "https://files.pythonhosted.org/packages/fa/19/7f28b10994433d43b9caa66f3b9bd6a0a9192b7ce8b5a7fc41534e54b821/types_shapely-2.1.0.20250917.tar.gz", hash = "sha256:5c56670742105aebe40c16414390d35fcaa55d6f774d328c1a18273ab0e2134a", size = 26363, upload-time = "2025-09-17T02:47:44.604Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/e5/a9/554ac40810e530263b6163b30a2b623bc16aae3fb64416f5d2b3657d0729/types_shapely-2.1.0.20250917-py3-none-any.whl", hash = "sha256:9334a79339504d39b040426be4938d422cec419168414dc74972aa746a8bf3a1", size = 37813 }, + { url = "https://files.pythonhosted.org/packages/e5/a9/554ac40810e530263b6163b30a2b623bc16aae3fb64416f5d2b3657d0729/types_shapely-2.1.0.20250917-py3-none-any.whl", hash = "sha256:9334a79339504d39b040426be4938d422cec419168414dc74972aa746a8bf3a1", size = 37813, upload-time = "2025-09-17T02:47:43.788Z" }, ] [[package]] name = "types-simplejson" version = "3.20.0.20250822" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/df/6b/96d43a90cd202bd552cdd871858a11c138fe5ef11aeb4ed8e8dc51389257/types_simplejson-3.20.0.20250822.tar.gz", hash = "sha256:2b0bfd57a6beed3b932fd2c3c7f8e2f48a7df3978c9bba43023a32b3741a95b0", size = 10608 } +sdist = { url = "https://files.pythonhosted.org/packages/df/6b/96d43a90cd202bd552cdd871858a11c138fe5ef11aeb4ed8e8dc51389257/types_simplejson-3.20.0.20250822.tar.gz", hash = "sha256:2b0bfd57a6beed3b932fd2c3c7f8e2f48a7df3978c9bba43023a32b3741a95b0", size = 10608, upload-time = "2025-08-22T03:03:35.36Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/3c/9f/8e2c9e6aee9a2ff34f2ffce6ccd9c26edeef6dfd366fde611dc2e2c00ab9/types_simplejson-3.20.0.20250822-py3-none-any.whl", hash = "sha256:b5e63ae220ac7a1b0bb9af43b9cb8652237c947981b2708b0c776d3b5d8fa169", size = 10417 }, + { url = "https://files.pythonhosted.org/packages/3c/9f/8e2c9e6aee9a2ff34f2ffce6ccd9c26edeef6dfd366fde611dc2e2c00ab9/types_simplejson-3.20.0.20250822-py3-none-any.whl", hash = "sha256:b5e63ae220ac7a1b0bb9af43b9cb8652237c947981b2708b0c776d3b5d8fa169", size = 10417, upload-time = "2025-08-22T03:03:34.485Z" }, ] [[package]] name = "types-six" version = "1.17.0.20251009" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/ee/f7/448215bc7695cfa0c8a7e0dcfa54fe31b1d52fb87004fed32e659dd85c80/types_six-1.17.0.20251009.tar.gz", hash = "sha256:efe03064ecd0ffb0f7afe133990a2398d8493d8d1c1cc10ff3dfe476d57ba44f", size = 15552 } +sdist = { url = "https://files.pythonhosted.org/packages/ee/f7/448215bc7695cfa0c8a7e0dcfa54fe31b1d52fb87004fed32e659dd85c80/types_six-1.17.0.20251009.tar.gz", hash = "sha256:efe03064ecd0ffb0f7afe133990a2398d8493d8d1c1cc10ff3dfe476d57ba44f", size = 15552, upload-time = "2025-10-09T02:54:26.02Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/b8/2f/94baa623421940e3eb5d2fc63570ebb046f2bb4d9573b8787edab3ed2526/types_six-1.17.0.20251009-py3-none-any.whl", hash = "sha256:2494f4c2a58ada0edfe01ea84b58468732e43394c572d9cf5b1dd06d86c487a3", size = 19935 }, + { url = "https://files.pythonhosted.org/packages/b8/2f/94baa623421940e3eb5d2fc63570ebb046f2bb4d9573b8787edab3ed2526/types_six-1.17.0.20251009-py3-none-any.whl", hash = "sha256:2494f4c2a58ada0edfe01ea84b58468732e43394c572d9cf5b1dd06d86c487a3", size = 19935, upload-time = "2025-10-09T02:54:25.096Z" }, ] [[package]] @@ -6600,9 +6651,9 @@ dependencies = [ { name = "types-protobuf" }, { name = "types-requests" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/0d/0a/13bde03fb5a23faaadcca2d6914f865e444334133902310ea05e6ade780c/types_tensorflow-2.18.0.20251008.tar.gz", hash = "sha256:8db03d4dd391a362e2ea796ffdbccb03c082127606d4d852edb7ed9504745933", size = 257550 } +sdist = { url = "https://files.pythonhosted.org/packages/0d/0a/13bde03fb5a23faaadcca2d6914f865e444334133902310ea05e6ade780c/types_tensorflow-2.18.0.20251008.tar.gz", hash = "sha256:8db03d4dd391a362e2ea796ffdbccb03c082127606d4d852edb7ed9504745933", size = 257550, upload-time = "2025-10-08T02:51:51.104Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/66/cc/e50e49db621b0cf03c1f3d10be47389de41a02dc9924c3a83a9c1a55bf28/types_tensorflow-2.18.0.20251008-py3-none-any.whl", hash = "sha256:d6b0dd4d81ac6d9c5af803ebcc8ce0f65c5850c063e8b9789dc828898944b5f4", size = 329023 }, + { url = "https://files.pythonhosted.org/packages/66/cc/e50e49db621b0cf03c1f3d10be47389de41a02dc9924c3a83a9c1a55bf28/types_tensorflow-2.18.0.20251008-py3-none-any.whl", hash = "sha256:d6b0dd4d81ac6d9c5af803ebcc8ce0f65c5850c063e8b9789dc828898944b5f4", size = 329023, upload-time = "2025-10-08T02:51:50.024Z" }, ] [[package]] @@ -6612,36 +6663,36 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "types-requests" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/fb/d0/cf498fc630d9fdaf2428b93e60b0e67b08008fec22b78716b8323cf644dc/types_tqdm-4.67.0.20250809.tar.gz", hash = "sha256:02bf7ab91256080b9c4c63f9f11b519c27baaf52718e5fdab9e9606da168d500", size = 17200 } +sdist = { url = "https://files.pythonhosted.org/packages/fb/d0/cf498fc630d9fdaf2428b93e60b0e67b08008fec22b78716b8323cf644dc/types_tqdm-4.67.0.20250809.tar.gz", hash = "sha256:02bf7ab91256080b9c4c63f9f11b519c27baaf52718e5fdab9e9606da168d500", size = 17200, upload-time = "2025-08-09T03:17:43.489Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/3f/13/3ff0781445d7c12730befce0fddbbc7a76e56eb0e7029446f2853238360a/types_tqdm-4.67.0.20250809-py3-none-any.whl", hash = "sha256:1a73053b31fcabf3c1f3e2a9d5ecdba0f301bde47a418cd0e0bdf774827c5c57", size = 24020 }, + { url = "https://files.pythonhosted.org/packages/3f/13/3ff0781445d7c12730befce0fddbbc7a76e56eb0e7029446f2853238360a/types_tqdm-4.67.0.20250809-py3-none-any.whl", hash = "sha256:1a73053b31fcabf3c1f3e2a9d5ecdba0f301bde47a418cd0e0bdf774827c5c57", size = 24020, upload-time = "2025-08-09T03:17:42.453Z" }, ] [[package]] name = "types-ujson" version = "5.10.0.20250822" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/5c/bd/d372d44534f84864a96c19a7059d9b4d29db8541828b8b9dc3040f7a46d0/types_ujson-5.10.0.20250822.tar.gz", hash = "sha256:0a795558e1f78532373cf3f03f35b1f08bc60d52d924187b97995ee3597ba006", size = 8437 } +sdist = { url = "https://files.pythonhosted.org/packages/5c/bd/d372d44534f84864a96c19a7059d9b4d29db8541828b8b9dc3040f7a46d0/types_ujson-5.10.0.20250822.tar.gz", hash = "sha256:0a795558e1f78532373cf3f03f35b1f08bc60d52d924187b97995ee3597ba006", size = 8437, upload-time = "2025-08-22T03:02:19.433Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/d7/f2/d812543c350674d8b3f6e17c8922248ee3bb752c2a76f64beb8c538b40cf/types_ujson-5.10.0.20250822-py3-none-any.whl", hash = "sha256:3e9e73a6dc62ccc03449d9ac2c580cd1b7a8e4873220db498f7dd056754be080", size = 7657 }, + { url = "https://files.pythonhosted.org/packages/d7/f2/d812543c350674d8b3f6e17c8922248ee3bb752c2a76f64beb8c538b40cf/types_ujson-5.10.0.20250822-py3-none-any.whl", hash = "sha256:3e9e73a6dc62ccc03449d9ac2c580cd1b7a8e4873220db498f7dd056754be080", size = 7657, upload-time = "2025-08-22T03:02:18.699Z" }, ] [[package]] name = "types-webencodings" version = "0.5.0.20251108" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/66/d6/75e381959a2706644f02f7527d264de3216cf6ed333f98eff95954d78e07/types_webencodings-0.5.0.20251108.tar.gz", hash = "sha256:2378e2ceccced3d41bb5e21387586e7b5305e11519fc6b0659c629f23b2e5de4", size = 7470 } +sdist = { url = "https://files.pythonhosted.org/packages/66/d6/75e381959a2706644f02f7527d264de3216cf6ed333f98eff95954d78e07/types_webencodings-0.5.0.20251108.tar.gz", hash = "sha256:2378e2ceccced3d41bb5e21387586e7b5305e11519fc6b0659c629f23b2e5de4", size = 7470, upload-time = "2025-11-08T02:56:00.132Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/45/4e/8fcf33e193ce4af03c19d0e08483cf5f0838e883f800909c6bc61cb361be/types_webencodings-0.5.0.20251108-py3-none-any.whl", hash = "sha256:e21f81ff750795faffddaffd70a3d8bfff77d006f22c27e393eb7812586249d8", size = 8715 }, + { url = "https://files.pythonhosted.org/packages/45/4e/8fcf33e193ce4af03c19d0e08483cf5f0838e883f800909c6bc61cb361be/types_webencodings-0.5.0.20251108-py3-none-any.whl", hash = "sha256:e21f81ff750795faffddaffd70a3d8bfff77d006f22c27e393eb7812586249d8", size = 8715, upload-time = "2025-11-08T02:55:59.456Z" }, ] [[package]] name = "typing-extensions" version = "4.15.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/72/94/1a15dd82efb362ac84269196e94cf00f187f7ed21c242792a923cdb1c61f/typing_extensions-4.15.0.tar.gz", hash = "sha256:0cea48d173cc12fa28ecabc3b837ea3cf6f38c6d1136f85cbaaf598984861466", size = 109391 } +sdist = { url = "https://files.pythonhosted.org/packages/72/94/1a15dd82efb362ac84269196e94cf00f187f7ed21c242792a923cdb1c61f/typing_extensions-4.15.0.tar.gz", hash = "sha256:0cea48d173cc12fa28ecabc3b837ea3cf6f38c6d1136f85cbaaf598984861466", size = 109391, upload-time = "2025-08-25T13:49:26.313Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/18/67/36e9267722cc04a6b9f15c7f3441c2363321a3ea07da7ae0c0707beb2a9c/typing_extensions-4.15.0-py3-none-any.whl", hash = "sha256:f0fa19c6845758ab08074a0cfa8b7aecb71c999ca73d62883bc25cc018c4e548", size = 44614 }, + { url = "https://files.pythonhosted.org/packages/18/67/36e9267722cc04a6b9f15c7f3441c2363321a3ea07da7ae0c0707beb2a9c/typing_extensions-4.15.0-py3-none-any.whl", hash = "sha256:f0fa19c6845758ab08074a0cfa8b7aecb71c999ca73d62883bc25cc018c4e548", size = 44614, upload-time = "2025-08-25T13:49:24.86Z" }, ] [[package]] @@ -6652,9 +6703,9 @@ dependencies = [ { name = "mypy-extensions" }, { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/dc/74/1789779d91f1961fa9438e9a8710cdae6bd138c80d7303996933d117264a/typing_inspect-0.9.0.tar.gz", hash = "sha256:b23fc42ff6f6ef6954e4852c1fb512cdd18dbea03134f91f856a95ccc9461f78", size = 13825 } +sdist = { url = "https://files.pythonhosted.org/packages/dc/74/1789779d91f1961fa9438e9a8710cdae6bd138c80d7303996933d117264a/typing_inspect-0.9.0.tar.gz", hash = "sha256:b23fc42ff6f6ef6954e4852c1fb512cdd18dbea03134f91f856a95ccc9461f78", size = 13825, upload-time = "2023-05-24T20:25:47.612Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/65/f3/107a22063bf27bdccf2024833d3445f4eea42b2e598abfbd46f6a63b6cb0/typing_inspect-0.9.0-py3-none-any.whl", hash = "sha256:9ee6fc59062311ef8547596ab6b955e1b8aa46242d854bfc78f4f6b0eff35f9f", size = 8827 }, + { url = "https://files.pythonhosted.org/packages/65/f3/107a22063bf27bdccf2024833d3445f4eea42b2e598abfbd46f6a63b6cb0/typing_inspect-0.9.0-py3-none-any.whl", hash = "sha256:9ee6fc59062311ef8547596ab6b955e1b8aa46242d854bfc78f4f6b0eff35f9f", size = 8827, upload-time = "2023-05-24T20:25:45.287Z" }, ] [[package]] @@ -6664,18 +6715,18 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/55/e3/70399cb7dd41c10ac53367ae42139cf4b1ca5f36bb3dc6c9d33acdb43655/typing_inspection-0.4.2.tar.gz", hash = "sha256:ba561c48a67c5958007083d386c3295464928b01faa735ab8547c5692e87f464", size = 75949 } +sdist = { url = "https://files.pythonhosted.org/packages/55/e3/70399cb7dd41c10ac53367ae42139cf4b1ca5f36bb3dc6c9d33acdb43655/typing_inspection-0.4.2.tar.gz", hash = "sha256:ba561c48a67c5958007083d386c3295464928b01faa735ab8547c5692e87f464", size = 75949, upload-time = "2025-10-01T02:14:41.687Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/dc/9b/47798a6c91d8bdb567fe2698fe81e0c6b7cb7ef4d13da4114b41d239f65d/typing_inspection-0.4.2-py3-none-any.whl", hash = "sha256:4ed1cacbdc298c220f1bd249ed5287caa16f34d44ef4e9c3d0cbad5b521545e7", size = 14611 }, + { url = "https://files.pythonhosted.org/packages/dc/9b/47798a6c91d8bdb567fe2698fe81e0c6b7cb7ef4d13da4114b41d239f65d/typing_inspection-0.4.2-py3-none-any.whl", hash = "sha256:4ed1cacbdc298c220f1bd249ed5287caa16f34d44ef4e9c3d0cbad5b521545e7", size = 14611, upload-time = "2025-10-01T02:14:40.154Z" }, ] [[package]] name = "tzdata" version = "2025.2" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/95/32/1a225d6164441be760d75c2c42e2780dc0873fe382da3e98a2e1e48361e5/tzdata-2025.2.tar.gz", hash = "sha256:b60a638fcc0daffadf82fe0f57e53d06bdec2f36c4df66280ae79bce6bd6f2b9", size = 196380 } +sdist = { url = "https://files.pythonhosted.org/packages/95/32/1a225d6164441be760d75c2c42e2780dc0873fe382da3e98a2e1e48361e5/tzdata-2025.2.tar.gz", hash = "sha256:b60a638fcc0daffadf82fe0f57e53d06bdec2f36c4df66280ae79bce6bd6f2b9", size = 196380, upload-time = "2025-03-23T13:54:43.652Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/5c/23/c7abc0ca0a1526a0774eca151daeb8de62ec457e77262b66b359c3c7679e/tzdata-2025.2-py2.py3-none-any.whl", hash = "sha256:1a403fada01ff9221ca8044d701868fa132215d84beb92242d9acd2147f667a8", size = 347839 }, + { url = "https://files.pythonhosted.org/packages/5c/23/c7abc0ca0a1526a0774eca151daeb8de62ec457e77262b66b359c3c7679e/tzdata-2025.2-py2.py3-none-any.whl", hash = "sha256:1a403fada01ff9221ca8044d701868fa132215d84beb92242d9acd2147f667a8", size = 347839, upload-time = "2025-03-23T13:54:41.845Z" }, ] [[package]] @@ -6685,37 +6736,37 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "tzdata", marker = "sys_platform == 'win32'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/8b/2e/c14812d3d4d9cd1773c6be938f89e5735a1f11a9f184ac3639b93cef35d5/tzlocal-5.3.1.tar.gz", hash = "sha256:cceffc7edecefea1f595541dbd6e990cb1ea3d19bf01b2809f362a03dd7921fd", size = 30761 } +sdist = { url = "https://files.pythonhosted.org/packages/8b/2e/c14812d3d4d9cd1773c6be938f89e5735a1f11a9f184ac3639b93cef35d5/tzlocal-5.3.1.tar.gz", hash = "sha256:cceffc7edecefea1f595541dbd6e990cb1ea3d19bf01b2809f362a03dd7921fd", size = 30761, upload-time = "2025-03-05T21:17:41.549Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/c2/14/e2a54fabd4f08cd7af1c07030603c3356b74da07f7cc056e600436edfa17/tzlocal-5.3.1-py3-none-any.whl", hash = "sha256:eb1a66c3ef5847adf7a834f1be0800581b683b5608e74f86ecbcef8ab91bb85d", size = 18026 }, + { url = "https://files.pythonhosted.org/packages/c2/14/e2a54fabd4f08cd7af1c07030603c3356b74da07f7cc056e600436edfa17/tzlocal-5.3.1-py3-none-any.whl", hash = "sha256:eb1a66c3ef5847adf7a834f1be0800581b683b5608e74f86ecbcef8ab91bb85d", size = 18026, upload-time = "2025-03-05T21:17:39.857Z" }, ] [[package]] name = "ujson" version = "5.9.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/6e/54/6f2bdac7117e89a47de4511c9f01732a283457ab1bf856e1e51aa861619e/ujson-5.9.0.tar.gz", hash = "sha256:89cc92e73d5501b8a7f48575eeb14ad27156ad092c2e9fc7e3cf949f07e75532", size = 7154214 } +sdist = { url = "https://files.pythonhosted.org/packages/6e/54/6f2bdac7117e89a47de4511c9f01732a283457ab1bf856e1e51aa861619e/ujson-5.9.0.tar.gz", hash = "sha256:89cc92e73d5501b8a7f48575eeb14ad27156ad092c2e9fc7e3cf949f07e75532", size = 7154214, upload-time = "2023-12-10T22:50:34.812Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/c0/ca/ae3a6ca5b4f82ce654d6ac3dde5e59520537e20939592061ba506f4e569a/ujson-5.9.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3b23bbb46334ce51ddb5dded60c662fbf7bb74a37b8f87221c5b0fec1ec6454b", size = 57753 }, - { url = "https://files.pythonhosted.org/packages/34/5f/c27fa9a1562c96d978c39852b48063c3ca480758f3088dcfc0f3b09f8e93/ujson-5.9.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:6974b3a7c17bbf829e6c3bfdc5823c67922e44ff169851a755eab79a3dd31ec0", size = 54092 }, - { url = "https://files.pythonhosted.org/packages/19/f3/1431713de9e5992e5e33ba459b4de28f83904233958855d27da820a101f9/ujson-5.9.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b5964ea916edfe24af1f4cc68488448fbb1ec27a3ddcddc2b236da575c12c8ae", size = 51675 }, - { url = "https://files.pythonhosted.org/packages/d3/93/de6fff3ae06351f3b1c372f675fe69bc180f93d237c9e496c05802173dd6/ujson-5.9.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8ba7cac47dd65ff88571eceeff48bf30ed5eb9c67b34b88cb22869b7aa19600d", size = 53246 }, - { url = "https://files.pythonhosted.org/packages/26/73/db509fe1d7da62a15c0769c398cec66bdfc61a8bdffaf7dfa9d973e3d65c/ujson-5.9.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6bbd91a151a8f3358c29355a491e915eb203f607267a25e6ab10531b3b157c5e", size = 58182 }, - { url = "https://files.pythonhosted.org/packages/fc/a8/6be607fa3e1fa3e1c9b53f5de5acad33b073b6cc9145803e00bcafa729a8/ujson-5.9.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:829a69d451a49c0de14a9fecb2a2d544a9b2c884c2b542adb243b683a6f15908", size = 584493 }, - { url = "https://files.pythonhosted.org/packages/c8/c7/33822c2f1a8175e841e2bc378ffb2c1109ce9280f14cedb1b2fa0caf3145/ujson-5.9.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:a807ae73c46ad5db161a7e883eec0fbe1bebc6a54890152ccc63072c4884823b", size = 656038 }, - { url = "https://files.pythonhosted.org/packages/51/b8/5309fbb299d5fcac12bbf3db20896db5178392904abe6b992da233dc69d6/ujson-5.9.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:8fc2aa18b13d97b3c8ccecdf1a3c405f411a6e96adeee94233058c44ff92617d", size = 597643 }, - { url = "https://files.pythonhosted.org/packages/5f/64/7b63043b95dd78feed401b9973958af62645a6d19b72b6e83d1ea5af07e0/ujson-5.9.0-cp311-cp311-win32.whl", hash = "sha256:70e06849dfeb2548be48fdd3ceb53300640bc8100c379d6e19d78045e9c26120", size = 38342 }, - { url = "https://files.pythonhosted.org/packages/7a/13/a3cd1fc3a1126d30b558b6235c05e2d26eeaacba4979ee2fd2b5745c136d/ujson-5.9.0-cp311-cp311-win_amd64.whl", hash = "sha256:7309d063cd392811acc49b5016728a5e1b46ab9907d321ebbe1c2156bc3c0b99", size = 41923 }, - { url = "https://files.pythonhosted.org/packages/16/7e/c37fca6cd924931fa62d615cdbf5921f34481085705271696eff38b38867/ujson-5.9.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:20509a8c9f775b3a511e308bbe0b72897ba6b800767a7c90c5cca59d20d7c42c", size = 57834 }, - { url = "https://files.pythonhosted.org/packages/fb/44/2753e902ee19bf6ccaf0bda02f1f0037f92a9769a5d31319905e3de645b4/ujson-5.9.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:b28407cfe315bd1b34f1ebe65d3bd735d6b36d409b334100be8cdffae2177b2f", size = 54119 }, - { url = "https://files.pythonhosted.org/packages/d2/06/2317433e394450bc44afe32b6c39d5a51014da4c6f6cfc2ae7bf7b4a2922/ujson-5.9.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9d302bd17989b6bd90d49bade66943c78f9e3670407dbc53ebcf61271cadc399", size = 51658 }, - { url = "https://files.pythonhosted.org/packages/5b/3a/2acf0da085d96953580b46941504aa3c91a1dd38701b9e9bfa43e2803467/ujson-5.9.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9f21315f51e0db8ee245e33a649dd2d9dce0594522de6f278d62f15f998e050e", size = 53370 }, - { url = "https://files.pythonhosted.org/packages/03/32/737e6c4b1841720f88ae88ec91f582dc21174bd40742739e1fa16a0c9ffa/ujson-5.9.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5635b78b636a54a86fdbf6f027e461aa6c6b948363bdf8d4fbb56a42b7388320", size = 58278 }, - { url = "https://files.pythonhosted.org/packages/8a/dc/3fda97f1ad070ccf2af597fb67dde358bc698ffecebe3bc77991d60e4fe5/ujson-5.9.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:82b5a56609f1235d72835ee109163c7041b30920d70fe7dac9176c64df87c164", size = 584418 }, - { url = "https://files.pythonhosted.org/packages/d7/57/e4083d774fcd8ff3089c0ff19c424abe33f23e72c6578a8172bf65131992/ujson-5.9.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:5ca35f484622fd208f55041b042d9d94f3b2c9c5add4e9af5ee9946d2d30db01", size = 656126 }, - { url = "https://files.pythonhosted.org/packages/0d/c3/8c6d5f6506ca9fcedd5a211e30a7d5ee053dc05caf23dae650e1f897effb/ujson-5.9.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:829b824953ebad76d46e4ae709e940bb229e8999e40881338b3cc94c771b876c", size = 597795 }, - { url = "https://files.pythonhosted.org/packages/34/5a/a231f0cd305a34cf2d16930304132db3a7a8c3997b367dd38fc8f8dfae36/ujson-5.9.0-cp312-cp312-win32.whl", hash = "sha256:25fa46e4ff0a2deecbcf7100af3a5d70090b461906f2299506485ff31d9ec437", size = 38495 }, - { url = "https://files.pythonhosted.org/packages/30/b7/18b841b44760ed298acdb150608dccdc045c41655e0bae4441f29bcab872/ujson-5.9.0-cp312-cp312-win_amd64.whl", hash = "sha256:60718f1720a61560618eff3b56fd517d107518d3c0160ca7a5a66ac949c6cf1c", size = 42088 }, + { url = "https://files.pythonhosted.org/packages/c0/ca/ae3a6ca5b4f82ce654d6ac3dde5e59520537e20939592061ba506f4e569a/ujson-5.9.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3b23bbb46334ce51ddb5dded60c662fbf7bb74a37b8f87221c5b0fec1ec6454b", size = 57753, upload-time = "2023-12-10T22:49:03.939Z" }, + { url = "https://files.pythonhosted.org/packages/34/5f/c27fa9a1562c96d978c39852b48063c3ca480758f3088dcfc0f3b09f8e93/ujson-5.9.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:6974b3a7c17bbf829e6c3bfdc5823c67922e44ff169851a755eab79a3dd31ec0", size = 54092, upload-time = "2023-12-10T22:49:05.194Z" }, + { url = "https://files.pythonhosted.org/packages/19/f3/1431713de9e5992e5e33ba459b4de28f83904233958855d27da820a101f9/ujson-5.9.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b5964ea916edfe24af1f4cc68488448fbb1ec27a3ddcddc2b236da575c12c8ae", size = 51675, upload-time = "2023-12-10T22:49:06.449Z" }, + { url = "https://files.pythonhosted.org/packages/d3/93/de6fff3ae06351f3b1c372f675fe69bc180f93d237c9e496c05802173dd6/ujson-5.9.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8ba7cac47dd65ff88571eceeff48bf30ed5eb9c67b34b88cb22869b7aa19600d", size = 53246, upload-time = "2023-12-10T22:49:07.691Z" }, + { url = "https://files.pythonhosted.org/packages/26/73/db509fe1d7da62a15c0769c398cec66bdfc61a8bdffaf7dfa9d973e3d65c/ujson-5.9.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6bbd91a151a8f3358c29355a491e915eb203f607267a25e6ab10531b3b157c5e", size = 58182, upload-time = "2023-12-10T22:49:08.89Z" }, + { url = "https://files.pythonhosted.org/packages/fc/a8/6be607fa3e1fa3e1c9b53f5de5acad33b073b6cc9145803e00bcafa729a8/ujson-5.9.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:829a69d451a49c0de14a9fecb2a2d544a9b2c884c2b542adb243b683a6f15908", size = 584493, upload-time = "2023-12-10T22:49:11.043Z" }, + { url = "https://files.pythonhosted.org/packages/c8/c7/33822c2f1a8175e841e2bc378ffb2c1109ce9280f14cedb1b2fa0caf3145/ujson-5.9.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:a807ae73c46ad5db161a7e883eec0fbe1bebc6a54890152ccc63072c4884823b", size = 656038, upload-time = "2023-12-10T22:49:12.651Z" }, + { url = "https://files.pythonhosted.org/packages/51/b8/5309fbb299d5fcac12bbf3db20896db5178392904abe6b992da233dc69d6/ujson-5.9.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:8fc2aa18b13d97b3c8ccecdf1a3c405f411a6e96adeee94233058c44ff92617d", size = 597643, upload-time = "2023-12-10T22:49:14.883Z" }, + { url = "https://files.pythonhosted.org/packages/5f/64/7b63043b95dd78feed401b9973958af62645a6d19b72b6e83d1ea5af07e0/ujson-5.9.0-cp311-cp311-win32.whl", hash = "sha256:70e06849dfeb2548be48fdd3ceb53300640bc8100c379d6e19d78045e9c26120", size = 38342, upload-time = "2023-12-10T22:49:16.854Z" }, + { url = "https://files.pythonhosted.org/packages/7a/13/a3cd1fc3a1126d30b558b6235c05e2d26eeaacba4979ee2fd2b5745c136d/ujson-5.9.0-cp311-cp311-win_amd64.whl", hash = "sha256:7309d063cd392811acc49b5016728a5e1b46ab9907d321ebbe1c2156bc3c0b99", size = 41923, upload-time = "2023-12-10T22:49:17.983Z" }, + { url = "https://files.pythonhosted.org/packages/16/7e/c37fca6cd924931fa62d615cdbf5921f34481085705271696eff38b38867/ujson-5.9.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:20509a8c9f775b3a511e308bbe0b72897ba6b800767a7c90c5cca59d20d7c42c", size = 57834, upload-time = "2023-12-10T22:49:19.799Z" }, + { url = "https://files.pythonhosted.org/packages/fb/44/2753e902ee19bf6ccaf0bda02f1f0037f92a9769a5d31319905e3de645b4/ujson-5.9.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:b28407cfe315bd1b34f1ebe65d3bd735d6b36d409b334100be8cdffae2177b2f", size = 54119, upload-time = "2023-12-10T22:49:21.039Z" }, + { url = "https://files.pythonhosted.org/packages/d2/06/2317433e394450bc44afe32b6c39d5a51014da4c6f6cfc2ae7bf7b4a2922/ujson-5.9.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9d302bd17989b6bd90d49bade66943c78f9e3670407dbc53ebcf61271cadc399", size = 51658, upload-time = "2023-12-10T22:49:22.494Z" }, + { url = "https://files.pythonhosted.org/packages/5b/3a/2acf0da085d96953580b46941504aa3c91a1dd38701b9e9bfa43e2803467/ujson-5.9.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9f21315f51e0db8ee245e33a649dd2d9dce0594522de6f278d62f15f998e050e", size = 53370, upload-time = "2023-12-10T22:49:24.045Z" }, + { url = "https://files.pythonhosted.org/packages/03/32/737e6c4b1841720f88ae88ec91f582dc21174bd40742739e1fa16a0c9ffa/ujson-5.9.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5635b78b636a54a86fdbf6f027e461aa6c6b948363bdf8d4fbb56a42b7388320", size = 58278, upload-time = "2023-12-10T22:49:25.261Z" }, + { url = "https://files.pythonhosted.org/packages/8a/dc/3fda97f1ad070ccf2af597fb67dde358bc698ffecebe3bc77991d60e4fe5/ujson-5.9.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:82b5a56609f1235d72835ee109163c7041b30920d70fe7dac9176c64df87c164", size = 584418, upload-time = "2023-12-10T22:49:27.573Z" }, + { url = "https://files.pythonhosted.org/packages/d7/57/e4083d774fcd8ff3089c0ff19c424abe33f23e72c6578a8172bf65131992/ujson-5.9.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:5ca35f484622fd208f55041b042d9d94f3b2c9c5add4e9af5ee9946d2d30db01", size = 656126, upload-time = "2023-12-10T22:49:29.509Z" }, + { url = "https://files.pythonhosted.org/packages/0d/c3/8c6d5f6506ca9fcedd5a211e30a7d5ee053dc05caf23dae650e1f897effb/ujson-5.9.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:829b824953ebad76d46e4ae709e940bb229e8999e40881338b3cc94c771b876c", size = 597795, upload-time = "2023-12-10T22:49:31.029Z" }, + { url = "https://files.pythonhosted.org/packages/34/5a/a231f0cd305a34cf2d16930304132db3a7a8c3997b367dd38fc8f8dfae36/ujson-5.9.0-cp312-cp312-win32.whl", hash = "sha256:25fa46e4ff0a2deecbcf7100af3a5d70090b461906f2299506485ff31d9ec437", size = 38495, upload-time = "2023-12-10T22:49:33.2Z" }, + { url = "https://files.pythonhosted.org/packages/30/b7/18b841b44760ed298acdb150608dccdc045c41655e0bae4441f29bcab872/ujson-5.9.0-cp312-cp312-win_amd64.whl", hash = "sha256:60718f1720a61560618eff3b56fd517d107518d3c0160ca7a5a66ac949c6cf1c", size = 42088, upload-time = "2023-12-10T22:49:34.921Z" }, ] [[package]] @@ -6745,9 +6796,9 @@ dependencies = [ { name = "unstructured-client" }, { name = "wrapt" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/64/31/98c4c78e305d1294888adf87fd5ee30577a4c393951341ca32b43f167f1e/unstructured-0.16.25.tar.gz", hash = "sha256:73b9b0f51dbb687af572ecdb849a6811710b9cac797ddeab8ee80fa07d8aa5e6", size = 1683097 } +sdist = { url = "https://files.pythonhosted.org/packages/64/31/98c4c78e305d1294888adf87fd5ee30577a4c393951341ca32b43f167f1e/unstructured-0.16.25.tar.gz", hash = "sha256:73b9b0f51dbb687af572ecdb849a6811710b9cac797ddeab8ee80fa07d8aa5e6", size = 1683097, upload-time = "2025-03-07T11:19:39.507Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/12/4f/ad08585b5c8a33c82ea119494c4d3023f4796958c56e668b15cc282ec0a0/unstructured-0.16.25-py3-none-any.whl", hash = "sha256:14719ccef2830216cf1c5bf654f75e2bf07b17ca5dcee9da5ac74618130fd337", size = 1769286 }, + { url = "https://files.pythonhosted.org/packages/12/4f/ad08585b5c8a33c82ea119494c4d3023f4796958c56e668b15cc282ec0a0/unstructured-0.16.25-py3-none-any.whl", hash = "sha256:14719ccef2830216cf1c5bf654f75e2bf07b17ca5dcee9da5ac74618130fd337", size = 1769286, upload-time = "2025-03-07T11:19:37.299Z" }, ] [package.optional-dependencies] @@ -6780,9 +6831,9 @@ dependencies = [ { name = "pypdf" }, { name = "requests-toolbelt" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/a4/8f/43c9a936a153e62f18e7629128698feebd81d2cfff2835febc85377b8eb8/unstructured_client-0.42.4.tar.gz", hash = "sha256:144ecd231a11d091cdc76acf50e79e57889269b8c9d8b9df60e74cf32ac1ba5e", size = 91404 } +sdist = { url = "https://files.pythonhosted.org/packages/a4/8f/43c9a936a153e62f18e7629128698feebd81d2cfff2835febc85377b8eb8/unstructured_client-0.42.4.tar.gz", hash = "sha256:144ecd231a11d091cdc76acf50e79e57889269b8c9d8b9df60e74cf32ac1ba5e", size = 91404, upload-time = "2025-11-14T16:59:25.131Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/5e/6c/7c69e4353e5bdd05fc247c2ec1d840096eb928975697277b015c49405b0f/unstructured_client-0.42.4-py3-none-any.whl", hash = "sha256:fc6341344dd2f2e2aed793636b5f4e6204cad741ff2253d5a48ff2f2bccb8e9a", size = 207863 }, + { url = "https://files.pythonhosted.org/packages/5e/6c/7c69e4353e5bdd05fc247c2ec1d840096eb928975697277b015c49405b0f/unstructured_client-0.42.4-py3-none-any.whl", hash = "sha256:fc6341344dd2f2e2aed793636b5f4e6204cad741ff2253d5a48ff2f2bccb8e9a", size = 207863, upload-time = "2025-11-14T16:59:23.674Z" }, ] [[package]] @@ -6792,36 +6843,36 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "httpx" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/94/a6/a9178fef247687917701a60eb66542eb5361c58af40c033ba8174ff7366d/upstash_vector-0.6.0.tar.gz", hash = "sha256:a716ed4d0251362208518db8b194158a616d37d1ccbb1155f619df690599e39b", size = 15075 } +sdist = { url = "https://files.pythonhosted.org/packages/94/a6/a9178fef247687917701a60eb66542eb5361c58af40c033ba8174ff7366d/upstash_vector-0.6.0.tar.gz", hash = "sha256:a716ed4d0251362208518db8b194158a616d37d1ccbb1155f619df690599e39b", size = 15075, upload-time = "2024-09-27T12:02:13.533Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/5d/45/95073b83b7fd7b83f10ea314f197bae3989bfe022e736b90145fe9ea4362/upstash_vector-0.6.0-py3-none-any.whl", hash = "sha256:d0bdad7765b8a7f5c205b7a9c81ca4b9a4cee3ee4952afc7d5ea5fb76c3f3c3c", size = 15061 }, + { url = "https://files.pythonhosted.org/packages/5d/45/95073b83b7fd7b83f10ea314f197bae3989bfe022e736b90145fe9ea4362/upstash_vector-0.6.0-py3-none-any.whl", hash = "sha256:d0bdad7765b8a7f5c205b7a9c81ca4b9a4cee3ee4952afc7d5ea5fb76c3f3c3c", size = 15061, upload-time = "2024-09-27T12:02:12.041Z" }, ] [[package]] name = "uritemplate" version = "4.2.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/98/60/f174043244c5306c9988380d2cb10009f91563fc4b31293d27e17201af56/uritemplate-4.2.0.tar.gz", hash = "sha256:480c2ed180878955863323eea31b0ede668795de182617fef9c6ca09e6ec9d0e", size = 33267 } +sdist = { url = "https://files.pythonhosted.org/packages/98/60/f174043244c5306c9988380d2cb10009f91563fc4b31293d27e17201af56/uritemplate-4.2.0.tar.gz", hash = "sha256:480c2ed180878955863323eea31b0ede668795de182617fef9c6ca09e6ec9d0e", size = 33267, upload-time = "2025-06-02T15:12:06.318Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/a9/99/3ae339466c9183ea5b8ae87b34c0b897eda475d2aec2307cae60e5cd4f29/uritemplate-4.2.0-py3-none-any.whl", hash = "sha256:962201ba1c4edcab02e60f9a0d3821e82dfc5d2d6662a21abd533879bdb8a686", size = 11488 }, + { url = "https://files.pythonhosted.org/packages/a9/99/3ae339466c9183ea5b8ae87b34c0b897eda475d2aec2307cae60e5cd4f29/uritemplate-4.2.0-py3-none-any.whl", hash = "sha256:962201ba1c4edcab02e60f9a0d3821e82dfc5d2d6662a21abd533879bdb8a686", size = 11488, upload-time = "2025-06-02T15:12:03.405Z" }, ] [[package]] name = "urllib3" version = "2.6.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/1c/43/554c2569b62f49350597348fc3ac70f786e3c32e7f19d266e19817812dd3/urllib3-2.6.0.tar.gz", hash = "sha256:cb9bcef5a4b345d5da5d145dc3e30834f58e8018828cbc724d30b4cb7d4d49f1", size = 432585 } +sdist = { url = "https://files.pythonhosted.org/packages/1c/43/554c2569b62f49350597348fc3ac70f786e3c32e7f19d266e19817812dd3/urllib3-2.6.0.tar.gz", hash = "sha256:cb9bcef5a4b345d5da5d145dc3e30834f58e8018828cbc724d30b4cb7d4d49f1", size = 432585, upload-time = "2025-12-05T15:08:47.885Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/56/1a/9ffe814d317c5224166b23e7c47f606d6e473712a2fad0f704ea9b99f246/urllib3-2.6.0-py3-none-any.whl", hash = "sha256:c90f7a39f716c572c4e3e58509581ebd83f9b59cced005b7db7ad2d22b0db99f", size = 131083 }, + { url = "https://files.pythonhosted.org/packages/56/1a/9ffe814d317c5224166b23e7c47f606d6e473712a2fad0f704ea9b99f246/urllib3-2.6.0-py3-none-any.whl", hash = "sha256:c90f7a39f716c572c4e3e58509581ebd83f9b59cced005b7db7ad2d22b0db99f", size = 131083, upload-time = "2025-12-05T15:08:45.983Z" }, ] [[package]] name = "uuid6" version = "2025.0.1" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/ca/b7/4c0f736ca824b3a25b15e8213d1bcfc15f8ac2ae48d1b445b310892dc4da/uuid6-2025.0.1.tar.gz", hash = "sha256:cd0af94fa428675a44e32c5319ec5a3485225ba2179eefcf4c3f205ae30a81bd", size = 13932 } +sdist = { url = "https://files.pythonhosted.org/packages/ca/b7/4c0f736ca824b3a25b15e8213d1bcfc15f8ac2ae48d1b445b310892dc4da/uuid6-2025.0.1.tar.gz", hash = "sha256:cd0af94fa428675a44e32c5319ec5a3485225ba2179eefcf4c3f205ae30a81bd", size = 13932, upload-time = "2025-07-04T18:30:35.186Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/3d/b2/93faaab7962e2aa8d6e174afb6f76be2ca0ce89fde14d3af835acebcaa59/uuid6-2025.0.1-py3-none-any.whl", hash = "sha256:80530ce4d02a93cdf82e7122ca0da3ebbbc269790ec1cb902481fa3e9cc9ff99", size = 6979 }, + { url = "https://files.pythonhosted.org/packages/3d/b2/93faaab7962e2aa8d6e174afb6f76be2ca0ce89fde14d3af835acebcaa59/uuid6-2025.0.1-py3-none-any.whl", hash = "sha256:80530ce4d02a93cdf82e7122ca0da3ebbbc269790ec1cb902481fa3e9cc9ff99", size = 6979, upload-time = "2025-07-04T18:30:34.001Z" }, ] [[package]] @@ -6832,9 +6883,9 @@ dependencies = [ { name = "click" }, { name = "h11" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/cb/ce/f06b84e2697fef4688ca63bdb2fdf113ca0a3be33f94488f2cadb690b0cf/uvicorn-0.38.0.tar.gz", hash = "sha256:fd97093bdd120a2609fc0d3afe931d4d4ad688b6e75f0f929fde1bc36fe0e91d", size = 80605 } +sdist = { url = "https://files.pythonhosted.org/packages/cb/ce/f06b84e2697fef4688ca63bdb2fdf113ca0a3be33f94488f2cadb690b0cf/uvicorn-0.38.0.tar.gz", hash = "sha256:fd97093bdd120a2609fc0d3afe931d4d4ad688b6e75f0f929fde1bc36fe0e91d", size = 80605, upload-time = "2025-10-18T13:46:44.63Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/ee/d9/d88e73ca598f4f6ff671fb5fde8a32925c2e08a637303a1d12883c7305fa/uvicorn-0.38.0-py3-none-any.whl", hash = "sha256:48c0afd214ceb59340075b4a052ea1ee91c16fbc2a9b1469cca0e54566977b02", size = 68109 }, + { url = "https://files.pythonhosted.org/packages/ee/d9/d88e73ca598f4f6ff671fb5fde8a32925c2e08a637303a1d12883c7305fa/uvicorn-0.38.0-py3-none-any.whl", hash = "sha256:48c0afd214ceb59340075b4a052ea1ee91c16fbc2a9b1469cca0e54566977b02", size = 68109, upload-time = "2025-10-18T13:46:42.958Z" }, ] [package.optional-dependencies] @@ -6852,38 +6903,38 @@ standard = [ name = "uvloop" version = "0.22.1" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/06/f0/18d39dbd1971d6d62c4629cc7fa67f74821b0dc1f5a77af43719de7936a7/uvloop-0.22.1.tar.gz", hash = "sha256:6c84bae345b9147082b17371e3dd5d42775bddce91f885499017f4607fdaf39f", size = 2443250 } +sdist = { url = "https://files.pythonhosted.org/packages/06/f0/18d39dbd1971d6d62c4629cc7fa67f74821b0dc1f5a77af43719de7936a7/uvloop-0.22.1.tar.gz", hash = "sha256:6c84bae345b9147082b17371e3dd5d42775bddce91f885499017f4607fdaf39f", size = 2443250, upload-time = "2025-10-16T22:17:19.342Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/c7/d5/69900f7883235562f1f50d8184bb7dd84a2fb61e9ec63f3782546fdbd057/uvloop-0.22.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:c60ebcd36f7b240b30788554b6f0782454826a0ed765d8430652621b5de674b9", size = 1352420 }, - { url = "https://files.pythonhosted.org/packages/a8/73/c4e271b3bce59724e291465cc936c37758886a4868787da0278b3b56b905/uvloop-0.22.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3b7f102bf3cb1995cfeaee9321105e8f5da76fdb104cdad8986f85461a1b7b77", size = 748677 }, - { url = "https://files.pythonhosted.org/packages/86/94/9fb7fad2f824d25f8ecac0d70b94d0d48107ad5ece03769a9c543444f78a/uvloop-0.22.1-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:53c85520781d84a4b8b230e24a5af5b0778efdb39142b424990ff1ef7c48ba21", size = 3753819 }, - { url = "https://files.pythonhosted.org/packages/74/4f/256aca690709e9b008b7108bc85fba619a2bc37c6d80743d18abad16ee09/uvloop-0.22.1-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:56a2d1fae65fd82197cb8c53c367310b3eabe1bbb9fb5a04d28e3e3520e4f702", size = 3804529 }, - { url = "https://files.pythonhosted.org/packages/7f/74/03c05ae4737e871923d21a76fe28b6aad57f5c03b6e6bfcfa5ad616013e4/uvloop-0.22.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:40631b049d5972c6755b06d0bfe8233b1bd9a8a6392d9d1c45c10b6f9e9b2733", size = 3621267 }, - { url = "https://files.pythonhosted.org/packages/75/be/f8e590fe61d18b4a92070905497aec4c0e64ae1761498cad09023f3f4b3e/uvloop-0.22.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:535cc37b3a04f6cd2c1ef65fa1d370c9a35b6695df735fcff5427323f2cd5473", size = 3723105 }, - { url = "https://files.pythonhosted.org/packages/3d/ff/7f72e8170be527b4977b033239a83a68d5c881cc4775fca255c677f7ac5d/uvloop-0.22.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:fe94b4564e865d968414598eea1a6de60adba0c040ba4ed05ac1300de402cd42", size = 1359936 }, - { url = "https://files.pythonhosted.org/packages/c3/c6/e5d433f88fd54d81ef4be58b2b7b0cea13c442454a1db703a1eea0db1a59/uvloop-0.22.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:51eb9bd88391483410daad430813d982010f9c9c89512321f5b60e2cddbdddd6", size = 752769 }, - { url = "https://files.pythonhosted.org/packages/24/68/a6ac446820273e71aa762fa21cdcc09861edd3536ff47c5cd3b7afb10eeb/uvloop-0.22.1-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:700e674a166ca5778255e0e1dc4e9d79ab2acc57b9171b79e65feba7184b3370", size = 4317413 }, - { url = "https://files.pythonhosted.org/packages/5f/6f/e62b4dfc7ad6518e7eff2516f680d02a0f6eb62c0c212e152ca708a0085e/uvloop-0.22.1-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:7b5b1ac819a3f946d3b2ee07f09149578ae76066d70b44df3fa990add49a82e4", size = 4426307 }, - { url = "https://files.pythonhosted.org/packages/90/60/97362554ac21e20e81bcef1150cb2a7e4ffdaf8ea1e5b2e8bf7a053caa18/uvloop-0.22.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:e047cc068570bac9866237739607d1313b9253c3051ad84738cbb095be0537b2", size = 4131970 }, - { url = "https://files.pythonhosted.org/packages/99/39/6b3f7d234ba3964c428a6e40006340f53ba37993f46ed6e111c6e9141d18/uvloop-0.22.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:512fec6815e2dd45161054592441ef76c830eddaad55c8aa30952e6fe1ed07c0", size = 4296343 }, + { url = "https://files.pythonhosted.org/packages/c7/d5/69900f7883235562f1f50d8184bb7dd84a2fb61e9ec63f3782546fdbd057/uvloop-0.22.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:c60ebcd36f7b240b30788554b6f0782454826a0ed765d8430652621b5de674b9", size = 1352420, upload-time = "2025-10-16T22:16:21.187Z" }, + { url = "https://files.pythonhosted.org/packages/a8/73/c4e271b3bce59724e291465cc936c37758886a4868787da0278b3b56b905/uvloop-0.22.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3b7f102bf3cb1995cfeaee9321105e8f5da76fdb104cdad8986f85461a1b7b77", size = 748677, upload-time = "2025-10-16T22:16:22.558Z" }, + { url = "https://files.pythonhosted.org/packages/86/94/9fb7fad2f824d25f8ecac0d70b94d0d48107ad5ece03769a9c543444f78a/uvloop-0.22.1-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:53c85520781d84a4b8b230e24a5af5b0778efdb39142b424990ff1ef7c48ba21", size = 3753819, upload-time = "2025-10-16T22:16:23.903Z" }, + { url = "https://files.pythonhosted.org/packages/74/4f/256aca690709e9b008b7108bc85fba619a2bc37c6d80743d18abad16ee09/uvloop-0.22.1-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:56a2d1fae65fd82197cb8c53c367310b3eabe1bbb9fb5a04d28e3e3520e4f702", size = 3804529, upload-time = "2025-10-16T22:16:25.246Z" }, + { url = "https://files.pythonhosted.org/packages/7f/74/03c05ae4737e871923d21a76fe28b6aad57f5c03b6e6bfcfa5ad616013e4/uvloop-0.22.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:40631b049d5972c6755b06d0bfe8233b1bd9a8a6392d9d1c45c10b6f9e9b2733", size = 3621267, upload-time = "2025-10-16T22:16:26.819Z" }, + { url = "https://files.pythonhosted.org/packages/75/be/f8e590fe61d18b4a92070905497aec4c0e64ae1761498cad09023f3f4b3e/uvloop-0.22.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:535cc37b3a04f6cd2c1ef65fa1d370c9a35b6695df735fcff5427323f2cd5473", size = 3723105, upload-time = "2025-10-16T22:16:28.252Z" }, + { url = "https://files.pythonhosted.org/packages/3d/ff/7f72e8170be527b4977b033239a83a68d5c881cc4775fca255c677f7ac5d/uvloop-0.22.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:fe94b4564e865d968414598eea1a6de60adba0c040ba4ed05ac1300de402cd42", size = 1359936, upload-time = "2025-10-16T22:16:29.436Z" }, + { url = "https://files.pythonhosted.org/packages/c3/c6/e5d433f88fd54d81ef4be58b2b7b0cea13c442454a1db703a1eea0db1a59/uvloop-0.22.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:51eb9bd88391483410daad430813d982010f9c9c89512321f5b60e2cddbdddd6", size = 752769, upload-time = "2025-10-16T22:16:30.493Z" }, + { url = "https://files.pythonhosted.org/packages/24/68/a6ac446820273e71aa762fa21cdcc09861edd3536ff47c5cd3b7afb10eeb/uvloop-0.22.1-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:700e674a166ca5778255e0e1dc4e9d79ab2acc57b9171b79e65feba7184b3370", size = 4317413, upload-time = "2025-10-16T22:16:31.644Z" }, + { url = "https://files.pythonhosted.org/packages/5f/6f/e62b4dfc7ad6518e7eff2516f680d02a0f6eb62c0c212e152ca708a0085e/uvloop-0.22.1-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:7b5b1ac819a3f946d3b2ee07f09149578ae76066d70b44df3fa990add49a82e4", size = 4426307, upload-time = "2025-10-16T22:16:32.917Z" }, + { url = "https://files.pythonhosted.org/packages/90/60/97362554ac21e20e81bcef1150cb2a7e4ffdaf8ea1e5b2e8bf7a053caa18/uvloop-0.22.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:e047cc068570bac9866237739607d1313b9253c3051ad84738cbb095be0537b2", size = 4131970, upload-time = "2025-10-16T22:16:34.015Z" }, + { url = "https://files.pythonhosted.org/packages/99/39/6b3f7d234ba3964c428a6e40006340f53ba37993f46ed6e111c6e9141d18/uvloop-0.22.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:512fec6815e2dd45161054592441ef76c830eddaad55c8aa30952e6fe1ed07c0", size = 4296343, upload-time = "2025-10-16T22:16:35.149Z" }, ] [[package]] name = "validators" version = "0.35.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/53/66/a435d9ae49850b2f071f7ebd8119dd4e84872b01630d6736761e6e7fd847/validators-0.35.0.tar.gz", hash = "sha256:992d6c48a4e77c81f1b4daba10d16c3a9bb0dbb79b3a19ea847ff0928e70497a", size = 73399 } +sdist = { url = "https://files.pythonhosted.org/packages/53/66/a435d9ae49850b2f071f7ebd8119dd4e84872b01630d6736761e6e7fd847/validators-0.35.0.tar.gz", hash = "sha256:992d6c48a4e77c81f1b4daba10d16c3a9bb0dbb79b3a19ea847ff0928e70497a", size = 73399, upload-time = "2025-05-01T05:42:06.7Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/fa/6e/3e955517e22cbdd565f2f8b2e73d52528b14b8bcfdb04f62466b071de847/validators-0.35.0-py3-none-any.whl", hash = "sha256:e8c947097eae7892cb3d26868d637f79f47b4a0554bc6b80065dfe5aac3705dd", size = 44712 }, + { url = "https://files.pythonhosted.org/packages/fa/6e/3e955517e22cbdd565f2f8b2e73d52528b14b8bcfdb04f62466b071de847/validators-0.35.0-py3-none-any.whl", hash = "sha256:e8c947097eae7892cb3d26868d637f79f47b4a0554bc6b80065dfe5aac3705dd", size = 44712, upload-time = "2025-05-01T05:42:04.203Z" }, ] [[package]] name = "vine" version = "5.1.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/bd/e4/d07b5f29d283596b9727dd5275ccbceb63c44a1a82aa9e4bfd20426762ac/vine-5.1.0.tar.gz", hash = "sha256:8b62e981d35c41049211cf62a0a1242d8c1ee9bd15bb196ce38aefd6799e61e0", size = 48980 } +sdist = { url = "https://files.pythonhosted.org/packages/bd/e4/d07b5f29d283596b9727dd5275ccbceb63c44a1a82aa9e4bfd20426762ac/vine-5.1.0.tar.gz", hash = "sha256:8b62e981d35c41049211cf62a0a1242d8c1ee9bd15bb196ce38aefd6799e61e0", size = 48980, upload-time = "2023-11-05T08:46:53.857Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/03/ff/7c0c86c43b3cbb927e0ccc0255cb4057ceba4799cd44ae95174ce8e8b5b2/vine-5.1.0-py3-none-any.whl", hash = "sha256:40fdf3c48b2cfe1c38a49e9ae2da6fda88e4794c810050a728bd7413811fb1dc", size = 9636 }, + { url = "https://files.pythonhosted.org/packages/03/ff/7c0c86c43b3cbb927e0ccc0255cb4057ceba4799cd44ae95174ce8e8b5b2/vine-5.1.0-py3-none-any.whl", hash = "sha256:40fdf3c48b2cfe1c38a49e9ae2da6fda88e4794c810050a728bd7413811fb1dc", size = 9636, upload-time = "2023-11-05T08:46:51.205Z" }, ] [[package]] @@ -6899,9 +6950,9 @@ dependencies = [ { name = "retry" }, { name = "six" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/8a/c5/62f2fbf0359b31d4e8f766e9ee3096c23d08fc294df1ab6ac117c2d1440c/volcengine_compat-1.0.156.tar.gz", hash = "sha256:e357d096828e31a202dc6047bbc5bf6fff3f54a98cd35a99ab5f965ea741a267", size = 329616 } +sdist = { url = "https://files.pythonhosted.org/packages/8a/c5/62f2fbf0359b31d4e8f766e9ee3096c23d08fc294df1ab6ac117c2d1440c/volcengine_compat-1.0.156.tar.gz", hash = "sha256:e357d096828e31a202dc6047bbc5bf6fff3f54a98cd35a99ab5f965ea741a267", size = 329616, upload-time = "2024-10-13T09:19:09.149Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/37/da/7ccbe82470dc27e1cfd0466dc637248be906eb8447c28a40c1c74cf617ee/volcengine_compat-1.0.156-py3-none-any.whl", hash = "sha256:4abc149a7601ebad8fa2d28fab50c7945145cf74daecb71bca797b0bdc82c5a5", size = 677272 }, + { url = "https://files.pythonhosted.org/packages/37/da/7ccbe82470dc27e1cfd0466dc637248be906eb8447c28a40c1c74cf617ee/volcengine_compat-1.0.156-py3-none-any.whl", hash = "sha256:4abc149a7601ebad8fa2d28fab50c7945145cf74daecb71bca797b0bdc82c5a5", size = 677272, upload-time = "2024-10-13T09:17:19.944Z" }, ] [[package]] @@ -6920,17 +6971,17 @@ dependencies = [ { name = "sentry-sdk" }, { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/0a/cc/770ae3aa7ae44f6792f7ecb81c14c0e38b672deb35235719bb1006519487/wandb-0.23.1.tar.gz", hash = "sha256:f6fb1e3717949b29675a69359de0eeb01e67d3360d581947d5b3f98c273567d6", size = 44298053 } +sdist = { url = "https://files.pythonhosted.org/packages/0a/cc/770ae3aa7ae44f6792f7ecb81c14c0e38b672deb35235719bb1006519487/wandb-0.23.1.tar.gz", hash = "sha256:f6fb1e3717949b29675a69359de0eeb01e67d3360d581947d5b3f98c273567d6", size = 44298053, upload-time = "2025-12-03T02:25:10.79Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/12/0b/c3d7053dfd93fd259a63c7818d9c4ac2ba0642ff8dc8db98662ea0cf9cc0/wandb-0.23.1-py3-none-macosx_12_0_arm64.whl", hash = "sha256:358e15471d19b7d73fc464e37371c19d44d39e433252ac24df107aff993a286b", size = 21527293 }, - { url = "https://files.pythonhosted.org/packages/ee/9f/059420fa0cb6c511dc5c5a50184122b6aca7b178cb2aa210139e354020da/wandb-0.23.1-py3-none-macosx_12_0_x86_64.whl", hash = "sha256:110304407f4b38f163bdd50ed5c5225365e4df3092f13089c30171a75257b575", size = 22745926 }, - { url = "https://files.pythonhosted.org/packages/96/b6/fd465827c14c64d056d30b4c9fcf4dac889a6969dba64489a88fc4ffa333/wandb-0.23.1-py3-none-manylinux_2_28_aarch64.whl", hash = "sha256:6cc984cf85feb2f8ee0451d76bc9fb7f39da94956bb8183e30d26284cf203b65", size = 21212973 }, - { url = "https://files.pythonhosted.org/packages/5c/ee/9a8bb9a39cc1f09c3060456cc79565110226dc4099a719af5c63432da21d/wandb-0.23.1-py3-none-manylinux_2_28_x86_64.whl", hash = "sha256:67431cd3168d79fdb803e503bd669c577872ffd5dadfa86de733b3274b93088e", size = 22887885 }, - { url = "https://files.pythonhosted.org/packages/6d/4d/8d9e75add529142e037b05819cb3ab1005679272950128d69d218b7e5b2e/wandb-0.23.1-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:07be70c0baa97ea25fadc4a9d0097f7371eef6dcacc5ceb525c82491a31e9244", size = 21250967 }, - { url = "https://files.pythonhosted.org/packages/97/72/0b35cddc4e4168f03c759b96d9f671ad18aec8bdfdd84adfea7ecb3f5701/wandb-0.23.1-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:216c95b08e0a2ec6a6008373b056d597573d565e30b43a7a93c35a171485ee26", size = 22988382 }, - { url = "https://files.pythonhosted.org/packages/c0/6d/e78093d49d68afb26f5261a70fc7877c34c114af5c2ee0ab3b1af85f5e76/wandb-0.23.1-py3-none-win32.whl", hash = "sha256:fb5cf0f85692f758a5c36ab65fea96a1284126de64e836610f92ddbb26df5ded", size = 22150756 }, - { url = "https://files.pythonhosted.org/packages/05/27/4f13454b44c9eceaac3d6e4e4efa2230b6712d613ff9bf7df010eef4fd18/wandb-0.23.1-py3-none-win_amd64.whl", hash = "sha256:21c8c56e436eb707b7d54f705652e030d48e5cfcba24cf953823eb652e30e714", size = 22150760 }, - { url = "https://files.pythonhosted.org/packages/30/20/6c091d451e2a07689bfbfaeb7592d488011420e721de170884fedd68c644/wandb-0.23.1-py3-none-win_arm64.whl", hash = "sha256:8aee7f3bb573f2c0acf860f497ca9c684f9b35f2ca51011ba65af3d4592b77c1", size = 20137463 }, + { url = "https://files.pythonhosted.org/packages/12/0b/c3d7053dfd93fd259a63c7818d9c4ac2ba0642ff8dc8db98662ea0cf9cc0/wandb-0.23.1-py3-none-macosx_12_0_arm64.whl", hash = "sha256:358e15471d19b7d73fc464e37371c19d44d39e433252ac24df107aff993a286b", size = 21527293, upload-time = "2025-12-03T02:24:48.011Z" }, + { url = "https://files.pythonhosted.org/packages/ee/9f/059420fa0cb6c511dc5c5a50184122b6aca7b178cb2aa210139e354020da/wandb-0.23.1-py3-none-macosx_12_0_x86_64.whl", hash = "sha256:110304407f4b38f163bdd50ed5c5225365e4df3092f13089c30171a75257b575", size = 22745926, upload-time = "2025-12-03T02:24:50.519Z" }, + { url = "https://files.pythonhosted.org/packages/96/b6/fd465827c14c64d056d30b4c9fcf4dac889a6969dba64489a88fc4ffa333/wandb-0.23.1-py3-none-manylinux_2_28_aarch64.whl", hash = "sha256:6cc984cf85feb2f8ee0451d76bc9fb7f39da94956bb8183e30d26284cf203b65", size = 21212973, upload-time = "2025-12-03T02:24:52.828Z" }, + { url = "https://files.pythonhosted.org/packages/5c/ee/9a8bb9a39cc1f09c3060456cc79565110226dc4099a719af5c63432da21d/wandb-0.23.1-py3-none-manylinux_2_28_x86_64.whl", hash = "sha256:67431cd3168d79fdb803e503bd669c577872ffd5dadfa86de733b3274b93088e", size = 22887885, upload-time = "2025-12-03T02:24:55.281Z" }, + { url = "https://files.pythonhosted.org/packages/6d/4d/8d9e75add529142e037b05819cb3ab1005679272950128d69d218b7e5b2e/wandb-0.23.1-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:07be70c0baa97ea25fadc4a9d0097f7371eef6dcacc5ceb525c82491a31e9244", size = 21250967, upload-time = "2025-12-03T02:24:57.603Z" }, + { url = "https://files.pythonhosted.org/packages/97/72/0b35cddc4e4168f03c759b96d9f671ad18aec8bdfdd84adfea7ecb3f5701/wandb-0.23.1-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:216c95b08e0a2ec6a6008373b056d597573d565e30b43a7a93c35a171485ee26", size = 22988382, upload-time = "2025-12-03T02:25:00.518Z" }, + { url = "https://files.pythonhosted.org/packages/c0/6d/e78093d49d68afb26f5261a70fc7877c34c114af5c2ee0ab3b1af85f5e76/wandb-0.23.1-py3-none-win32.whl", hash = "sha256:fb5cf0f85692f758a5c36ab65fea96a1284126de64e836610f92ddbb26df5ded", size = 22150756, upload-time = "2025-12-03T02:25:02.734Z" }, + { url = "https://files.pythonhosted.org/packages/05/27/4f13454b44c9eceaac3d6e4e4efa2230b6712d613ff9bf7df010eef4fd18/wandb-0.23.1-py3-none-win_amd64.whl", hash = "sha256:21c8c56e436eb707b7d54f705652e030d48e5cfcba24cf953823eb652e30e714", size = 22150760, upload-time = "2025-12-03T02:25:05.106Z" }, + { url = "https://files.pythonhosted.org/packages/30/20/6c091d451e2a07689bfbfaeb7592d488011420e721de170884fedd68c644/wandb-0.23.1-py3-none-win_arm64.whl", hash = "sha256:8aee7f3bb573f2c0acf860f497ca9c684f9b35f2ca51011ba65af3d4592b77c1", size = 20137463, upload-time = "2025-12-03T02:25:08.317Z" }, ] [[package]] @@ -6940,47 +6991,47 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "anyio" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/c2/c9/8869df9b2a2d6c59d79220a4db37679e74f807c559ffe5265e08b227a210/watchfiles-1.1.1.tar.gz", hash = "sha256:a173cb5c16c4f40ab19cecf48a534c409f7ea983ab8fed0741304a1c0a31b3f2", size = 94440 } +sdist = { url = "https://files.pythonhosted.org/packages/c2/c9/8869df9b2a2d6c59d79220a4db37679e74f807c559ffe5265e08b227a210/watchfiles-1.1.1.tar.gz", hash = "sha256:a173cb5c16c4f40ab19cecf48a534c409f7ea983ab8fed0741304a1c0a31b3f2", size = 94440, upload-time = "2025-10-14T15:06:21.08Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/1f/f8/2c5f479fb531ce2f0564eda479faecf253d886b1ab3630a39b7bf7362d46/watchfiles-1.1.1-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:f57b396167a2565a4e8b5e56a5a1c537571733992b226f4f1197d79e94cf0ae5", size = 406529 }, - { url = "https://files.pythonhosted.org/packages/fe/cd/f515660b1f32f65df671ddf6f85bfaca621aee177712874dc30a97397977/watchfiles-1.1.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:421e29339983e1bebc281fab40d812742268ad057db4aee8c4d2bce0af43b741", size = 394384 }, - { url = "https://files.pythonhosted.org/packages/7b/c3/28b7dc99733eab43fca2d10f55c86e03bd6ab11ca31b802abac26b23d161/watchfiles-1.1.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6e43d39a741e972bab5d8100b5cdacf69db64e34eb19b6e9af162bccf63c5cc6", size = 448789 }, - { url = "https://files.pythonhosted.org/packages/4a/24/33e71113b320030011c8e4316ccca04194bf0cbbaeee207f00cbc7d6b9f5/watchfiles-1.1.1-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:f537afb3276d12814082a2e9b242bdcf416c2e8fd9f799a737990a1dbe906e5b", size = 460521 }, - { url = "https://files.pythonhosted.org/packages/f4/c3/3c9a55f255aa57b91579ae9e98c88704955fa9dac3e5614fb378291155df/watchfiles-1.1.1-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b2cd9e04277e756a2e2d2543d65d1e2166d6fd4c9b183f8808634fda23f17b14", size = 488722 }, - { url = "https://files.pythonhosted.org/packages/49/36/506447b73eb46c120169dc1717fe2eff07c234bb3232a7200b5f5bd816e9/watchfiles-1.1.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5f3f58818dc0b07f7d9aa7fe9eb1037aecb9700e63e1f6acfed13e9fef648f5d", size = 596088 }, - { url = "https://files.pythonhosted.org/packages/82/ab/5f39e752a9838ec4d52e9b87c1e80f1ee3ccdbe92e183c15b6577ab9de16/watchfiles-1.1.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9bb9f66367023ae783551042d31b1d7fd422e8289eedd91f26754a66f44d5cff", size = 472923 }, - { url = "https://files.pythonhosted.org/packages/af/b9/a419292f05e302dea372fa7e6fda5178a92998411f8581b9830d28fb9edb/watchfiles-1.1.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aebfd0861a83e6c3d1110b78ad54704486555246e542be3e2bb94195eabb2606", size = 456080 }, - { url = "https://files.pythonhosted.org/packages/b0/c3/d5932fd62bde1a30c36e10c409dc5d54506726f08cb3e1d8d0ba5e2bc8db/watchfiles-1.1.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:5fac835b4ab3c6487b5dbad78c4b3724e26bcc468e886f8ba8cc4306f68f6701", size = 629432 }, - { url = "https://files.pythonhosted.org/packages/f7/77/16bddd9779fafb795f1a94319dc965209c5641db5bf1edbbccace6d1b3c0/watchfiles-1.1.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:399600947b170270e80134ac854e21b3ccdefa11a9529a3decc1327088180f10", size = 623046 }, - { url = "https://files.pythonhosted.org/packages/46/ef/f2ecb9a0f342b4bfad13a2787155c6ee7ce792140eac63a34676a2feeef2/watchfiles-1.1.1-cp311-cp311-win32.whl", hash = "sha256:de6da501c883f58ad50db3a32ad397b09ad29865b5f26f64c24d3e3281685849", size = 271473 }, - { url = "https://files.pythonhosted.org/packages/94/bc/f42d71125f19731ea435c3948cad148d31a64fccde3867e5ba4edee901f9/watchfiles-1.1.1-cp311-cp311-win_amd64.whl", hash = "sha256:35c53bd62a0b885bf653ebf6b700d1bf05debb78ad9292cf2a942b23513dc4c4", size = 287598 }, - { url = "https://files.pythonhosted.org/packages/57/c9/a30f897351f95bbbfb6abcadafbaca711ce1162f4db95fc908c98a9165f3/watchfiles-1.1.1-cp311-cp311-win_arm64.whl", hash = "sha256:57ca5281a8b5e27593cb7d82c2ac927ad88a96ed406aa446f6344e4328208e9e", size = 277210 }, - { url = "https://files.pythonhosted.org/packages/74/d5/f039e7e3c639d9b1d09b07ea412a6806d38123f0508e5f9b48a87b0a76cc/watchfiles-1.1.1-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:8c89f9f2f740a6b7dcc753140dd5e1ab9215966f7a3530d0c0705c83b401bd7d", size = 404745 }, - { url = "https://files.pythonhosted.org/packages/a5/96/a881a13aa1349827490dab2d363c8039527060cfcc2c92cc6d13d1b1049e/watchfiles-1.1.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:bd404be08018c37350f0d6e34676bd1e2889990117a2b90070b3007f172d0610", size = 391769 }, - { url = "https://files.pythonhosted.org/packages/4b/5b/d3b460364aeb8da471c1989238ea0e56bec24b6042a68046adf3d9ddb01c/watchfiles-1.1.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8526e8f916bb5b9a0a777c8317c23ce65de259422bba5b31325a6fa6029d33af", size = 449374 }, - { url = "https://files.pythonhosted.org/packages/b9/44/5769cb62d4ed055cb17417c0a109a92f007114a4e07f30812a73a4efdb11/watchfiles-1.1.1-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:2edc3553362b1c38d9f06242416a5d8e9fe235c204a4072e988ce2e5bb1f69f6", size = 459485 }, - { url = "https://files.pythonhosted.org/packages/19/0c/286b6301ded2eccd4ffd0041a1b726afda999926cf720aab63adb68a1e36/watchfiles-1.1.1-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:30f7da3fb3f2844259cba4720c3fc7138eb0f7b659c38f3bfa65084c7fc7abce", size = 488813 }, - { url = "https://files.pythonhosted.org/packages/c7/2b/8530ed41112dd4a22f4dcfdb5ccf6a1baad1ff6eed8dc5a5f09e7e8c41c7/watchfiles-1.1.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f8979280bdafff686ba5e4d8f97840f929a87ed9cdf133cbbd42f7766774d2aa", size = 594816 }, - { url = "https://files.pythonhosted.org/packages/ce/d2/f5f9fb49489f184f18470d4f99f4e862a4b3e9ac2865688eb2099e3d837a/watchfiles-1.1.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dcc5c24523771db3a294c77d94771abcfcb82a0e0ee8efd910c37c59ec1b31bb", size = 475186 }, - { url = "https://files.pythonhosted.org/packages/cf/68/5707da262a119fb06fbe214d82dd1fe4a6f4af32d2d14de368d0349eb52a/watchfiles-1.1.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1db5d7ae38ff20153d542460752ff397fcf5c96090c1230803713cf3147a6803", size = 456812 }, - { url = "https://files.pythonhosted.org/packages/66/ab/3cbb8756323e8f9b6f9acb9ef4ec26d42b2109bce830cc1f3468df20511d/watchfiles-1.1.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:28475ddbde92df1874b6c5c8aaeb24ad5be47a11f87cde5a28ef3835932e3e94", size = 630196 }, - { url = "https://files.pythonhosted.org/packages/78/46/7152ec29b8335f80167928944a94955015a345440f524d2dfe63fc2f437b/watchfiles-1.1.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:36193ed342f5b9842edd3532729a2ad55c4160ffcfa3700e0d54be496b70dd43", size = 622657 }, - { url = "https://files.pythonhosted.org/packages/0a/bf/95895e78dd75efe9a7f31733607f384b42eb5feb54bd2eb6ed57cc2e94f4/watchfiles-1.1.1-cp312-cp312-win32.whl", hash = "sha256:859e43a1951717cc8de7f4c77674a6d389b106361585951d9e69572823f311d9", size = 272042 }, - { url = "https://files.pythonhosted.org/packages/87/0a/90eb755f568de2688cb220171c4191df932232c20946966c27a59c400850/watchfiles-1.1.1-cp312-cp312-win_amd64.whl", hash = "sha256:91d4c9a823a8c987cce8fa2690923b069966dabb196dd8d137ea2cede885fde9", size = 288410 }, - { url = "https://files.pythonhosted.org/packages/36/76/f322701530586922fbd6723c4f91ace21364924822a8772c549483abed13/watchfiles-1.1.1-cp312-cp312-win_arm64.whl", hash = "sha256:a625815d4a2bdca61953dbba5a39d60164451ef34c88d751f6c368c3ea73d404", size = 278209 }, - { url = "https://files.pythonhosted.org/packages/d3/8e/e500f8b0b77be4ff753ac94dc06b33d8f0d839377fee1b78e8c8d8f031bf/watchfiles-1.1.1-pp311-pypy311_pp73-macosx_10_12_x86_64.whl", hash = "sha256:db476ab59b6765134de1d4fe96a1a9c96ddf091683599be0f26147ea1b2e4b88", size = 408250 }, - { url = "https://files.pythonhosted.org/packages/bd/95/615e72cd27b85b61eec764a5ca51bd94d40b5adea5ff47567d9ebc4d275a/watchfiles-1.1.1-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:89eef07eee5e9d1fda06e38822ad167a044153457e6fd997f8a858ab7564a336", size = 396117 }, - { url = "https://files.pythonhosted.org/packages/c9/81/e7fe958ce8a7fb5c73cc9fb07f5aeaf755e6aa72498c57d760af760c91f8/watchfiles-1.1.1-pp311-pypy311_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ce19e06cbda693e9e7686358af9cd6f5d61312ab8b00488bc36f5aabbaf77e24", size = 450493 }, - { url = "https://files.pythonhosted.org/packages/6e/d4/ed38dd3b1767193de971e694aa544356e63353c33a85d948166b5ff58b9e/watchfiles-1.1.1-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3e6f39af2eab0118338902798b5aa6664f46ff66bc0280de76fca67a7f262a49", size = 457546 }, + { url = "https://files.pythonhosted.org/packages/1f/f8/2c5f479fb531ce2f0564eda479faecf253d886b1ab3630a39b7bf7362d46/watchfiles-1.1.1-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:f57b396167a2565a4e8b5e56a5a1c537571733992b226f4f1197d79e94cf0ae5", size = 406529, upload-time = "2025-10-14T15:04:32.899Z" }, + { url = "https://files.pythonhosted.org/packages/fe/cd/f515660b1f32f65df671ddf6f85bfaca621aee177712874dc30a97397977/watchfiles-1.1.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:421e29339983e1bebc281fab40d812742268ad057db4aee8c4d2bce0af43b741", size = 394384, upload-time = "2025-10-14T15:04:33.761Z" }, + { url = "https://files.pythonhosted.org/packages/7b/c3/28b7dc99733eab43fca2d10f55c86e03bd6ab11ca31b802abac26b23d161/watchfiles-1.1.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6e43d39a741e972bab5d8100b5cdacf69db64e34eb19b6e9af162bccf63c5cc6", size = 448789, upload-time = "2025-10-14T15:04:34.679Z" }, + { url = "https://files.pythonhosted.org/packages/4a/24/33e71113b320030011c8e4316ccca04194bf0cbbaeee207f00cbc7d6b9f5/watchfiles-1.1.1-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:f537afb3276d12814082a2e9b242bdcf416c2e8fd9f799a737990a1dbe906e5b", size = 460521, upload-time = "2025-10-14T15:04:35.963Z" }, + { url = "https://files.pythonhosted.org/packages/f4/c3/3c9a55f255aa57b91579ae9e98c88704955fa9dac3e5614fb378291155df/watchfiles-1.1.1-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b2cd9e04277e756a2e2d2543d65d1e2166d6fd4c9b183f8808634fda23f17b14", size = 488722, upload-time = "2025-10-14T15:04:37.091Z" }, + { url = "https://files.pythonhosted.org/packages/49/36/506447b73eb46c120169dc1717fe2eff07c234bb3232a7200b5f5bd816e9/watchfiles-1.1.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5f3f58818dc0b07f7d9aa7fe9eb1037aecb9700e63e1f6acfed13e9fef648f5d", size = 596088, upload-time = "2025-10-14T15:04:38.39Z" }, + { url = "https://files.pythonhosted.org/packages/82/ab/5f39e752a9838ec4d52e9b87c1e80f1ee3ccdbe92e183c15b6577ab9de16/watchfiles-1.1.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9bb9f66367023ae783551042d31b1d7fd422e8289eedd91f26754a66f44d5cff", size = 472923, upload-time = "2025-10-14T15:04:39.666Z" }, + { url = "https://files.pythonhosted.org/packages/af/b9/a419292f05e302dea372fa7e6fda5178a92998411f8581b9830d28fb9edb/watchfiles-1.1.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aebfd0861a83e6c3d1110b78ad54704486555246e542be3e2bb94195eabb2606", size = 456080, upload-time = "2025-10-14T15:04:40.643Z" }, + { url = "https://files.pythonhosted.org/packages/b0/c3/d5932fd62bde1a30c36e10c409dc5d54506726f08cb3e1d8d0ba5e2bc8db/watchfiles-1.1.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:5fac835b4ab3c6487b5dbad78c4b3724e26bcc468e886f8ba8cc4306f68f6701", size = 629432, upload-time = "2025-10-14T15:04:41.789Z" }, + { url = "https://files.pythonhosted.org/packages/f7/77/16bddd9779fafb795f1a94319dc965209c5641db5bf1edbbccace6d1b3c0/watchfiles-1.1.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:399600947b170270e80134ac854e21b3ccdefa11a9529a3decc1327088180f10", size = 623046, upload-time = "2025-10-14T15:04:42.718Z" }, + { url = "https://files.pythonhosted.org/packages/46/ef/f2ecb9a0f342b4bfad13a2787155c6ee7ce792140eac63a34676a2feeef2/watchfiles-1.1.1-cp311-cp311-win32.whl", hash = "sha256:de6da501c883f58ad50db3a32ad397b09ad29865b5f26f64c24d3e3281685849", size = 271473, upload-time = "2025-10-14T15:04:43.624Z" }, + { url = "https://files.pythonhosted.org/packages/94/bc/f42d71125f19731ea435c3948cad148d31a64fccde3867e5ba4edee901f9/watchfiles-1.1.1-cp311-cp311-win_amd64.whl", hash = "sha256:35c53bd62a0b885bf653ebf6b700d1bf05debb78ad9292cf2a942b23513dc4c4", size = 287598, upload-time = "2025-10-14T15:04:44.516Z" }, + { url = "https://files.pythonhosted.org/packages/57/c9/a30f897351f95bbbfb6abcadafbaca711ce1162f4db95fc908c98a9165f3/watchfiles-1.1.1-cp311-cp311-win_arm64.whl", hash = "sha256:57ca5281a8b5e27593cb7d82c2ac927ad88a96ed406aa446f6344e4328208e9e", size = 277210, upload-time = "2025-10-14T15:04:45.883Z" }, + { url = "https://files.pythonhosted.org/packages/74/d5/f039e7e3c639d9b1d09b07ea412a6806d38123f0508e5f9b48a87b0a76cc/watchfiles-1.1.1-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:8c89f9f2f740a6b7dcc753140dd5e1ab9215966f7a3530d0c0705c83b401bd7d", size = 404745, upload-time = "2025-10-14T15:04:46.731Z" }, + { url = "https://files.pythonhosted.org/packages/a5/96/a881a13aa1349827490dab2d363c8039527060cfcc2c92cc6d13d1b1049e/watchfiles-1.1.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:bd404be08018c37350f0d6e34676bd1e2889990117a2b90070b3007f172d0610", size = 391769, upload-time = "2025-10-14T15:04:48.003Z" }, + { url = "https://files.pythonhosted.org/packages/4b/5b/d3b460364aeb8da471c1989238ea0e56bec24b6042a68046adf3d9ddb01c/watchfiles-1.1.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8526e8f916bb5b9a0a777c8317c23ce65de259422bba5b31325a6fa6029d33af", size = 449374, upload-time = "2025-10-14T15:04:49.179Z" }, + { url = "https://files.pythonhosted.org/packages/b9/44/5769cb62d4ed055cb17417c0a109a92f007114a4e07f30812a73a4efdb11/watchfiles-1.1.1-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:2edc3553362b1c38d9f06242416a5d8e9fe235c204a4072e988ce2e5bb1f69f6", size = 459485, upload-time = "2025-10-14T15:04:50.155Z" }, + { url = "https://files.pythonhosted.org/packages/19/0c/286b6301ded2eccd4ffd0041a1b726afda999926cf720aab63adb68a1e36/watchfiles-1.1.1-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:30f7da3fb3f2844259cba4720c3fc7138eb0f7b659c38f3bfa65084c7fc7abce", size = 488813, upload-time = "2025-10-14T15:04:51.059Z" }, + { url = "https://files.pythonhosted.org/packages/c7/2b/8530ed41112dd4a22f4dcfdb5ccf6a1baad1ff6eed8dc5a5f09e7e8c41c7/watchfiles-1.1.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f8979280bdafff686ba5e4d8f97840f929a87ed9cdf133cbbd42f7766774d2aa", size = 594816, upload-time = "2025-10-14T15:04:52.031Z" }, + { url = "https://files.pythonhosted.org/packages/ce/d2/f5f9fb49489f184f18470d4f99f4e862a4b3e9ac2865688eb2099e3d837a/watchfiles-1.1.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dcc5c24523771db3a294c77d94771abcfcb82a0e0ee8efd910c37c59ec1b31bb", size = 475186, upload-time = "2025-10-14T15:04:53.064Z" }, + { url = "https://files.pythonhosted.org/packages/cf/68/5707da262a119fb06fbe214d82dd1fe4a6f4af32d2d14de368d0349eb52a/watchfiles-1.1.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1db5d7ae38ff20153d542460752ff397fcf5c96090c1230803713cf3147a6803", size = 456812, upload-time = "2025-10-14T15:04:55.174Z" }, + { url = "https://files.pythonhosted.org/packages/66/ab/3cbb8756323e8f9b6f9acb9ef4ec26d42b2109bce830cc1f3468df20511d/watchfiles-1.1.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:28475ddbde92df1874b6c5c8aaeb24ad5be47a11f87cde5a28ef3835932e3e94", size = 630196, upload-time = "2025-10-14T15:04:56.22Z" }, + { url = "https://files.pythonhosted.org/packages/78/46/7152ec29b8335f80167928944a94955015a345440f524d2dfe63fc2f437b/watchfiles-1.1.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:36193ed342f5b9842edd3532729a2ad55c4160ffcfa3700e0d54be496b70dd43", size = 622657, upload-time = "2025-10-14T15:04:57.521Z" }, + { url = "https://files.pythonhosted.org/packages/0a/bf/95895e78dd75efe9a7f31733607f384b42eb5feb54bd2eb6ed57cc2e94f4/watchfiles-1.1.1-cp312-cp312-win32.whl", hash = "sha256:859e43a1951717cc8de7f4c77674a6d389b106361585951d9e69572823f311d9", size = 272042, upload-time = "2025-10-14T15:04:59.046Z" }, + { url = "https://files.pythonhosted.org/packages/87/0a/90eb755f568de2688cb220171c4191df932232c20946966c27a59c400850/watchfiles-1.1.1-cp312-cp312-win_amd64.whl", hash = "sha256:91d4c9a823a8c987cce8fa2690923b069966dabb196dd8d137ea2cede885fde9", size = 288410, upload-time = "2025-10-14T15:05:00.081Z" }, + { url = "https://files.pythonhosted.org/packages/36/76/f322701530586922fbd6723c4f91ace21364924822a8772c549483abed13/watchfiles-1.1.1-cp312-cp312-win_arm64.whl", hash = "sha256:a625815d4a2bdca61953dbba5a39d60164451ef34c88d751f6c368c3ea73d404", size = 278209, upload-time = "2025-10-14T15:05:01.168Z" }, + { url = "https://files.pythonhosted.org/packages/d3/8e/e500f8b0b77be4ff753ac94dc06b33d8f0d839377fee1b78e8c8d8f031bf/watchfiles-1.1.1-pp311-pypy311_pp73-macosx_10_12_x86_64.whl", hash = "sha256:db476ab59b6765134de1d4fe96a1a9c96ddf091683599be0f26147ea1b2e4b88", size = 408250, upload-time = "2025-10-14T15:06:10.264Z" }, + { url = "https://files.pythonhosted.org/packages/bd/95/615e72cd27b85b61eec764a5ca51bd94d40b5adea5ff47567d9ebc4d275a/watchfiles-1.1.1-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:89eef07eee5e9d1fda06e38822ad167a044153457e6fd997f8a858ab7564a336", size = 396117, upload-time = "2025-10-14T15:06:11.28Z" }, + { url = "https://files.pythonhosted.org/packages/c9/81/e7fe958ce8a7fb5c73cc9fb07f5aeaf755e6aa72498c57d760af760c91f8/watchfiles-1.1.1-pp311-pypy311_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ce19e06cbda693e9e7686358af9cd6f5d61312ab8b00488bc36f5aabbaf77e24", size = 450493, upload-time = "2025-10-14T15:06:12.321Z" }, + { url = "https://files.pythonhosted.org/packages/6e/d4/ed38dd3b1767193de971e694aa544356e63353c33a85d948166b5ff58b9e/watchfiles-1.1.1-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3e6f39af2eab0118338902798b5aa6664f46ff66bc0280de76fca67a7f262a49", size = 457546, upload-time = "2025-10-14T15:06:13.372Z" }, ] [[package]] name = "wcwidth" version = "0.2.14" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/24/30/6b0809f4510673dc723187aeaf24c7f5459922d01e2f794277a3dfb90345/wcwidth-0.2.14.tar.gz", hash = "sha256:4d478375d31bc5395a3c55c40ccdf3354688364cd61c4f6adacaa9215d0b3605", size = 102293 } +sdist = { url = "https://files.pythonhosted.org/packages/24/30/6b0809f4510673dc723187aeaf24c7f5459922d01e2f794277a3dfb90345/wcwidth-0.2.14.tar.gz", hash = "sha256:4d478375d31bc5395a3c55c40ccdf3354688364cd61c4f6adacaa9215d0b3605", size = 102293, upload-time = "2025-09-22T16:29:53.023Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/af/b5/123f13c975e9f27ab9c0770f514345bd406d0e8d3b7a0723af9d43f710af/wcwidth-0.2.14-py2.py3-none-any.whl", hash = "sha256:a7bb560c8aee30f9957e5f9895805edd20602f2d7f720186dfd906e82b4982e1", size = 37286 }, + { url = "https://files.pythonhosted.org/packages/af/b5/123f13c975e9f27ab9c0770f514345bd406d0e8d3b7a0723af9d43f710af/wcwidth-0.2.14-py2.py3-none-any.whl", hash = "sha256:a7bb560c8aee30f9957e5f9895805edd20602f2d7f720186dfd906e82b4982e1", size = 37286, upload-time = "2025-09-22T16:29:51.641Z" }, ] [[package]] @@ -7001,9 +7052,9 @@ dependencies = [ { name = "tzdata", marker = "sys_platform == 'win32'" }, { name = "wandb" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/09/95/27e05d954972a83372a3ceb6b5db6136bc4f649fa69d8009b27c144ca111/weave-0.52.17.tar.gz", hash = "sha256:940aaf892b65c72c67cb893e97ed5339136a4b33a7ea85d52ed36671111826ef", size = 609149 } +sdist = { url = "https://files.pythonhosted.org/packages/09/95/27e05d954972a83372a3ceb6b5db6136bc4f649fa69d8009b27c144ca111/weave-0.52.17.tar.gz", hash = "sha256:940aaf892b65c72c67cb893e97ed5339136a4b33a7ea85d52ed36671111826ef", size = 609149, upload-time = "2025-11-13T22:09:51.045Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/ed/0b/ae7860d2b0c02e7efab26815a9a5286d3b0f9f4e0356446f2896351bf770/weave-0.52.17-py3-none-any.whl", hash = "sha256:5772ef82521a033829c921115c5779399581a7ae06d81dfd527126e2115d16d4", size = 765887 }, + { url = "https://files.pythonhosted.org/packages/ed/0b/ae7860d2b0c02e7efab26815a9a5286d3b0f9f4e0356446f2896351bf770/weave-0.52.17-py3-none-any.whl", hash = "sha256:5772ef82521a033829c921115c5779399581a7ae06d81dfd527126e2115d16d4", size = 765887, upload-time = "2025-11-13T22:09:49.161Z" }, ] [[package]] @@ -7019,67 +7070,67 @@ dependencies = [ { name = "pydantic" }, { name = "validators" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/bd/0e/e4582b007427187a9fde55fa575db4b766c81929d2b43a3dd8becce50567/weaviate_client-4.17.0.tar.gz", hash = "sha256:731d58d84b0989df4db399b686357ed285fb95971a492ccca8dec90bb2343c51", size = 769019 } +sdist = { url = "https://files.pythonhosted.org/packages/bd/0e/e4582b007427187a9fde55fa575db4b766c81929d2b43a3dd8becce50567/weaviate_client-4.17.0.tar.gz", hash = "sha256:731d58d84b0989df4db399b686357ed285fb95971a492ccca8dec90bb2343c51", size = 769019, upload-time = "2025-09-26T11:20:27.381Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/5b/c5/2da3a45866da7a935dab8ad07be05dcaee48b3ad4955144583b651929be7/weaviate_client-4.17.0-py3-none-any.whl", hash = "sha256:60e4a355b90537ee1e942ab0b76a94750897a13d9cf13c5a6decbd166d0ca8b5", size = 582763 }, + { url = "https://files.pythonhosted.org/packages/5b/c5/2da3a45866da7a935dab8ad07be05dcaee48b3ad4955144583b651929be7/weaviate_client-4.17.0-py3-none-any.whl", hash = "sha256:60e4a355b90537ee1e942ab0b76a94750897a13d9cf13c5a6decbd166d0ca8b5", size = 582763, upload-time = "2025-09-26T11:20:25.864Z" }, ] [[package]] name = "webencodings" version = "0.5.1" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/0b/02/ae6ceac1baeda530866a85075641cec12989bd8d31af6d5ab4a3e8c92f47/webencodings-0.5.1.tar.gz", hash = "sha256:b36a1c245f2d304965eb4e0a82848379241dc04b865afcc4aab16748587e1923", size = 9721 } +sdist = { url = "https://files.pythonhosted.org/packages/0b/02/ae6ceac1baeda530866a85075641cec12989bd8d31af6d5ab4a3e8c92f47/webencodings-0.5.1.tar.gz", hash = "sha256:b36a1c245f2d304965eb4e0a82848379241dc04b865afcc4aab16748587e1923", size = 9721, upload-time = "2017-04-05T20:21:34.189Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/f4/24/2a3e3df732393fed8b3ebf2ec078f05546de641fe1b667ee316ec1dcf3b7/webencodings-0.5.1-py2.py3-none-any.whl", hash = "sha256:a0af1213f3c2226497a97e2b3aa01a7e4bee4f403f95be16fc9acd2947514a78", size = 11774 }, + { url = "https://files.pythonhosted.org/packages/f4/24/2a3e3df732393fed8b3ebf2ec078f05546de641fe1b667ee316ec1dcf3b7/webencodings-0.5.1-py2.py3-none-any.whl", hash = "sha256:a0af1213f3c2226497a97e2b3aa01a7e4bee4f403f95be16fc9acd2947514a78", size = 11774, upload-time = "2017-04-05T20:21:32.581Z" }, ] [[package]] name = "websocket-client" version = "1.9.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/2c/41/aa4bf9664e4cda14c3b39865b12251e8e7d239f4cd0e3cc1b6c2ccde25c1/websocket_client-1.9.0.tar.gz", hash = "sha256:9e813624b6eb619999a97dc7958469217c3176312b3a16a4bd1bc7e08a46ec98", size = 70576 } +sdist = { url = "https://files.pythonhosted.org/packages/2c/41/aa4bf9664e4cda14c3b39865b12251e8e7d239f4cd0e3cc1b6c2ccde25c1/websocket_client-1.9.0.tar.gz", hash = "sha256:9e813624b6eb619999a97dc7958469217c3176312b3a16a4bd1bc7e08a46ec98", size = 70576, upload-time = "2025-10-07T21:16:36.495Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/34/db/b10e48aa8fff7407e67470363eac595018441cf32d5e1001567a7aeba5d2/websocket_client-1.9.0-py3-none-any.whl", hash = "sha256:af248a825037ef591efbf6ed20cc5faa03d3b47b9e5a2230a529eeee1c1fc3ef", size = 82616 }, + { url = "https://files.pythonhosted.org/packages/34/db/b10e48aa8fff7407e67470363eac595018441cf32d5e1001567a7aeba5d2/websocket_client-1.9.0-py3-none-any.whl", hash = "sha256:af248a825037ef591efbf6ed20cc5faa03d3b47b9e5a2230a529eeee1c1fc3ef", size = 82616, upload-time = "2025-10-07T21:16:34.951Z" }, ] [[package]] name = "websockets" version = "15.0.1" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/21/e6/26d09fab466b7ca9c7737474c52be4f76a40301b08362eb2dbc19dcc16c1/websockets-15.0.1.tar.gz", hash = "sha256:82544de02076bafba038ce055ee6412d68da13ab47f0c60cab827346de828dee", size = 177016 } +sdist = { url = "https://files.pythonhosted.org/packages/21/e6/26d09fab466b7ca9c7737474c52be4f76a40301b08362eb2dbc19dcc16c1/websockets-15.0.1.tar.gz", hash = "sha256:82544de02076bafba038ce055ee6412d68da13ab47f0c60cab827346de828dee", size = 177016, upload-time = "2025-03-05T20:03:41.606Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/9f/32/18fcd5919c293a398db67443acd33fde142f283853076049824fc58e6f75/websockets-15.0.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:823c248b690b2fd9303ba00c4f66cd5e2d8c3ba4aa968b2779be9532a4dad431", size = 175423 }, - { url = "https://files.pythonhosted.org/packages/76/70/ba1ad96b07869275ef42e2ce21f07a5b0148936688c2baf7e4a1f60d5058/websockets-15.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:678999709e68425ae2593acf2e3ebcbcf2e69885a5ee78f9eb80e6e371f1bf57", size = 173082 }, - { url = "https://files.pythonhosted.org/packages/86/f2/10b55821dd40eb696ce4704a87d57774696f9451108cff0d2824c97e0f97/websockets-15.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d50fd1ee42388dcfb2b3676132c78116490976f1300da28eb629272d5d93e905", size = 173330 }, - { url = "https://files.pythonhosted.org/packages/a5/90/1c37ae8b8a113d3daf1065222b6af61cc44102da95388ac0018fcb7d93d9/websockets-15.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d99e5546bf73dbad5bf3547174cd6cb8ba7273062a23808ffea025ecb1cf8562", size = 182878 }, - { url = "https://files.pythonhosted.org/packages/8e/8d/96e8e288b2a41dffafb78e8904ea7367ee4f891dafc2ab8d87e2124cb3d3/websockets-15.0.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:66dd88c918e3287efc22409d426c8f729688d89a0c587c88971a0faa2c2f3792", size = 181883 }, - { url = "https://files.pythonhosted.org/packages/93/1f/5d6dbf551766308f6f50f8baf8e9860be6182911e8106da7a7f73785f4c4/websockets-15.0.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8dd8327c795b3e3f219760fa603dcae1dcc148172290a8ab15158cf85a953413", size = 182252 }, - { url = "https://files.pythonhosted.org/packages/d4/78/2d4fed9123e6620cbf1706c0de8a1632e1a28e7774d94346d7de1bba2ca3/websockets-15.0.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:8fdc51055e6ff4adeb88d58a11042ec9a5eae317a0a53d12c062c8a8865909e8", size = 182521 }, - { url = "https://files.pythonhosted.org/packages/e7/3b/66d4c1b444dd1a9823c4a81f50231b921bab54eee2f69e70319b4e21f1ca/websockets-15.0.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:693f0192126df6c2327cce3baa7c06f2a117575e32ab2308f7f8216c29d9e2e3", size = 181958 }, - { url = "https://files.pythonhosted.org/packages/08/ff/e9eed2ee5fed6f76fdd6032ca5cd38c57ca9661430bb3d5fb2872dc8703c/websockets-15.0.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:54479983bd5fb469c38f2f5c7e3a24f9a4e70594cd68cd1fa6b9340dadaff7cf", size = 181918 }, - { url = "https://files.pythonhosted.org/packages/d8/75/994634a49b7e12532be6a42103597b71098fd25900f7437d6055ed39930a/websockets-15.0.1-cp311-cp311-win32.whl", hash = "sha256:16b6c1b3e57799b9d38427dda63edcbe4926352c47cf88588c0be4ace18dac85", size = 176388 }, - { url = "https://files.pythonhosted.org/packages/98/93/e36c73f78400a65f5e236cd376713c34182e6663f6889cd45a4a04d8f203/websockets-15.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:27ccee0071a0e75d22cb35849b1db43f2ecd3e161041ac1ee9d2352ddf72f065", size = 176828 }, - { url = "https://files.pythonhosted.org/packages/51/6b/4545a0d843594f5d0771e86463606a3988b5a09ca5123136f8a76580dd63/websockets-15.0.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:3e90baa811a5d73f3ca0bcbf32064d663ed81318ab225ee4f427ad4e26e5aff3", size = 175437 }, - { url = "https://files.pythonhosted.org/packages/f4/71/809a0f5f6a06522af902e0f2ea2757f71ead94610010cf570ab5c98e99ed/websockets-15.0.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:592f1a9fe869c778694f0aa806ba0374e97648ab57936f092fd9d87f8bc03665", size = 173096 }, - { url = "https://files.pythonhosted.org/packages/3d/69/1a681dd6f02180916f116894181eab8b2e25b31e484c5d0eae637ec01f7c/websockets-15.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:0701bc3cfcb9164d04a14b149fd74be7347a530ad3bbf15ab2c678a2cd3dd9a2", size = 173332 }, - { url = "https://files.pythonhosted.org/packages/a6/02/0073b3952f5bce97eafbb35757f8d0d54812b6174ed8dd952aa08429bcc3/websockets-15.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e8b56bdcdb4505c8078cb6c7157d9811a85790f2f2b3632c7d1462ab5783d215", size = 183152 }, - { url = "https://files.pythonhosted.org/packages/74/45/c205c8480eafd114b428284840da0b1be9ffd0e4f87338dc95dc6ff961a1/websockets-15.0.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0af68c55afbd5f07986df82831c7bff04846928ea8d1fd7f30052638788bc9b5", size = 182096 }, - { url = "https://files.pythonhosted.org/packages/14/8f/aa61f528fba38578ec553c145857a181384c72b98156f858ca5c8e82d9d3/websockets-15.0.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:64dee438fed052b52e4f98f76c5790513235efaa1ef7f3f2192c392cd7c91b65", size = 182523 }, - { url = "https://files.pythonhosted.org/packages/ec/6d/0267396610add5bc0d0d3e77f546d4cd287200804fe02323797de77dbce9/websockets-15.0.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:d5f6b181bb38171a8ad1d6aa58a67a6aa9d4b38d0f8c5f496b9e42561dfc62fe", size = 182790 }, - { url = "https://files.pythonhosted.org/packages/02/05/c68c5adbf679cf610ae2f74a9b871ae84564462955d991178f95a1ddb7dd/websockets-15.0.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:5d54b09eba2bada6011aea5375542a157637b91029687eb4fdb2dab11059c1b4", size = 182165 }, - { url = "https://files.pythonhosted.org/packages/29/93/bb672df7b2f5faac89761cb5fa34f5cec45a4026c383a4b5761c6cea5c16/websockets-15.0.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:3be571a8b5afed347da347bfcf27ba12b069d9d7f42cb8c7028b5e98bbb12597", size = 182160 }, - { url = "https://files.pythonhosted.org/packages/ff/83/de1f7709376dc3ca9b7eeb4b9a07b4526b14876b6d372a4dc62312bebee0/websockets-15.0.1-cp312-cp312-win32.whl", hash = "sha256:c338ffa0520bdb12fbc527265235639fb76e7bc7faafbb93f6ba80d9c06578a9", size = 176395 }, - { url = "https://files.pythonhosted.org/packages/7d/71/abf2ebc3bbfa40f391ce1428c7168fb20582d0ff57019b69ea20fa698043/websockets-15.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:fcd5cf9e305d7b8338754470cf69cf81f420459dbae8a3b40cee57417f4614a7", size = 176841 }, - { url = "https://files.pythonhosted.org/packages/fa/a8/5b41e0da817d64113292ab1f8247140aac61cbf6cfd085d6a0fa77f4984f/websockets-15.0.1-py3-none-any.whl", hash = "sha256:f7a866fbc1e97b5c617ee4116daaa09b722101d4a3c170c787450ba409f9736f", size = 169743 }, + { url = "https://files.pythonhosted.org/packages/9f/32/18fcd5919c293a398db67443acd33fde142f283853076049824fc58e6f75/websockets-15.0.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:823c248b690b2fd9303ba00c4f66cd5e2d8c3ba4aa968b2779be9532a4dad431", size = 175423, upload-time = "2025-03-05T20:01:56.276Z" }, + { url = "https://files.pythonhosted.org/packages/76/70/ba1ad96b07869275ef42e2ce21f07a5b0148936688c2baf7e4a1f60d5058/websockets-15.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:678999709e68425ae2593acf2e3ebcbcf2e69885a5ee78f9eb80e6e371f1bf57", size = 173082, upload-time = "2025-03-05T20:01:57.563Z" }, + { url = "https://files.pythonhosted.org/packages/86/f2/10b55821dd40eb696ce4704a87d57774696f9451108cff0d2824c97e0f97/websockets-15.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d50fd1ee42388dcfb2b3676132c78116490976f1300da28eb629272d5d93e905", size = 173330, upload-time = "2025-03-05T20:01:59.063Z" }, + { url = "https://files.pythonhosted.org/packages/a5/90/1c37ae8b8a113d3daf1065222b6af61cc44102da95388ac0018fcb7d93d9/websockets-15.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d99e5546bf73dbad5bf3547174cd6cb8ba7273062a23808ffea025ecb1cf8562", size = 182878, upload-time = "2025-03-05T20:02:00.305Z" }, + { url = "https://files.pythonhosted.org/packages/8e/8d/96e8e288b2a41dffafb78e8904ea7367ee4f891dafc2ab8d87e2124cb3d3/websockets-15.0.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:66dd88c918e3287efc22409d426c8f729688d89a0c587c88971a0faa2c2f3792", size = 181883, upload-time = "2025-03-05T20:02:03.148Z" }, + { url = "https://files.pythonhosted.org/packages/93/1f/5d6dbf551766308f6f50f8baf8e9860be6182911e8106da7a7f73785f4c4/websockets-15.0.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8dd8327c795b3e3f219760fa603dcae1dcc148172290a8ab15158cf85a953413", size = 182252, upload-time = "2025-03-05T20:02:05.29Z" }, + { url = "https://files.pythonhosted.org/packages/d4/78/2d4fed9123e6620cbf1706c0de8a1632e1a28e7774d94346d7de1bba2ca3/websockets-15.0.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:8fdc51055e6ff4adeb88d58a11042ec9a5eae317a0a53d12c062c8a8865909e8", size = 182521, upload-time = "2025-03-05T20:02:07.458Z" }, + { url = "https://files.pythonhosted.org/packages/e7/3b/66d4c1b444dd1a9823c4a81f50231b921bab54eee2f69e70319b4e21f1ca/websockets-15.0.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:693f0192126df6c2327cce3baa7c06f2a117575e32ab2308f7f8216c29d9e2e3", size = 181958, upload-time = "2025-03-05T20:02:09.842Z" }, + { url = "https://files.pythonhosted.org/packages/08/ff/e9eed2ee5fed6f76fdd6032ca5cd38c57ca9661430bb3d5fb2872dc8703c/websockets-15.0.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:54479983bd5fb469c38f2f5c7e3a24f9a4e70594cd68cd1fa6b9340dadaff7cf", size = 181918, upload-time = "2025-03-05T20:02:11.968Z" }, + { url = "https://files.pythonhosted.org/packages/d8/75/994634a49b7e12532be6a42103597b71098fd25900f7437d6055ed39930a/websockets-15.0.1-cp311-cp311-win32.whl", hash = "sha256:16b6c1b3e57799b9d38427dda63edcbe4926352c47cf88588c0be4ace18dac85", size = 176388, upload-time = "2025-03-05T20:02:13.32Z" }, + { url = "https://files.pythonhosted.org/packages/98/93/e36c73f78400a65f5e236cd376713c34182e6663f6889cd45a4a04d8f203/websockets-15.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:27ccee0071a0e75d22cb35849b1db43f2ecd3e161041ac1ee9d2352ddf72f065", size = 176828, upload-time = "2025-03-05T20:02:14.585Z" }, + { url = "https://files.pythonhosted.org/packages/51/6b/4545a0d843594f5d0771e86463606a3988b5a09ca5123136f8a76580dd63/websockets-15.0.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:3e90baa811a5d73f3ca0bcbf32064d663ed81318ab225ee4f427ad4e26e5aff3", size = 175437, upload-time = "2025-03-05T20:02:16.706Z" }, + { url = "https://files.pythonhosted.org/packages/f4/71/809a0f5f6a06522af902e0f2ea2757f71ead94610010cf570ab5c98e99ed/websockets-15.0.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:592f1a9fe869c778694f0aa806ba0374e97648ab57936f092fd9d87f8bc03665", size = 173096, upload-time = "2025-03-05T20:02:18.832Z" }, + { url = "https://files.pythonhosted.org/packages/3d/69/1a681dd6f02180916f116894181eab8b2e25b31e484c5d0eae637ec01f7c/websockets-15.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:0701bc3cfcb9164d04a14b149fd74be7347a530ad3bbf15ab2c678a2cd3dd9a2", size = 173332, upload-time = "2025-03-05T20:02:20.187Z" }, + { url = "https://files.pythonhosted.org/packages/a6/02/0073b3952f5bce97eafbb35757f8d0d54812b6174ed8dd952aa08429bcc3/websockets-15.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e8b56bdcdb4505c8078cb6c7157d9811a85790f2f2b3632c7d1462ab5783d215", size = 183152, upload-time = "2025-03-05T20:02:22.286Z" }, + { url = "https://files.pythonhosted.org/packages/74/45/c205c8480eafd114b428284840da0b1be9ffd0e4f87338dc95dc6ff961a1/websockets-15.0.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0af68c55afbd5f07986df82831c7bff04846928ea8d1fd7f30052638788bc9b5", size = 182096, upload-time = "2025-03-05T20:02:24.368Z" }, + { url = "https://files.pythonhosted.org/packages/14/8f/aa61f528fba38578ec553c145857a181384c72b98156f858ca5c8e82d9d3/websockets-15.0.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:64dee438fed052b52e4f98f76c5790513235efaa1ef7f3f2192c392cd7c91b65", size = 182523, upload-time = "2025-03-05T20:02:25.669Z" }, + { url = "https://files.pythonhosted.org/packages/ec/6d/0267396610add5bc0d0d3e77f546d4cd287200804fe02323797de77dbce9/websockets-15.0.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:d5f6b181bb38171a8ad1d6aa58a67a6aa9d4b38d0f8c5f496b9e42561dfc62fe", size = 182790, upload-time = "2025-03-05T20:02:26.99Z" }, + { url = "https://files.pythonhosted.org/packages/02/05/c68c5adbf679cf610ae2f74a9b871ae84564462955d991178f95a1ddb7dd/websockets-15.0.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:5d54b09eba2bada6011aea5375542a157637b91029687eb4fdb2dab11059c1b4", size = 182165, upload-time = "2025-03-05T20:02:30.291Z" }, + { url = "https://files.pythonhosted.org/packages/29/93/bb672df7b2f5faac89761cb5fa34f5cec45a4026c383a4b5761c6cea5c16/websockets-15.0.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:3be571a8b5afed347da347bfcf27ba12b069d9d7f42cb8c7028b5e98bbb12597", size = 182160, upload-time = "2025-03-05T20:02:31.634Z" }, + { url = "https://files.pythonhosted.org/packages/ff/83/de1f7709376dc3ca9b7eeb4b9a07b4526b14876b6d372a4dc62312bebee0/websockets-15.0.1-cp312-cp312-win32.whl", hash = "sha256:c338ffa0520bdb12fbc527265235639fb76e7bc7faafbb93f6ba80d9c06578a9", size = 176395, upload-time = "2025-03-05T20:02:33.017Z" }, + { url = "https://files.pythonhosted.org/packages/7d/71/abf2ebc3bbfa40f391ce1428c7168fb20582d0ff57019b69ea20fa698043/websockets-15.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:fcd5cf9e305d7b8338754470cf69cf81f420459dbae8a3b40cee57417f4614a7", size = 176841, upload-time = "2025-03-05T20:02:34.498Z" }, + { url = "https://files.pythonhosted.org/packages/fa/a8/5b41e0da817d64113292ab1f8247140aac61cbf6cfd085d6a0fa77f4984f/websockets-15.0.1-py3-none-any.whl", hash = "sha256:f7a866fbc1e97b5c617ee4116daaa09b722101d4a3c170c787450ba409f9736f", size = 169743, upload-time = "2025-03-05T20:03:39.41Z" }, ] [[package]] name = "webvtt-py" version = "0.5.1" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/5e/f6/7c9c964681fb148e0293e6860108d378e09ccab2218f9063fd3eb87f840a/webvtt-py-0.5.1.tar.gz", hash = "sha256:2040dd325277ddadc1e0c6cc66cbc4a1d9b6b49b24c57a0c3364374c3e8a3dc1", size = 55128 } +sdist = { url = "https://files.pythonhosted.org/packages/5e/f6/7c9c964681fb148e0293e6860108d378e09ccab2218f9063fd3eb87f840a/webvtt-py-0.5.1.tar.gz", hash = "sha256:2040dd325277ddadc1e0c6cc66cbc4a1d9b6b49b24c57a0c3364374c3e8a3dc1", size = 55128, upload-time = "2024-05-30T13:40:17.189Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/f3/ed/aad7e0f5a462d679f7b4d2e0d8502c3096740c883b5bbed5103146480937/webvtt_py-0.5.1-py3-none-any.whl", hash = "sha256:9d517d286cfe7fc7825e9d4e2079647ce32f5678eb58e39ef544ffbb932610b7", size = 19802 }, + { url = "https://files.pythonhosted.org/packages/f3/ed/aad7e0f5a462d679f7b4d2e0d8502c3096740c883b5bbed5103146480937/webvtt_py-0.5.1-py3-none-any.whl", hash = "sha256:9d517d286cfe7fc7825e9d4e2079647ce32f5678eb58e39ef544ffbb932610b7", size = 19802, upload-time = "2024-05-30T13:40:14.661Z" }, ] [[package]] @@ -7089,38 +7140,38 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "markupsafe" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/45/ea/b0f8eeb287f8df9066e56e831c7824ac6bab645dd6c7a8f4b2d767944f9b/werkzeug-3.1.4.tar.gz", hash = "sha256:cd3cd98b1b92dc3b7b3995038826c68097dcb16f9baa63abe35f20eafeb9fe5e", size = 864687 } +sdist = { url = "https://files.pythonhosted.org/packages/45/ea/b0f8eeb287f8df9066e56e831c7824ac6bab645dd6c7a8f4b2d767944f9b/werkzeug-3.1.4.tar.gz", hash = "sha256:cd3cd98b1b92dc3b7b3995038826c68097dcb16f9baa63abe35f20eafeb9fe5e", size = 864687, upload-time = "2025-11-29T02:15:22.841Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/2f/f9/9e082990c2585c744734f85bec79b5dae5df9c974ffee58fe421652c8e91/werkzeug-3.1.4-py3-none-any.whl", hash = "sha256:2ad50fb9ed09cc3af22c54698351027ace879a0b60a3b5edf5730b2f7d876905", size = 224960 }, + { url = "https://files.pythonhosted.org/packages/2f/f9/9e082990c2585c744734f85bec79b5dae5df9c974ffee58fe421652c8e91/werkzeug-3.1.4-py3-none-any.whl", hash = "sha256:2ad50fb9ed09cc3af22c54698351027ace879a0b60a3b5edf5730b2f7d876905", size = 224960, upload-time = "2025-11-29T02:15:21.13Z" }, ] [[package]] name = "wrapt" -version = "1.17.3" +version = "1.16.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/95/8f/aeb76c5b46e273670962298c23e7ddde79916cb74db802131d49a85e4b7d/wrapt-1.17.3.tar.gz", hash = "sha256:f66eb08feaa410fe4eebd17f2a2c8e2e46d3476e9f8c783daa8e09e0faa666d0", size = 55547 } +sdist = { url = "https://files.pythonhosted.org/packages/95/4c/063a912e20bcef7124e0df97282a8af3ff3e4b603ce84c481d6d7346be0a/wrapt-1.16.0.tar.gz", hash = "sha256:5f370f952971e7d17c7d1ead40e49f32345a7f7a5373571ef44d800d06b1899d", size = 53972, upload-time = "2023-11-09T06:33:30.191Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/52/db/00e2a219213856074a213503fdac0511203dceefff26e1daa15250cc01a0/wrapt-1.17.3-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:273a736c4645e63ac582c60a56b0acb529ef07f78e08dc6bfadf6a46b19c0da7", size = 53482 }, - { url = "https://files.pythonhosted.org/packages/5e/30/ca3c4a5eba478408572096fe9ce36e6e915994dd26a4e9e98b4f729c06d9/wrapt-1.17.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5531d911795e3f935a9c23eb1c8c03c211661a5060aab167065896bbf62a5f85", size = 38674 }, - { url = "https://files.pythonhosted.org/packages/31/25/3e8cc2c46b5329c5957cec959cb76a10718e1a513309c31399a4dad07eb3/wrapt-1.17.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:0610b46293c59a3adbae3dee552b648b984176f8562ee0dba099a56cfbe4df1f", size = 38959 }, - { url = "https://files.pythonhosted.org/packages/5d/8f/a32a99fc03e4b37e31b57cb9cefc65050ea08147a8ce12f288616b05ef54/wrapt-1.17.3-cp311-cp311-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:b32888aad8b6e68f83a8fdccbf3165f5469702a7544472bdf41f582970ed3311", size = 82376 }, - { url = "https://files.pythonhosted.org/packages/31/57/4930cb8d9d70d59c27ee1332a318c20291749b4fba31f113c2f8ac49a72e/wrapt-1.17.3-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:8cccf4f81371f257440c88faed6b74f1053eef90807b77e31ca057b2db74edb1", size = 83604 }, - { url = "https://files.pythonhosted.org/packages/a8/f3/1afd48de81d63dd66e01b263a6fbb86e1b5053b419b9b33d13e1f6d0f7d0/wrapt-1.17.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:d8a210b158a34164de8bb68b0e7780041a903d7b00c87e906fb69928bf7890d5", size = 82782 }, - { url = "https://files.pythonhosted.org/packages/1e/d7/4ad5327612173b144998232f98a85bb24b60c352afb73bc48e3e0d2bdc4e/wrapt-1.17.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:79573c24a46ce11aab457b472efd8d125e5a51da2d1d24387666cd85f54c05b2", size = 82076 }, - { url = "https://files.pythonhosted.org/packages/bb/59/e0adfc831674a65694f18ea6dc821f9fcb9ec82c2ce7e3d73a88ba2e8718/wrapt-1.17.3-cp311-cp311-win32.whl", hash = "sha256:c31eebe420a9a5d2887b13000b043ff6ca27c452a9a22fa71f35f118e8d4bf89", size = 36457 }, - { url = "https://files.pythonhosted.org/packages/83/88/16b7231ba49861b6f75fc309b11012ede4d6b0a9c90969d9e0db8d991aeb/wrapt-1.17.3-cp311-cp311-win_amd64.whl", hash = "sha256:0b1831115c97f0663cb77aa27d381237e73ad4f721391a9bfb2fe8bc25fa6e77", size = 38745 }, - { url = "https://files.pythonhosted.org/packages/9a/1e/c4d4f3398ec073012c51d1c8d87f715f56765444e1a4b11e5180577b7e6e/wrapt-1.17.3-cp311-cp311-win_arm64.whl", hash = "sha256:5a7b3c1ee8265eb4c8f1b7d29943f195c00673f5ab60c192eba2d4a7eae5f46a", size = 36806 }, - { url = "https://files.pythonhosted.org/packages/9f/41/cad1aba93e752f1f9268c77270da3c469883d56e2798e7df6240dcb2287b/wrapt-1.17.3-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:ab232e7fdb44cdfbf55fc3afa31bcdb0d8980b9b95c38b6405df2acb672af0e0", size = 53998 }, - { url = "https://files.pythonhosted.org/packages/60/f8/096a7cc13097a1869fe44efe68dace40d2a16ecb853141394047f0780b96/wrapt-1.17.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:9baa544e6acc91130e926e8c802a17f3b16fbea0fd441b5a60f5cf2cc5c3deba", size = 39020 }, - { url = "https://files.pythonhosted.org/packages/33/df/bdf864b8997aab4febb96a9ae5c124f700a5abd9b5e13d2a3214ec4be705/wrapt-1.17.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:6b538e31eca1a7ea4605e44f81a48aa24c4632a277431a6ed3f328835901f4fd", size = 39098 }, - { url = "https://files.pythonhosted.org/packages/9f/81/5d931d78d0eb732b95dc3ddaeeb71c8bb572fb01356e9133916cd729ecdd/wrapt-1.17.3-cp312-cp312-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:042ec3bb8f319c147b1301f2393bc19dba6e176b7da446853406d041c36c7828", size = 88036 }, - { url = "https://files.pythonhosted.org/packages/ca/38/2e1785df03b3d72d34fc6252d91d9d12dc27a5c89caef3335a1bbb8908ca/wrapt-1.17.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:3af60380ba0b7b5aeb329bc4e402acd25bd877e98b3727b0135cb5c2efdaefe9", size = 88156 }, - { url = "https://files.pythonhosted.org/packages/b3/8b/48cdb60fe0603e34e05cffda0b2a4adab81fd43718e11111a4b0100fd7c1/wrapt-1.17.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:0b02e424deef65c9f7326d8c19220a2c9040c51dc165cddb732f16198c168396", size = 87102 }, - { url = "https://files.pythonhosted.org/packages/3c/51/d81abca783b58f40a154f1b2c56db1d2d9e0d04fa2d4224e357529f57a57/wrapt-1.17.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:74afa28374a3c3a11b3b5e5fca0ae03bef8450d6aa3ab3a1e2c30e3a75d023dc", size = 87732 }, - { url = "https://files.pythonhosted.org/packages/9e/b1/43b286ca1392a006d5336412d41663eeef1ad57485f3e52c767376ba7e5a/wrapt-1.17.3-cp312-cp312-win32.whl", hash = "sha256:4da9f45279fff3543c371d5ababc57a0384f70be244de7759c85a7f989cb4ebe", size = 36705 }, - { url = "https://files.pythonhosted.org/packages/28/de/49493f962bd3c586ab4b88066e967aa2e0703d6ef2c43aa28cb83bf7b507/wrapt-1.17.3-cp312-cp312-win_amd64.whl", hash = "sha256:e71d5c6ebac14875668a1e90baf2ea0ef5b7ac7918355850c0908ae82bcb297c", size = 38877 }, - { url = "https://files.pythonhosted.org/packages/f1/48/0f7102fe9cb1e8a5a77f80d4f0956d62d97034bbe88d33e94699f99d181d/wrapt-1.17.3-cp312-cp312-win_arm64.whl", hash = "sha256:604d076c55e2fdd4c1c03d06dc1a31b95130010517b5019db15365ec4a405fc6", size = 36885 }, - { url = "https://files.pythonhosted.org/packages/1f/f6/a933bd70f98e9cf3e08167fc5cd7aaaca49147e48411c0bd5ae701bb2194/wrapt-1.17.3-py3-none-any.whl", hash = "sha256:7171ae35d2c33d326ac19dd8facb1e82e5fd04ef8c6c0e394d7af55a55051c22", size = 23591 }, + { url = "https://files.pythonhosted.org/packages/fd/03/c188ac517f402775b90d6f312955a5e53b866c964b32119f2ed76315697e/wrapt-1.16.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:1a5db485fe2de4403f13fafdc231b0dbae5eca4359232d2efc79025527375b09", size = 37313, upload-time = "2023-11-09T06:31:52.168Z" }, + { url = "https://files.pythonhosted.org/packages/0f/16/ea627d7817394db04518f62934a5de59874b587b792300991b3c347ff5e0/wrapt-1.16.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:75ea7d0ee2a15733684badb16de6794894ed9c55aa5e9903260922f0482e687d", size = 38164, upload-time = "2023-11-09T06:31:53.522Z" }, + { url = "https://files.pythonhosted.org/packages/7f/a7/f1212ba098f3de0fd244e2de0f8791ad2539c03bef6c05a9fcb03e45b089/wrapt-1.16.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a452f9ca3e3267cd4d0fcf2edd0d035b1934ac2bd7e0e57ac91ad6b95c0c6389", size = 80890, upload-time = "2023-11-09T06:31:55.247Z" }, + { url = "https://files.pythonhosted.org/packages/b7/96/bb5e08b3d6db003c9ab219c487714c13a237ee7dcc572a555eaf1ce7dc82/wrapt-1.16.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:43aa59eadec7890d9958748db829df269f0368521ba6dc68cc172d5d03ed8060", size = 73118, upload-time = "2023-11-09T06:31:57.023Z" }, + { url = "https://files.pythonhosted.org/packages/6e/52/2da48b35193e39ac53cfb141467d9f259851522d0e8c87153f0ba4205fb1/wrapt-1.16.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:72554a23c78a8e7aa02abbd699d129eead8b147a23c56e08d08dfc29cfdddca1", size = 80746, upload-time = "2023-11-09T06:31:58.686Z" }, + { url = "https://files.pythonhosted.org/packages/11/fb/18ec40265ab81c0e82a934de04596b6ce972c27ba2592c8b53d5585e6bcd/wrapt-1.16.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:d2efee35b4b0a347e0d99d28e884dfd82797852d62fcd7ebdeee26f3ceb72cf3", size = 85668, upload-time = "2023-11-09T06:31:59.992Z" }, + { url = "https://files.pythonhosted.org/packages/0f/ef/0ecb1fa23145560431b970418dce575cfaec555ab08617d82eb92afc7ccf/wrapt-1.16.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:6dcfcffe73710be01d90cae08c3e548d90932d37b39ef83969ae135d36ef3956", size = 78556, upload-time = "2023-11-09T06:32:01.942Z" }, + { url = "https://files.pythonhosted.org/packages/25/62/cd284b2b747f175b5a96cbd8092b32e7369edab0644c45784871528eb852/wrapt-1.16.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:eb6e651000a19c96f452c85132811d25e9264d836951022d6e81df2fff38337d", size = 85712, upload-time = "2023-11-09T06:32:03.686Z" }, + { url = "https://files.pythonhosted.org/packages/e5/a7/47b7ff74fbadf81b696872d5ba504966591a3468f1bc86bca2f407baef68/wrapt-1.16.0-cp311-cp311-win32.whl", hash = "sha256:66027d667efe95cc4fa945af59f92c5a02c6f5bb6012bff9e60542c74c75c362", size = 35327, upload-time = "2023-11-09T06:32:05.284Z" }, + { url = "https://files.pythonhosted.org/packages/cf/c3/0084351951d9579ae83a3d9e38c140371e4c6b038136909235079f2e6e78/wrapt-1.16.0-cp311-cp311-win_amd64.whl", hash = "sha256:aefbc4cb0a54f91af643660a0a150ce2c090d3652cf4052a5397fb2de549cd89", size = 37523, upload-time = "2023-11-09T06:32:07.17Z" }, + { url = "https://files.pythonhosted.org/packages/92/17/224132494c1e23521868cdd57cd1e903f3b6a7ba6996b7b8f077ff8ac7fe/wrapt-1.16.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:5eb404d89131ec9b4f748fa5cfb5346802e5ee8836f57d516576e61f304f3b7b", size = 37614, upload-time = "2023-11-09T06:32:08.859Z" }, + { url = "https://files.pythonhosted.org/packages/6a/d7/cfcd73e8f4858079ac59d9db1ec5a1349bc486ae8e9ba55698cc1f4a1dff/wrapt-1.16.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:9090c9e676d5236a6948330e83cb89969f433b1943a558968f659ead07cb3b36", size = 38316, upload-time = "2023-11-09T06:32:10.719Z" }, + { url = "https://files.pythonhosted.org/packages/7e/79/5ff0a5c54bda5aec75b36453d06be4f83d5cd4932cc84b7cb2b52cee23e2/wrapt-1.16.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:94265b00870aa407bd0cbcfd536f17ecde43b94fb8d228560a1e9d3041462d73", size = 86322, upload-time = "2023-11-09T06:32:12.592Z" }, + { url = "https://files.pythonhosted.org/packages/c4/81/e799bf5d419f422d8712108837c1d9bf6ebe3cb2a81ad94413449543a923/wrapt-1.16.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f2058f813d4f2b5e3a9eb2eb3faf8f1d99b81c3e51aeda4b168406443e8ba809", size = 79055, upload-time = "2023-11-09T06:32:14.394Z" }, + { url = "https://files.pythonhosted.org/packages/62/62/30ca2405de6a20448ee557ab2cd61ab9c5900be7cbd18a2639db595f0b98/wrapt-1.16.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:98b5e1f498a8ca1858a1cdbffb023bfd954da4e3fa2c0cb5853d40014557248b", size = 87291, upload-time = "2023-11-09T06:32:16.201Z" }, + { url = "https://files.pythonhosted.org/packages/49/4e/5d2f6d7b57fc9956bf06e944eb00463551f7d52fc73ca35cfc4c2cdb7aed/wrapt-1.16.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:14d7dc606219cdd7405133c713f2c218d4252f2a469003f8c46bb92d5d095d81", size = 90374, upload-time = "2023-11-09T06:32:18.052Z" }, + { url = "https://files.pythonhosted.org/packages/a6/9b/c2c21b44ff5b9bf14a83252a8b973fb84923764ff63db3e6dfc3895cf2e0/wrapt-1.16.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:49aac49dc4782cb04f58986e81ea0b4768e4ff197b57324dcbd7699c5dfb40b9", size = 83896, upload-time = "2023-11-09T06:32:19.533Z" }, + { url = "https://files.pythonhosted.org/packages/14/26/93a9fa02c6f257df54d7570dfe8011995138118d11939a4ecd82cb849613/wrapt-1.16.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:418abb18146475c310d7a6dc71143d6f7adec5b004ac9ce08dc7a34e2babdc5c", size = 91738, upload-time = "2023-11-09T06:32:20.989Z" }, + { url = "https://files.pythonhosted.org/packages/a2/5b/4660897233eb2c8c4de3dc7cefed114c61bacb3c28327e64150dc44ee2f6/wrapt-1.16.0-cp312-cp312-win32.whl", hash = "sha256:685f568fa5e627e93f3b52fda002c7ed2fa1800b50ce51f6ed1d572d8ab3e7fc", size = 35568, upload-time = "2023-11-09T06:32:22.715Z" }, + { url = "https://files.pythonhosted.org/packages/5c/cc/8297f9658506b224aa4bd71906447dea6bb0ba629861a758c28f67428b91/wrapt-1.16.0-cp312-cp312-win_amd64.whl", hash = "sha256:dcdba5c86e368442528f7060039eda390cc4091bfd1dca41e8046af7c910dda8", size = 37653, upload-time = "2023-11-09T06:32:24.533Z" }, + { url = "https://files.pythonhosted.org/packages/ff/21/abdedb4cdf6ff41ebf01a74087740a709e2edb146490e4d9beea054b0b7a/wrapt-1.16.0-py3-none-any.whl", hash = "sha256:6906c4100a8fcbf2fa735f6059214bb13b97f75b1a61777fcf6432121ef12ef1", size = 23362, upload-time = "2023-11-09T06:33:28.271Z" }, ] [[package]] @@ -7132,36 +7183,36 @@ dependencies = [ { name = "requests" }, { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/4b/cf/7f825a311b11d1e0f7947a94f88adcf1d31e707c54a6d76d61a5d98604ed/xinference-client-1.2.2.tar.gz", hash = "sha256:85d2ba0fcbaae616b06719c422364123cbac97f3e3c82e614095fe6d0e630ed0", size = 44824 } +sdist = { url = "https://files.pythonhosted.org/packages/4b/cf/7f825a311b11d1e0f7947a94f88adcf1d31e707c54a6d76d61a5d98604ed/xinference-client-1.2.2.tar.gz", hash = "sha256:85d2ba0fcbaae616b06719c422364123cbac97f3e3c82e614095fe6d0e630ed0", size = 44824, upload-time = "2025-02-08T09:28:56.692Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/77/0f/fc58e062cf2f7506a33d2fe5446a1e88eb7f64914addffd7ed8b12749712/xinference_client-1.2.2-py3-none-any.whl", hash = "sha256:6941d87cf61283a9d6e81cee6cb2609a183d34c6b7d808c6ba0c33437520518f", size = 25723 }, + { url = "https://files.pythonhosted.org/packages/77/0f/fc58e062cf2f7506a33d2fe5446a1e88eb7f64914addffd7ed8b12749712/xinference_client-1.2.2-py3-none-any.whl", hash = "sha256:6941d87cf61283a9d6e81cee6cb2609a183d34c6b7d808c6ba0c33437520518f", size = 25723, upload-time = "2025-02-08T09:28:54.046Z" }, ] [[package]] name = "xlrd" version = "2.0.2" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/07/5a/377161c2d3538d1990d7af382c79f3b2372e880b65de21b01b1a2b78691e/xlrd-2.0.2.tar.gz", hash = "sha256:08b5e25de58f21ce71dc7db3b3b8106c1fa776f3024c54e45b45b374e89234c9", size = 100167 } +sdist = { url = "https://files.pythonhosted.org/packages/07/5a/377161c2d3538d1990d7af382c79f3b2372e880b65de21b01b1a2b78691e/xlrd-2.0.2.tar.gz", hash = "sha256:08b5e25de58f21ce71dc7db3b3b8106c1fa776f3024c54e45b45b374e89234c9", size = 100167, upload-time = "2025-06-14T08:46:39.039Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/1a/62/c8d562e7766786ba6587d09c5a8ba9f718ed3fa8af7f4553e8f91c36f302/xlrd-2.0.2-py2.py3-none-any.whl", hash = "sha256:ea762c3d29f4cca48d82df517b6d89fbce4db3107f9d78713e48cd321d5c9aa9", size = 96555 }, + { url = "https://files.pythonhosted.org/packages/1a/62/c8d562e7766786ba6587d09c5a8ba9f718ed3fa8af7f4553e8f91c36f302/xlrd-2.0.2-py2.py3-none-any.whl", hash = "sha256:ea762c3d29f4cca48d82df517b6d89fbce4db3107f9d78713e48cd321d5c9aa9", size = 96555, upload-time = "2025-06-14T08:46:37.766Z" }, ] [[package]] name = "xlsxwriter" version = "3.2.9" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/46/2c/c06ef49dc36e7954e55b802a8b231770d286a9758b3d936bd1e04ce5ba88/xlsxwriter-3.2.9.tar.gz", hash = "sha256:254b1c37a368c444eac6e2f867405cc9e461b0ed97a3233b2ac1e574efb4140c", size = 215940 } +sdist = { url = "https://files.pythonhosted.org/packages/46/2c/c06ef49dc36e7954e55b802a8b231770d286a9758b3d936bd1e04ce5ba88/xlsxwriter-3.2.9.tar.gz", hash = "sha256:254b1c37a368c444eac6e2f867405cc9e461b0ed97a3233b2ac1e574efb4140c", size = 215940, upload-time = "2025-09-16T00:16:21.63Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/3a/0c/3662f4a66880196a590b202f0db82d919dd2f89e99a27fadef91c4a33d41/xlsxwriter-3.2.9-py3-none-any.whl", hash = "sha256:9a5db42bc5dff014806c58a20b9eae7322a134abb6fce3c92c181bfb275ec5b3", size = 175315 }, + { url = "https://files.pythonhosted.org/packages/3a/0c/3662f4a66880196a590b202f0db82d919dd2f89e99a27fadef91c4a33d41/xlsxwriter-3.2.9-py3-none-any.whl", hash = "sha256:9a5db42bc5dff014806c58a20b9eae7322a134abb6fce3c92c181bfb275ec5b3", size = 175315, upload-time = "2025-09-16T00:16:20.108Z" }, ] [[package]] name = "xmltodict" version = "1.0.2" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/6a/aa/917ceeed4dbb80d2f04dbd0c784b7ee7bba8ae5a54837ef0e5e062cd3cfb/xmltodict-1.0.2.tar.gz", hash = "sha256:54306780b7c2175a3967cad1db92f218207e5bc1aba697d887807c0fb68b7649", size = 25725 } +sdist = { url = "https://files.pythonhosted.org/packages/6a/aa/917ceeed4dbb80d2f04dbd0c784b7ee7bba8ae5a54837ef0e5e062cd3cfb/xmltodict-1.0.2.tar.gz", hash = "sha256:54306780b7c2175a3967cad1db92f218207e5bc1aba697d887807c0fb68b7649", size = 25725, upload-time = "2025-09-17T21:59:26.459Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/c0/20/69a0e6058bc5ea74892d089d64dfc3a62ba78917ec5e2cfa70f7c92ba3a5/xmltodict-1.0.2-py3-none-any.whl", hash = "sha256:62d0fddb0dcbc9f642745d8bbf4d81fd17d6dfaec5a15b5c1876300aad92af0d", size = 13893 }, + { url = "https://files.pythonhosted.org/packages/c0/20/69a0e6058bc5ea74892d089d64dfc3a62ba78917ec5e2cfa70f7c92ba3a5/xmltodict-1.0.2-py3-none-any.whl", hash = "sha256:62d0fddb0dcbc9f642745d8bbf4d81fd17d6dfaec5a15b5c1876300aad92af0d", size = 13893, upload-time = "2025-09-17T21:59:24.859Z" }, ] [[package]] @@ -7173,119 +7224,119 @@ dependencies = [ { name = "multidict" }, { name = "propcache" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/b7/9d/4b94a8e6d2b51b599516a5cb88e5bc99b4d8d4583e468057eaa29d5f0918/yarl-1.18.3.tar.gz", hash = "sha256:ac1801c45cbf77b6c99242eeff4fffb5e4e73a800b5c4ad4fc0be5def634d2e1", size = 181062 } +sdist = { url = "https://files.pythonhosted.org/packages/b7/9d/4b94a8e6d2b51b599516a5cb88e5bc99b4d8d4583e468057eaa29d5f0918/yarl-1.18.3.tar.gz", hash = "sha256:ac1801c45cbf77b6c99242eeff4fffb5e4e73a800b5c4ad4fc0be5def634d2e1", size = 181062, upload-time = "2024-12-01T20:35:23.292Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/40/93/282b5f4898d8e8efaf0790ba6d10e2245d2c9f30e199d1a85cae9356098c/yarl-1.18.3-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:8503ad47387b8ebd39cbbbdf0bf113e17330ffd339ba1144074da24c545f0069", size = 141555 }, - { url = "https://files.pythonhosted.org/packages/6d/9c/0a49af78df099c283ca3444560f10718fadb8a18dc8b3edf8c7bd9fd7d89/yarl-1.18.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:02ddb6756f8f4517a2d5e99d8b2f272488e18dd0bfbc802f31c16c6c20f22193", size = 94351 }, - { url = "https://files.pythonhosted.org/packages/5a/a1/205ab51e148fdcedad189ca8dd587794c6f119882437d04c33c01a75dece/yarl-1.18.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:67a283dd2882ac98cc6318384f565bffc751ab564605959df4752d42483ad889", size = 92286 }, - { url = "https://files.pythonhosted.org/packages/ed/fe/88b690b30f3f59275fb674f5f93ddd4a3ae796c2b62e5bb9ece8a4914b83/yarl-1.18.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d980e0325b6eddc81331d3f4551e2a333999fb176fd153e075c6d1c2530aa8a8", size = 340649 }, - { url = "https://files.pythonhosted.org/packages/07/eb/3b65499b568e01f36e847cebdc8d7ccb51fff716dbda1ae83c3cbb8ca1c9/yarl-1.18.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b643562c12680b01e17239be267bc306bbc6aac1f34f6444d1bded0c5ce438ca", size = 356623 }, - { url = "https://files.pythonhosted.org/packages/33/46/f559dc184280b745fc76ec6b1954de2c55595f0ec0a7614238b9ebf69618/yarl-1.18.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c017a3b6df3a1bd45b9fa49a0f54005e53fbcad16633870104b66fa1a30a29d8", size = 354007 }, - { url = "https://files.pythonhosted.org/packages/af/ba/1865d85212351ad160f19fb99808acf23aab9a0f8ff31c8c9f1b4d671fc9/yarl-1.18.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:75674776d96d7b851b6498f17824ba17849d790a44d282929c42dbb77d4f17ae", size = 344145 }, - { url = "https://files.pythonhosted.org/packages/94/cb/5c3e975d77755d7b3d5193e92056b19d83752ea2da7ab394e22260a7b824/yarl-1.18.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ccaa3a4b521b780a7e771cc336a2dba389a0861592bbce09a476190bb0c8b4b3", size = 336133 }, - { url = "https://files.pythonhosted.org/packages/19/89/b77d3fd249ab52a5c40859815765d35c91425b6bb82e7427ab2f78f5ff55/yarl-1.18.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:2d06d3005e668744e11ed80812e61efd77d70bb7f03e33c1598c301eea20efbb", size = 347967 }, - { url = "https://files.pythonhosted.org/packages/35/bd/f6b7630ba2cc06c319c3235634c582a6ab014d52311e7d7c22f9518189b5/yarl-1.18.3-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:9d41beda9dc97ca9ab0b9888cb71f7539124bc05df02c0cff6e5acc5a19dcc6e", size = 346397 }, - { url = "https://files.pythonhosted.org/packages/18/1a/0b4e367d5a72d1f095318344848e93ea70da728118221f84f1bf6c1e39e7/yarl-1.18.3-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:ba23302c0c61a9999784e73809427c9dbedd79f66a13d84ad1b1943802eaaf59", size = 350206 }, - { url = "https://files.pythonhosted.org/packages/b5/cf/320fff4367341fb77809a2d8d7fe75b5d323a8e1b35710aafe41fdbf327b/yarl-1.18.3-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:6748dbf9bfa5ba1afcc7556b71cda0d7ce5f24768043a02a58846e4a443d808d", size = 362089 }, - { url = "https://files.pythonhosted.org/packages/57/cf/aadba261d8b920253204085268bad5e8cdd86b50162fcb1b10c10834885a/yarl-1.18.3-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:0b0cad37311123211dc91eadcb322ef4d4a66008d3e1bdc404808992260e1a0e", size = 366267 }, - { url = "https://files.pythonhosted.org/packages/54/58/fb4cadd81acdee6dafe14abeb258f876e4dd410518099ae9a35c88d8097c/yarl-1.18.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:0fb2171a4486bb075316ee754c6d8382ea6eb8b399d4ec62fde2b591f879778a", size = 359141 }, - { url = "https://files.pythonhosted.org/packages/9a/7a/4c571597589da4cd5c14ed2a0b17ac56ec9ee7ee615013f74653169e702d/yarl-1.18.3-cp311-cp311-win32.whl", hash = "sha256:61b1a825a13bef4a5f10b1885245377d3cd0bf87cba068e1d9a88c2ae36880e1", size = 84402 }, - { url = "https://files.pythonhosted.org/packages/ae/7b/8600250b3d89b625f1121d897062f629883c2f45339623b69b1747ec65fa/yarl-1.18.3-cp311-cp311-win_amd64.whl", hash = "sha256:b9d60031cf568c627d028239693fd718025719c02c9f55df0a53e587aab951b5", size = 91030 }, - { url = "https://files.pythonhosted.org/packages/33/85/bd2e2729752ff4c77338e0102914897512e92496375e079ce0150a6dc306/yarl-1.18.3-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:1dd4bdd05407ced96fed3d7f25dbbf88d2ffb045a0db60dbc247f5b3c5c25d50", size = 142644 }, - { url = "https://files.pythonhosted.org/packages/ff/74/1178322cc0f10288d7eefa6e4a85d8d2e28187ccab13d5b844e8b5d7c88d/yarl-1.18.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:7c33dd1931a95e5d9a772d0ac5e44cac8957eaf58e3c8da8c1414de7dd27c576", size = 94962 }, - { url = "https://files.pythonhosted.org/packages/be/75/79c6acc0261e2c2ae8a1c41cf12265e91628c8c58ae91f5ff59e29c0787f/yarl-1.18.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:25b411eddcfd56a2f0cd6a384e9f4f7aa3efee14b188de13048c25b5e91f1640", size = 92795 }, - { url = "https://files.pythonhosted.org/packages/6b/32/927b2d67a412c31199e83fefdce6e645247b4fb164aa1ecb35a0f9eb2058/yarl-1.18.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:436c4fc0a4d66b2badc6c5fc5ef4e47bb10e4fd9bf0c79524ac719a01f3607c2", size = 332368 }, - { url = "https://files.pythonhosted.org/packages/19/e5/859fca07169d6eceeaa4fde1997c91d8abde4e9a7c018e371640c2da2b71/yarl-1.18.3-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e35ef8683211db69ffe129a25d5634319a677570ab6b2eba4afa860f54eeaf75", size = 342314 }, - { url = "https://files.pythonhosted.org/packages/08/75/76b63ccd91c9e03ab213ef27ae6add2e3400e77e5cdddf8ed2dbc36e3f21/yarl-1.18.3-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:84b2deecba4a3f1a398df819151eb72d29bfeb3b69abb145a00ddc8d30094512", size = 341987 }, - { url = "https://files.pythonhosted.org/packages/1a/e1/a097d5755d3ea8479a42856f51d97eeff7a3a7160593332d98f2709b3580/yarl-1.18.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:00e5a1fea0fd4f5bfa7440a47eff01d9822a65b4488f7cff83155a0f31a2ecba", size = 336914 }, - { url = "https://files.pythonhosted.org/packages/0b/42/e1b4d0e396b7987feceebe565286c27bc085bf07d61a59508cdaf2d45e63/yarl-1.18.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d0e883008013c0e4aef84dcfe2a0b172c4d23c2669412cf5b3371003941f72bb", size = 325765 }, - { url = "https://files.pythonhosted.org/packages/7e/18/03a5834ccc9177f97ca1bbb245b93c13e58e8225276f01eedc4cc98ab820/yarl-1.18.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:5a3f356548e34a70b0172d8890006c37be92995f62d95a07b4a42e90fba54272", size = 344444 }, - { url = "https://files.pythonhosted.org/packages/c8/03/a713633bdde0640b0472aa197b5b86e90fbc4c5bc05b727b714cd8a40e6d/yarl-1.18.3-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:ccd17349166b1bee6e529b4add61727d3f55edb7babbe4069b5764c9587a8cc6", size = 340760 }, - { url = "https://files.pythonhosted.org/packages/eb/99/f6567e3f3bbad8fd101886ea0276c68ecb86a2b58be0f64077396cd4b95e/yarl-1.18.3-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:b958ddd075ddba5b09bb0be8a6d9906d2ce933aee81100db289badbeb966f54e", size = 346484 }, - { url = "https://files.pythonhosted.org/packages/8e/a9/84717c896b2fc6cb15bd4eecd64e34a2f0a9fd6669e69170c73a8b46795a/yarl-1.18.3-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:c7d79f7d9aabd6011004e33b22bc13056a3e3fb54794d138af57f5ee9d9032cb", size = 359864 }, - { url = "https://files.pythonhosted.org/packages/1e/2e/d0f5f1bef7ee93ed17e739ec8dbcb47794af891f7d165fa6014517b48169/yarl-1.18.3-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:4891ed92157e5430874dad17b15eb1fda57627710756c27422200c52d8a4e393", size = 364537 }, - { url = "https://files.pythonhosted.org/packages/97/8a/568d07c5d4964da5b02621a517532adb8ec5ba181ad1687191fffeda0ab6/yarl-1.18.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:ce1af883b94304f493698b00d0f006d56aea98aeb49d75ec7d98cd4a777e9285", size = 357861 }, - { url = "https://files.pythonhosted.org/packages/7d/e3/924c3f64b6b3077889df9a1ece1ed8947e7b61b0a933f2ec93041990a677/yarl-1.18.3-cp312-cp312-win32.whl", hash = "sha256:f91c4803173928a25e1a55b943c81f55b8872f0018be83e3ad4938adffb77dd2", size = 84097 }, - { url = "https://files.pythonhosted.org/packages/34/45/0e055320daaabfc169b21ff6174567b2c910c45617b0d79c68d7ab349b02/yarl-1.18.3-cp312-cp312-win_amd64.whl", hash = "sha256:7e2ee16578af3b52ac2f334c3b1f92262f47e02cc6193c598502bd46f5cd1477", size = 90399 }, - { url = "https://files.pythonhosted.org/packages/f5/4b/a06e0ec3d155924f77835ed2d167ebd3b211a7b0853da1cf8d8414d784ef/yarl-1.18.3-py3-none-any.whl", hash = "sha256:b57f4f58099328dfb26c6a771d09fb20dbbae81d20cfb66141251ea063bd101b", size = 45109 }, + { url = "https://files.pythonhosted.org/packages/40/93/282b5f4898d8e8efaf0790ba6d10e2245d2c9f30e199d1a85cae9356098c/yarl-1.18.3-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:8503ad47387b8ebd39cbbbdf0bf113e17330ffd339ba1144074da24c545f0069", size = 141555, upload-time = "2024-12-01T20:33:08.819Z" }, + { url = "https://files.pythonhosted.org/packages/6d/9c/0a49af78df099c283ca3444560f10718fadb8a18dc8b3edf8c7bd9fd7d89/yarl-1.18.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:02ddb6756f8f4517a2d5e99d8b2f272488e18dd0bfbc802f31c16c6c20f22193", size = 94351, upload-time = "2024-12-01T20:33:10.609Z" }, + { url = "https://files.pythonhosted.org/packages/5a/a1/205ab51e148fdcedad189ca8dd587794c6f119882437d04c33c01a75dece/yarl-1.18.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:67a283dd2882ac98cc6318384f565bffc751ab564605959df4752d42483ad889", size = 92286, upload-time = "2024-12-01T20:33:12.322Z" }, + { url = "https://files.pythonhosted.org/packages/ed/fe/88b690b30f3f59275fb674f5f93ddd4a3ae796c2b62e5bb9ece8a4914b83/yarl-1.18.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d980e0325b6eddc81331d3f4551e2a333999fb176fd153e075c6d1c2530aa8a8", size = 340649, upload-time = "2024-12-01T20:33:13.842Z" }, + { url = "https://files.pythonhosted.org/packages/07/eb/3b65499b568e01f36e847cebdc8d7ccb51fff716dbda1ae83c3cbb8ca1c9/yarl-1.18.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b643562c12680b01e17239be267bc306bbc6aac1f34f6444d1bded0c5ce438ca", size = 356623, upload-time = "2024-12-01T20:33:15.535Z" }, + { url = "https://files.pythonhosted.org/packages/33/46/f559dc184280b745fc76ec6b1954de2c55595f0ec0a7614238b9ebf69618/yarl-1.18.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c017a3b6df3a1bd45b9fa49a0f54005e53fbcad16633870104b66fa1a30a29d8", size = 354007, upload-time = "2024-12-01T20:33:17.518Z" }, + { url = "https://files.pythonhosted.org/packages/af/ba/1865d85212351ad160f19fb99808acf23aab9a0f8ff31c8c9f1b4d671fc9/yarl-1.18.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:75674776d96d7b851b6498f17824ba17849d790a44d282929c42dbb77d4f17ae", size = 344145, upload-time = "2024-12-01T20:33:20.071Z" }, + { url = "https://files.pythonhosted.org/packages/94/cb/5c3e975d77755d7b3d5193e92056b19d83752ea2da7ab394e22260a7b824/yarl-1.18.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ccaa3a4b521b780a7e771cc336a2dba389a0861592bbce09a476190bb0c8b4b3", size = 336133, upload-time = "2024-12-01T20:33:22.515Z" }, + { url = "https://files.pythonhosted.org/packages/19/89/b77d3fd249ab52a5c40859815765d35c91425b6bb82e7427ab2f78f5ff55/yarl-1.18.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:2d06d3005e668744e11ed80812e61efd77d70bb7f03e33c1598c301eea20efbb", size = 347967, upload-time = "2024-12-01T20:33:24.139Z" }, + { url = "https://files.pythonhosted.org/packages/35/bd/f6b7630ba2cc06c319c3235634c582a6ab014d52311e7d7c22f9518189b5/yarl-1.18.3-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:9d41beda9dc97ca9ab0b9888cb71f7539124bc05df02c0cff6e5acc5a19dcc6e", size = 346397, upload-time = "2024-12-01T20:33:26.205Z" }, + { url = "https://files.pythonhosted.org/packages/18/1a/0b4e367d5a72d1f095318344848e93ea70da728118221f84f1bf6c1e39e7/yarl-1.18.3-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:ba23302c0c61a9999784e73809427c9dbedd79f66a13d84ad1b1943802eaaf59", size = 350206, upload-time = "2024-12-01T20:33:27.83Z" }, + { url = "https://files.pythonhosted.org/packages/b5/cf/320fff4367341fb77809a2d8d7fe75b5d323a8e1b35710aafe41fdbf327b/yarl-1.18.3-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:6748dbf9bfa5ba1afcc7556b71cda0d7ce5f24768043a02a58846e4a443d808d", size = 362089, upload-time = "2024-12-01T20:33:29.565Z" }, + { url = "https://files.pythonhosted.org/packages/57/cf/aadba261d8b920253204085268bad5e8cdd86b50162fcb1b10c10834885a/yarl-1.18.3-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:0b0cad37311123211dc91eadcb322ef4d4a66008d3e1bdc404808992260e1a0e", size = 366267, upload-time = "2024-12-01T20:33:31.449Z" }, + { url = "https://files.pythonhosted.org/packages/54/58/fb4cadd81acdee6dafe14abeb258f876e4dd410518099ae9a35c88d8097c/yarl-1.18.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:0fb2171a4486bb075316ee754c6d8382ea6eb8b399d4ec62fde2b591f879778a", size = 359141, upload-time = "2024-12-01T20:33:33.79Z" }, + { url = "https://files.pythonhosted.org/packages/9a/7a/4c571597589da4cd5c14ed2a0b17ac56ec9ee7ee615013f74653169e702d/yarl-1.18.3-cp311-cp311-win32.whl", hash = "sha256:61b1a825a13bef4a5f10b1885245377d3cd0bf87cba068e1d9a88c2ae36880e1", size = 84402, upload-time = "2024-12-01T20:33:35.689Z" }, + { url = "https://files.pythonhosted.org/packages/ae/7b/8600250b3d89b625f1121d897062f629883c2f45339623b69b1747ec65fa/yarl-1.18.3-cp311-cp311-win_amd64.whl", hash = "sha256:b9d60031cf568c627d028239693fd718025719c02c9f55df0a53e587aab951b5", size = 91030, upload-time = "2024-12-01T20:33:37.511Z" }, + { url = "https://files.pythonhosted.org/packages/33/85/bd2e2729752ff4c77338e0102914897512e92496375e079ce0150a6dc306/yarl-1.18.3-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:1dd4bdd05407ced96fed3d7f25dbbf88d2ffb045a0db60dbc247f5b3c5c25d50", size = 142644, upload-time = "2024-12-01T20:33:39.204Z" }, + { url = "https://files.pythonhosted.org/packages/ff/74/1178322cc0f10288d7eefa6e4a85d8d2e28187ccab13d5b844e8b5d7c88d/yarl-1.18.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:7c33dd1931a95e5d9a772d0ac5e44cac8957eaf58e3c8da8c1414de7dd27c576", size = 94962, upload-time = "2024-12-01T20:33:40.808Z" }, + { url = "https://files.pythonhosted.org/packages/be/75/79c6acc0261e2c2ae8a1c41cf12265e91628c8c58ae91f5ff59e29c0787f/yarl-1.18.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:25b411eddcfd56a2f0cd6a384e9f4f7aa3efee14b188de13048c25b5e91f1640", size = 92795, upload-time = "2024-12-01T20:33:42.322Z" }, + { url = "https://files.pythonhosted.org/packages/6b/32/927b2d67a412c31199e83fefdce6e645247b4fb164aa1ecb35a0f9eb2058/yarl-1.18.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:436c4fc0a4d66b2badc6c5fc5ef4e47bb10e4fd9bf0c79524ac719a01f3607c2", size = 332368, upload-time = "2024-12-01T20:33:43.956Z" }, + { url = "https://files.pythonhosted.org/packages/19/e5/859fca07169d6eceeaa4fde1997c91d8abde4e9a7c018e371640c2da2b71/yarl-1.18.3-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e35ef8683211db69ffe129a25d5634319a677570ab6b2eba4afa860f54eeaf75", size = 342314, upload-time = "2024-12-01T20:33:46.046Z" }, + { url = "https://files.pythonhosted.org/packages/08/75/76b63ccd91c9e03ab213ef27ae6add2e3400e77e5cdddf8ed2dbc36e3f21/yarl-1.18.3-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:84b2deecba4a3f1a398df819151eb72d29bfeb3b69abb145a00ddc8d30094512", size = 341987, upload-time = "2024-12-01T20:33:48.352Z" }, + { url = "https://files.pythonhosted.org/packages/1a/e1/a097d5755d3ea8479a42856f51d97eeff7a3a7160593332d98f2709b3580/yarl-1.18.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:00e5a1fea0fd4f5bfa7440a47eff01d9822a65b4488f7cff83155a0f31a2ecba", size = 336914, upload-time = "2024-12-01T20:33:50.875Z" }, + { url = "https://files.pythonhosted.org/packages/0b/42/e1b4d0e396b7987feceebe565286c27bc085bf07d61a59508cdaf2d45e63/yarl-1.18.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d0e883008013c0e4aef84dcfe2a0b172c4d23c2669412cf5b3371003941f72bb", size = 325765, upload-time = "2024-12-01T20:33:52.641Z" }, + { url = "https://files.pythonhosted.org/packages/7e/18/03a5834ccc9177f97ca1bbb245b93c13e58e8225276f01eedc4cc98ab820/yarl-1.18.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:5a3f356548e34a70b0172d8890006c37be92995f62d95a07b4a42e90fba54272", size = 344444, upload-time = "2024-12-01T20:33:54.395Z" }, + { url = "https://files.pythonhosted.org/packages/c8/03/a713633bdde0640b0472aa197b5b86e90fbc4c5bc05b727b714cd8a40e6d/yarl-1.18.3-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:ccd17349166b1bee6e529b4add61727d3f55edb7babbe4069b5764c9587a8cc6", size = 340760, upload-time = "2024-12-01T20:33:56.286Z" }, + { url = "https://files.pythonhosted.org/packages/eb/99/f6567e3f3bbad8fd101886ea0276c68ecb86a2b58be0f64077396cd4b95e/yarl-1.18.3-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:b958ddd075ddba5b09bb0be8a6d9906d2ce933aee81100db289badbeb966f54e", size = 346484, upload-time = "2024-12-01T20:33:58.375Z" }, + { url = "https://files.pythonhosted.org/packages/8e/a9/84717c896b2fc6cb15bd4eecd64e34a2f0a9fd6669e69170c73a8b46795a/yarl-1.18.3-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:c7d79f7d9aabd6011004e33b22bc13056a3e3fb54794d138af57f5ee9d9032cb", size = 359864, upload-time = "2024-12-01T20:34:00.22Z" }, + { url = "https://files.pythonhosted.org/packages/1e/2e/d0f5f1bef7ee93ed17e739ec8dbcb47794af891f7d165fa6014517b48169/yarl-1.18.3-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:4891ed92157e5430874dad17b15eb1fda57627710756c27422200c52d8a4e393", size = 364537, upload-time = "2024-12-01T20:34:03.54Z" }, + { url = "https://files.pythonhosted.org/packages/97/8a/568d07c5d4964da5b02621a517532adb8ec5ba181ad1687191fffeda0ab6/yarl-1.18.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:ce1af883b94304f493698b00d0f006d56aea98aeb49d75ec7d98cd4a777e9285", size = 357861, upload-time = "2024-12-01T20:34:05.73Z" }, + { url = "https://files.pythonhosted.org/packages/7d/e3/924c3f64b6b3077889df9a1ece1ed8947e7b61b0a933f2ec93041990a677/yarl-1.18.3-cp312-cp312-win32.whl", hash = "sha256:f91c4803173928a25e1a55b943c81f55b8872f0018be83e3ad4938adffb77dd2", size = 84097, upload-time = "2024-12-01T20:34:07.664Z" }, + { url = "https://files.pythonhosted.org/packages/34/45/0e055320daaabfc169b21ff6174567b2c910c45617b0d79c68d7ab349b02/yarl-1.18.3-cp312-cp312-win_amd64.whl", hash = "sha256:7e2ee16578af3b52ac2f334c3b1f92262f47e02cc6193c598502bd46f5cd1477", size = 90399, upload-time = "2024-12-01T20:34:09.61Z" }, + { url = "https://files.pythonhosted.org/packages/f5/4b/a06e0ec3d155924f77835ed2d167ebd3b211a7b0853da1cf8d8414d784ef/yarl-1.18.3-py3-none-any.whl", hash = "sha256:b57f4f58099328dfb26c6a771d09fb20dbbae81d20cfb66141251ea063bd101b", size = 45109, upload-time = "2024-12-01T20:35:20.834Z" }, ] [[package]] name = "zipp" version = "3.23.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/e3/02/0f2892c661036d50ede074e376733dca2ae7c6eb617489437771209d4180/zipp-3.23.0.tar.gz", hash = "sha256:a07157588a12518c9d4034df3fbbee09c814741a33ff63c05fa29d26a2404166", size = 25547 } +sdist = { url = "https://files.pythonhosted.org/packages/e3/02/0f2892c661036d50ede074e376733dca2ae7c6eb617489437771209d4180/zipp-3.23.0.tar.gz", hash = "sha256:a07157588a12518c9d4034df3fbbee09c814741a33ff63c05fa29d26a2404166", size = 25547, upload-time = "2025-06-08T17:06:39.4Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/2e/54/647ade08bf0db230bfea292f893923872fd20be6ac6f53b2b936ba839d75/zipp-3.23.0-py3-none-any.whl", hash = "sha256:071652d6115ed432f5ce1d34c336c0adfd6a884660d1e9712a256d3d3bd4b14e", size = 10276 }, + { url = "https://files.pythonhosted.org/packages/2e/54/647ade08bf0db230bfea292f893923872fd20be6ac6f53b2b936ba839d75/zipp-3.23.0-py3-none-any.whl", hash = "sha256:071652d6115ed432f5ce1d34c336c0adfd6a884660d1e9712a256d3d3bd4b14e", size = 10276, upload-time = "2025-06-08T17:06:38.034Z" }, ] [[package]] name = "zope-event" version = "6.1" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/46/33/d3eeac228fc14de76615612ee208be2d8a5b5b0fada36bf9b62d6b40600c/zope_event-6.1.tar.gz", hash = "sha256:6052a3e0cb8565d3d4ef1a3a7809336ac519bc4fe38398cb8d466db09adef4f0", size = 18739 } +sdist = { url = "https://files.pythonhosted.org/packages/46/33/d3eeac228fc14de76615612ee208be2d8a5b5b0fada36bf9b62d6b40600c/zope_event-6.1.tar.gz", hash = "sha256:6052a3e0cb8565d3d4ef1a3a7809336ac519bc4fe38398cb8d466db09adef4f0", size = 18739, upload-time = "2025-11-07T08:05:49.934Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/c2/b0/956902e5e1302f8c5d124e219c6bf214e2649f92ad5fce85b05c039a04c9/zope_event-6.1-py3-none-any.whl", hash = "sha256:0ca78b6391b694272b23ec1335c0294cc471065ed10f7f606858fc54566c25a0", size = 6414 }, + { url = "https://files.pythonhosted.org/packages/c2/b0/956902e5e1302f8c5d124e219c6bf214e2649f92ad5fce85b05c039a04c9/zope_event-6.1-py3-none-any.whl", hash = "sha256:0ca78b6391b694272b23ec1335c0294cc471065ed10f7f606858fc54566c25a0", size = 6414, upload-time = "2025-11-07T08:05:48.874Z" }, ] [[package]] name = "zope-interface" version = "8.1.1" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/71/c9/5ec8679a04d37c797d343f650c51ad67d178f0001c363e44b6ac5f97a9da/zope_interface-8.1.1.tar.gz", hash = "sha256:51b10e6e8e238d719636a401f44f1e366146912407b58453936b781a19be19ec", size = 254748 } +sdist = { url = "https://files.pythonhosted.org/packages/71/c9/5ec8679a04d37c797d343f650c51ad67d178f0001c363e44b6ac5f97a9da/zope_interface-8.1.1.tar.gz", hash = "sha256:51b10e6e8e238d719636a401f44f1e366146912407b58453936b781a19be19ec", size = 254748, upload-time = "2025-11-15T08:32:52.404Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/77/fc/d84bac27332bdefe8c03f7289d932aeb13a5fd6aeedba72b0aa5b18276ff/zope_interface-8.1.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:e8a0fdd5048c1bb733e4693eae9bc4145a19419ea6a1c95299318a93fe9f3d72", size = 207955 }, - { url = "https://files.pythonhosted.org/packages/52/02/e1234eb08b10b5cf39e68372586acc7f7bbcd18176f6046433a8f6b8b263/zope_interface-8.1.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a4cb0ea75a26b606f5bc8524fbce7b7d8628161b6da002c80e6417ce5ec757c0", size = 208398 }, - { url = "https://files.pythonhosted.org/packages/3c/be/aabda44d4bc490f9966c2b77fa7822b0407d852cb909b723f2d9e05d2427/zope_interface-8.1.1-cp311-cp311-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:c267b00b5a49a12743f5e1d3b4beef45479d696dab090f11fe3faded078a5133", size = 255079 }, - { url = "https://files.pythonhosted.org/packages/d8/7f/4fbc7c2d7cb310e5a91b55db3d98e98d12b262014c1fcad9714fe33c2adc/zope_interface-8.1.1-cp311-cp311-manylinux1_x86_64.manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:e25d3e2b9299e7ec54b626573673bdf0d740cf628c22aef0a3afef85b438aa54", size = 259850 }, - { url = "https://files.pythonhosted.org/packages/fe/2c/dc573fffe59cdbe8bbbdd2814709bdc71c4870893e7226700bc6a08c5e0c/zope_interface-8.1.1-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:63db1241804417aff95ac229c13376c8c12752b83cc06964d62581b493e6551b", size = 261033 }, - { url = "https://files.pythonhosted.org/packages/0e/51/1ac50e5ee933d9e3902f3400bda399c128a5c46f9f209d16affe3d4facc5/zope_interface-8.1.1-cp311-cp311-win_amd64.whl", hash = "sha256:9639bf4ed07b5277fb231e54109117c30d608254685e48a7104a34618bcbfc83", size = 212215 }, - { url = "https://files.pythonhosted.org/packages/08/3d/f5b8dd2512f33bfab4faba71f66f6873603d625212206dd36f12403ae4ca/zope_interface-8.1.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:a16715808408db7252b8c1597ed9008bdad7bf378ed48eb9b0595fad4170e49d", size = 208660 }, - { url = "https://files.pythonhosted.org/packages/e5/41/c331adea9b11e05ff9ac4eb7d3032b24c36a3654ae9f2bf4ef2997048211/zope_interface-8.1.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ce6b58752acc3352c4aa0b55bbeae2a941d61537e6afdad2467a624219025aae", size = 208851 }, - { url = "https://files.pythonhosted.org/packages/25/00/7a8019c3bb8b119c5f50f0a4869183a4b699ca004a7f87ce98382e6b364c/zope_interface-8.1.1-cp312-cp312-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:807778883d07177713136479de7fd566f9056a13aef63b686f0ab4807c6be259", size = 259292 }, - { url = "https://files.pythonhosted.org/packages/1a/fc/b70e963bf89345edffdd5d16b61e789fdc09365972b603e13785360fea6f/zope_interface-8.1.1-cp312-cp312-manylinux1_x86_64.manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:50e5eb3b504a7d63dc25211b9298071d5b10a3eb754d6bf2f8ef06cb49f807ab", size = 264741 }, - { url = "https://files.pythonhosted.org/packages/96/fe/7d0b5c0692b283901b34847f2b2f50d805bfff4b31de4021ac9dfb516d2a/zope_interface-8.1.1-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:eee6f93b2512ec9466cf30c37548fd3ed7bc4436ab29cd5943d7a0b561f14f0f", size = 264281 }, - { url = "https://files.pythonhosted.org/packages/2b/2c/a7cebede1cf2757be158bcb151fe533fa951038cfc5007c7597f9f86804b/zope_interface-8.1.1-cp312-cp312-win_amd64.whl", hash = "sha256:80edee6116d569883c58ff8efcecac3b737733d646802036dc337aa839a5f06b", size = 212327 }, + { url = "https://files.pythonhosted.org/packages/77/fc/d84bac27332bdefe8c03f7289d932aeb13a5fd6aeedba72b0aa5b18276ff/zope_interface-8.1.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:e8a0fdd5048c1bb733e4693eae9bc4145a19419ea6a1c95299318a93fe9f3d72", size = 207955, upload-time = "2025-11-15T08:36:45.902Z" }, + { url = "https://files.pythonhosted.org/packages/52/02/e1234eb08b10b5cf39e68372586acc7f7bbcd18176f6046433a8f6b8b263/zope_interface-8.1.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a4cb0ea75a26b606f5bc8524fbce7b7d8628161b6da002c80e6417ce5ec757c0", size = 208398, upload-time = "2025-11-15T08:36:47.016Z" }, + { url = "https://files.pythonhosted.org/packages/3c/be/aabda44d4bc490f9966c2b77fa7822b0407d852cb909b723f2d9e05d2427/zope_interface-8.1.1-cp311-cp311-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:c267b00b5a49a12743f5e1d3b4beef45479d696dab090f11fe3faded078a5133", size = 255079, upload-time = "2025-11-15T08:36:48.157Z" }, + { url = "https://files.pythonhosted.org/packages/d8/7f/4fbc7c2d7cb310e5a91b55db3d98e98d12b262014c1fcad9714fe33c2adc/zope_interface-8.1.1-cp311-cp311-manylinux1_x86_64.manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:e25d3e2b9299e7ec54b626573673bdf0d740cf628c22aef0a3afef85b438aa54", size = 259850, upload-time = "2025-11-15T08:36:49.544Z" }, + { url = "https://files.pythonhosted.org/packages/fe/2c/dc573fffe59cdbe8bbbdd2814709bdc71c4870893e7226700bc6a08c5e0c/zope_interface-8.1.1-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:63db1241804417aff95ac229c13376c8c12752b83cc06964d62581b493e6551b", size = 261033, upload-time = "2025-11-15T08:36:51.061Z" }, + { url = "https://files.pythonhosted.org/packages/0e/51/1ac50e5ee933d9e3902f3400bda399c128a5c46f9f209d16affe3d4facc5/zope_interface-8.1.1-cp311-cp311-win_amd64.whl", hash = "sha256:9639bf4ed07b5277fb231e54109117c30d608254685e48a7104a34618bcbfc83", size = 212215, upload-time = "2025-11-15T08:36:52.553Z" }, + { url = "https://files.pythonhosted.org/packages/08/3d/f5b8dd2512f33bfab4faba71f66f6873603d625212206dd36f12403ae4ca/zope_interface-8.1.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:a16715808408db7252b8c1597ed9008bdad7bf378ed48eb9b0595fad4170e49d", size = 208660, upload-time = "2025-11-15T08:36:53.579Z" }, + { url = "https://files.pythonhosted.org/packages/e5/41/c331adea9b11e05ff9ac4eb7d3032b24c36a3654ae9f2bf4ef2997048211/zope_interface-8.1.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ce6b58752acc3352c4aa0b55bbeae2a941d61537e6afdad2467a624219025aae", size = 208851, upload-time = "2025-11-15T08:36:54.854Z" }, + { url = "https://files.pythonhosted.org/packages/25/00/7a8019c3bb8b119c5f50f0a4869183a4b699ca004a7f87ce98382e6b364c/zope_interface-8.1.1-cp312-cp312-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:807778883d07177713136479de7fd566f9056a13aef63b686f0ab4807c6be259", size = 259292, upload-time = "2025-11-15T08:36:56.409Z" }, + { url = "https://files.pythonhosted.org/packages/1a/fc/b70e963bf89345edffdd5d16b61e789fdc09365972b603e13785360fea6f/zope_interface-8.1.1-cp312-cp312-manylinux1_x86_64.manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:50e5eb3b504a7d63dc25211b9298071d5b10a3eb754d6bf2f8ef06cb49f807ab", size = 264741, upload-time = "2025-11-15T08:36:57.675Z" }, + { url = "https://files.pythonhosted.org/packages/96/fe/7d0b5c0692b283901b34847f2b2f50d805bfff4b31de4021ac9dfb516d2a/zope_interface-8.1.1-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:eee6f93b2512ec9466cf30c37548fd3ed7bc4436ab29cd5943d7a0b561f14f0f", size = 264281, upload-time = "2025-11-15T08:36:58.968Z" }, + { url = "https://files.pythonhosted.org/packages/2b/2c/a7cebede1cf2757be158bcb151fe533fa951038cfc5007c7597f9f86804b/zope_interface-8.1.1-cp312-cp312-win_amd64.whl", hash = "sha256:80edee6116d569883c58ff8efcecac3b737733d646802036dc337aa839a5f06b", size = 212327, upload-time = "2025-11-15T08:37:00.4Z" }, ] [[package]] name = "zstandard" version = "0.25.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/fd/aa/3e0508d5a5dd96529cdc5a97011299056e14c6505b678fd58938792794b1/zstandard-0.25.0.tar.gz", hash = "sha256:7713e1179d162cf5c7906da876ec2ccb9c3a9dcbdffef0cc7f70c3667a205f0b", size = 711513 } +sdist = { url = "https://files.pythonhosted.org/packages/fd/aa/3e0508d5a5dd96529cdc5a97011299056e14c6505b678fd58938792794b1/zstandard-0.25.0.tar.gz", hash = "sha256:7713e1179d162cf5c7906da876ec2ccb9c3a9dcbdffef0cc7f70c3667a205f0b", size = 711513, upload-time = "2025-09-14T22:15:54.002Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/2a/83/c3ca27c363d104980f1c9cee1101cc8ba724ac8c28a033ede6aab89585b1/zstandard-0.25.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:933b65d7680ea337180733cf9e87293cc5500cc0eb3fc8769f4d3c88d724ec5c", size = 795254 }, - { url = "https://files.pythonhosted.org/packages/ac/4d/e66465c5411a7cf4866aeadc7d108081d8ceba9bc7abe6b14aa21c671ec3/zstandard-0.25.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a3f79487c687b1fc69f19e487cd949bf3aae653d181dfb5fde3bf6d18894706f", size = 640559 }, - { url = "https://files.pythonhosted.org/packages/12/56/354fe655905f290d3b147b33fe946b0f27e791e4b50a5f004c802cb3eb7b/zstandard-0.25.0-cp311-cp311-manylinux2010_i686.manylinux2014_i686.manylinux_2_12_i686.manylinux_2_17_i686.whl", hash = "sha256:0bbc9a0c65ce0eea3c34a691e3c4b6889f5f3909ba4822ab385fab9057099431", size = 5348020 }, - { url = "https://files.pythonhosted.org/packages/3b/13/2b7ed68bd85e69a2069bcc72141d378f22cae5a0f3b353a2c8f50ef30c1b/zstandard-0.25.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:01582723b3ccd6939ab7b3a78622c573799d5d8737b534b86d0e06ac18dbde4a", size = 5058126 }, - { url = "https://files.pythonhosted.org/packages/c9/dd/fdaf0674f4b10d92cb120ccff58bbb6626bf8368f00ebfd2a41ba4a0dc99/zstandard-0.25.0-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:5f1ad7bf88535edcf30038f6919abe087f606f62c00a87d7e33e7fc57cb69fcc", size = 5405390 }, - { url = "https://files.pythonhosted.org/packages/0f/67/354d1555575bc2490435f90d67ca4dd65238ff2f119f30f72d5cde09c2ad/zstandard-0.25.0-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:06acb75eebeedb77b69048031282737717a63e71e4ae3f77cc0c3b9508320df6", size = 5452914 }, - { url = "https://files.pythonhosted.org/packages/bb/1f/e9cfd801a3f9190bf3e759c422bbfd2247db9d7f3d54a56ecde70137791a/zstandard-0.25.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:9300d02ea7c6506f00e627e287e0492a5eb0371ec1670ae852fefffa6164b072", size = 5559635 }, - { url = "https://files.pythonhosted.org/packages/21/88/5ba550f797ca953a52d708c8e4f380959e7e3280af029e38fbf47b55916e/zstandard-0.25.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:bfd06b1c5584b657a2892a6014c2f4c20e0db0208c159148fa78c65f7e0b0277", size = 5048277 }, - { url = "https://files.pythonhosted.org/packages/46/c0/ca3e533b4fa03112facbe7fbe7779cb1ebec215688e5df576fe5429172e0/zstandard-0.25.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:f373da2c1757bb7f1acaf09369cdc1d51d84131e50d5fa9863982fd626466313", size = 5574377 }, - { url = "https://files.pythonhosted.org/packages/12/9b/3fb626390113f272abd0799fd677ea33d5fc3ec185e62e6be534493c4b60/zstandard-0.25.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:6c0e5a65158a7946e7a7affa6418878ef97ab66636f13353b8502d7ea03c8097", size = 4961493 }, - { url = "https://files.pythonhosted.org/packages/cb/d3/23094a6b6a4b1343b27ae68249daa17ae0651fcfec9ed4de09d14b940285/zstandard-0.25.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:c8e167d5adf59476fa3e37bee730890e389410c354771a62e3c076c86f9f7778", size = 5269018 }, - { url = "https://files.pythonhosted.org/packages/8c/a7/bb5a0c1c0f3f4b5e9d5b55198e39de91e04ba7c205cc46fcb0f95f0383c1/zstandard-0.25.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:98750a309eb2f020da61e727de7d7ba3c57c97cf6213f6f6277bb7fb42a8e065", size = 5443672 }, - { url = "https://files.pythonhosted.org/packages/27/22/503347aa08d073993f25109c36c8d9f029c7d5949198050962cb568dfa5e/zstandard-0.25.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:22a086cff1b6ceca18a8dd6096ec631e430e93a8e70a9ca5efa7561a00f826fa", size = 5822753 }, - { url = "https://files.pythonhosted.org/packages/e2/be/94267dc6ee64f0f8ba2b2ae7c7a2df934a816baaa7291db9e1aa77394c3c/zstandard-0.25.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:72d35d7aa0bba323965da807a462b0966c91608ef3a48ba761678cb20ce5d8b7", size = 5366047 }, - { url = "https://files.pythonhosted.org/packages/7b/a3/732893eab0a3a7aecff8b99052fecf9f605cf0fb5fb6d0290e36beee47a4/zstandard-0.25.0-cp311-cp311-win32.whl", hash = "sha256:f5aeea11ded7320a84dcdd62a3d95b5186834224a9e55b92ccae35d21a8b63d4", size = 436484 }, - { url = "https://files.pythonhosted.org/packages/43/a3/c6155f5c1cce691cb80dfd38627046e50af3ee9ddc5d0b45b9b063bfb8c9/zstandard-0.25.0-cp311-cp311-win_amd64.whl", hash = "sha256:daab68faadb847063d0c56f361a289c4f268706b598afbf9ad113cbe5c38b6b2", size = 506183 }, - { url = "https://files.pythonhosted.org/packages/8c/3e/8945ab86a0820cc0e0cdbf38086a92868a9172020fdab8a03ac19662b0e5/zstandard-0.25.0-cp311-cp311-win_arm64.whl", hash = "sha256:22a06c5df3751bb7dc67406f5374734ccee8ed37fc5981bf1ad7041831fa1137", size = 462533 }, - { url = "https://files.pythonhosted.org/packages/82/fc/f26eb6ef91ae723a03e16eddb198abcfce2bc5a42e224d44cc8b6765e57e/zstandard-0.25.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:7b3c3a3ab9daa3eed242d6ecceead93aebbb8f5f84318d82cee643e019c4b73b", size = 795738 }, - { url = "https://files.pythonhosted.org/packages/aa/1c/d920d64b22f8dd028a8b90e2d756e431a5d86194caa78e3819c7bf53b4b3/zstandard-0.25.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:913cbd31a400febff93b564a23e17c3ed2d56c064006f54efec210d586171c00", size = 640436 }, - { url = "https://files.pythonhosted.org/packages/53/6c/288c3f0bd9fcfe9ca41e2c2fbfd17b2097f6af57b62a81161941f09afa76/zstandard-0.25.0-cp312-cp312-manylinux2010_i686.manylinux2014_i686.manylinux_2_12_i686.manylinux_2_17_i686.whl", hash = "sha256:011d388c76b11a0c165374ce660ce2c8efa8e5d87f34996aa80f9c0816698b64", size = 5343019 }, - { url = "https://files.pythonhosted.org/packages/1e/15/efef5a2f204a64bdb5571e6161d49f7ef0fffdbca953a615efbec045f60f/zstandard-0.25.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:6dffecc361d079bb48d7caef5d673c88c8988d3d33fb74ab95b7ee6da42652ea", size = 5063012 }, - { url = "https://files.pythonhosted.org/packages/b7/37/a6ce629ffdb43959e92e87ebdaeebb5ac81c944b6a75c9c47e300f85abdf/zstandard-0.25.0-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:7149623bba7fdf7e7f24312953bcf73cae103db8cae49f8154dd1eadc8a29ecb", size = 5394148 }, - { url = "https://files.pythonhosted.org/packages/e3/79/2bf870b3abeb5c070fe2d670a5a8d1057a8270f125ef7676d29ea900f496/zstandard-0.25.0-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:6a573a35693e03cf1d67799fd01b50ff578515a8aeadd4595d2a7fa9f3ec002a", size = 5451652 }, - { url = "https://files.pythonhosted.org/packages/53/60/7be26e610767316c028a2cbedb9a3beabdbe33e2182c373f71a1c0b88f36/zstandard-0.25.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:5a56ba0db2d244117ed744dfa8f6f5b366e14148e00de44723413b2f3938a902", size = 5546993 }, - { url = "https://files.pythonhosted.org/packages/85/c7/3483ad9ff0662623f3648479b0380d2de5510abf00990468c286c6b04017/zstandard-0.25.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:10ef2a79ab8e2974e2075fb984e5b9806c64134810fac21576f0668e7ea19f8f", size = 5046806 }, - { url = "https://files.pythonhosted.org/packages/08/b3/206883dd25b8d1591a1caa44b54c2aad84badccf2f1de9e2d60a446f9a25/zstandard-0.25.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:aaf21ba8fb76d102b696781bddaa0954b782536446083ae3fdaa6f16b25a1c4b", size = 5576659 }, - { url = "https://files.pythonhosted.org/packages/9d/31/76c0779101453e6c117b0ff22565865c54f48f8bd807df2b00c2c404b8e0/zstandard-0.25.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:1869da9571d5e94a85a5e8d57e4e8807b175c9e4a6294e3b66fa4efb074d90f6", size = 4953933 }, - { url = "https://files.pythonhosted.org/packages/18/e1/97680c664a1bf9a247a280a053d98e251424af51f1b196c6d52f117c9720/zstandard-0.25.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:809c5bcb2c67cd0ed81e9229d227d4ca28f82d0f778fc5fea624a9def3963f91", size = 5268008 }, - { url = "https://files.pythonhosted.org/packages/1e/73/316e4010de585ac798e154e88fd81bb16afc5c5cb1a72eeb16dd37e8024a/zstandard-0.25.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:f27662e4f7dbf9f9c12391cb37b4c4c3cb90ffbd3b1fb9284dadbbb8935fa708", size = 5433517 }, - { url = "https://files.pythonhosted.org/packages/5b/60/dd0f8cfa8129c5a0ce3ea6b7f70be5b33d2618013a161e1ff26c2b39787c/zstandard-0.25.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:99c0c846e6e61718715a3c9437ccc625de26593fea60189567f0118dc9db7512", size = 5814292 }, - { url = "https://files.pythonhosted.org/packages/fc/5f/75aafd4b9d11b5407b641b8e41a57864097663699f23e9ad4dbb91dc6bfe/zstandard-0.25.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:474d2596a2dbc241a556e965fb76002c1ce655445e4e3bf38e5477d413165ffa", size = 5360237 }, - { url = "https://files.pythonhosted.org/packages/ff/8d/0309daffea4fcac7981021dbf21cdb2e3427a9e76bafbcdbdf5392ff99a4/zstandard-0.25.0-cp312-cp312-win32.whl", hash = "sha256:23ebc8f17a03133b4426bcc04aabd68f8236eb78c3760f12783385171b0fd8bd", size = 436922 }, - { url = "https://files.pythonhosted.org/packages/79/3b/fa54d9015f945330510cb5d0b0501e8253c127cca7ebe8ba46a965df18c5/zstandard-0.25.0-cp312-cp312-win_amd64.whl", hash = "sha256:ffef5a74088f1e09947aecf91011136665152e0b4b359c42be3373897fb39b01", size = 506276 }, - { url = "https://files.pythonhosted.org/packages/ea/6b/8b51697e5319b1f9ac71087b0af9a40d8a6288ff8025c36486e0c12abcc4/zstandard-0.25.0-cp312-cp312-win_arm64.whl", hash = "sha256:181eb40e0b6a29b3cd2849f825e0fa34397f649170673d385f3598ae17cca2e9", size = 462679 }, + { url = "https://files.pythonhosted.org/packages/2a/83/c3ca27c363d104980f1c9cee1101cc8ba724ac8c28a033ede6aab89585b1/zstandard-0.25.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:933b65d7680ea337180733cf9e87293cc5500cc0eb3fc8769f4d3c88d724ec5c", size = 795254, upload-time = "2025-09-14T22:16:26.137Z" }, + { url = "https://files.pythonhosted.org/packages/ac/4d/e66465c5411a7cf4866aeadc7d108081d8ceba9bc7abe6b14aa21c671ec3/zstandard-0.25.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a3f79487c687b1fc69f19e487cd949bf3aae653d181dfb5fde3bf6d18894706f", size = 640559, upload-time = "2025-09-14T22:16:27.973Z" }, + { url = "https://files.pythonhosted.org/packages/12/56/354fe655905f290d3b147b33fe946b0f27e791e4b50a5f004c802cb3eb7b/zstandard-0.25.0-cp311-cp311-manylinux2010_i686.manylinux2014_i686.manylinux_2_12_i686.manylinux_2_17_i686.whl", hash = "sha256:0bbc9a0c65ce0eea3c34a691e3c4b6889f5f3909ba4822ab385fab9057099431", size = 5348020, upload-time = "2025-09-14T22:16:29.523Z" }, + { url = "https://files.pythonhosted.org/packages/3b/13/2b7ed68bd85e69a2069bcc72141d378f22cae5a0f3b353a2c8f50ef30c1b/zstandard-0.25.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:01582723b3ccd6939ab7b3a78622c573799d5d8737b534b86d0e06ac18dbde4a", size = 5058126, upload-time = "2025-09-14T22:16:31.811Z" }, + { url = "https://files.pythonhosted.org/packages/c9/dd/fdaf0674f4b10d92cb120ccff58bbb6626bf8368f00ebfd2a41ba4a0dc99/zstandard-0.25.0-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:5f1ad7bf88535edcf30038f6919abe087f606f62c00a87d7e33e7fc57cb69fcc", size = 5405390, upload-time = "2025-09-14T22:16:33.486Z" }, + { url = "https://files.pythonhosted.org/packages/0f/67/354d1555575bc2490435f90d67ca4dd65238ff2f119f30f72d5cde09c2ad/zstandard-0.25.0-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:06acb75eebeedb77b69048031282737717a63e71e4ae3f77cc0c3b9508320df6", size = 5452914, upload-time = "2025-09-14T22:16:35.277Z" }, + { url = "https://files.pythonhosted.org/packages/bb/1f/e9cfd801a3f9190bf3e759c422bbfd2247db9d7f3d54a56ecde70137791a/zstandard-0.25.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:9300d02ea7c6506f00e627e287e0492a5eb0371ec1670ae852fefffa6164b072", size = 5559635, upload-time = "2025-09-14T22:16:37.141Z" }, + { url = "https://files.pythonhosted.org/packages/21/88/5ba550f797ca953a52d708c8e4f380959e7e3280af029e38fbf47b55916e/zstandard-0.25.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:bfd06b1c5584b657a2892a6014c2f4c20e0db0208c159148fa78c65f7e0b0277", size = 5048277, upload-time = "2025-09-14T22:16:38.807Z" }, + { url = "https://files.pythonhosted.org/packages/46/c0/ca3e533b4fa03112facbe7fbe7779cb1ebec215688e5df576fe5429172e0/zstandard-0.25.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:f373da2c1757bb7f1acaf09369cdc1d51d84131e50d5fa9863982fd626466313", size = 5574377, upload-time = "2025-09-14T22:16:40.523Z" }, + { url = "https://files.pythonhosted.org/packages/12/9b/3fb626390113f272abd0799fd677ea33d5fc3ec185e62e6be534493c4b60/zstandard-0.25.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:6c0e5a65158a7946e7a7affa6418878ef97ab66636f13353b8502d7ea03c8097", size = 4961493, upload-time = "2025-09-14T22:16:43.3Z" }, + { url = "https://files.pythonhosted.org/packages/cb/d3/23094a6b6a4b1343b27ae68249daa17ae0651fcfec9ed4de09d14b940285/zstandard-0.25.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:c8e167d5adf59476fa3e37bee730890e389410c354771a62e3c076c86f9f7778", size = 5269018, upload-time = "2025-09-14T22:16:45.292Z" }, + { url = "https://files.pythonhosted.org/packages/8c/a7/bb5a0c1c0f3f4b5e9d5b55198e39de91e04ba7c205cc46fcb0f95f0383c1/zstandard-0.25.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:98750a309eb2f020da61e727de7d7ba3c57c97cf6213f6f6277bb7fb42a8e065", size = 5443672, upload-time = "2025-09-14T22:16:47.076Z" }, + { url = "https://files.pythonhosted.org/packages/27/22/503347aa08d073993f25109c36c8d9f029c7d5949198050962cb568dfa5e/zstandard-0.25.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:22a086cff1b6ceca18a8dd6096ec631e430e93a8e70a9ca5efa7561a00f826fa", size = 5822753, upload-time = "2025-09-14T22:16:49.316Z" }, + { url = "https://files.pythonhosted.org/packages/e2/be/94267dc6ee64f0f8ba2b2ae7c7a2df934a816baaa7291db9e1aa77394c3c/zstandard-0.25.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:72d35d7aa0bba323965da807a462b0966c91608ef3a48ba761678cb20ce5d8b7", size = 5366047, upload-time = "2025-09-14T22:16:51.328Z" }, + { url = "https://files.pythonhosted.org/packages/7b/a3/732893eab0a3a7aecff8b99052fecf9f605cf0fb5fb6d0290e36beee47a4/zstandard-0.25.0-cp311-cp311-win32.whl", hash = "sha256:f5aeea11ded7320a84dcdd62a3d95b5186834224a9e55b92ccae35d21a8b63d4", size = 436484, upload-time = "2025-09-14T22:16:55.005Z" }, + { url = "https://files.pythonhosted.org/packages/43/a3/c6155f5c1cce691cb80dfd38627046e50af3ee9ddc5d0b45b9b063bfb8c9/zstandard-0.25.0-cp311-cp311-win_amd64.whl", hash = "sha256:daab68faadb847063d0c56f361a289c4f268706b598afbf9ad113cbe5c38b6b2", size = 506183, upload-time = "2025-09-14T22:16:52.753Z" }, + { url = "https://files.pythonhosted.org/packages/8c/3e/8945ab86a0820cc0e0cdbf38086a92868a9172020fdab8a03ac19662b0e5/zstandard-0.25.0-cp311-cp311-win_arm64.whl", hash = "sha256:22a06c5df3751bb7dc67406f5374734ccee8ed37fc5981bf1ad7041831fa1137", size = 462533, upload-time = "2025-09-14T22:16:53.878Z" }, + { url = "https://files.pythonhosted.org/packages/82/fc/f26eb6ef91ae723a03e16eddb198abcfce2bc5a42e224d44cc8b6765e57e/zstandard-0.25.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:7b3c3a3ab9daa3eed242d6ecceead93aebbb8f5f84318d82cee643e019c4b73b", size = 795738, upload-time = "2025-09-14T22:16:56.237Z" }, + { url = "https://files.pythonhosted.org/packages/aa/1c/d920d64b22f8dd028a8b90e2d756e431a5d86194caa78e3819c7bf53b4b3/zstandard-0.25.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:913cbd31a400febff93b564a23e17c3ed2d56c064006f54efec210d586171c00", size = 640436, upload-time = "2025-09-14T22:16:57.774Z" }, + { url = "https://files.pythonhosted.org/packages/53/6c/288c3f0bd9fcfe9ca41e2c2fbfd17b2097f6af57b62a81161941f09afa76/zstandard-0.25.0-cp312-cp312-manylinux2010_i686.manylinux2014_i686.manylinux_2_12_i686.manylinux_2_17_i686.whl", hash = "sha256:011d388c76b11a0c165374ce660ce2c8efa8e5d87f34996aa80f9c0816698b64", size = 5343019, upload-time = "2025-09-14T22:16:59.302Z" }, + { url = "https://files.pythonhosted.org/packages/1e/15/efef5a2f204a64bdb5571e6161d49f7ef0fffdbca953a615efbec045f60f/zstandard-0.25.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:6dffecc361d079bb48d7caef5d673c88c8988d3d33fb74ab95b7ee6da42652ea", size = 5063012, upload-time = "2025-09-14T22:17:01.156Z" }, + { url = "https://files.pythonhosted.org/packages/b7/37/a6ce629ffdb43959e92e87ebdaeebb5ac81c944b6a75c9c47e300f85abdf/zstandard-0.25.0-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:7149623bba7fdf7e7f24312953bcf73cae103db8cae49f8154dd1eadc8a29ecb", size = 5394148, upload-time = "2025-09-14T22:17:03.091Z" }, + { url = "https://files.pythonhosted.org/packages/e3/79/2bf870b3abeb5c070fe2d670a5a8d1057a8270f125ef7676d29ea900f496/zstandard-0.25.0-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:6a573a35693e03cf1d67799fd01b50ff578515a8aeadd4595d2a7fa9f3ec002a", size = 5451652, upload-time = "2025-09-14T22:17:04.979Z" }, + { url = "https://files.pythonhosted.org/packages/53/60/7be26e610767316c028a2cbedb9a3beabdbe33e2182c373f71a1c0b88f36/zstandard-0.25.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:5a56ba0db2d244117ed744dfa8f6f5b366e14148e00de44723413b2f3938a902", size = 5546993, upload-time = "2025-09-14T22:17:06.781Z" }, + { url = "https://files.pythonhosted.org/packages/85/c7/3483ad9ff0662623f3648479b0380d2de5510abf00990468c286c6b04017/zstandard-0.25.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:10ef2a79ab8e2974e2075fb984e5b9806c64134810fac21576f0668e7ea19f8f", size = 5046806, upload-time = "2025-09-14T22:17:08.415Z" }, + { url = "https://files.pythonhosted.org/packages/08/b3/206883dd25b8d1591a1caa44b54c2aad84badccf2f1de9e2d60a446f9a25/zstandard-0.25.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:aaf21ba8fb76d102b696781bddaa0954b782536446083ae3fdaa6f16b25a1c4b", size = 5576659, upload-time = "2025-09-14T22:17:10.164Z" }, + { url = "https://files.pythonhosted.org/packages/9d/31/76c0779101453e6c117b0ff22565865c54f48f8bd807df2b00c2c404b8e0/zstandard-0.25.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:1869da9571d5e94a85a5e8d57e4e8807b175c9e4a6294e3b66fa4efb074d90f6", size = 4953933, upload-time = "2025-09-14T22:17:11.857Z" }, + { url = "https://files.pythonhosted.org/packages/18/e1/97680c664a1bf9a247a280a053d98e251424af51f1b196c6d52f117c9720/zstandard-0.25.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:809c5bcb2c67cd0ed81e9229d227d4ca28f82d0f778fc5fea624a9def3963f91", size = 5268008, upload-time = "2025-09-14T22:17:13.627Z" }, + { url = "https://files.pythonhosted.org/packages/1e/73/316e4010de585ac798e154e88fd81bb16afc5c5cb1a72eeb16dd37e8024a/zstandard-0.25.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:f27662e4f7dbf9f9c12391cb37b4c4c3cb90ffbd3b1fb9284dadbbb8935fa708", size = 5433517, upload-time = "2025-09-14T22:17:16.103Z" }, + { url = "https://files.pythonhosted.org/packages/5b/60/dd0f8cfa8129c5a0ce3ea6b7f70be5b33d2618013a161e1ff26c2b39787c/zstandard-0.25.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:99c0c846e6e61718715a3c9437ccc625de26593fea60189567f0118dc9db7512", size = 5814292, upload-time = "2025-09-14T22:17:17.827Z" }, + { url = "https://files.pythonhosted.org/packages/fc/5f/75aafd4b9d11b5407b641b8e41a57864097663699f23e9ad4dbb91dc6bfe/zstandard-0.25.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:474d2596a2dbc241a556e965fb76002c1ce655445e4e3bf38e5477d413165ffa", size = 5360237, upload-time = "2025-09-14T22:17:19.954Z" }, + { url = "https://files.pythonhosted.org/packages/ff/8d/0309daffea4fcac7981021dbf21cdb2e3427a9e76bafbcdbdf5392ff99a4/zstandard-0.25.0-cp312-cp312-win32.whl", hash = "sha256:23ebc8f17a03133b4426bcc04aabd68f8236eb78c3760f12783385171b0fd8bd", size = 436922, upload-time = "2025-09-14T22:17:24.398Z" }, + { url = "https://files.pythonhosted.org/packages/79/3b/fa54d9015f945330510cb5d0b0501e8253c127cca7ebe8ba46a965df18c5/zstandard-0.25.0-cp312-cp312-win_amd64.whl", hash = "sha256:ffef5a74088f1e09947aecf91011136665152e0b4b359c42be3373897fb39b01", size = 506276, upload-time = "2025-09-14T22:17:21.429Z" }, + { url = "https://files.pythonhosted.org/packages/ea/6b/8b51697e5319b1f9ac71087b0af9a40d8a6288ff8025c36486e0c12abcc4/zstandard-0.25.0-cp312-cp312-win_arm64.whl", hash = "sha256:181eb40e0b6a29b3cd2849f825e0fa34397f649170673d385f3598ae17cca2e9", size = 462679, upload-time = "2025-09-14T22:17:23.147Z" }, ] diff --git a/dev/start-worker b/dev/start-worker index a01da11d86..7876620188 100755 --- a/dev/start-worker +++ b/dev/start-worker @@ -37,6 +37,7 @@ show_help() { echo " pipeline - Standard pipeline tasks" echo " triggered_workflow_dispatcher - Trigger dispatcher tasks" echo " trigger_refresh_executor - Trigger refresh tasks" + echo " retention - Retention tasks" } # Parse command line arguments @@ -105,10 +106,10 @@ if [[ -z "${QUEUES}" ]]; then # Configure queues based on edition if [[ "${EDITION}" == "CLOUD" ]]; then # Cloud edition: separate queues for dataset and trigger tasks - QUEUES="dataset,priority_dataset,priority_pipeline,pipeline,mail,ops_trace,app_deletion,plugin,workflow_storage,conversation,workflow_professional,workflow_team,workflow_sandbox,schedule_poller,schedule_executor,triggered_workflow_dispatcher,trigger_refresh_executor" + QUEUES="dataset,priority_dataset,priority_pipeline,pipeline,mail,ops_trace,app_deletion,plugin,workflow_storage,conversation,workflow_professional,workflow_team,workflow_sandbox,schedule_poller,schedule_executor,triggered_workflow_dispatcher,trigger_refresh_executor,retention" else # Community edition (SELF_HOSTED): dataset and workflow have separate queues - QUEUES="dataset,priority_dataset,priority_pipeline,pipeline,mail,ops_trace,app_deletion,plugin,workflow_storage,conversation,workflow,schedule_poller,schedule_executor,triggered_workflow_dispatcher,trigger_refresh_executor" + QUEUES="dataset,priority_dataset,priority_pipeline,pipeline,mail,ops_trace,app_deletion,plugin,workflow_storage,conversation,workflow,schedule_poller,schedule_executor,triggered_workflow_dispatcher,trigger_refresh_executor,retention" fi echo "No queues specified, using edition-based defaults: ${QUEUES}" diff --git a/docker/.env.example b/docker/.env.example index 8be75420b1..e7cb8711ce 100644 --- a/docker/.env.example +++ b/docker/.env.example @@ -58,8 +58,8 @@ FILES_URL= INTERNAL_FILES_URL= # Ensure UTF-8 encoding -LANG=en_US.UTF-8 -LC_ALL=en_US.UTF-8 +LANG=C.UTF-8 +LC_ALL=C.UTF-8 PYTHONIOENCODING=utf-8 # ------------------------------ @@ -69,6 +69,8 @@ PYTHONIOENCODING=utf-8 # The log level for the application. # Supported values are `DEBUG`, `INFO`, `WARNING`, `ERROR`, `CRITICAL` LOG_LEVEL=INFO +# Log output format: text or json +LOG_OUTPUT_FORMAT=text # Log file path LOG_FILE=/app/logs/server.log # Log file max size, the unit is MB @@ -231,7 +233,7 @@ NEXT_PUBLIC_ENABLE_SINGLE_DOLLAR_LATEX=false # You can adjust the database configuration according to your needs. # ------------------------------ -# Database type, supported values are `postgresql` and `mysql` +# Database type, supported values are `postgresql`, `mysql`, `oceanbase`, `seekdb` DB_TYPE=postgresql # For MySQL, only `root` user is supported for now DB_USERNAME=postgres @@ -399,6 +401,7 @@ CONSOLE_CORS_ALLOW_ORIGINS=* COOKIE_DOMAIN= # When the frontend and backend run on different subdomains, set NEXT_PUBLIC_COOKIE_DOMAIN=1. NEXT_PUBLIC_COOKIE_DOMAIN= +NEXT_PUBLIC_BATCH_CONCURRENCY=5 # ------------------------------ # File Storage Configuration @@ -446,6 +449,15 @@ S3_SECRET_KEY= # If set to false, the access key and secret key must be provided. S3_USE_AWS_MANAGED_IAM=false +# Workflow run and Conversation archive storage (S3-compatible) +ARCHIVE_STORAGE_ENABLED=false +ARCHIVE_STORAGE_ENDPOINT= +ARCHIVE_STORAGE_ARCHIVE_BUCKET= +ARCHIVE_STORAGE_EXPORT_BUCKET= +ARCHIVE_STORAGE_ACCESS_KEY= +ARCHIVE_STORAGE_SECRET_KEY= +ARCHIVE_STORAGE_REGION=auto + # Azure Blob Configuration # AZURE_BLOB_ACCOUNT_NAME=difyai @@ -468,6 +480,7 @@ ALIYUN_OSS_REGION=ap-southeast-1 ALIYUN_OSS_AUTH_VERSION=v4 # Don't start with '/'. OSS doesn't support leading slash in object names. ALIYUN_OSS_PATH=your-path +ALIYUN_CLOUDBOX_ID=your-cloudbox-id # Tencent COS Configuration # @@ -476,6 +489,7 @@ TENCENT_COS_SECRET_KEY=your-secret-key TENCENT_COS_SECRET_ID=your-secret-id TENCENT_COS_REGION=your-region TENCENT_COS_SCHEME=your-scheme +TENCENT_COS_CUSTOM_DOMAIN=your-custom-domain # Oracle Storage Configuration # @@ -491,6 +505,7 @@ HUAWEI_OBS_BUCKET_NAME=your-bucket-name HUAWEI_OBS_SECRET_KEY=your-secret-key HUAWEI_OBS_ACCESS_KEY=your-access-key HUAWEI_OBS_SERVER=your-server-url +HUAWEI_OBS_PATH_STYLE=false # Volcengine TOS Configuration # @@ -518,7 +533,7 @@ SUPABASE_URL=your-server-url # ------------------------------ # The type of vector store to use. -# Supported values are `weaviate`, `oceanbase`, `qdrant`, `milvus`, `myscale`, `relyt`, `pgvector`, `pgvecto-rs`, `chroma`, `opensearch`, `oracle`, `tencent`, `elasticsearch`, `elasticsearch-ja`, `analyticdb`, `couchbase`, `vikingdb`, `opengauss`, `tablestore`,`vastbase`,`tidb`,`tidb_on_qdrant`,`baidu`,`lindorm`,`huawei_cloud`,`upstash`, `matrixone`, `clickzetta`, `alibabacloud_mysql`, `iris`. +# Supported values are `weaviate`, `oceanbase`, `seekdb`, `qdrant`, `milvus`, `myscale`, `relyt`, `pgvector`, `pgvecto-rs`, `chroma`, `opensearch`, `oracle`, `tencent`, `elasticsearch`, `elasticsearch-ja`, `analyticdb`, `couchbase`, `vikingdb`, `opengauss`, `tablestore`, `vastbase`, `tidb`, `tidb_on_qdrant`, `baidu`, `lindorm`, `huawei_cloud`, `upstash`, `matrixone`, `clickzetta`, `alibabacloud_mysql`, `iris`. VECTOR_STORE=weaviate # Prefix used to create collection name in vector database VECTOR_INDEX_NAME_PREFIX=Vector_index @@ -529,9 +544,9 @@ WEAVIATE_API_KEY=WVF5YThaHlkYwhGUSmCRgsX3tD5ngdN8pkih WEAVIATE_GRPC_ENDPOINT=grpc://weaviate:50051 WEAVIATE_TOKENIZATION=word -# For OceanBase metadata database configuration, available when `DB_TYPE` is `mysql` and `COMPOSE_PROFILES` includes `oceanbase`. +# For OceanBase metadata database configuration, available when `DB_TYPE` is `oceanbase`. # For OceanBase vector database configuration, available when `VECTOR_STORE` is `oceanbase` -# If you want to use OceanBase as both vector database and metadata database, you need to set `DB_TYPE` to `mysql`, `COMPOSE_PROFILES` is `oceanbase`, and set Database Configuration is the same as the vector database. +# If you want to use OceanBase as both vector database and metadata database, you need to set both `DB_TYPE` and `VECTOR_STORE` to `oceanbase`, and set Database Configuration is the same as the vector database. # seekdb is the lite version of OceanBase and shares the connection configuration with OceanBase. OCEANBASE_VECTOR_HOST=oceanbase OCEANBASE_VECTOR_PORT=2881 @@ -1044,6 +1059,29 @@ WORKFLOW_LOG_RETENTION_DAYS=30 # Batch size for workflow log cleanup operations (default: 100) WORKFLOW_LOG_CLEANUP_BATCH_SIZE=100 +# Aliyun SLS Logstore Configuration +# Aliyun Access Key ID +ALIYUN_SLS_ACCESS_KEY_ID= +# Aliyun Access Key Secret +ALIYUN_SLS_ACCESS_KEY_SECRET= +# Aliyun SLS Endpoint (e.g., cn-hangzhou.log.aliyuncs.com) +ALIYUN_SLS_ENDPOINT= +# Aliyun SLS Region (e.g., cn-hangzhou) +ALIYUN_SLS_REGION= +# Aliyun SLS Project Name +ALIYUN_SLS_PROJECT_NAME= +# Number of days to retain workflow run logs (default: 365 days, 3650 for permanent storage) +ALIYUN_SLS_LOGSTORE_TTL=365 +# Enable dual-write to both SLS LogStore and SQL database (default: false) +LOGSTORE_DUAL_WRITE_ENABLED=false +# Enable dual-read fallback to SQL database when LogStore returns no results (default: true) +# Useful for migration scenarios where historical data exists only in SQL database +LOGSTORE_DUAL_READ_ENABLED=true +# Control flag for whether to write the `graph` field to LogStore. +# If LOGSTORE_ENABLE_PUT_GRAPH_FIELD is "true", write the full `graph` field; +# otherwise write an empty {} instead. Defaults to writing the `graph` field. +LOGSTORE_ENABLE_PUT_GRAPH_FIELD=true + # HTTP request node in workflow configuration HTTP_REQUEST_NODE_MAX_BINARY_SIZE=10485760 HTTP_REQUEST_NODE_MAX_TEXT_SIZE=1048576 @@ -1229,7 +1267,7 @@ NGINX_SSL_PORT=443 # and modify the env vars below accordingly. NGINX_SSL_CERT_FILENAME=dify.crt NGINX_SSL_CERT_KEY_FILENAME=dify.key -NGINX_SSL_PROTOCOLS=TLSv1.1 TLSv1.2 TLSv1.3 +NGINX_SSL_PROTOCOLS=TLSv1.2 TLSv1.3 # Nginx performance tuning NGINX_WORKER_PROCESSES=auto @@ -1350,7 +1388,10 @@ PLUGIN_STDIO_BUFFER_SIZE=1024 PLUGIN_STDIO_MAX_BUFFER_SIZE=5242880 PLUGIN_PYTHON_ENV_INIT_TIMEOUT=120 +# Plugin Daemon side timeout (configure to match the API side below) PLUGIN_MAX_EXECUTION_TIMEOUT=600 +# API side timeout (configure to match the Plugin Daemon side above) +PLUGIN_DAEMON_TIMEOUT=600.0 # PIP_MIRROR_URL=https://pypi.tuna.tsinghua.edu.cn/simple PIP_MIRROR_URL= @@ -1421,7 +1462,7 @@ QUEUE_MONITOR_ALERT_EMAILS= QUEUE_MONITOR_INTERVAL=30 # Swagger UI configuration -SWAGGER_UI_ENABLED=true +SWAGGER_UI_ENABLED=false SWAGGER_UI_PATH=/swagger-ui.html # Whether to encrypt dataset IDs when exporting DSL files (default: true) @@ -1437,6 +1478,7 @@ ENABLE_CLEAN_UNUSED_DATASETS_TASK=false ENABLE_CREATE_TIDB_SERVERLESS_TASK=false ENABLE_UPDATE_TIDB_SERVERLESS_STATUS_TASK=false ENABLE_CLEAN_MESSAGES=false +ENABLE_WORKFLOW_RUN_CLEANUP_TASK=false ENABLE_MAIL_CLEAN_DOCUMENT_NOTIFY_TASK=false ENABLE_DATASETS_QUEUE_MONITOR=false ENABLE_CHECK_UPGRADABLE_PLUGIN_TASK=true @@ -1461,3 +1503,8 @@ ANNOTATION_IMPORT_MAX_CONCURRENT=5 # The API key of amplitude AMPLITUDE_API_KEY= + +# Sandbox expired records clean configuration +SANDBOX_EXPIRED_RECORDS_CLEAN_GRACEFUL_PERIOD=21 +SANDBOX_EXPIRED_RECORDS_CLEAN_BATCH_SIZE=1000 +SANDBOX_EXPIRED_RECORDS_RETENTION_DAYS=30 diff --git a/docker/README.md b/docker/README.md index 375570f106..4c40317f37 100644 --- a/docker/README.md +++ b/docker/README.md @@ -23,6 +23,10 @@ Welcome to the new `docker` directory for deploying Dify using Docker Compose. T - Navigate to the `docker` directory. - Copy the `.env.example` file to a new file named `.env` by running `cp .env.example .env`. - Customize the `.env` file as needed. Refer to the `.env.example` file for detailed configuration options. + - **Optional (Recommended for upgrades)**: + You may use the environment synchronization tool to help keep your `.env` file aligned with the latest `.env.example` updates, while preserving your custom settings. + This is especially useful when upgrading Dify or managing a large, customized `.env` file. + See the [Environment Variables Synchronization](#environment-variables-synchronization) section below. 1. **Running the Services**: - Execute `docker compose up` from the `docker` directory to start the services. - To specify a vector database, set the `VECTOR_STORE` variable in your `.env` file to your desired vector database service, such as `milvus`, `weaviate`, or `opensearch`. @@ -111,6 +115,47 @@ The `.env.example` file provided in the Docker setup is extensive and covers a w - Each service like `nginx`, `redis`, `db`, and vector databases have specific environment variables that are directly referenced in the `docker-compose.yaml`. +### Environment Variables Synchronization + +When upgrading Dify or pulling the latest changes, new environment variables may be introduced in `.env.example`. + +To help keep your existing `.env` file up to date **without losing your custom values**, an optional environment variables synchronization tool is provided. + +> This tool performs a **one-way synchronization** from `.env.example` to `.env`. +> Existing values in `.env` are never overwritten automatically. + +#### `dify-env-sync.sh` (Optional) + +This script compares your current `.env` file with the latest `.env.example` template and helps safely apply new or updated environment variables. + +**What it does** + +- Creates a backup of the current `.env` file before making any changes +- Synchronizes newly added environment variables from `.env.example` +- Preserves all existing custom values in `.env` +- Displays differences and variables removed from `.env.example` for review + +**Backup behavior** + +Before synchronization, the current `.env` file is saved to the `env-backup/` directory with a timestamped filename +(e.g. `env-backup/.env.backup_20231218_143022`). + +**When to use** + +- After upgrading Dify to a newer version +- When `.env.example` has been updated with new environment variables +- When managing a large or heavily customized `.env` file + +**Usage** + +```bash +# Grant execution permission (first time only) +chmod +x dify-env-sync.sh + +# Run the synchronization +./dify-env-sync.sh +``` + ### Additional Information - **Continuous Improvement Phase**: We are actively seeking feedback from the community to refine and enhance the deployment process. As more users adopt this new method, we will continue to make improvements based on your experiences and suggestions. diff --git a/docker/dify-env-sync.sh b/docker/dify-env-sync.sh new file mode 100755 index 0000000000..edeeae3abc --- /dev/null +++ b/docker/dify-env-sync.sh @@ -0,0 +1,465 @@ +#!/bin/bash + +# ================================================================ +# Dify Environment Variables Synchronization Script +# +# Features: +# - Synchronize latest settings from .env.example to .env +# - Preserve custom settings in existing .env +# - Add new environment variables +# - Detect removed environment variables +# - Create backup files +# ================================================================ + +set -eo pipefail # Exit on error and pipe failures (safer for complex variable handling) + +# Error handling function +# Arguments: +# $1 - Line number where error occurred +# $2 - Error code +handle_error() { + local line_no=$1 + local error_code=$2 + echo -e "\033[0;31m[ERROR]\033[0m Script error: line $line_no with error code $error_code" >&2 + echo -e "\033[0;31m[ERROR]\033[0m Debug info: current working directory $(pwd)" >&2 + exit $error_code +} + +# Set error trap +trap 'handle_error ${LINENO} $?' ERR + +# Color settings for output +readonly RED='\033[0;31m' +readonly GREEN='\033[0;32m' +readonly YELLOW='\033[1;33m' +readonly BLUE='\033[0;34m' +readonly NC='\033[0m' # No Color + +# Logging functions +# Print informational message in blue +# Arguments: $1 - Message to print +log_info() { + echo -e "${BLUE}[INFO]${NC} $1" +} + +# Print success message in green +# Arguments: $1 - Message to print +log_success() { + echo -e "${GREEN}[SUCCESS]${NC} $1" +} + +# Print warning message in yellow +# Arguments: $1 - Message to print +log_warning() { + echo -e "${YELLOW}[WARNING]${NC} $1" >&2 +} + +# Print error message in red to stderr +# Arguments: $1 - Message to print +log_error() { + echo -e "${RED}[ERROR]${NC} $1" >&2 +} + +# Check for required files and create .env if missing +# Verifies that .env.example exists and creates .env from template if needed +check_files() { + log_info "Checking required files..." + + if [[ ! -f ".env.example" ]]; then + log_error ".env.example file not found" + exit 1 + fi + + if [[ ! -f ".env" ]]; then + log_warning ".env file does not exist. Creating from .env.example." + cp ".env.example" ".env" + log_success ".env file created" + fi + + log_success "Required files verified" +} + +# Create timestamped backup of .env file +# Creates env-backup directory if needed and backs up current .env file +create_backup() { + local timestamp=$(date +"%Y%m%d_%H%M%S") + local backup_dir="env-backup" + + # Create backup directory if it doesn't exist + if [[ ! -d "$backup_dir" ]]; then + mkdir -p "$backup_dir" + log_info "Created backup directory: $backup_dir" + fi + + if [[ -f ".env" ]]; then + local backup_file="${backup_dir}/.env.backup_${timestamp}" + cp ".env" "$backup_file" + log_success "Backed up existing .env to $backup_file" + fi +} + +# Detect differences between .env and .env.example (optimized for large files) +detect_differences() { + log_info "Detecting differences between .env and .env.example..." + + # Create secure temporary directory + local temp_dir=$(mktemp -d) + local temp_diff="$temp_dir/env_diff" + + # Store diff file path as global variable + declare -g DIFF_FILE="$temp_diff" + declare -g TEMP_DIR="$temp_dir" + + # Initialize difference file + > "$temp_diff" + + # Use awk for efficient comparison (much faster for large files) + local diff_count=$(awk -F= ' + BEGIN { OFS="\x01" } + FNR==NR { + if (!/^[[:space:]]*#/ && !/^[[:space:]]*$/ && /=/) { + gsub(/^[[:space:]]+|[[:space:]]+$/, "", $1) + key = $1 + value = substr($0, index($0,"=")+1) + gsub(/^[[:space:]]+|[[:space:]]+$/, "", value) + env_values[key] = value + } + next + } + { + if (!/^[[:space:]]*#/ && !/^[[:space:]]*$/ && /=/) { + gsub(/^[[:space:]]+|[[:space:]]+$/, "", $1) + key = $1 + example_value = substr($0, index($0,"=")+1) + gsub(/^[[:space:]]+|[[:space:]]+$/, "", example_value) + + if (key in env_values && env_values[key] != example_value) { + print key, env_values[key], example_value > "'$temp_diff'" + diff_count++ + } + } + } + END { print diff_count } + ' .env .env.example) + + if [[ $diff_count -gt 0 ]]; then + log_success "Detected differences in $diff_count environment variables" + # Show detailed differences + show_differences_detail + else + log_info "No differences detected" + fi +} + +# Parse environment variable line +# Extracts key-value pairs from .env file format lines +# Arguments: +# $1 - Line to parse +# Returns: +# 0 - Success, outputs "key|value" format +# 1 - Skip (empty line, comment, or invalid format) +parse_env_line() { + local line="$1" + local key="" + local value="" + + # Skip empty lines or comment lines + [[ -z "$line" || "$line" =~ ^[[:space:]]*# ]] && return 1 + + # Split by = + if [[ "$line" =~ ^([^=]+)=(.*)$ ]]; then + key="${BASH_REMATCH[1]}" + value="${BASH_REMATCH[2]}" + + # Remove leading and trailing whitespace + key=$(echo "$key" | sed 's/^[[:space:]]*//; s/[[:space:]]*$//') + value=$(echo "$value" | sed 's/^[[:space:]]*//; s/[[:space:]]*$//') + + if [[ -n "$key" ]]; then + echo "$key|$value" + return 0 + fi + fi + + return 1 +} + +# Show detailed differences +show_differences_detail() { + log_info "" + log_info "=== Environment Variable Differences ===" + + # Read differences from the already created diff file + if [[ ! -s "$DIFF_FILE" ]]; then + log_info "No differences to display" + return + fi + + # Display differences + local count=1 + while IFS=$'\x01' read -r key env_value example_value; do + echo "" + echo -e "${YELLOW}[$count] $key${NC}" + echo -e " ${GREEN}.env (current)${NC} : ${env_value}" + echo -e " ${BLUE}.env.example (recommended)${NC}: ${example_value}" + + # Analyze value changes + analyze_value_change "$env_value" "$example_value" + ((count++)) + done < "$DIFF_FILE" + + echo "" + log_info "=== Difference Analysis Complete ===" + log_info "Note: Consider changing to the recommended values above." + log_info "Current implementation preserves .env values." + echo "" +} + +# Analyze value changes +analyze_value_change() { + local current_value="$1" + local recommended_value="$2" + + # Analyze value characteristics + local analysis="" + + # Empty value check + if [[ -z "$current_value" && -n "$recommended_value" ]]; then + analysis=" ${RED}→ Setting from empty to recommended value${NC}" + elif [[ -n "$current_value" && -z "$recommended_value" ]]; then + analysis=" ${RED}→ Recommended value changed to empty${NC}" + # Numeric check - using arithmetic evaluation for robust comparison + elif [[ "$current_value" =~ ^[0-9]+$ && "$recommended_value" =~ ^[0-9]+$ ]]; then + # Use arithmetic evaluation to handle leading zeros correctly + if (( 10#$current_value < 10#$recommended_value )); then + analysis=" ${BLUE}→ Numeric increase (${current_value} < ${recommended_value})${NC}" + elif (( 10#$current_value > 10#$recommended_value )); then + analysis=" ${YELLOW}→ Numeric decrease (${current_value} > ${recommended_value})${NC}" + fi + # Boolean check + elif [[ "$current_value" =~ ^(true|false)$ && "$recommended_value" =~ ^(true|false)$ ]]; then + if [[ "$current_value" != "$recommended_value" ]]; then + analysis=" ${BLUE}→ Boolean value change (${current_value} → ${recommended_value})${NC}" + fi + # URL/endpoint check + elif [[ "$current_value" =~ ^https?:// || "$recommended_value" =~ ^https?:// ]]; then + analysis=" ${BLUE}→ URL/endpoint change${NC}" + # File path check + elif [[ "$current_value" =~ ^/ || "$recommended_value" =~ ^/ ]]; then + analysis=" ${BLUE}→ File path change${NC}" + else + # Length comparison + local current_len=${#current_value} + local recommended_len=${#recommended_value} + if [[ $current_len -ne $recommended_len ]]; then + analysis=" ${YELLOW}→ String length change (${current_len} → ${recommended_len} characters)${NC}" + fi + fi + + if [[ -n "$analysis" ]]; then + echo -e "$analysis" + fi +} + +# Synchronize .env file with .env.example while preserving custom values +# Creates a new .env file based on .env.example structure, preserving existing custom values +# Global variables used: DIFF_FILE, TEMP_DIR +sync_env_file() { + log_info "Starting partial synchronization of .env file..." + + local new_env_file=".env.new" + local preserved_count=0 + local updated_count=0 + + # Pre-process diff file for efficient lookup + local lookup_file="" + if [[ -f "$DIFF_FILE" && -s "$DIFF_FILE" ]]; then + lookup_file="${DIFF_FILE}.lookup" + # Create sorted lookup file for fast search + sort "$DIFF_FILE" > "$lookup_file" + log_info "Created lookup file for $(wc -l < "$DIFF_FILE") preserved values" + fi + + # Use AWK for efficient processing (much faster than bash loop for large files) + log_info "Processing $(wc -l < .env.example) lines with AWK..." + + local preserved_keys_file="${TEMP_DIR}/preserved_keys" + local awk_preserved_count_file="${TEMP_DIR}/awk_preserved_count" + local awk_updated_count_file="${TEMP_DIR}/awk_updated_count" + + awk -F'=' -v lookup_file="$lookup_file" -v preserved_file="$preserved_keys_file" \ + -v preserved_count_file="$awk_preserved_count_file" -v updated_count_file="$awk_updated_count_file" ' + BEGIN { + preserved_count = 0 + updated_count = 0 + + # Load preserved values if lookup file exists + if (lookup_file != "") { + while ((getline line < lookup_file) > 0) { + split(line, parts, "\x01") + key = parts[1] + value = parts[2] + preserved_values[key] = value + } + close(lookup_file) + } + } + + # Process each line + { + # Check if this is an environment variable line + if (/^[[:space:]]*[A-Za-z_][A-Za-z0-9_]*[[:space:]]*=/) { + # Extract key + key = $1 + gsub(/^[[:space:]]+|[[:space:]]+$/, "", key) + + # Check if key should be preserved + if (key in preserved_values) { + print key "=" preserved_values[key] + print key > preserved_file + preserved_count++ + } else { + print $0 + updated_count++ + } + } else { + # Not an env var line, preserve as-is + print $0 + } + } + + END { + print preserved_count > preserved_count_file + print updated_count > updated_count_file + } + ' .env.example > "$new_env_file" + + # Read counters and preserved keys + if [[ -f "$awk_preserved_count_file" ]]; then + preserved_count=$(cat "$awk_preserved_count_file") + fi + if [[ -f "$awk_updated_count_file" ]]; then + updated_count=$(cat "$awk_updated_count_file") + fi + + # Show what was preserved + if [[ -f "$preserved_keys_file" ]]; then + while read -r key; do + [[ -n "$key" ]] && log_info " Preserved: $key (.env value)" + done < "$preserved_keys_file" + fi + + # Clean up lookup file + [[ -n "$lookup_file" ]] && rm -f "$lookup_file" + + # Replace the original .env file + if mv "$new_env_file" ".env"; then + log_success "Successfully created new .env file" + else + log_error "Failed to replace .env file" + rm -f "$new_env_file" + return 1 + fi + + # Clean up difference file and temporary directory + if [[ -n "${TEMP_DIR:-}" ]]; then + rm -rf "${TEMP_DIR}" + unset TEMP_DIR + fi + if [[ -n "${DIFF_FILE:-}" ]]; then + unset DIFF_FILE + fi + + log_success "Partial synchronization of .env file completed" + log_info " Preserved .env values: $preserved_count" + log_info " Updated to .env.example values: $updated_count" +} + +# Detect removed environment variables +detect_removed_variables() { + log_info "Detecting removed environment variables..." + + if [[ ! -f ".env" ]]; then + return + fi + + # Use temporary files for efficient lookup + local temp_dir="${TEMP_DIR:-$(mktemp -d)}" + local temp_example_keys="$temp_dir/example_keys" + local temp_current_keys="$temp_dir/current_keys" + local cleanup_temp_dir="" + + # Set flag if we created a new temp directory + if [[ -z "${TEMP_DIR:-}" ]]; then + cleanup_temp_dir="$temp_dir" + fi + + # Get keys from .env.example and .env, sorted for comm + awk -F= '!/^[[:space:]]*#/ && /=/ {gsub(/^[[:space:]]+|[[:space:]]+$/, "", $1); print $1}' .env.example | sort > "$temp_example_keys" + awk -F= '!/^[[:space:]]*#/ && /=/ {gsub(/^[[:space:]]+|[[:space:]]+$/, "", $1); print $1}' .env | sort > "$temp_current_keys" + + # Get keys from existing .env and check for removals + local removed_vars=() + while IFS= read -r var; do + removed_vars+=("$var") + done < <(comm -13 "$temp_example_keys" "$temp_current_keys") + + # Clean up temporary files if we created a new temp directory + if [[ -n "$cleanup_temp_dir" ]]; then + rm -rf "$cleanup_temp_dir" + fi + + if [[ ${#removed_vars[@]} -gt 0 ]]; then + log_warning "The following environment variables have been removed from .env.example:" + for var in "${removed_vars[@]}"; do + log_warning " - $var" + done + log_warning "Consider manually removing these variables from .env" + else + log_success "No removed environment variables found" + fi +} + +# Show statistics +show_statistics() { + log_info "Synchronization statistics:" + + local total_example=$(grep -c "^[^#]*=" .env.example 2>/dev/null || echo "0") + local total_env=$(grep -c "^[^#]*=" .env 2>/dev/null || echo "0") + + log_info " .env.example environment variables: $total_example" + log_info " .env environment variables: $total_env" +} + +# Main execution function +# Orchestrates the complete synchronization process in the correct order +main() { + log_info "=== Dify Environment Variables Synchronization Script ===" + log_info "Execution started: $(date)" + + # Check prerequisites + check_files + + # Create backup + create_backup + + # Detect differences + detect_differences + + # Detect removed variables (before sync) + detect_removed_variables + + # Synchronize environment file + sync_env_file + + # Show statistics + show_statistics + + log_success "=== Synchronization process completed successfully ===" + log_info "Execution finished: $(date)" +} + +# Execute main function only when script is run directly +if [[ "${BASH_SOURCE[0]}" == "${0}" ]]; then + main "$@" +fi \ No newline at end of file diff --git a/docker/docker-compose-template.yaml b/docker/docker-compose-template.yaml index 6ba3409288..709aff23df 100644 --- a/docker/docker-compose-template.yaml +++ b/docker/docker-compose-template.yaml @@ -21,7 +21,7 @@ services: # API service api: - image: langgenius/dify-api:1.11.1 + image: langgenius/dify-api:1.11.2 restart: always environment: # Use the shared environment variables. @@ -34,6 +34,7 @@ services: PLUGIN_REMOTE_INSTALL_HOST: ${EXPOSE_PLUGIN_DEBUGGING_HOST:-localhost} PLUGIN_REMOTE_INSTALL_PORT: ${EXPOSE_PLUGIN_DEBUGGING_PORT:-5003} PLUGIN_MAX_PACKAGE_SIZE: ${PLUGIN_MAX_PACKAGE_SIZE:-52428800} + PLUGIN_DAEMON_TIMEOUT: ${PLUGIN_DAEMON_TIMEOUT:-600.0} INNER_API_KEY_FOR_PLUGIN: ${PLUGIN_DIFY_INNER_API_KEY:-QaHbTe77CtuXmsfyhR7+vRjI/+XbV1AaFy691iy+kGDv2Jvy0/eAh8Y1} depends_on: init_permissions: @@ -62,7 +63,7 @@ services: # worker service # The Celery worker for processing all queues (dataset, workflow, mail, etc.) worker: - image: langgenius/dify-api:1.11.1 + image: langgenius/dify-api:1.11.2 restart: always environment: # Use the shared environment variables. @@ -101,7 +102,7 @@ services: # worker_beat service # Celery beat for scheduling periodic tasks. worker_beat: - image: langgenius/dify-api:1.11.1 + image: langgenius/dify-api:1.11.2 restart: always environment: # Use the shared environment variables. @@ -131,7 +132,7 @@ services: # Frontend web application. web: - image: langgenius/dify-web:1.11.1 + image: langgenius/dify-web:1.11.2 restart: always environment: CONSOLE_API_URL: ${CONSOLE_API_URL:-} @@ -269,7 +270,7 @@ services: # plugin daemon plugin_daemon: - image: langgenius/dify-plugin-daemon:0.5.1-local + image: langgenius/dify-plugin-daemon:0.5.2-local restart: always environment: # Use the shared environment variables. @@ -414,7 +415,7 @@ services: # and modify the env vars below in .env if HTTPS_ENABLED is true. NGINX_SSL_CERT_FILENAME: ${NGINX_SSL_CERT_FILENAME:-dify.crt} NGINX_SSL_CERT_KEY_FILENAME: ${NGINX_SSL_CERT_KEY_FILENAME:-dify.key} - NGINX_SSL_PROTOCOLS: ${NGINX_SSL_PROTOCOLS:-TLSv1.1 TLSv1.2 TLSv1.3} + NGINX_SSL_PROTOCOLS: ${NGINX_SSL_PROTOCOLS:-TLSv1.2 TLSv1.3} NGINX_WORKER_PROCESSES: ${NGINX_WORKER_PROCESSES:-auto} NGINX_CLIENT_MAX_BODY_SIZE: ${NGINX_CLIENT_MAX_BODY_SIZE:-100M} NGINX_KEEPALIVE_TIMEOUT: ${NGINX_KEEPALIVE_TIMEOUT:-65} @@ -474,7 +475,8 @@ services: OB_CLUSTER_NAME: ${OCEANBASE_CLUSTER_NAME:-difyai} OB_SERVER_IP: 127.0.0.1 MODE: mini - LANG: en_US.UTF-8 + LANG: C.UTF-8 + LC_ALL: C.UTF-8 ports: - "${OCEANBASE_VECTOR_PORT:-2881}:2881" healthcheck: diff --git a/docker/docker-compose.middleware.yaml b/docker/docker-compose.middleware.yaml index 68ef217bbd..81c34fc6a2 100644 --- a/docker/docker-compose.middleware.yaml +++ b/docker/docker-compose.middleware.yaml @@ -123,12 +123,13 @@ services: # plugin daemon plugin_daemon: - image: langgenius/dify-plugin-daemon:0.5.1-local + image: langgenius/dify-plugin-daemon:0.5.2-local restart: always env_file: - ./middleware.env environment: # Use the shared environment variables. + LOG_OUTPUT_FORMAT: ${LOG_OUTPUT_FORMAT:-text} DB_DATABASE: ${DB_PLUGIN_DATABASE:-dify_plugin} REDIS_HOST: ${REDIS_HOST:-redis} REDIS_PORT: ${REDIS_PORT:-6379} diff --git a/docker/docker-compose.yaml b/docker/docker-compose.yaml index cc17b2853a..041f60aaa2 100644 --- a/docker/docker-compose.yaml +++ b/docker/docker-compose.yaml @@ -13,10 +13,11 @@ x-shared-env: &shared-api-worker-env APP_WEB_URL: ${APP_WEB_URL:-} FILES_URL: ${FILES_URL:-} INTERNAL_FILES_URL: ${INTERNAL_FILES_URL:-} - LANG: ${LANG:-en_US.UTF-8} - LC_ALL: ${LC_ALL:-en_US.UTF-8} + LANG: ${LANG:-C.UTF-8} + LC_ALL: ${LC_ALL:-C.UTF-8} PYTHONIOENCODING: ${PYTHONIOENCODING:-utf-8} LOG_LEVEL: ${LOG_LEVEL:-INFO} + LOG_OUTPUT_FORMAT: ${LOG_OUTPUT_FORMAT:-text} LOG_FILE: ${LOG_FILE:-/app/logs/server.log} LOG_FILE_MAX_SIZE: ${LOG_FILE_MAX_SIZE:-20} LOG_FILE_BACKUP_COUNT: ${LOG_FILE_BACKUP_COUNT:-5} @@ -108,6 +109,7 @@ x-shared-env: &shared-api-worker-env CONSOLE_CORS_ALLOW_ORIGINS: ${CONSOLE_CORS_ALLOW_ORIGINS:-*} COOKIE_DOMAIN: ${COOKIE_DOMAIN:-} NEXT_PUBLIC_COOKIE_DOMAIN: ${NEXT_PUBLIC_COOKIE_DOMAIN:-} + NEXT_PUBLIC_BATCH_CONCURRENCY: ${NEXT_PUBLIC_BATCH_CONCURRENCY:-5} STORAGE_TYPE: ${STORAGE_TYPE:-opendal} OPENDAL_SCHEME: ${OPENDAL_SCHEME:-fs} OPENDAL_FS_ROOT: ${OPENDAL_FS_ROOT:-storage} @@ -121,6 +123,13 @@ x-shared-env: &shared-api-worker-env S3_ACCESS_KEY: ${S3_ACCESS_KEY:-} S3_SECRET_KEY: ${S3_SECRET_KEY:-} S3_USE_AWS_MANAGED_IAM: ${S3_USE_AWS_MANAGED_IAM:-false} + ARCHIVE_STORAGE_ENABLED: ${ARCHIVE_STORAGE_ENABLED:-false} + ARCHIVE_STORAGE_ENDPOINT: ${ARCHIVE_STORAGE_ENDPOINT:-} + ARCHIVE_STORAGE_ARCHIVE_BUCKET: ${ARCHIVE_STORAGE_ARCHIVE_BUCKET:-} + ARCHIVE_STORAGE_EXPORT_BUCKET: ${ARCHIVE_STORAGE_EXPORT_BUCKET:-} + ARCHIVE_STORAGE_ACCESS_KEY: ${ARCHIVE_STORAGE_ACCESS_KEY:-} + ARCHIVE_STORAGE_SECRET_KEY: ${ARCHIVE_STORAGE_SECRET_KEY:-} + ARCHIVE_STORAGE_REGION: ${ARCHIVE_STORAGE_REGION:-auto} AZURE_BLOB_ACCOUNT_NAME: ${AZURE_BLOB_ACCOUNT_NAME:-difyai} AZURE_BLOB_ACCOUNT_KEY: ${AZURE_BLOB_ACCOUNT_KEY:-difyai} AZURE_BLOB_CONTAINER_NAME: ${AZURE_BLOB_CONTAINER_NAME:-difyai-container} @@ -134,11 +143,13 @@ x-shared-env: &shared-api-worker-env ALIYUN_OSS_REGION: ${ALIYUN_OSS_REGION:-ap-southeast-1} ALIYUN_OSS_AUTH_VERSION: ${ALIYUN_OSS_AUTH_VERSION:-v4} ALIYUN_OSS_PATH: ${ALIYUN_OSS_PATH:-your-path} + ALIYUN_CLOUDBOX_ID: ${ALIYUN_CLOUDBOX_ID:-your-cloudbox-id} TENCENT_COS_BUCKET_NAME: ${TENCENT_COS_BUCKET_NAME:-your-bucket-name} TENCENT_COS_SECRET_KEY: ${TENCENT_COS_SECRET_KEY:-your-secret-key} TENCENT_COS_SECRET_ID: ${TENCENT_COS_SECRET_ID:-your-secret-id} TENCENT_COS_REGION: ${TENCENT_COS_REGION:-your-region} TENCENT_COS_SCHEME: ${TENCENT_COS_SCHEME:-your-scheme} + TENCENT_COS_CUSTOM_DOMAIN: ${TENCENT_COS_CUSTOM_DOMAIN:-your-custom-domain} OCI_ENDPOINT: ${OCI_ENDPOINT:-https://your-object-storage-namespace.compat.objectstorage.us-ashburn-1.oraclecloud.com} OCI_BUCKET_NAME: ${OCI_BUCKET_NAME:-your-bucket-name} OCI_ACCESS_KEY: ${OCI_ACCESS_KEY:-your-access-key} @@ -148,6 +159,7 @@ x-shared-env: &shared-api-worker-env HUAWEI_OBS_SECRET_KEY: ${HUAWEI_OBS_SECRET_KEY:-your-secret-key} HUAWEI_OBS_ACCESS_KEY: ${HUAWEI_OBS_ACCESS_KEY:-your-access-key} HUAWEI_OBS_SERVER: ${HUAWEI_OBS_SERVER:-your-server-url} + HUAWEI_OBS_PATH_STYLE: ${HUAWEI_OBS_PATH_STYLE:-false} VOLCENGINE_TOS_BUCKET_NAME: ${VOLCENGINE_TOS_BUCKET_NAME:-your-bucket-name} VOLCENGINE_TOS_SECRET_KEY: ${VOLCENGINE_TOS_SECRET_KEY:-your-secret-key} VOLCENGINE_TOS_ACCESS_KEY: ${VOLCENGINE_TOS_ACCESS_KEY:-your-access-key} @@ -455,6 +467,15 @@ x-shared-env: &shared-api-worker-env WORKFLOW_LOG_CLEANUP_ENABLED: ${WORKFLOW_LOG_CLEANUP_ENABLED:-false} WORKFLOW_LOG_RETENTION_DAYS: ${WORKFLOW_LOG_RETENTION_DAYS:-30} WORKFLOW_LOG_CLEANUP_BATCH_SIZE: ${WORKFLOW_LOG_CLEANUP_BATCH_SIZE:-100} + ALIYUN_SLS_ACCESS_KEY_ID: ${ALIYUN_SLS_ACCESS_KEY_ID:-} + ALIYUN_SLS_ACCESS_KEY_SECRET: ${ALIYUN_SLS_ACCESS_KEY_SECRET:-} + ALIYUN_SLS_ENDPOINT: ${ALIYUN_SLS_ENDPOINT:-} + ALIYUN_SLS_REGION: ${ALIYUN_SLS_REGION:-} + ALIYUN_SLS_PROJECT_NAME: ${ALIYUN_SLS_PROJECT_NAME:-} + ALIYUN_SLS_LOGSTORE_TTL: ${ALIYUN_SLS_LOGSTORE_TTL:-365} + LOGSTORE_DUAL_WRITE_ENABLED: ${LOGSTORE_DUAL_WRITE_ENABLED:-false} + LOGSTORE_DUAL_READ_ENABLED: ${LOGSTORE_DUAL_READ_ENABLED:-true} + LOGSTORE_ENABLE_PUT_GRAPH_FIELD: ${LOGSTORE_ENABLE_PUT_GRAPH_FIELD:-true} HTTP_REQUEST_NODE_MAX_BINARY_SIZE: ${HTTP_REQUEST_NODE_MAX_BINARY_SIZE:-10485760} HTTP_REQUEST_NODE_MAX_TEXT_SIZE: ${HTTP_REQUEST_NODE_MAX_TEXT_SIZE:-1048576} HTTP_REQUEST_NODE_SSL_VERIFY: ${HTTP_REQUEST_NODE_SSL_VERIFY:-True} @@ -528,7 +549,7 @@ x-shared-env: &shared-api-worker-env NGINX_SSL_PORT: ${NGINX_SSL_PORT:-443} NGINX_SSL_CERT_FILENAME: ${NGINX_SSL_CERT_FILENAME:-dify.crt} NGINX_SSL_CERT_KEY_FILENAME: ${NGINX_SSL_CERT_KEY_FILENAME:-dify.key} - NGINX_SSL_PROTOCOLS: ${NGINX_SSL_PROTOCOLS:-TLSv1.1 TLSv1.2 TLSv1.3} + NGINX_SSL_PROTOCOLS: ${NGINX_SSL_PROTOCOLS:-TLSv1.2 TLSv1.3} NGINX_WORKER_PROCESSES: ${NGINX_WORKER_PROCESSES:-auto} NGINX_CLIENT_MAX_BODY_SIZE: ${NGINX_CLIENT_MAX_BODY_SIZE:-100M} NGINX_KEEPALIVE_TIMEOUT: ${NGINX_KEEPALIVE_TIMEOUT:-65} @@ -583,6 +604,7 @@ x-shared-env: &shared-api-worker-env PLUGIN_STDIO_MAX_BUFFER_SIZE: ${PLUGIN_STDIO_MAX_BUFFER_SIZE:-5242880} PLUGIN_PYTHON_ENV_INIT_TIMEOUT: ${PLUGIN_PYTHON_ENV_INIT_TIMEOUT:-120} PLUGIN_MAX_EXECUTION_TIMEOUT: ${PLUGIN_MAX_EXECUTION_TIMEOUT:-600} + PLUGIN_DAEMON_TIMEOUT: ${PLUGIN_DAEMON_TIMEOUT:-600.0} PIP_MIRROR_URL: ${PIP_MIRROR_URL:-} PLUGIN_STORAGE_TYPE: ${PLUGIN_STORAGE_TYPE:-local} PLUGIN_STORAGE_LOCAL_ROOT: ${PLUGIN_STORAGE_LOCAL_ROOT:-/app/storage} @@ -631,7 +653,7 @@ x-shared-env: &shared-api-worker-env QUEUE_MONITOR_THRESHOLD: ${QUEUE_MONITOR_THRESHOLD:-200} QUEUE_MONITOR_ALERT_EMAILS: ${QUEUE_MONITOR_ALERT_EMAILS:-} QUEUE_MONITOR_INTERVAL: ${QUEUE_MONITOR_INTERVAL:-30} - SWAGGER_UI_ENABLED: ${SWAGGER_UI_ENABLED:-true} + SWAGGER_UI_ENABLED: ${SWAGGER_UI_ENABLED:-false} SWAGGER_UI_PATH: ${SWAGGER_UI_PATH:-/swagger-ui.html} DSL_EXPORT_ENCRYPT_DATASET_ID: ${DSL_EXPORT_ENCRYPT_DATASET_ID:-true} DATASET_MAX_SEGMENTS_PER_REQUEST: ${DATASET_MAX_SEGMENTS_PER_REQUEST:-0} @@ -640,6 +662,7 @@ x-shared-env: &shared-api-worker-env ENABLE_CREATE_TIDB_SERVERLESS_TASK: ${ENABLE_CREATE_TIDB_SERVERLESS_TASK:-false} ENABLE_UPDATE_TIDB_SERVERLESS_STATUS_TASK: ${ENABLE_UPDATE_TIDB_SERVERLESS_STATUS_TASK:-false} ENABLE_CLEAN_MESSAGES: ${ENABLE_CLEAN_MESSAGES:-false} + ENABLE_WORKFLOW_RUN_CLEANUP_TASK: ${ENABLE_WORKFLOW_RUN_CLEANUP_TASK:-false} ENABLE_MAIL_CLEAN_DOCUMENT_NOTIFY_TASK: ${ENABLE_MAIL_CLEAN_DOCUMENT_NOTIFY_TASK:-false} ENABLE_DATASETS_QUEUE_MONITOR: ${ENABLE_DATASETS_QUEUE_MONITOR:-false} ENABLE_CHECK_UPGRADABLE_PLUGIN_TASK: ${ENABLE_CHECK_UPGRADABLE_PLUGIN_TASK:-true} @@ -655,6 +678,9 @@ x-shared-env: &shared-api-worker-env ANNOTATION_IMPORT_RATE_LIMIT_PER_HOUR: ${ANNOTATION_IMPORT_RATE_LIMIT_PER_HOUR:-20} ANNOTATION_IMPORT_MAX_CONCURRENT: ${ANNOTATION_IMPORT_MAX_CONCURRENT:-5} AMPLITUDE_API_KEY: ${AMPLITUDE_API_KEY:-} + SANDBOX_EXPIRED_RECORDS_CLEAN_GRACEFUL_PERIOD: ${SANDBOX_EXPIRED_RECORDS_CLEAN_GRACEFUL_PERIOD:-21} + SANDBOX_EXPIRED_RECORDS_CLEAN_BATCH_SIZE: ${SANDBOX_EXPIRED_RECORDS_CLEAN_BATCH_SIZE:-1000} + SANDBOX_EXPIRED_RECORDS_RETENTION_DAYS: ${SANDBOX_EXPIRED_RECORDS_RETENTION_DAYS:-30} services: # Init container to fix permissions @@ -678,7 +704,7 @@ services: # API service api: - image: langgenius/dify-api:1.11.1 + image: langgenius/dify-api:1.11.2 restart: always environment: # Use the shared environment variables. @@ -691,6 +717,7 @@ services: PLUGIN_REMOTE_INSTALL_HOST: ${EXPOSE_PLUGIN_DEBUGGING_HOST:-localhost} PLUGIN_REMOTE_INSTALL_PORT: ${EXPOSE_PLUGIN_DEBUGGING_PORT:-5003} PLUGIN_MAX_PACKAGE_SIZE: ${PLUGIN_MAX_PACKAGE_SIZE:-52428800} + PLUGIN_DAEMON_TIMEOUT: ${PLUGIN_DAEMON_TIMEOUT:-600.0} INNER_API_KEY_FOR_PLUGIN: ${PLUGIN_DIFY_INNER_API_KEY:-QaHbTe77CtuXmsfyhR7+vRjI/+XbV1AaFy691iy+kGDv2Jvy0/eAh8Y1} depends_on: init_permissions: @@ -719,7 +746,7 @@ services: # worker service # The Celery worker for processing all queues (dataset, workflow, mail, etc.) worker: - image: langgenius/dify-api:1.11.1 + image: langgenius/dify-api:1.11.2 restart: always environment: # Use the shared environment variables. @@ -758,7 +785,7 @@ services: # worker_beat service # Celery beat for scheduling periodic tasks. worker_beat: - image: langgenius/dify-api:1.11.1 + image: langgenius/dify-api:1.11.2 restart: always environment: # Use the shared environment variables. @@ -788,7 +815,7 @@ services: # Frontend web application. web: - image: langgenius/dify-web:1.11.1 + image: langgenius/dify-web:1.11.2 restart: always environment: CONSOLE_API_URL: ${CONSOLE_API_URL:-} @@ -926,7 +953,7 @@ services: # plugin daemon plugin_daemon: - image: langgenius/dify-plugin-daemon:0.5.1-local + image: langgenius/dify-plugin-daemon:0.5.2-local restart: always environment: # Use the shared environment variables. @@ -1071,7 +1098,7 @@ services: # and modify the env vars below in .env if HTTPS_ENABLED is true. NGINX_SSL_CERT_FILENAME: ${NGINX_SSL_CERT_FILENAME:-dify.crt} NGINX_SSL_CERT_KEY_FILENAME: ${NGINX_SSL_CERT_KEY_FILENAME:-dify.key} - NGINX_SSL_PROTOCOLS: ${NGINX_SSL_PROTOCOLS:-TLSv1.1 TLSv1.2 TLSv1.3} + NGINX_SSL_PROTOCOLS: ${NGINX_SSL_PROTOCOLS:-TLSv1.2 TLSv1.3} NGINX_WORKER_PROCESSES: ${NGINX_WORKER_PROCESSES:-auto} NGINX_CLIENT_MAX_BODY_SIZE: ${NGINX_CLIENT_MAX_BODY_SIZE:-100M} NGINX_KEEPALIVE_TIMEOUT: ${NGINX_KEEPALIVE_TIMEOUT:-65} @@ -1131,7 +1158,8 @@ services: OB_CLUSTER_NAME: ${OCEANBASE_CLUSTER_NAME:-difyai} OB_SERVER_IP: 127.0.0.1 MODE: mini - LANG: en_US.UTF-8 + LANG: C.UTF-8 + LC_ALL: C.UTF-8 ports: - "${OCEANBASE_VECTOR_PORT:-2881}:2881" healthcheck: diff --git a/docker/middleware.env.example b/docker/middleware.env.example index d4cbcd1762..c88dbe5511 100644 --- a/docker/middleware.env.example +++ b/docker/middleware.env.example @@ -213,3 +213,28 @@ PLUGIN_VOLCENGINE_TOS_ENDPOINT= PLUGIN_VOLCENGINE_TOS_ACCESS_KEY= PLUGIN_VOLCENGINE_TOS_SECRET_KEY= PLUGIN_VOLCENGINE_TOS_REGION= + +# ------------------------------ +# Environment Variables for Aliyun SLS (Simple Log Service) +# ------------------------------ +# Aliyun SLS Access Key ID +ALIYUN_SLS_ACCESS_KEY_ID= +# Aliyun SLS Access Key Secret +ALIYUN_SLS_ACCESS_KEY_SECRET= +# Aliyun SLS Endpoint (e.g., cn-hangzhou.log.aliyuncs.com) +ALIYUN_SLS_ENDPOINT= +# Aliyun SLS Region (e.g., cn-hangzhou) +ALIYUN_SLS_REGION= +# Aliyun SLS Project Name +ALIYUN_SLS_PROJECT_NAME= +# Aliyun SLS Logstore TTL (default: 365 days, 3650 for permanent storage) +ALIYUN_SLS_LOGSTORE_TTL=365 +# Enable dual-write to both LogStore and SQL database (default: true) +LOGSTORE_DUAL_WRITE_ENABLED=true +# Enable dual-read fallback to SQL database when LogStore returns no results (default: true) +# Useful for migration scenarios where historical data exists only in SQL database +LOGSTORE_DUAL_READ_ENABLED=true +# Control flag for whether to write the `graph` field to LogStore. +# If LOGSTORE_ENABLE_PUT_GRAPH_FIELD is "true", write the full `graph` field; +# otherwise write an empty {} instead. Defaults to writing the `graph` field. +LOGSTORE_ENABLE_PUT_GRAPH_FIELD=true \ No newline at end of file diff --git a/docker/ssrf_proxy/squid.conf.template b/docker/ssrf_proxy/squid.conf.template index 1775a1fff9..256e669c8d 100644 --- a/docker/ssrf_proxy/squid.conf.template +++ b/docker/ssrf_proxy/squid.conf.template @@ -54,3 +54,52 @@ http_access allow src_all # Unless the option's size is increased, an error will occur when uploading more than two files. client_request_buffer_max_size 100 MB + +################################## Performance & Concurrency ############################### +# Increase file descriptor limit for high concurrency +max_filedescriptors 65536 + +# Timeout configurations for image requests +connect_timeout 30 seconds +request_timeout 2 minutes +read_timeout 2 minutes +client_lifetime 5 minutes +shutdown_lifetime 30 seconds + +# Persistent connections - improve performance for multiple requests +server_persistent_connections on +client_persistent_connections on +persistent_request_timeout 30 seconds +pconn_timeout 1 minute + +# Connection pool and concurrency limits +client_db on +server_idle_pconn_timeout 2 minutes +client_idle_pconn_timeout 2 minutes + +# Quick abort settings - don't abort requests that are mostly done +quick_abort_min 16 KB +quick_abort_max 16 MB +quick_abort_pct 95 + +# Memory and cache optimization +memory_cache_mode disk +cache_mem 256 MB +maximum_object_size_in_memory 512 KB + +# DNS resolver settings for better performance +dns_timeout 30 seconds +dns_retransmit_interval 5 seconds +# By default, Squid uses the system's configured DNS resolvers. +# If you need to override them, set dns_nameservers to appropriate servers +# for your environment (for example, internal/corporate DNS). The following +# is an example using public DNS and SHOULD be customized before use: +# dns_nameservers 8.8.8.8 8.8.4.4 + +# Logging format for better debugging +logformat dify_log %ts.%03tu %6tr %>a %Ss/%03>Hs % langgenius%2Fdify | Trendshift

-Dify est une plateforme de développement d'applications LLM open source. Son interface intuitive combine un flux de travail d'IA, un pipeline RAG, des capacités d'agent, une gestion de modèles, des fonctionnalités d'observabilité, et plus encore, vous permettant de passer rapidement du prototype à la production. Voici une liste des fonctionnalités principales: +Dify est une plateforme de développement d'applications LLM open source. Sa interface intuitive combine un flux de travail d'IA, un pipeline RAG, des capacités d'agent, une gestion de modèles, des fonctionnalités d'observabilité, et plus encore, vous permettant de passer rapidement du prototype à la production. Voici une liste des fonctionnalités principales:

**1. Flux de travail** : Construisez et testez des flux de travail d'IA puissants sur un canevas visuel, en utilisant toutes les fonctionnalités suivantes et plus encore. **2. Prise en charge complète des modèles** : -Intégration transparente avec des centaines de LLM propriétaires / open source provenant de dizaines de fournisseurs d'inférence et de solutions auto-hébergées, couvrant GPT, Mistral, Llama3, et tous les modèles compatibles avec l'API OpenAI. Une liste complète des fournisseurs de modèles pris en charge se trouve [ici](https://docs.dify.ai/getting-started/readme/model-providers). +Intégration transparente avec des centaines de LLM propriétaires / open source offerts par dizaines de fournisseurs d'inférence et de solutions auto-hébergées, couvrant GPT, Mistral, Llama3, et tous les modèles compatibles avec l'API OpenAI. Une liste complète des fournisseurs de modèles pris en charge se trouve [ici](https://docs.dify.ai/getting-started/readme/model-providers). ![providers-v5](https://github.com/langgenius/dify/assets/13230914/5a17bdbe-097a-4100-8363-40255b70f6e3) @@ -79,7 +79,7 @@ Interface intuitive pour créer des prompts, comparer les performances des modè Des capacités RAG étendues qui couvrent tout, de l'ingestion de documents à la récupération, avec un support prêt à l'emploi pour l'extraction de texte à partir de PDF, PPT et autres formats de document courants. **5. Capacités d'agent** : -Vous pouvez définir des agents basés sur l'appel de fonction LLM ou ReAct, et ajouter des outils pré-construits ou personnalisés pour l'agent. Dify fournit plus de 50 outils intégrés pour les agents d'IA, tels que la recherche Google, DALL·E, Stable Diffusion et WolframAlpha. +Vous pouvez définir des agents basés sur l'appel de fonctions LLM ou ReAct, et ajouter des outils pré-construits ou personnalisés pour l'agent. Dify fournit plus de 50 outils intégrés pour les agents d'IA, tels que la recherche Google, DALL·E, Stable Diffusion et WolframAlpha. **6. LLMOps** : Surveillez et analysez les journaux d'application et les performances au fil du temps. Vous pouvez continuellement améliorer les prompts, les ensembles de données et les modèles en fonction des données de production et des annotations. diff --git a/sdks/nodejs-client/.gitignore b/sdks/nodejs-client/.gitignore index 1d40ff2ece..33cfbd3b18 100644 --- a/sdks/nodejs-client/.gitignore +++ b/sdks/nodejs-client/.gitignore @@ -1,48 +1,40 @@ -# See https://help.github.com/articles/ignoring-files/ for more about ignoring files. +# Dependencies +node_modules/ -# dependencies -/node_modules -/.pnp -.pnp.js +# Build output +dist/ -# testing -/coverage +# Testing +coverage/ -# next.js -/.next/ -/out/ +# IDE +.idea/ +.vscode/ +*.swp +*.swo -# production -/build - -# misc +# OS .DS_Store -*.pem +Thumbs.db -# debug +# Debug logs npm-debug.log* yarn-debug.log* yarn-error.log* -.pnpm-debug.log* +pnpm-debug.log* -# local env files -.env*.local +# Environment +.env +.env.local +.env.*.local -# vercel -.vercel - -# typescript +# TypeScript *.tsbuildinfo -next-env.d.ts -# npm +# Lock files (use pnpm-lock.yaml in CI if needed) package-lock.json +yarn.lock -# yarn -.pnp.cjs -.pnp.loader.mjs -.yarn/ -.yarnrc.yml - -# pmpm -pnpm-lock.yaml +# Misc +*.pem +*.tgz diff --git a/sdks/nodejs-client/LICENSE b/sdks/nodejs-client/LICENSE new file mode 100644 index 0000000000..ff17417e1f --- /dev/null +++ b/sdks/nodejs-client/LICENSE @@ -0,0 +1,22 @@ +MIT License + +Copyright (c) 2023 LangGenius + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. + diff --git a/sdks/nodejs-client/README.md b/sdks/nodejs-client/README.md index 3a5688bcbe..f8c2803c08 100644 --- a/sdks/nodejs-client/README.md +++ b/sdks/nodejs-client/README.md @@ -13,54 +13,92 @@ npm install dify-client After installing the SDK, you can use it in your project like this: ```js -import { DifyClient, ChatClient, CompletionClient } from 'dify-client' +import { + DifyClient, + ChatClient, + CompletionClient, + WorkflowClient, + KnowledgeBaseClient, + WorkspaceClient +} from 'dify-client' -const API_KEY = 'your-api-key-here' -const user = `random-user-id` +const API_KEY = 'your-app-api-key' +const DATASET_API_KEY = 'your-dataset-api-key' +const user = 'random-user-id' const query = 'Please tell me a short story in 10 words or less.' -const remote_url_files = [{ - type: 'image', - transfer_method: 'remote_url', - url: 'your_url_address' -}] -// Create a completion client -const completionClient = new CompletionClient(API_KEY) -// Create a completion message -completionClient.createCompletionMessage({'query': query}, user) -// Create a completion message with vision model -completionClient.createCompletionMessage({'query': 'Describe the picture.'}, user, false, remote_url_files) - -// Create a chat client const chatClient = new ChatClient(API_KEY) -// Create a chat message in stream mode -const response = await chatClient.createChatMessage({}, query, user, true, null) -const stream = response.data; -stream.on('data', data => { - console.log(data); -}); -stream.on('end', () => { - console.log('stream done'); -}); -// Create a chat message with vision model -chatClient.createChatMessage({}, 'Describe the picture.', user, false, null, remote_url_files) -// Fetch conversations -chatClient.getConversations(user) -// Fetch conversation messages -chatClient.getConversationMessages(conversationId, user) -// Rename conversation -chatClient.renameConversation(conversationId, name, user) - - +const completionClient = new CompletionClient(API_KEY) +const workflowClient = new WorkflowClient(API_KEY) +const kbClient = new KnowledgeBaseClient(DATASET_API_KEY) +const workspaceClient = new WorkspaceClient(DATASET_API_KEY) const client = new DifyClient(API_KEY) -// Fetch application parameters -client.getApplicationParameters(user) -// Provide feedback for a message -client.messageFeedback(messageId, rating, user) + +// App core +await client.getApplicationParameters(user) +await client.messageFeedback('message-id', 'like', user) + +// Completion (blocking) +await completionClient.createCompletionMessage({ + inputs: { query }, + user, + response_mode: 'blocking' +}) + +// Chat (streaming) +const stream = await chatClient.createChatMessage({ + inputs: {}, + query, + user, + response_mode: 'streaming' +}) +for await (const event of stream) { + console.log(event.event, event.data) +} + +// Chatflow (advanced chat via workflow_id) +await chatClient.createChatMessage({ + inputs: {}, + query, + user, + workflow_id: 'workflow-id', + response_mode: 'blocking' +}) + +// Workflow run (blocking or streaming) +await workflowClient.run({ + inputs: { query }, + user, + response_mode: 'blocking' +}) + +// Knowledge base (dataset token required) +await kbClient.listDatasets({ page: 1, limit: 20 }) +await kbClient.createDataset({ name: 'KB', indexing_technique: 'economy' }) + +// RAG pipeline (may require service API route registration) +const pipelineStream = await kbClient.runPipeline('dataset-id', { + inputs: {}, + datasource_type: 'online_document', + datasource_info_list: [], + start_node_id: 'start-node-id', + is_published: true, + response_mode: 'streaming' +}) +for await (const event of pipelineStream) { + console.log(event.data) +} + +// Workspace models (dataset token required) +await workspaceClient.getModelsByType('text-embedding') ``` -Replace 'your-api-key-here' with your actual Dify API key.Replace 'your-app-id-here' with your actual Dify APP ID. +Notes: + +- App endpoints use an app API token; knowledge base and workspace endpoints use a dataset API token. +- Chat/completion require a stable `user` identifier in the request payload. +- For streaming responses, iterate the returned AsyncIterable. Use `stream.toText()` to collect text. ## License diff --git a/sdks/nodejs-client/babel.config.cjs b/sdks/nodejs-client/babel.config.cjs deleted file mode 100644 index 392abb66d8..0000000000 --- a/sdks/nodejs-client/babel.config.cjs +++ /dev/null @@ -1,12 +0,0 @@ -module.exports = { - presets: [ - [ - "@babel/preset-env", - { - targets: { - node: "current", - }, - }, - ], - ], -}; diff --git a/sdks/nodejs-client/eslint.config.js b/sdks/nodejs-client/eslint.config.js new file mode 100644 index 0000000000..9e659f5d28 --- /dev/null +++ b/sdks/nodejs-client/eslint.config.js @@ -0,0 +1,45 @@ +import js from "@eslint/js"; +import tsParser from "@typescript-eslint/parser"; +import tsPlugin from "@typescript-eslint/eslint-plugin"; +import { fileURLToPath } from "node:url"; +import path from "node:path"; + +const tsconfigRootDir = path.dirname(fileURLToPath(import.meta.url)); +const typeCheckedRules = + tsPlugin.configs["recommended-type-checked"]?.rules ?? + tsPlugin.configs.recommendedTypeChecked?.rules ?? + {}; + +export default [ + { + ignores: ["dist", "node_modules", "scripts", "tests", "**/*.test.*", "**/*.spec.*"], + }, + js.configs.recommended, + { + files: ["src/**/*.ts"], + languageOptions: { + parser: tsParser, + ecmaVersion: "latest", + parserOptions: { + project: "./tsconfig.json", + tsconfigRootDir, + sourceType: "module", + }, + }, + plugins: { + "@typescript-eslint": tsPlugin, + }, + rules: { + ...tsPlugin.configs.recommended.rules, + ...typeCheckedRules, + "no-undef": "off", + "no-unused-vars": "off", + "@typescript-eslint/no-unsafe-call": "error", + "@typescript-eslint/no-unsafe-return": "error", + "@typescript-eslint/consistent-type-imports": [ + "error", + { prefer: "type-imports", fixStyle: "separate-type-imports" }, + ], + }, + }, +]; diff --git a/sdks/nodejs-client/index.d.ts b/sdks/nodejs-client/index.d.ts deleted file mode 100644 index 3ea4b9d153..0000000000 --- a/sdks/nodejs-client/index.d.ts +++ /dev/null @@ -1,107 +0,0 @@ -// Types.d.ts -export const BASE_URL: string; - -export type RequestMethods = 'GET' | 'POST' | 'PATCH' | 'DELETE'; - -interface Params { - [key: string]: any; -} - -interface HeaderParams { - [key: string]: string; -} - -interface User { -} - -interface DifyFileBase { - type: "image" -} - -export interface DifyRemoteFile extends DifyFileBase { - transfer_method: "remote_url" - url: string -} - -export interface DifyLocalFile extends DifyFileBase { - transfer_method: "local_file" - upload_file_id: string -} - -export type DifyFile = DifyRemoteFile | DifyLocalFile; - -export declare class DifyClient { - constructor(apiKey: string, baseUrl?: string); - - updateApiKey(apiKey: string): void; - - sendRequest( - method: RequestMethods, - endpoint: string, - data?: any, - params?: Params, - stream?: boolean, - headerParams?: HeaderParams - ): Promise; - - messageFeedback(message_id: string, rating: number, user: User): Promise; - - getApplicationParameters(user: User): Promise; - - fileUpload(data: FormData): Promise; - - textToAudio(text: string ,user: string, streaming?: boolean): Promise; - - getMeta(user: User): Promise; -} - -export declare class CompletionClient extends DifyClient { - createCompletionMessage( - inputs: any, - user: User, - stream?: boolean, - files?: DifyFile[] | null - ): Promise; -} - -export declare class ChatClient extends DifyClient { - createChatMessage( - inputs: any, - query: string, - user: User, - stream?: boolean, - conversation_id?: string | null, - files?: DifyFile[] | null - ): Promise; - - getSuggested(message_id: string, user: User): Promise; - - stopMessage(task_id: string, user: User) : Promise; - - - getConversations( - user: User, - first_id?: string | null, - limit?: number | null, - pinned?: boolean | null - ): Promise; - - getConversationMessages( - user: User, - conversation_id?: string, - first_id?: string | null, - limit?: number | null - ): Promise; - - renameConversation(conversation_id: string, name: string, user: User,auto_generate:boolean): Promise; - - deleteConversation(conversation_id: string, user: User): Promise; - - audioToText(data: FormData): Promise; -} - -export declare class WorkflowClient extends DifyClient { - run(inputs: any, user: User, stream?: boolean,): Promise; - - stop(task_id: string, user: User): Promise; -} diff --git a/sdks/nodejs-client/index.js b/sdks/nodejs-client/index.js deleted file mode 100644 index 9743ae358c..0000000000 --- a/sdks/nodejs-client/index.js +++ /dev/null @@ -1,351 +0,0 @@ -import axios from "axios"; -export const BASE_URL = "https://api.dify.ai/v1"; - -export const routes = { - // app's - feedback: { - method: "POST", - url: (message_id) => `/messages/${message_id}/feedbacks`, - }, - application: { - method: "GET", - url: () => `/parameters`, - }, - fileUpload: { - method: "POST", - url: () => `/files/upload`, - }, - textToAudio: { - method: "POST", - url: () => `/text-to-audio`, - }, - getMeta: { - method: "GET", - url: () => `/meta`, - }, - - // completion's - createCompletionMessage: { - method: "POST", - url: () => `/completion-messages`, - }, - - // chat's - createChatMessage: { - method: "POST", - url: () => `/chat-messages`, - }, - getSuggested:{ - method: "GET", - url: (message_id) => `/messages/${message_id}/suggested`, - }, - stopChatMessage: { - method: "POST", - url: (task_id) => `/chat-messages/${task_id}/stop`, - }, - getConversations: { - method: "GET", - url: () => `/conversations`, - }, - getConversationMessages: { - method: "GET", - url: () => `/messages`, - }, - renameConversation: { - method: "POST", - url: (conversation_id) => `/conversations/${conversation_id}/name`, - }, - deleteConversation: { - method: "DELETE", - url: (conversation_id) => `/conversations/${conversation_id}`, - }, - audioToText: { - method: "POST", - url: () => `/audio-to-text`, - }, - - // workflow‘s - runWorkflow: { - method: "POST", - url: () => `/workflows/run`, - }, - stopWorkflow: { - method: "POST", - url: (task_id) => `/workflows/tasks/${task_id}/stop`, - } - -}; - -export class DifyClient { - constructor(apiKey, baseUrl = BASE_URL) { - this.apiKey = apiKey; - this.baseUrl = baseUrl; - } - - updateApiKey(apiKey) { - this.apiKey = apiKey; - } - - async sendRequest( - method, - endpoint, - data = null, - params = null, - stream = false, - headerParams = {} - ) { - const isFormData = - (typeof FormData !== "undefined" && data instanceof FormData) || - (data && data.constructor && data.constructor.name === "FormData"); - const headers = { - Authorization: `Bearer ${this.apiKey}`, - ...(isFormData ? {} : { "Content-Type": "application/json" }), - ...headerParams, - }; - - const url = `${this.baseUrl}${endpoint}`; - let response; - if (stream) { - response = await axios({ - method, - url, - data, - params, - headers, - responseType: "stream", - }); - } else { - response = await axios({ - method, - url, - ...(method !== "GET" && { data }), - params, - headers, - responseType: "json", - }); - } - - return response; - } - - messageFeedback(message_id, rating, user) { - const data = { - rating, - user, - }; - return this.sendRequest( - routes.feedback.method, - routes.feedback.url(message_id), - data - ); - } - - getApplicationParameters(user) { - const params = { user }; - return this.sendRequest( - routes.application.method, - routes.application.url(), - null, - params - ); - } - - fileUpload(data) { - return this.sendRequest( - routes.fileUpload.method, - routes.fileUpload.url(), - data - ); - } - - textToAudio(text, user, streaming = false) { - const data = { - text, - user, - streaming - }; - return this.sendRequest( - routes.textToAudio.method, - routes.textToAudio.url(), - data, - null, - streaming - ); - } - - getMeta(user) { - const params = { user }; - return this.sendRequest( - routes.getMeta.method, - routes.getMeta.url(), - null, - params - ); - } -} - -export class CompletionClient extends DifyClient { - createCompletionMessage(inputs, user, stream = false, files = null) { - const data = { - inputs, - user, - response_mode: stream ? "streaming" : "blocking", - files, - }; - return this.sendRequest( - routes.createCompletionMessage.method, - routes.createCompletionMessage.url(), - data, - null, - stream - ); - } - - runWorkflow(inputs, user, stream = false, files = null) { - const data = { - inputs, - user, - response_mode: stream ? "streaming" : "blocking", - }; - return this.sendRequest( - routes.runWorkflow.method, - routes.runWorkflow.url(), - data, - null, - stream - ); - } -} - -export class ChatClient extends DifyClient { - createChatMessage( - inputs, - query, - user, - stream = false, - conversation_id = null, - files = null - ) { - const data = { - inputs, - query, - user, - response_mode: stream ? "streaming" : "blocking", - files, - }; - if (conversation_id) data.conversation_id = conversation_id; - - return this.sendRequest( - routes.createChatMessage.method, - routes.createChatMessage.url(), - data, - null, - stream - ); - } - - getSuggested(message_id, user) { - const data = { user }; - return this.sendRequest( - routes.getSuggested.method, - routes.getSuggested.url(message_id), - data - ); - } - - stopMessage(task_id, user) { - const data = { user }; - return this.sendRequest( - routes.stopChatMessage.method, - routes.stopChatMessage.url(task_id), - data - ); - } - - getConversations(user, first_id = null, limit = null, pinned = null) { - const params = { user, first_id: first_id, limit, pinned }; - return this.sendRequest( - routes.getConversations.method, - routes.getConversations.url(), - null, - params - ); - } - - getConversationMessages( - user, - conversation_id = "", - first_id = null, - limit = null - ) { - const params = { user }; - - if (conversation_id) params.conversation_id = conversation_id; - - if (first_id) params.first_id = first_id; - - if (limit) params.limit = limit; - - return this.sendRequest( - routes.getConversationMessages.method, - routes.getConversationMessages.url(), - null, - params - ); - } - - renameConversation(conversation_id, name, user, auto_generate) { - const data = { name, user, auto_generate }; - return this.sendRequest( - routes.renameConversation.method, - routes.renameConversation.url(conversation_id), - data - ); - } - - deleteConversation(conversation_id, user) { - const data = { user }; - return this.sendRequest( - routes.deleteConversation.method, - routes.deleteConversation.url(conversation_id), - data - ); - } - - - audioToText(data) { - return this.sendRequest( - routes.audioToText.method, - routes.audioToText.url(), - data - ); - } - -} - -export class WorkflowClient extends DifyClient { - run(inputs,user,stream) { - const data = { - inputs, - response_mode: stream ? "streaming" : "blocking", - user - }; - - return this.sendRequest( - routes.runWorkflow.method, - routes.runWorkflow.url(), - data, - null, - stream - ); - } - - stop(task_id, user) { - const data = { user }; - return this.sendRequest( - routes.stopWorkflow.method, - routes.stopWorkflow.url(task_id), - data - ); - } -} diff --git a/sdks/nodejs-client/index.test.js b/sdks/nodejs-client/index.test.js deleted file mode 100644 index e3a1715238..0000000000 --- a/sdks/nodejs-client/index.test.js +++ /dev/null @@ -1,141 +0,0 @@ -import { DifyClient, WorkflowClient, BASE_URL, routes } from "."; - -import axios from 'axios' - -jest.mock('axios') - -afterEach(() => { - jest.resetAllMocks() -}) - -describe('Client', () => { - let difyClient - beforeEach(() => { - difyClient = new DifyClient('test') - }) - - test('should create a client', () => { - expect(difyClient).toBeDefined(); - }) - // test updateApiKey - test('should update the api key', () => { - difyClient.updateApiKey('test2'); - expect(difyClient.apiKey).toBe('test2'); - }) -}); - -describe('Send Requests', () => { - let difyClient - - beforeEach(() => { - difyClient = new DifyClient('test') - }) - - it('should make a successful request to the application parameter', async () => { - const method = 'GET' - const endpoint = routes.application.url() - const expectedResponse = { data: 'response' } - axios.mockResolvedValue(expectedResponse) - - await difyClient.sendRequest(method, endpoint) - - expect(axios).toHaveBeenCalledWith({ - method, - url: `${BASE_URL}${endpoint}`, - params: null, - headers: { - Authorization: `Bearer ${difyClient.apiKey}`, - 'Content-Type': 'application/json', - }, - responseType: 'json', - }) - - }) - - it('should handle errors from the API', async () => { - const method = 'GET' - const endpoint = '/test-endpoint' - const errorMessage = 'Request failed with status code 404' - axios.mockRejectedValue(new Error(errorMessage)) - - await expect(difyClient.sendRequest(method, endpoint)).rejects.toThrow( - errorMessage - ) - }) - - it('uses the getMeta route configuration', async () => { - axios.mockResolvedValue({ data: 'ok' }) - await difyClient.getMeta('end-user') - - expect(axios).toHaveBeenCalledWith({ - method: routes.getMeta.method, - url: `${BASE_URL}${routes.getMeta.url()}`, - params: { user: 'end-user' }, - headers: { - Authorization: `Bearer ${difyClient.apiKey}`, - 'Content-Type': 'application/json', - }, - responseType: 'json', - }) - }) -}) - -describe('File uploads', () => { - let difyClient - const OriginalFormData = global.FormData - - beforeAll(() => { - global.FormData = class FormDataMock {} - }) - - afterAll(() => { - global.FormData = OriginalFormData - }) - - beforeEach(() => { - difyClient = new DifyClient('test') - }) - - it('does not override multipart boundary headers for FormData', async () => { - const form = new FormData() - axios.mockResolvedValue({ data: 'ok' }) - - await difyClient.fileUpload(form) - - expect(axios).toHaveBeenCalledWith({ - method: routes.fileUpload.method, - url: `${BASE_URL}${routes.fileUpload.url()}`, - data: form, - params: null, - headers: { - Authorization: `Bearer ${difyClient.apiKey}`, - }, - responseType: 'json', - }) - }) -}) - -describe('Workflow client', () => { - let workflowClient - - beforeEach(() => { - workflowClient = new WorkflowClient('test') - }) - - it('uses tasks stop path for workflow stop', async () => { - axios.mockResolvedValue({ data: 'stopped' }) - await workflowClient.stop('task-1', 'end-user') - - expect(axios).toHaveBeenCalledWith({ - method: routes.stopWorkflow.method, - url: `${BASE_URL}${routes.stopWorkflow.url('task-1')}`, - data: { user: 'end-user' }, - params: null, - headers: { - Authorization: `Bearer ${workflowClient.apiKey}`, - 'Content-Type': 'application/json', - }, - responseType: 'json', - }) - }) -}) diff --git a/sdks/nodejs-client/jest.config.cjs b/sdks/nodejs-client/jest.config.cjs deleted file mode 100644 index ea0fb34ad1..0000000000 --- a/sdks/nodejs-client/jest.config.cjs +++ /dev/null @@ -1,6 +0,0 @@ -module.exports = { - testEnvironment: "node", - transform: { - "^.+\\.[tj]sx?$": "babel-jest", - }, -}; diff --git a/sdks/nodejs-client/package.json b/sdks/nodejs-client/package.json index c6bb0a9c1f..afbb58fee1 100644 --- a/sdks/nodejs-client/package.json +++ b/sdks/nodejs-client/package.json @@ -1,30 +1,70 @@ { "name": "dify-client", - "version": "2.3.2", + "version": "3.0.0", "description": "This is the Node.js SDK for the Dify.AI API, which allows you to easily integrate Dify.AI into your Node.js applications.", - "main": "index.js", "type": "module", - "types":"index.d.ts", + "main": "./dist/index.js", + "types": "./dist/index.d.ts", + "exports": { + ".": { + "types": "./dist/index.d.ts", + "import": "./dist/index.js" + } + }, + "engines": { + "node": ">=18.0.0" + }, + "files": [ + "dist", + "README.md", + "LICENSE" + ], "keywords": [ "Dify", "Dify.AI", - "LLM" + "LLM", + "AI", + "SDK", + "API" ], - "author": "Joel", + "author": "LangGenius", "contributors": [ - " <<427733928@qq.com>> (https://github.com/crazywoola)" + "Joel (https://github.com/iamjoel)", + "lyzno1 (https://github.com/lyzno1)", + "crazywoola <427733928@qq.com> (https://github.com/crazywoola)" ], + "repository": { + "type": "git", + "url": "https://github.com/langgenius/dify.git", + "directory": "sdks/nodejs-client" + }, + "bugs": { + "url": "https://github.com/langgenius/dify/issues" + }, + "homepage": "https://dify.ai", "license": "MIT", "scripts": { - "test": "jest" + "build": "tsup", + "lint": "eslint", + "lint:fix": "eslint --fix", + "type-check": "tsc -p tsconfig.json --noEmit", + "test": "vitest run", + "test:coverage": "vitest run --coverage", + "publish:check": "./scripts/publish.sh --dry-run", + "publish:npm": "./scripts/publish.sh" }, "dependencies": { - "axios": "^1.3.5" + "axios": "^1.13.2" }, "devDependencies": { - "@babel/core": "^7.21.8", - "@babel/preset-env": "^7.21.5", - "babel-jest": "^29.5.0", - "jest": "^29.5.0" + "@eslint/js": "^9.39.2", + "@types/node": "^25.0.3", + "@typescript-eslint/eslint-plugin": "^8.50.1", + "@typescript-eslint/parser": "^8.50.1", + "@vitest/coverage-v8": "4.0.16", + "eslint": "^9.39.2", + "tsup": "^8.5.1", + "typescript": "^5.9.3", + "vitest": "^4.0.16" } } diff --git a/sdks/nodejs-client/pnpm-lock.yaml b/sdks/nodejs-client/pnpm-lock.yaml new file mode 100644 index 0000000000..6febed2ea6 --- /dev/null +++ b/sdks/nodejs-client/pnpm-lock.yaml @@ -0,0 +1,2353 @@ +lockfileVersion: '9.0' + +settings: + autoInstallPeers: true + excludeLinksFromLockfile: false + +importers: + + .: + dependencies: + axios: + specifier: ^1.13.2 + version: 1.13.2 + devDependencies: + '@eslint/js': + specifier: ^9.39.2 + version: 9.39.2 + '@types/node': + specifier: ^25.0.3 + version: 25.0.3 + '@typescript-eslint/eslint-plugin': + specifier: ^8.50.1 + version: 8.50.1(@typescript-eslint/parser@8.50.1(eslint@9.39.2)(typescript@5.9.3))(eslint@9.39.2)(typescript@5.9.3) + '@typescript-eslint/parser': + specifier: ^8.50.1 + version: 8.50.1(eslint@9.39.2)(typescript@5.9.3) + '@vitest/coverage-v8': + specifier: 4.0.16 + version: 4.0.16(vitest@4.0.16(@types/node@25.0.3)) + eslint: + specifier: ^9.39.2 + version: 9.39.2 + tsup: + specifier: ^8.5.1 + version: 8.5.1(postcss@8.5.6)(typescript@5.9.3) + typescript: + specifier: ^5.9.3 + version: 5.9.3 + vitest: + specifier: ^4.0.16 + version: 4.0.16(@types/node@25.0.3) + +packages: + + '@babel/helper-string-parser@7.27.1': + resolution: {integrity: sha512-qMlSxKbpRlAridDExk92nSobyDdpPijUq2DW6oDnUqd0iOGxmQjyqhMIihI9+zv4LPyZdRje2cavWPbCbWm3eA==} + engines: {node: '>=6.9.0'} + + '@babel/helper-validator-identifier@7.28.5': + resolution: {integrity: sha512-qSs4ifwzKJSV39ucNjsvc6WVHs6b7S03sOh2OcHF9UHfVPqWWALUsNUVzhSBiItjRZoLHx7nIarVjqKVusUZ1Q==} + engines: {node: '>=6.9.0'} + + '@babel/parser@7.28.5': + resolution: {integrity: sha512-KKBU1VGYR7ORr3At5HAtUQ+TV3SzRCXmA/8OdDZiLDBIZxVyzXuztPjfLd3BV1PRAQGCMWWSHYhL0F8d5uHBDQ==} + engines: {node: '>=6.0.0'} + hasBin: true + + '@babel/types@7.28.5': + resolution: {integrity: sha512-qQ5m48eI/MFLQ5PxQj4PFaprjyCTLI37ElWMmNs0K8Lk3dVeOdNpB3ks8jc7yM5CDmVC73eMVk/trk3fgmrUpA==} + engines: {node: '>=6.9.0'} + + '@bcoe/v8-coverage@1.0.2': + resolution: {integrity: sha512-6zABk/ECA/QYSCQ1NGiVwwbQerUCZ+TQbp64Q3AgmfNvurHH0j8TtXa1qbShXA6qqkpAj4V5W8pP6mLe1mcMqA==} + engines: {node: '>=18'} + + '@esbuild/aix-ppc64@0.27.2': + resolution: {integrity: sha512-GZMB+a0mOMZs4MpDbj8RJp4cw+w1WV5NYD6xzgvzUJ5Ek2jerwfO2eADyI6ExDSUED+1X8aMbegahsJi+8mgpw==} + engines: {node: '>=18'} + cpu: [ppc64] + os: [aix] + + '@esbuild/android-arm64@0.27.2': + resolution: {integrity: sha512-pvz8ZZ7ot/RBphf8fv60ljmaoydPU12VuXHImtAs0XhLLw+EXBi2BLe3OYSBslR4rryHvweW5gmkKFwTiFy6KA==} + engines: {node: '>=18'} + cpu: [arm64] + os: [android] + + '@esbuild/android-arm@0.27.2': + resolution: {integrity: sha512-DVNI8jlPa7Ujbr1yjU2PfUSRtAUZPG9I1RwW4F4xFB1Imiu2on0ADiI/c3td+KmDtVKNbi+nffGDQMfcIMkwIA==} + engines: {node: '>=18'} + cpu: [arm] + os: [android] + + '@esbuild/android-x64@0.27.2': + resolution: {integrity: sha512-z8Ank4Byh4TJJOh4wpz8g2vDy75zFL0TlZlkUkEwYXuPSgX8yzep596n6mT7905kA9uHZsf/o2OJZubl2l3M7A==} + engines: {node: '>=18'} + cpu: [x64] + os: [android] + + '@esbuild/darwin-arm64@0.27.2': + resolution: {integrity: sha512-davCD2Zc80nzDVRwXTcQP/28fiJbcOwvdolL0sOiOsbwBa72kegmVU0Wrh1MYrbuCL98Omp5dVhQFWRKR2ZAlg==} + engines: {node: '>=18'} + cpu: [arm64] + os: [darwin] + + '@esbuild/darwin-x64@0.27.2': + resolution: {integrity: sha512-ZxtijOmlQCBWGwbVmwOF/UCzuGIbUkqB1faQRf5akQmxRJ1ujusWsb3CVfk/9iZKr2L5SMU5wPBi1UWbvL+VQA==} + engines: {node: '>=18'} + cpu: [x64] + os: [darwin] + + '@esbuild/freebsd-arm64@0.27.2': + resolution: {integrity: sha512-lS/9CN+rgqQ9czogxlMcBMGd+l8Q3Nj1MFQwBZJyoEKI50XGxwuzznYdwcav6lpOGv5BqaZXqvBSiB/kJ5op+g==} + engines: {node: '>=18'} + cpu: [arm64] + os: [freebsd] + + '@esbuild/freebsd-x64@0.27.2': + resolution: {integrity: sha512-tAfqtNYb4YgPnJlEFu4c212HYjQWSO/w/h/lQaBK7RbwGIkBOuNKQI9tqWzx7Wtp7bTPaGC6MJvWI608P3wXYA==} + engines: {node: '>=18'} + cpu: [x64] + os: [freebsd] + + '@esbuild/linux-arm64@0.27.2': + resolution: {integrity: sha512-hYxN8pr66NsCCiRFkHUAsxylNOcAQaxSSkHMMjcpx0si13t1LHFphxJZUiGwojB1a/Hd5OiPIqDdXONia6bhTw==} + engines: {node: '>=18'} + cpu: [arm64] + os: [linux] + + '@esbuild/linux-arm@0.27.2': + resolution: {integrity: sha512-vWfq4GaIMP9AIe4yj1ZUW18RDhx6EPQKjwe7n8BbIecFtCQG4CfHGaHuh7fdfq+y3LIA2vGS/o9ZBGVxIDi9hw==} + engines: {node: '>=18'} + cpu: [arm] + os: [linux] + + '@esbuild/linux-ia32@0.27.2': + resolution: {integrity: sha512-MJt5BRRSScPDwG2hLelYhAAKh9imjHK5+NE/tvnRLbIqUWa+0E9N4WNMjmp/kXXPHZGqPLxggwVhz7QP8CTR8w==} + engines: {node: '>=18'} + cpu: [ia32] + os: [linux] + + '@esbuild/linux-loong64@0.27.2': + resolution: {integrity: sha512-lugyF1atnAT463aO6KPshVCJK5NgRnU4yb3FUumyVz+cGvZbontBgzeGFO1nF+dPueHD367a2ZXe1NtUkAjOtg==} + engines: {node: '>=18'} + cpu: [loong64] + os: [linux] + + '@esbuild/linux-mips64el@0.27.2': + resolution: {integrity: sha512-nlP2I6ArEBewvJ2gjrrkESEZkB5mIoaTswuqNFRv/WYd+ATtUpe9Y09RnJvgvdag7he0OWgEZWhviS1OTOKixw==} + engines: {node: '>=18'} + cpu: [mips64el] + os: [linux] + + '@esbuild/linux-ppc64@0.27.2': + resolution: {integrity: sha512-C92gnpey7tUQONqg1n6dKVbx3vphKtTHJaNG2Ok9lGwbZil6DrfyecMsp9CrmXGQJmZ7iiVXvvZH6Ml5hL6XdQ==} + engines: {node: '>=18'} + cpu: [ppc64] + os: [linux] + + '@esbuild/linux-riscv64@0.27.2': + resolution: {integrity: sha512-B5BOmojNtUyN8AXlK0QJyvjEZkWwy/FKvakkTDCziX95AowLZKR6aCDhG7LeF7uMCXEJqwa8Bejz5LTPYm8AvA==} + engines: {node: '>=18'} + cpu: [riscv64] + os: [linux] + + '@esbuild/linux-s390x@0.27.2': + resolution: {integrity: sha512-p4bm9+wsPwup5Z8f4EpfN63qNagQ47Ua2znaqGH6bqLlmJ4bx97Y9JdqxgGZ6Y8xVTixUnEkoKSHcpRlDnNr5w==} + engines: {node: '>=18'} + cpu: [s390x] + os: [linux] + + '@esbuild/linux-x64@0.27.2': + resolution: {integrity: sha512-uwp2Tip5aPmH+NRUwTcfLb+W32WXjpFejTIOWZFw/v7/KnpCDKG66u4DLcurQpiYTiYwQ9B7KOeMJvLCu/OvbA==} + engines: {node: '>=18'} + cpu: [x64] + os: [linux] + + '@esbuild/netbsd-arm64@0.27.2': + resolution: {integrity: sha512-Kj6DiBlwXrPsCRDeRvGAUb/LNrBASrfqAIok+xB0LxK8CHqxZ037viF13ugfsIpePH93mX7xfJp97cyDuTZ3cw==} + engines: {node: '>=18'} + cpu: [arm64] + os: [netbsd] + + '@esbuild/netbsd-x64@0.27.2': + resolution: {integrity: sha512-HwGDZ0VLVBY3Y+Nw0JexZy9o/nUAWq9MlV7cahpaXKW6TOzfVno3y3/M8Ga8u8Yr7GldLOov27xiCnqRZf0tCA==} + engines: {node: '>=18'} + cpu: [x64] + os: [netbsd] + + '@esbuild/openbsd-arm64@0.27.2': + resolution: {integrity: sha512-DNIHH2BPQ5551A7oSHD0CKbwIA/Ox7+78/AWkbS5QoRzaqlev2uFayfSxq68EkonB+IKjiuxBFoV8ESJy8bOHA==} + engines: {node: '>=18'} + cpu: [arm64] + os: [openbsd] + + '@esbuild/openbsd-x64@0.27.2': + resolution: {integrity: sha512-/it7w9Nb7+0KFIzjalNJVR5bOzA9Vay+yIPLVHfIQYG/j+j9VTH84aNB8ExGKPU4AzfaEvN9/V4HV+F+vo8OEg==} + engines: {node: '>=18'} + cpu: [x64] + os: [openbsd] + + '@esbuild/openharmony-arm64@0.27.2': + resolution: {integrity: sha512-LRBbCmiU51IXfeXk59csuX/aSaToeG7w48nMwA6049Y4J4+VbWALAuXcs+qcD04rHDuSCSRKdmY63sruDS5qag==} + engines: {node: '>=18'} + cpu: [arm64] + os: [openharmony] + + '@esbuild/sunos-x64@0.27.2': + resolution: {integrity: sha512-kMtx1yqJHTmqaqHPAzKCAkDaKsffmXkPHThSfRwZGyuqyIeBvf08KSsYXl+abf5HDAPMJIPnbBfXvP2ZC2TfHg==} + engines: {node: '>=18'} + cpu: [x64] + os: [sunos] + + '@esbuild/win32-arm64@0.27.2': + resolution: {integrity: sha512-Yaf78O/B3Kkh+nKABUF++bvJv5Ijoy9AN1ww904rOXZFLWVc5OLOfL56W+C8F9xn5JQZa3UX6m+IktJnIb1Jjg==} + engines: {node: '>=18'} + cpu: [arm64] + os: [win32] + + '@esbuild/win32-ia32@0.27.2': + resolution: {integrity: sha512-Iuws0kxo4yusk7sw70Xa2E2imZU5HoixzxfGCdxwBdhiDgt9vX9VUCBhqcwY7/uh//78A1hMkkROMJq9l27oLQ==} + engines: {node: '>=18'} + cpu: [ia32] + os: [win32] + + '@esbuild/win32-x64@0.27.2': + resolution: {integrity: sha512-sRdU18mcKf7F+YgheI/zGf5alZatMUTKj/jNS6l744f9u3WFu4v7twcUI9vu4mknF4Y9aDlblIie0IM+5xxaqQ==} + engines: {node: '>=18'} + cpu: [x64] + os: [win32] + + '@eslint-community/eslint-utils@4.9.0': + resolution: {integrity: sha512-ayVFHdtZ+hsq1t2Dy24wCmGXGe4q9Gu3smhLYALJrr473ZH27MsnSL+LKUlimp4BWJqMDMLmPpx/Q9R3OAlL4g==} + engines: {node: ^12.22.0 || ^14.17.0 || >=16.0.0} + peerDependencies: + eslint: ^6.0.0 || ^7.0.0 || >=8.0.0 + + '@eslint-community/regexpp@4.12.2': + resolution: {integrity: sha512-EriSTlt5OC9/7SXkRSCAhfSxxoSUgBm33OH+IkwbdpgoqsSsUg7y3uh+IICI/Qg4BBWr3U2i39RpmycbxMq4ew==} + engines: {node: ^12.0.0 || ^14.0.0 || >=16.0.0} + + '@eslint/config-array@0.21.1': + resolution: {integrity: sha512-aw1gNayWpdI/jSYVgzN5pL0cfzU02GT3NBpeT/DXbx1/1x7ZKxFPd9bwrzygx/qiwIQiJ1sw/zD8qY/kRvlGHA==} + engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} + + '@eslint/config-helpers@0.4.2': + resolution: {integrity: sha512-gBrxN88gOIf3R7ja5K9slwNayVcZgK6SOUORm2uBzTeIEfeVaIhOpCtTox3P6R7o2jLFwLFTLnC7kU/RGcYEgw==} + engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} + + '@eslint/core@0.17.0': + resolution: {integrity: sha512-yL/sLrpmtDaFEiUj1osRP4TI2MDz1AddJL+jZ7KSqvBuliN4xqYY54IfdN8qD8Toa6g1iloph1fxQNkjOxrrpQ==} + engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} + + '@eslint/eslintrc@3.3.3': + resolution: {integrity: sha512-Kr+LPIUVKz2qkx1HAMH8q1q6azbqBAsXJUxBl/ODDuVPX45Z9DfwB8tPjTi6nNZ8BuM3nbJxC5zCAg5elnBUTQ==} + engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} + + '@eslint/js@9.39.2': + resolution: {integrity: sha512-q1mjIoW1VX4IvSocvM/vbTiveKC4k9eLrajNEuSsmjymSDEbpGddtpfOoN7YGAqBK3NG+uqo8ia4PDTt8buCYA==} + engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} + + '@eslint/object-schema@2.1.7': + resolution: {integrity: sha512-VtAOaymWVfZcmZbp6E2mympDIHvyjXs/12LqWYjVw6qjrfF+VK+fyG33kChz3nnK+SU5/NeHOqrTEHS8sXO3OA==} + engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} + + '@eslint/plugin-kit@0.4.1': + resolution: {integrity: sha512-43/qtrDUokr7LJqoF2c3+RInu/t4zfrpYdoSDfYyhg52rwLV6TnOvdG4fXm7IkSB3wErkcmJS9iEhjVtOSEjjA==} + engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} + + '@humanfs/core@0.19.1': + resolution: {integrity: sha512-5DyQ4+1JEUzejeK1JGICcideyfUbGixgS9jNgex5nqkW+cY7WZhxBigmieN5Qnw9ZosSNVC9KQKyb+GUaGyKUA==} + engines: {node: '>=18.18.0'} + + '@humanfs/node@0.16.7': + resolution: {integrity: sha512-/zUx+yOsIrG4Y43Eh2peDeKCxlRt/gET6aHfaKpuq267qXdYDFViVHfMaLyygZOnl0kGWxFIgsBy8QFuTLUXEQ==} + engines: {node: '>=18.18.0'} + + '@humanwhocodes/module-importer@1.0.1': + resolution: {integrity: sha512-bxveV4V8v5Yb4ncFTT3rPSgZBOpCkjfK0y4oVVVJwIuDVBRMDXrPyXRL988i5ap9m9bnyEEjWfm5WkBmtffLfA==} + engines: {node: '>=12.22'} + + '@humanwhocodes/retry@0.4.3': + resolution: {integrity: sha512-bV0Tgo9K4hfPCek+aMAn81RppFKv2ySDQeMoSZuvTASywNTnVJCArCZE2FWqpvIatKu7VMRLWlR1EazvVhDyhQ==} + engines: {node: '>=18.18'} + + '@jridgewell/gen-mapping@0.3.13': + resolution: {integrity: sha512-2kkt/7niJ6MgEPxF0bYdQ6etZaA+fQvDcLKckhy1yIQOzaoKjBBjSj63/aLVjYE3qhRt5dvM+uUyfCg6UKCBbA==} + + '@jridgewell/resolve-uri@3.1.2': + resolution: {integrity: sha512-bRISgCIjP20/tbWSPWMEi54QVPRZExkuD9lJL+UIxUKtwVJA8wW1Trb1jMs1RFXo1CBTNZ/5hpC9QvmKWdopKw==} + engines: {node: '>=6.0.0'} + + '@jridgewell/sourcemap-codec@1.5.5': + resolution: {integrity: sha512-cYQ9310grqxueWbl+WuIUIaiUaDcj7WOq5fVhEljNVgRfOUhY9fy2zTvfoqWsnebh8Sl70VScFbICvJnLKB0Og==} + + '@jridgewell/trace-mapping@0.3.31': + resolution: {integrity: sha512-zzNR+SdQSDJzc8joaeP8QQoCQr8NuYx2dIIytl1QeBEZHJ9uW6hebsrYgbz8hJwUQao3TWCMtmfV8Nu1twOLAw==} + + '@rollup/rollup-android-arm-eabi@4.54.0': + resolution: {integrity: sha512-OywsdRHrFvCdvsewAInDKCNyR3laPA2mc9bRYJ6LBp5IyvF3fvXbbNR0bSzHlZVFtn6E0xw2oZlyjg4rKCVcng==} + cpu: [arm] + os: [android] + + '@rollup/rollup-android-arm64@4.54.0': + resolution: {integrity: sha512-Skx39Uv+u7H224Af+bDgNinitlmHyQX1K/atIA32JP3JQw6hVODX5tkbi2zof/E69M1qH2UoN3Xdxgs90mmNYw==} + cpu: [arm64] + os: [android] + + '@rollup/rollup-darwin-arm64@4.54.0': + resolution: {integrity: sha512-k43D4qta/+6Fq+nCDhhv9yP2HdeKeP56QrUUTW7E6PhZP1US6NDqpJj4MY0jBHlJivVJD5P8NxrjuobZBJTCRw==} + cpu: [arm64] + os: [darwin] + + '@rollup/rollup-darwin-x64@4.54.0': + resolution: {integrity: sha512-cOo7biqwkpawslEfox5Vs8/qj83M/aZCSSNIWpVzfU2CYHa2G3P1UN5WF01RdTHSgCkri7XOlTdtk17BezlV3A==} + cpu: [x64] + os: [darwin] + + '@rollup/rollup-freebsd-arm64@4.54.0': + resolution: {integrity: sha512-miSvuFkmvFbgJ1BevMa4CPCFt5MPGw094knM64W9I0giUIMMmRYcGW/JWZDriaw/k1kOBtsWh1z6nIFV1vPNtA==} + cpu: [arm64] + os: [freebsd] + + '@rollup/rollup-freebsd-x64@4.54.0': + resolution: {integrity: sha512-KGXIs55+b/ZfZsq9aR026tmr/+7tq6VG6MsnrvF4H8VhwflTIuYh+LFUlIsRdQSgrgmtM3fVATzEAj4hBQlaqQ==} + cpu: [x64] + os: [freebsd] + + '@rollup/rollup-linux-arm-gnueabihf@4.54.0': + resolution: {integrity: sha512-EHMUcDwhtdRGlXZsGSIuXSYwD5kOT9NVnx9sqzYiwAc91wfYOE1g1djOEDseZJKKqtHAHGwnGPQu3kytmfaXLQ==} + cpu: [arm] + os: [linux] + + '@rollup/rollup-linux-arm-musleabihf@4.54.0': + resolution: {integrity: sha512-+pBrqEjaakN2ySv5RVrj/qLytYhPKEUwk+e3SFU5jTLHIcAtqh2rLrd/OkbNuHJpsBgxsD8ccJt5ga/SeG0JmA==} + cpu: [arm] + os: [linux] + + '@rollup/rollup-linux-arm64-gnu@4.54.0': + resolution: {integrity: sha512-NSqc7rE9wuUaRBsBp5ckQ5CVz5aIRKCwsoa6WMF7G01sX3/qHUw/z4pv+D+ahL1EIKy6Enpcnz1RY8pf7bjwng==} + cpu: [arm64] + os: [linux] + + '@rollup/rollup-linux-arm64-musl@4.54.0': + resolution: {integrity: sha512-gr5vDbg3Bakga5kbdpqx81m2n9IX8M6gIMlQQIXiLTNeQW6CucvuInJ91EuCJ/JYvc+rcLLsDFcfAD1K7fMofg==} + cpu: [arm64] + os: [linux] + + '@rollup/rollup-linux-loong64-gnu@4.54.0': + resolution: {integrity: sha512-gsrtB1NA3ZYj2vq0Rzkylo9ylCtW/PhpLEivlgWe0bpgtX5+9j9EZa0wtZiCjgu6zmSeZWyI/e2YRX1URozpIw==} + cpu: [loong64] + os: [linux] + + '@rollup/rollup-linux-ppc64-gnu@4.54.0': + resolution: {integrity: sha512-y3qNOfTBStmFNq+t4s7Tmc9hW2ENtPg8FeUD/VShI7rKxNW7O4fFeaYbMsd3tpFlIg1Q8IapFgy7Q9i2BqeBvA==} + cpu: [ppc64] + os: [linux] + + '@rollup/rollup-linux-riscv64-gnu@4.54.0': + resolution: {integrity: sha512-89sepv7h2lIVPsFma8iwmccN7Yjjtgz0Rj/Ou6fEqg3HDhpCa+Et+YSufy27i6b0Wav69Qv4WBNl3Rs6pwhebQ==} + cpu: [riscv64] + os: [linux] + + '@rollup/rollup-linux-riscv64-musl@4.54.0': + resolution: {integrity: sha512-ZcU77ieh0M2Q8Ur7D5X7KvK+UxbXeDHwiOt/CPSBTI1fBmeDMivW0dPkdqkT4rOgDjrDDBUed9x4EgraIKoR2A==} + cpu: [riscv64] + os: [linux] + + '@rollup/rollup-linux-s390x-gnu@4.54.0': + resolution: {integrity: sha512-2AdWy5RdDF5+4YfG/YesGDDtbyJlC9LHmL6rZw6FurBJ5n4vFGupsOBGfwMRjBYH7qRQowT8D/U4LoSvVwOhSQ==} + cpu: [s390x] + os: [linux] + + '@rollup/rollup-linux-x64-gnu@4.54.0': + resolution: {integrity: sha512-WGt5J8Ij/rvyqpFexxk3ffKqqbLf9AqrTBbWDk7ApGUzaIs6V+s2s84kAxklFwmMF/vBNGrVdYgbblCOFFezMQ==} + cpu: [x64] + os: [linux] + + '@rollup/rollup-linux-x64-musl@4.54.0': + resolution: {integrity: sha512-JzQmb38ATzHjxlPHuTH6tE7ojnMKM2kYNzt44LO/jJi8BpceEC8QuXYA908n8r3CNuG/B3BV8VR3Hi1rYtmPiw==} + cpu: [x64] + os: [linux] + + '@rollup/rollup-openharmony-arm64@4.54.0': + resolution: {integrity: sha512-huT3fd0iC7jigGh7n3q/+lfPcXxBi+om/Rs3yiFxjvSxbSB6aohDFXbWvlspaqjeOh+hx7DDHS+5Es5qRkWkZg==} + cpu: [arm64] + os: [openharmony] + + '@rollup/rollup-win32-arm64-msvc@4.54.0': + resolution: {integrity: sha512-c2V0W1bsKIKfbLMBu/WGBz6Yci8nJ/ZJdheE0EwB73N3MvHYKiKGs3mVilX4Gs70eGeDaMqEob25Tw2Gb9Nqyw==} + cpu: [arm64] + os: [win32] + + '@rollup/rollup-win32-ia32-msvc@4.54.0': + resolution: {integrity: sha512-woEHgqQqDCkAzrDhvDipnSirm5vxUXtSKDYTVpZG3nUdW/VVB5VdCYA2iReSj/u3yCZzXID4kuKG7OynPnB3WQ==} + cpu: [ia32] + os: [win32] + + '@rollup/rollup-win32-x64-gnu@4.54.0': + resolution: {integrity: sha512-dzAc53LOuFvHwbCEOS0rPbXp6SIhAf2txMP5p6mGyOXXw5mWY8NGGbPMPrs4P1WItkfApDathBj/NzMLUZ9rtQ==} + cpu: [x64] + os: [win32] + + '@rollup/rollup-win32-x64-msvc@4.54.0': + resolution: {integrity: sha512-hYT5d3YNdSh3mbCU1gwQyPgQd3T2ne0A3KG8KSBdav5TiBg6eInVmV+TeR5uHufiIgSFg0XsOWGW5/RhNcSvPg==} + cpu: [x64] + os: [win32] + + '@standard-schema/spec@1.1.0': + resolution: {integrity: sha512-l2aFy5jALhniG5HgqrD6jXLi/rUWrKvqN/qJx6yoJsgKhblVd+iqqU4RCXavm/jPityDo5TCvKMnpjKnOriy0w==} + + '@types/chai@5.2.3': + resolution: {integrity: sha512-Mw558oeA9fFbv65/y4mHtXDs9bPnFMZAL/jxdPFUpOHHIXX91mcgEHbS5Lahr+pwZFR8A7GQleRWeI6cGFC2UA==} + + '@types/deep-eql@4.0.2': + resolution: {integrity: sha512-c9h9dVVMigMPc4bwTvC5dxqtqJZwQPePsWjPlpSOnojbor6pGqdk541lfA7AqFQr5pB1BRdq0juY9db81BwyFw==} + + '@types/estree@1.0.8': + resolution: {integrity: sha512-dWHzHa2WqEXI/O1E9OjrocMTKJl2mSrEolh1Iomrv6U+JuNwaHXsXx9bLu5gG7BUWFIN0skIQJQ/L1rIex4X6w==} + + '@types/json-schema@7.0.15': + resolution: {integrity: sha512-5+fP8P8MFNC+AyZCDxrB2pkZFPGzqQWUzpSeuuVLvm8VMcorNYavBqoFcxK8bQz4Qsbn4oUEEem4wDLfcysGHA==} + + '@types/node@25.0.3': + resolution: {integrity: sha512-W609buLVRVmeW693xKfzHeIV6nJGGz98uCPfeXI1ELMLXVeKYZ9m15fAMSaUPBHYLGFsVRcMmSCksQOrZV9BYA==} + + '@typescript-eslint/eslint-plugin@8.50.1': + resolution: {integrity: sha512-PKhLGDq3JAg0Jk/aK890knnqduuI/Qj+udH7wCf0217IGi4gt+acgCyPVe79qoT+qKUvHMDQkwJeKW9fwl8Cyw==} + engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} + peerDependencies: + '@typescript-eslint/parser': ^8.50.1 + eslint: ^8.57.0 || ^9.0.0 + typescript: '>=4.8.4 <6.0.0' + + '@typescript-eslint/parser@8.50.1': + resolution: {integrity: sha512-hM5faZwg7aVNa819m/5r7D0h0c9yC4DUlWAOvHAtISdFTc8xB86VmX5Xqabrama3wIPJ/q9RbGS1worb6JfnMg==} + engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} + peerDependencies: + eslint: ^8.57.0 || ^9.0.0 + typescript: '>=4.8.4 <6.0.0' + + '@typescript-eslint/project-service@8.50.1': + resolution: {integrity: sha512-E1ur1MCVf+YiP89+o4Les/oBAVzmSbeRB0MQLfSlYtbWU17HPxZ6Bhs5iYmKZRALvEuBoXIZMOIRRc/P++Ortg==} + engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} + peerDependencies: + typescript: '>=4.8.4 <6.0.0' + + '@typescript-eslint/scope-manager@8.50.1': + resolution: {integrity: sha512-mfRx06Myt3T4vuoHaKi8ZWNTPdzKPNBhiblze5N50//TSHOAQQevl/aolqA/BcqqbJ88GUnLqjjcBc8EWdBcVw==} + engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} + + '@typescript-eslint/tsconfig-utils@8.50.1': + resolution: {integrity: sha512-ooHmotT/lCWLXi55G4mvaUF60aJa012QzvLK0Y+Mp4WdSt17QhMhWOaBWeGTFVkb2gDgBe19Cxy1elPXylslDw==} + engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} + peerDependencies: + typescript: '>=4.8.4 <6.0.0' + + '@typescript-eslint/type-utils@8.50.1': + resolution: {integrity: sha512-7J3bf022QZE42tYMO6SL+6lTPKFk/WphhRPe9Tw/el+cEwzLz1Jjz2PX3GtGQVxooLDKeMVmMt7fWpYRdG5Etg==} + engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} + peerDependencies: + eslint: ^8.57.0 || ^9.0.0 + typescript: '>=4.8.4 <6.0.0' + + '@typescript-eslint/types@8.50.1': + resolution: {integrity: sha512-v5lFIS2feTkNyMhd7AucE/9j/4V9v5iIbpVRncjk/K0sQ6Sb+Np9fgYS/63n6nwqahHQvbmujeBL7mp07Q9mlA==} + engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} + + '@typescript-eslint/typescript-estree@8.50.1': + resolution: {integrity: sha512-woHPdW+0gj53aM+cxchymJCrh0cyS7BTIdcDxWUNsclr9VDkOSbqC13juHzxOmQ22dDkMZEpZB+3X1WpUvzgVQ==} + engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} + peerDependencies: + typescript: '>=4.8.4 <6.0.0' + + '@typescript-eslint/utils@8.50.1': + resolution: {integrity: sha512-lCLp8H1T9T7gPbEuJSnHwnSuO9mDf8mfK/Nion5mZmiEaQD9sWf9W4dfeFqRyqRjF06/kBuTmAqcs9sewM2NbQ==} + engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} + peerDependencies: + eslint: ^8.57.0 || ^9.0.0 + typescript: '>=4.8.4 <6.0.0' + + '@typescript-eslint/visitor-keys@8.50.1': + resolution: {integrity: sha512-IrDKrw7pCRUR94zeuCSUWQ+w8JEf5ZX5jl/e6AHGSLi1/zIr0lgutfn/7JpfCey+urpgQEdrZVYzCaVVKiTwhQ==} + engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} + + '@vitest/coverage-v8@4.0.16': + resolution: {integrity: sha512-2rNdjEIsPRzsdu6/9Eq0AYAzYdpP6Bx9cje9tL3FE5XzXRQF1fNU9pe/1yE8fCrS0HD+fBtt6gLPh6LI57tX7A==} + peerDependencies: + '@vitest/browser': 4.0.16 + vitest: 4.0.16 + peerDependenciesMeta: + '@vitest/browser': + optional: true + + '@vitest/expect@4.0.16': + resolution: {integrity: sha512-eshqULT2It7McaJkQGLkPjPjNph+uevROGuIMJdG3V+0BSR2w9u6J9Lwu+E8cK5TETlfou8GRijhafIMhXsimA==} + + '@vitest/mocker@4.0.16': + resolution: {integrity: sha512-yb6k4AZxJTB+q9ycAvsoxGn+j/po0UaPgajllBgt1PzoMAAmJGYFdDk0uCcRcxb3BrME34I6u8gHZTQlkqSZpg==} + peerDependencies: + msw: ^2.4.9 + vite: ^6.0.0 || ^7.0.0-0 + peerDependenciesMeta: + msw: + optional: true + vite: + optional: true + + '@vitest/pretty-format@4.0.16': + resolution: {integrity: sha512-eNCYNsSty9xJKi/UdVD8Ou16alu7AYiS2fCPRs0b1OdhJiV89buAXQLpTbe+X8V9L6qrs9CqyvU7OaAopJYPsA==} + + '@vitest/runner@4.0.16': + resolution: {integrity: sha512-VWEDm5Wv9xEo80ctjORcTQRJ539EGPB3Pb9ApvVRAY1U/WkHXmmYISqU5E79uCwcW7xYUV38gwZD+RV755fu3Q==} + + '@vitest/snapshot@4.0.16': + resolution: {integrity: sha512-sf6NcrYhYBsSYefxnry+DR8n3UV4xWZwWxYbCJUt2YdvtqzSPR7VfGrY0zsv090DAbjFZsi7ZaMi1KnSRyK1XA==} + + '@vitest/spy@4.0.16': + resolution: {integrity: sha512-4jIOWjKP0ZUaEmJm00E0cOBLU+5WE0BpeNr3XN6TEF05ltro6NJqHWxXD0kA8/Zc8Nh23AT8WQxwNG+WeROupw==} + + '@vitest/utils@4.0.16': + resolution: {integrity: sha512-h8z9yYhV3e1LEfaQ3zdypIrnAg/9hguReGZoS7Gl0aBG5xgA410zBqECqmaF/+RkTggRsfnzc1XaAHA6bmUufA==} + + acorn-jsx@5.3.2: + resolution: {integrity: sha512-rq9s+JNhf0IChjtDXxllJ7g41oZk5SlXtp0LHwyA5cejwn7vKmKp4pPri6YEePv2PU65sAsegbXtIinmDFDXgQ==} + peerDependencies: + acorn: ^6.0.0 || ^7.0.0 || ^8.0.0 + + acorn@8.15.0: + resolution: {integrity: sha512-NZyJarBfL7nWwIq+FDL6Zp/yHEhePMNnnJ0y3qfieCrmNvYct8uvtiV41UvlSe6apAfk0fY1FbWx+NwfmpvtTg==} + engines: {node: '>=0.4.0'} + hasBin: true + + ajv@6.12.6: + resolution: {integrity: sha512-j3fVLgvTo527anyYyJOGTYJbG+vnnQYvE0m5mmkc1TK+nxAppkCLMIL0aZ4dblVCNoGShhm+kzE4ZUykBoMg4g==} + + ansi-styles@4.3.0: + resolution: {integrity: sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==} + engines: {node: '>=8'} + + any-promise@1.3.0: + resolution: {integrity: sha512-7UvmKalWRt1wgjL1RrGxoSJW/0QZFIegpeGvZG9kjp8vrRu55XTHbwnqq2GpXm9uLbcuhxm3IqX9OB4MZR1b2A==} + + argparse@2.0.1: + resolution: {integrity: sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q==} + + assertion-error@2.0.1: + resolution: {integrity: sha512-Izi8RQcffqCeNVgFigKli1ssklIbpHnCYc6AknXGYoB6grJqyeby7jv12JUQgmTAnIDnbck1uxksT4dzN3PWBA==} + engines: {node: '>=12'} + + ast-v8-to-istanbul@0.3.10: + resolution: {integrity: sha512-p4K7vMz2ZSk3wN8l5o3y2bJAoZXT3VuJI5OLTATY/01CYWumWvwkUw0SqDBnNq6IiTO3qDa1eSQDibAV8g7XOQ==} + + asynckit@0.4.0: + resolution: {integrity: sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q==} + + axios@1.13.2: + resolution: {integrity: sha512-VPk9ebNqPcy5lRGuSlKx752IlDatOjT9paPlm8A7yOuW2Fbvp4X3JznJtT4f0GzGLLiWE9W8onz51SqLYwzGaA==} + + balanced-match@1.0.2: + resolution: {integrity: sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==} + + brace-expansion@1.1.12: + resolution: {integrity: sha512-9T9UjW3r0UW5c1Q7GTwllptXwhvYmEzFhzMfZ9H7FQWt+uZePjZPjBP/W1ZEyZ1twGWom5/56TF4lPcqjnDHcg==} + + brace-expansion@2.0.2: + resolution: {integrity: sha512-Jt0vHyM+jmUBqojB7E1NIYadt0vI0Qxjxd2TErW94wDz+E2LAm5vKMXXwg6ZZBTHPuUlDgQHKXvjGBdfcF1ZDQ==} + + bundle-require@5.1.0: + resolution: {integrity: sha512-3WrrOuZiyaaZPWiEt4G3+IffISVC9HYlWueJEBWED4ZH4aIAC2PnkdnuRrR94M+w6yGWn4AglWtJtBI8YqvgoA==} + engines: {node: ^12.20.0 || ^14.13.1 || >=16.0.0} + peerDependencies: + esbuild: '>=0.18' + + cac@6.7.14: + resolution: {integrity: sha512-b6Ilus+c3RrdDk+JhLKUAQfzzgLEPy6wcXqS7f/xe1EETvsDP6GORG7SFuOs6cID5YkqchW/LXZbX5bc8j7ZcQ==} + engines: {node: '>=8'} + + call-bind-apply-helpers@1.0.2: + resolution: {integrity: sha512-Sp1ablJ0ivDkSzjcaJdxEunN5/XvksFJ2sMBFfq6x0ryhQV/2b/KwFe21cMpmHtPOSij8K99/wSfoEuTObmuMQ==} + engines: {node: '>= 0.4'} + + callsites@3.1.0: + resolution: {integrity: sha512-P8BjAsXvZS+VIDUI11hHCQEv74YT67YUi5JJFNWIqL235sBmjX4+qx9Muvls5ivyNENctx46xQLQ3aTuE7ssaQ==} + engines: {node: '>=6'} + + chai@6.2.2: + resolution: {integrity: sha512-NUPRluOfOiTKBKvWPtSD4PhFvWCqOi0BGStNWs57X9js7XGTprSmFoz5F0tWhR4WPjNeR9jXqdC7/UpSJTnlRg==} + engines: {node: '>=18'} + + chalk@4.1.2: + resolution: {integrity: sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==} + engines: {node: '>=10'} + + chokidar@4.0.3: + resolution: {integrity: sha512-Qgzu8kfBvo+cA4962jnP1KkS6Dop5NS6g7R5LFYJr4b8Ub94PPQXUksCw9PvXoeXPRRddRNC5C1JQUR2SMGtnA==} + engines: {node: '>= 14.16.0'} + + color-convert@2.0.1: + resolution: {integrity: sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==} + engines: {node: '>=7.0.0'} + + color-name@1.1.4: + resolution: {integrity: sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==} + + combined-stream@1.0.8: + resolution: {integrity: sha512-FQN4MRfuJeHf7cBbBMJFXhKSDq+2kAArBlmRBvcvFE5BB1HZKXtSFASDhdlz9zOYwxh8lDdnvmMOe/+5cdoEdg==} + engines: {node: '>= 0.8'} + + commander@4.1.1: + resolution: {integrity: sha512-NOKm8xhkzAjzFx8B2v5OAHT+u5pRQc2UCa2Vq9jYL/31o2wi9mxBA7LIFs3sV5VSC49z6pEhfbMULvShKj26WA==} + engines: {node: '>= 6'} + + concat-map@0.0.1: + resolution: {integrity: sha512-/Srv4dswyQNBfohGpz9o6Yb3Gz3SrUDqBH5rTuhGR7ahtlbYKnVxw2bCFMRljaA7EXHaXZ8wsHdodFvbkhKmqg==} + + confbox@0.1.8: + resolution: {integrity: sha512-RMtmw0iFkeR4YV+fUOSucriAQNb9g8zFR52MWCtl+cCZOFRNL6zeB395vPzFhEjjn4fMxXudmELnl/KF/WrK6w==} + + consola@3.4.2: + resolution: {integrity: sha512-5IKcdX0nnYavi6G7TtOhwkYzyjfJlatbjMjuLSfE2kYT5pMDOilZ4OvMhi637CcDICTmz3wARPoyhqyX1Y+XvA==} + engines: {node: ^14.18.0 || >=16.10.0} + + cross-spawn@7.0.6: + resolution: {integrity: sha512-uV2QOWP2nWzsy2aMp8aRibhi9dlzF5Hgh5SHaB9OiTGEyDTiJJyx0uy51QXdyWbtAHNua4XJzUKca3OzKUd3vA==} + engines: {node: '>= 8'} + + debug@4.4.3: + resolution: {integrity: sha512-RGwwWnwQvkVfavKVt22FGLw+xYSdzARwm0ru6DhTVA3umU5hZc28V3kO4stgYryrTlLpuvgI9GiijltAjNbcqA==} + engines: {node: '>=6.0'} + peerDependencies: + supports-color: '*' + peerDependenciesMeta: + supports-color: + optional: true + + deep-is@0.1.4: + resolution: {integrity: sha512-oIPzksmTg4/MriiaYGO+okXDT7ztn/w3Eptv/+gSIdMdKsJo0u4CfYNFJPy+4SKMuCqGw2wxnA+URMg3t8a/bQ==} + + delayed-stream@1.0.0: + resolution: {integrity: sha512-ZySD7Nf91aLB0RxL4KGrKHBXl7Eds1DAmEdcoVawXnLD7SDhpNgtuII2aAkg7a7QS41jxPSZ17p4VdGnMHk3MQ==} + engines: {node: '>=0.4.0'} + + dunder-proto@1.0.1: + resolution: {integrity: sha512-KIN/nDJBQRcXw0MLVhZE9iQHmG68qAVIBg9CqmUYjmQIhgij9U5MFvrqkUL5FbtyyzZuOeOt0zdeRe4UY7ct+A==} + engines: {node: '>= 0.4'} + + es-define-property@1.0.1: + resolution: {integrity: sha512-e3nRfgfUZ4rNGL232gUgX06QNyyez04KdjFrF+LTRoOXmrOgFKDg4BCdsjW8EnT69eqdYGmRpJwiPVYNrCaW3g==} + engines: {node: '>= 0.4'} + + es-errors@1.3.0: + resolution: {integrity: sha512-Zf5H2Kxt2xjTvbJvP2ZWLEICxA6j+hAmMzIlypy4xcBg1vKVnx89Wy0GbS+kf5cwCVFFzdCFh2XSCFNULS6csw==} + engines: {node: '>= 0.4'} + + es-module-lexer@1.7.0: + resolution: {integrity: sha512-jEQoCwk8hyb2AZziIOLhDqpm5+2ww5uIE6lkO/6jcOCusfk6LhMHpXXfBLXTZ7Ydyt0j4VoUQv6uGNYbdW+kBA==} + + es-object-atoms@1.1.1: + resolution: {integrity: sha512-FGgH2h8zKNim9ljj7dankFPcICIK9Cp5bm+c2gQSYePhpaG5+esrLODihIorn+Pe6FGJzWhXQotPv73jTaldXA==} + engines: {node: '>= 0.4'} + + es-set-tostringtag@2.1.0: + resolution: {integrity: sha512-j6vWzfrGVfyXxge+O0x5sh6cvxAog0a/4Rdd2K36zCMV5eJ+/+tOAngRO8cODMNWbVRdVlmGZQL2YS3yR8bIUA==} + engines: {node: '>= 0.4'} + + esbuild@0.27.2: + resolution: {integrity: sha512-HyNQImnsOC7X9PMNaCIeAm4ISCQXs5a5YasTXVliKv4uuBo1dKrG0A+uQS8M5eXjVMnLg3WgXaKvprHlFJQffw==} + engines: {node: '>=18'} + hasBin: true + + escape-string-regexp@4.0.0: + resolution: {integrity: sha512-TtpcNJ3XAzx3Gq8sWRzJaVajRs0uVxA2YAkdb1jm2YkPz4G6egUFAyA3n5vtEIZefPk5Wa4UXbKuS5fKkJWdgA==} + engines: {node: '>=10'} + + eslint-scope@8.4.0: + resolution: {integrity: sha512-sNXOfKCn74rt8RICKMvJS7XKV/Xk9kA7DyJr8mJik3S7Cwgy3qlkkmyS2uQB3jiJg6VNdZd/pDBJu0nvG2NlTg==} + engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} + + eslint-visitor-keys@3.4.3: + resolution: {integrity: sha512-wpc+LXeiyiisxPlEkUzU6svyS1frIO3Mgxj1fdy7Pm8Ygzguax2N3Fa/D/ag1WqbOprdI+uY6wMUl8/a2G+iag==} + engines: {node: ^12.22.0 || ^14.17.0 || >=16.0.0} + + eslint-visitor-keys@4.2.1: + resolution: {integrity: sha512-Uhdk5sfqcee/9H/rCOJikYz67o0a2Tw2hGRPOG2Y1R2dg7brRe1uG0yaNQDHu+TO/uQPF/5eCapvYSmHUjt7JQ==} + engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} + + eslint@9.39.2: + resolution: {integrity: sha512-LEyamqS7W5HB3ujJyvi0HQK/dtVINZvd5mAAp9eT5S/ujByGjiZLCzPcHVzuXbpJDJF/cxwHlfceVUDZ2lnSTw==} + engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} + hasBin: true + peerDependencies: + jiti: '*' + peerDependenciesMeta: + jiti: + optional: true + + espree@10.4.0: + resolution: {integrity: sha512-j6PAQ2uUr79PZhBjP5C5fhl8e39FmRnOjsD5lGnWrFU8i2G776tBK7+nP8KuQUTTyAZUwfQqXAgrVH5MbH9CYQ==} + engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} + + esquery@1.6.0: + resolution: {integrity: sha512-ca9pw9fomFcKPvFLXhBKUK90ZvGibiGOvRJNbjljY7s7uq/5YO4BOzcYtJqExdx99rF6aAcnRxHmcUHcz6sQsg==} + engines: {node: '>=0.10'} + + esrecurse@4.3.0: + resolution: {integrity: sha512-KmfKL3b6G+RXvP8N1vr3Tq1kL/oCFgn2NYXEtqP8/L3pKapUA4G8cFVaoF3SU323CD4XypR/ffioHmkti6/Tag==} + engines: {node: '>=4.0'} + + estraverse@5.3.0: + resolution: {integrity: sha512-MMdARuVEQziNTeJD8DgMqmhwR11BRQ/cBP+pLtYdSTnf3MIO8fFeiINEbX36ZdNlfU/7A9f3gUw49B3oQsvwBA==} + engines: {node: '>=4.0'} + + estree-walker@3.0.3: + resolution: {integrity: sha512-7RUKfXgSMMkzt6ZuXmqapOurLGPPfgj6l9uRZ7lRGolvk0y2yocc35LdcxKC5PQZdn2DMqioAQ2NoWcrTKmm6g==} + + esutils@2.0.3: + resolution: {integrity: sha512-kVscqXk4OCp68SZ0dkgEKVi6/8ij300KBWTJq32P/dYeWTSwK41WyTxalN1eRmA5Z9UU/LX9D7FWSmV9SAYx6g==} + engines: {node: '>=0.10.0'} + + expect-type@1.3.0: + resolution: {integrity: sha512-knvyeauYhqjOYvQ66MznSMs83wmHrCycNEN6Ao+2AeYEfxUIkuiVxdEa1qlGEPK+We3n0THiDciYSsCcgW/DoA==} + engines: {node: '>=12.0.0'} + + fast-deep-equal@3.1.3: + resolution: {integrity: sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q==} + + fast-json-stable-stringify@2.1.0: + resolution: {integrity: sha512-lhd/wF+Lk98HZoTCtlVraHtfh5XYijIjalXck7saUtuanSDyLMxnHhSXEDJqHxD7msR8D0uCmqlkwjCV8xvwHw==} + + fast-levenshtein@2.0.6: + resolution: {integrity: sha512-DCXu6Ifhqcks7TZKY3Hxp3y6qphY5SJZmrWMDrKcERSOXWQdMhU9Ig/PYrzyw/ul9jOIyh0N4M0tbC5hodg8dw==} + + fdir@6.5.0: + resolution: {integrity: sha512-tIbYtZbucOs0BRGqPJkshJUYdL+SDH7dVM8gjy+ERp3WAUjLEFJE+02kanyHtwjWOnwrKYBiwAmM0p4kLJAnXg==} + engines: {node: '>=12.0.0'} + peerDependencies: + picomatch: ^3 || ^4 + peerDependenciesMeta: + picomatch: + optional: true + + file-entry-cache@8.0.0: + resolution: {integrity: sha512-XXTUwCvisa5oacNGRP9SfNtYBNAMi+RPwBFmblZEF7N7swHYQS6/Zfk7SRwx4D5j3CH211YNRco1DEMNVfZCnQ==} + engines: {node: '>=16.0.0'} + + find-up@5.0.0: + resolution: {integrity: sha512-78/PXT1wlLLDgTzDs7sjq9hzz0vXD+zn+7wypEe4fXQxCmdmqfGsEPQxmiCSQI3ajFV91bVSsvNtrJRiW6nGng==} + engines: {node: '>=10'} + + fix-dts-default-cjs-exports@1.0.1: + resolution: {integrity: sha512-pVIECanWFC61Hzl2+oOCtoJ3F17kglZC/6N94eRWycFgBH35hHx0Li604ZIzhseh97mf2p0cv7vVrOZGoqhlEg==} + + flat-cache@4.0.1: + resolution: {integrity: sha512-f7ccFPK3SXFHpx15UIGyRJ/FJQctuKZ0zVuN3frBo4HnK3cay9VEW0R6yPYFHC0AgqhukPzKjq22t5DmAyqGyw==} + engines: {node: '>=16'} + + flatted@3.3.3: + resolution: {integrity: sha512-GX+ysw4PBCz0PzosHDepZGANEuFCMLrnRTiEy9McGjmkCQYwRq4A/X786G/fjM/+OjsWSU1ZrY5qyARZmO/uwg==} + + follow-redirects@1.15.11: + resolution: {integrity: sha512-deG2P0JfjrTxl50XGCDyfI97ZGVCxIpfKYmfyrQ54n5FO/0gfIES8C/Psl6kWVDolizcaaxZJnTS0QSMxvnsBQ==} + engines: {node: '>=4.0'} + peerDependencies: + debug: '*' + peerDependenciesMeta: + debug: + optional: true + + form-data@4.0.5: + resolution: {integrity: sha512-8RipRLol37bNs2bhoV67fiTEvdTrbMUYcFTiy3+wuuOnUog2QBHCZWXDRijWQfAkhBj2Uf5UnVaiWwA5vdd82w==} + engines: {node: '>= 6'} + + fsevents@2.3.3: + resolution: {integrity: sha512-5xoDfX+fL7faATnagmWPpbFtwh/R77WmMMqqHGS65C3vvB0YHrgF+B1YmZ3441tMj5n63k0212XNoJwzlhffQw==} + engines: {node: ^8.16.0 || ^10.6.0 || >=11.0.0} + os: [darwin] + + function-bind@1.1.2: + resolution: {integrity: sha512-7XHNxH7qX9xG5mIwxkhumTox/MIRNcOgDrxWsMt2pAr23WHp6MrRlN7FBSFpCpr+oVO0F744iUgR82nJMfG2SA==} + + get-intrinsic@1.3.0: + resolution: {integrity: sha512-9fSjSaos/fRIVIp+xSJlE6lfwhES7LNtKaCBIamHsjr2na1BiABJPo0mOjjz8GJDURarmCPGqaiVg5mfjb98CQ==} + engines: {node: '>= 0.4'} + + get-proto@1.0.1: + resolution: {integrity: sha512-sTSfBjoXBp89JvIKIefqw7U2CCebsc74kiY6awiGogKtoSGbgjYE/G/+l9sF3MWFPNc9IcoOC4ODfKHfxFmp0g==} + engines: {node: '>= 0.4'} + + glob-parent@6.0.2: + resolution: {integrity: sha512-XxwI8EOhVQgWp6iDL+3b0r86f4d6AX6zSU55HfB4ydCEuXLXc5FcYeOu+nnGftS4TEju/11rt4KJPTMgbfmv4A==} + engines: {node: '>=10.13.0'} + + globals@14.0.0: + resolution: {integrity: sha512-oahGvuMGQlPw/ivIYBjVSrWAfWLBeku5tpPE2fOPLi+WHffIWbuh2tCjhyQhTBPMf5E9jDEH4FOmTYgYwbKwtQ==} + engines: {node: '>=18'} + + gopd@1.2.0: + resolution: {integrity: sha512-ZUKRh6/kUFoAiTAtTYPZJ3hw9wNxx+BIBOijnlG9PnrJsCcSjs1wyyD6vJpaYtgnzDrKYRSqf3OO6Rfa93xsRg==} + engines: {node: '>= 0.4'} + + has-flag@4.0.0: + resolution: {integrity: sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==} + engines: {node: '>=8'} + + has-symbols@1.1.0: + resolution: {integrity: sha512-1cDNdwJ2Jaohmb3sg4OmKaMBwuC48sYni5HUw2DvsC8LjGTLK9h+eb1X6RyuOHe4hT0ULCW68iomhjUoKUqlPQ==} + engines: {node: '>= 0.4'} + + has-tostringtag@1.0.2: + resolution: {integrity: sha512-NqADB8VjPFLM2V0VvHUewwwsw0ZWBaIdgo+ieHtK3hasLz4qeCRjYcqfB6AQrBggRKppKF8L52/VqdVsO47Dlw==} + engines: {node: '>= 0.4'} + + hasown@2.0.2: + resolution: {integrity: sha512-0hJU9SCPvmMzIBdZFqNPXWa6dqh7WdH0cII9y+CyS8rG3nL48Bclra9HmKhVVUHyPWNH5Y7xDwAB7bfgSjkUMQ==} + engines: {node: '>= 0.4'} + + html-escaper@2.0.2: + resolution: {integrity: sha512-H2iMtd0I4Mt5eYiapRdIDjp+XzelXQ0tFE4JS7YFwFevXXMmOp9myNrUvCg0D6ws8iqkRPBfKHgbwig1SmlLfg==} + + ignore@5.3.2: + resolution: {integrity: sha512-hsBTNUqQTDwkWtcdYI2i06Y/nUBEsNEDJKjWdigLvegy8kDuJAS8uRlpkkcQpyEXL0Z/pjDy5HBmMjRCJ2gq+g==} + engines: {node: '>= 4'} + + ignore@7.0.5: + resolution: {integrity: sha512-Hs59xBNfUIunMFgWAbGX5cq6893IbWg4KnrjbYwX3tx0ztorVgTDA6B2sxf8ejHJ4wz8BqGUMYlnzNBer5NvGg==} + engines: {node: '>= 4'} + + import-fresh@3.3.1: + resolution: {integrity: sha512-TR3KfrTZTYLPB6jUjfx6MF9WcWrHL9su5TObK4ZkYgBdWKPOFoSoQIdEuTuR82pmtxH2spWG9h6etwfr1pLBqQ==} + engines: {node: '>=6'} + + imurmurhash@0.1.4: + resolution: {integrity: sha512-JmXMZ6wuvDmLiHEml9ykzqO6lwFbof0GG4IkcGaENdCRDDmMVnny7s5HsIgHCbaq0w2MyPhDqkhTUgS2LU2PHA==} + engines: {node: '>=0.8.19'} + + is-extglob@2.1.1: + resolution: {integrity: sha512-SbKbANkN603Vi4jEZv49LeVJMn4yGwsbzZworEoyEiutsN3nJYdbO36zfhGJ6QEDpOZIFkDtnq5JRxmvl3jsoQ==} + engines: {node: '>=0.10.0'} + + is-glob@4.0.3: + resolution: {integrity: sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg==} + engines: {node: '>=0.10.0'} + + isexe@2.0.0: + resolution: {integrity: sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw==} + + istanbul-lib-coverage@3.2.2: + resolution: {integrity: sha512-O8dpsF+r0WV/8MNRKfnmrtCWhuKjxrq2w+jpzBL5UZKTi2LeVWnWOmWRxFlesJONmc+wLAGvKQZEOanko0LFTg==} + engines: {node: '>=8'} + + istanbul-lib-report@3.0.1: + resolution: {integrity: sha512-GCfE1mtsHGOELCU8e/Z7YWzpmybrx/+dSTfLrvY8qRmaY6zXTKWn6WQIjaAFw069icm6GVMNkgu0NzI4iPZUNw==} + engines: {node: '>=10'} + + istanbul-lib-source-maps@5.0.6: + resolution: {integrity: sha512-yg2d+Em4KizZC5niWhQaIomgf5WlL4vOOjZ5xGCmF8SnPE/mDWWXgvRExdcpCgh9lLRRa1/fSYp2ymmbJ1pI+A==} + engines: {node: '>=10'} + + istanbul-reports@3.2.0: + resolution: {integrity: sha512-HGYWWS/ehqTV3xN10i23tkPkpH46MLCIMFNCaaKNavAXTF1RkqxawEPtnjnGZ6XKSInBKkiOA5BKS+aZiY3AvA==} + engines: {node: '>=8'} + + joycon@3.1.1: + resolution: {integrity: sha512-34wB/Y7MW7bzjKRjUKTa46I2Z7eV62Rkhva+KkopW7Qvv/OSWBqvkSY7vusOPrNuZcUG3tApvdVgNB8POj3SPw==} + engines: {node: '>=10'} + + js-tokens@9.0.1: + resolution: {integrity: sha512-mxa9E9ITFOt0ban3j6L5MpjwegGz6lBQmM1IJkWeBZGcMxto50+eWdjC/52xDbS2vy0k7vIMK0Fe2wfL9OQSpQ==} + + js-yaml@4.1.1: + resolution: {integrity: sha512-qQKT4zQxXl8lLwBtHMWwaTcGfFOZviOJet3Oy/xmGk2gZH677CJM9EvtfdSkgWcATZhj/55JZ0rmy3myCT5lsA==} + hasBin: true + + json-buffer@3.0.1: + resolution: {integrity: sha512-4bV5BfR2mqfQTJm+V5tPPdf+ZpuhiIvTuAB5g8kcrXOZpTT/QwwVRWBywX1ozr6lEuPdbHxwaJlm9G6mI2sfSQ==} + + json-schema-traverse@0.4.1: + resolution: {integrity: sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg==} + + json-stable-stringify-without-jsonify@1.0.1: + resolution: {integrity: sha512-Bdboy+l7tA3OGW6FjyFHWkP5LuByj1Tk33Ljyq0axyzdk9//JSi2u3fP1QSmd1KNwq6VOKYGlAu87CisVir6Pw==} + + keyv@4.5.4: + resolution: {integrity: sha512-oxVHkHR/EJf2CNXnWxRLW6mg7JyCCUcG0DtEGmL2ctUo1PNTin1PUil+r/+4r5MpVgC/fn1kjsx7mjSujKqIpw==} + + levn@0.4.1: + resolution: {integrity: sha512-+bT2uH4E5LGE7h/n3evcS/sQlJXCpIp6ym8OWJ5eV6+67Dsql/LaaT7qJBAt2rzfoa/5QBGBhxDix1dMt2kQKQ==} + engines: {node: '>= 0.8.0'} + + lilconfig@3.1.3: + resolution: {integrity: sha512-/vlFKAoH5Cgt3Ie+JLhRbwOsCQePABiU3tJ1egGvyQ+33R/vcwM2Zl2QR/LzjsBeItPt3oSVXapn+m4nQDvpzw==} + engines: {node: '>=14'} + + lines-and-columns@1.2.4: + resolution: {integrity: sha512-7ylylesZQ/PV29jhEDl3Ufjo6ZX7gCqJr5F7PKrqc93v7fzSymt1BpwEU8nAUXs8qzzvqhbjhK5QZg6Mt/HkBg==} + + load-tsconfig@0.2.5: + resolution: {integrity: sha512-IXO6OCs9yg8tMKzfPZ1YmheJbZCiEsnBdcB03l0OcfK9prKnJb96siuHCr5Fl37/yo9DnKU+TLpxzTUspw9shg==} + engines: {node: ^12.20.0 || ^14.13.1 || >=16.0.0} + + locate-path@6.0.0: + resolution: {integrity: sha512-iPZK6eYjbxRu3uB4/WZ3EsEIMJFMqAoopl3R+zuq0UjcAm/MO6KCweDgPfP3elTztoKP3KtnVHxTn2NHBSDVUw==} + engines: {node: '>=10'} + + lodash.merge@4.6.2: + resolution: {integrity: sha512-0KpjqXRVvrYyCsX1swR/XTK0va6VQkQM6MNo7PqW77ByjAhoARA8EfrP1N4+KlKj8YS0ZUCtRT/YUuhyYDujIQ==} + + magic-string@0.30.21: + resolution: {integrity: sha512-vd2F4YUyEXKGcLHoq+TEyCjxueSeHnFxyyjNp80yg0XV4vUhnDer/lvvlqM/arB5bXQN5K2/3oinyCRyx8T2CQ==} + + magicast@0.5.1: + resolution: {integrity: sha512-xrHS24IxaLrvuo613F719wvOIv9xPHFWQHuvGUBmPnCA/3MQxKI3b+r7n1jAoDHmsbC5bRhTZYR77invLAxVnw==} + + make-dir@4.0.0: + resolution: {integrity: sha512-hXdUTZYIVOt1Ex//jAQi+wTZZpUpwBj/0QsOzqegb3rGMMeJiSEu5xLHnYfBrRV4RH2+OCSOO95Is/7x1WJ4bw==} + engines: {node: '>=10'} + + math-intrinsics@1.1.0: + resolution: {integrity: sha512-/IXtbwEk5HTPyEwyKX6hGkYXxM9nbj64B+ilVJnC/R6B0pH5G4V3b0pVbL7DBj4tkhBAppbQUlf6F6Xl9LHu1g==} + engines: {node: '>= 0.4'} + + mime-db@1.52.0: + resolution: {integrity: sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg==} + engines: {node: '>= 0.6'} + + mime-types@2.1.35: + resolution: {integrity: sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw==} + engines: {node: '>= 0.6'} + + minimatch@3.1.2: + resolution: {integrity: sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==} + + minimatch@9.0.5: + resolution: {integrity: sha512-G6T0ZX48xgozx7587koeX9Ys2NYy6Gmv//P89sEte9V9whIapMNF4idKxnW2QtCcLiTWlb/wfCabAtAFWhhBow==} + engines: {node: '>=16 || 14 >=14.17'} + + mlly@1.8.0: + resolution: {integrity: sha512-l8D9ODSRWLe2KHJSifWGwBqpTZXIXTeo8mlKjY+E2HAakaTeNpqAyBZ8GSqLzHgw4XmHmC8whvpjJNMbFZN7/g==} + + ms@2.1.3: + resolution: {integrity: sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==} + + mz@2.7.0: + resolution: {integrity: sha512-z81GNO7nnYMEhrGh9LeymoE4+Yr0Wn5McHIZMK5cfQCl+NDX08sCZgUc9/6MHni9IWuFLm1Z3HTCXu2z9fN62Q==} + + nanoid@3.3.11: + resolution: {integrity: sha512-N8SpfPUnUp1bK+PMYW8qSWdl9U+wwNWI4QKxOYDy9JAro3WMX7p2OeVRF9v+347pnakNevPmiHhNmZ2HbFA76w==} + engines: {node: ^10 || ^12 || ^13.7 || ^14 || >=15.0.1} + hasBin: true + + natural-compare@1.4.0: + resolution: {integrity: sha512-OWND8ei3VtNC9h7V60qff3SVobHr996CTwgxubgyQYEpg290h9J0buyECNNJexkFm5sOajh5G116RYA1c8ZMSw==} + + object-assign@4.1.1: + resolution: {integrity: sha512-rJgTQnkUnH1sFw8yT6VSU3zD3sWmu6sZhIseY8VX+GRu3P6F7Fu+JNDoXfklElbLJSnc3FUQHVe4cU5hj+BcUg==} + engines: {node: '>=0.10.0'} + + obug@2.1.1: + resolution: {integrity: sha512-uTqF9MuPraAQ+IsnPf366RG4cP9RtUi7MLO1N3KEc+wb0a6yKpeL0lmk2IB1jY5KHPAlTc6T/JRdC/YqxHNwkQ==} + + optionator@0.9.4: + resolution: {integrity: sha512-6IpQ7mKUxRcZNLIObR0hz7lxsapSSIYNZJwXPGeF0mTVqGKFIXj1DQcMoT22S3ROcLyY/rz0PWaWZ9ayWmad9g==} + engines: {node: '>= 0.8.0'} + + p-limit@3.1.0: + resolution: {integrity: sha512-TYOanM3wGwNGsZN2cVTYPArw454xnXj5qmWF1bEoAc4+cU/ol7GVh7odevjp1FNHduHc3KZMcFduxU5Xc6uJRQ==} + engines: {node: '>=10'} + + p-locate@5.0.0: + resolution: {integrity: sha512-LaNjtRWUBY++zB5nE/NwcaoMylSPk+S+ZHNB1TzdbMJMny6dynpAGt7X/tl/QYq3TIeE6nxHppbo2LGymrG5Pw==} + engines: {node: '>=10'} + + parent-module@1.0.1: + resolution: {integrity: sha512-GQ2EWRpQV8/o+Aw8YqtfZZPfNRWZYkbidE9k5rpl/hC3vtHHBfGm2Ifi6qWV+coDGkrUKZAxE3Lot5kcsRlh+g==} + engines: {node: '>=6'} + + path-exists@4.0.0: + resolution: {integrity: sha512-ak9Qy5Q7jYb2Wwcey5Fpvg2KoAc/ZIhLSLOSBmRmygPsGwkVVt0fZa0qrtMz+m6tJTAHfZQ8FnmB4MG4LWy7/w==} + engines: {node: '>=8'} + + path-key@3.1.1: + resolution: {integrity: sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q==} + engines: {node: '>=8'} + + pathe@2.0.3: + resolution: {integrity: sha512-WUjGcAqP1gQacoQe+OBJsFA7Ld4DyXuUIjZ5cc75cLHvJ7dtNsTugphxIADwspS+AraAUePCKrSVtPLFj/F88w==} + + picocolors@1.1.1: + resolution: {integrity: sha512-xceH2snhtb5M9liqDsmEw56le376mTZkEX/jEb/RxNFyegNul7eNslCXP9FDj/Lcu0X8KEyMceP2ntpaHrDEVA==} + + picomatch@4.0.3: + resolution: {integrity: sha512-5gTmgEY/sqK6gFXLIsQNH19lWb4ebPDLA4SdLP7dsWkIXHWlG66oPuVvXSGFPppYZz8ZDZq0dYYrbHfBCVUb1Q==} + engines: {node: '>=12'} + + pirates@4.0.7: + resolution: {integrity: sha512-TfySrs/5nm8fQJDcBDuUng3VOUKsd7S+zqvbOTiGXHfxX4wK31ard+hoNuvkicM/2YFzlpDgABOevKSsB4G/FA==} + engines: {node: '>= 6'} + + pkg-types@1.3.1: + resolution: {integrity: sha512-/Jm5M4RvtBFVkKWRu2BLUTNP8/M2a+UwuAX+ae4770q1qVGtfjG+WTCupoZixokjmHiry8uI+dlY8KXYV5HVVQ==} + + postcss-load-config@6.0.1: + resolution: {integrity: sha512-oPtTM4oerL+UXmx+93ytZVN82RrlY/wPUV8IeDxFrzIjXOLF1pN+EmKPLbubvKHT2HC20xXsCAH2Z+CKV6Oz/g==} + engines: {node: '>= 18'} + peerDependencies: + jiti: '>=1.21.0' + postcss: '>=8.0.9' + tsx: ^4.8.1 + yaml: ^2.4.2 + peerDependenciesMeta: + jiti: + optional: true + postcss: + optional: true + tsx: + optional: true + yaml: + optional: true + + postcss@8.5.6: + resolution: {integrity: sha512-3Ybi1tAuwAP9s0r1UQ2J4n5Y0G05bJkpUIO0/bI9MhwmD70S5aTWbXGBwxHrelT+XM1k6dM0pk+SwNkpTRN7Pg==} + engines: {node: ^10 || ^12 || >=14} + + prelude-ls@1.2.1: + resolution: {integrity: sha512-vkcDPrRZo1QZLbn5RLGPpg/WmIQ65qoWWhcGKf/b5eplkkarX0m9z8ppCat4mlOqUsWpyNuYgO3VRyrYHSzX5g==} + engines: {node: '>= 0.8.0'} + + proxy-from-env@1.1.0: + resolution: {integrity: sha512-D+zkORCbA9f1tdWRK0RaCR3GPv50cMxcrz4X8k5LTSUD1Dkw47mKJEZQNunItRTkWwgtaUSo1RVFRIG9ZXiFYg==} + + punycode@2.3.1: + resolution: {integrity: sha512-vYt7UD1U9Wg6138shLtLOvdAu+8DsC/ilFtEVHcH+wydcSpNE20AfSOduf6MkRFahL5FY7X1oU7nKVZFtfq8Fg==} + engines: {node: '>=6'} + + readdirp@4.1.2: + resolution: {integrity: sha512-GDhwkLfywWL2s6vEjyhri+eXmfH6j1L7JE27WhqLeYzoh/A3DBaYGEj2H/HFZCn/kMfim73FXxEJTw06WtxQwg==} + engines: {node: '>= 14.18.0'} + + resolve-from@4.0.0: + resolution: {integrity: sha512-pb/MYmXstAkysRFx8piNI1tGFNQIFA3vkE3Gq4EuA1dF6gHp/+vgZqsCGJapvy8N3Q+4o7FwvquPJcnZ7RYy4g==} + engines: {node: '>=4'} + + resolve-from@5.0.0: + resolution: {integrity: sha512-qYg9KP24dD5qka9J47d0aVky0N+b4fTU89LN9iDnjB5waksiC49rvMB0PrUJQGoTmH50XPiqOvAjDfaijGxYZw==} + engines: {node: '>=8'} + + rollup@4.54.0: + resolution: {integrity: sha512-3nk8Y3a9Ea8szgKhinMlGMhGMw89mqule3KWczxhIzqudyHdCIOHw8WJlj/r329fACjKLEh13ZSk7oE22kyeIw==} + engines: {node: '>=18.0.0', npm: '>=8.0.0'} + hasBin: true + + semver@7.7.3: + resolution: {integrity: sha512-SdsKMrI9TdgjdweUSR9MweHA4EJ8YxHn8DFaDisvhVlUOe4BF1tLD7GAj0lIqWVl+dPb/rExr0Btby5loQm20Q==} + engines: {node: '>=10'} + hasBin: true + + shebang-command@2.0.0: + resolution: {integrity: sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA==} + engines: {node: '>=8'} + + shebang-regex@3.0.0: + resolution: {integrity: sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A==} + engines: {node: '>=8'} + + siginfo@2.0.0: + resolution: {integrity: sha512-ybx0WO1/8bSBLEWXZvEd7gMW3Sn3JFlW3TvX1nREbDLRNQNaeNN8WK0meBwPdAaOI7TtRRRJn/Es1zhrrCHu7g==} + + source-map-js@1.2.1: + resolution: {integrity: sha512-UXWMKhLOwVKb728IUtQPXxfYU+usdybtUrK/8uGE8CQMvrhOpwvzDBwj0QhSL7MQc7vIsISBG8VQ8+IDQxpfQA==} + engines: {node: '>=0.10.0'} + + source-map@0.7.6: + resolution: {integrity: sha512-i5uvt8C3ikiWeNZSVZNWcfZPItFQOsYTUAOkcUPGd8DqDy1uOUikjt5dG+uRlwyvR108Fb9DOd4GvXfT0N2/uQ==} + engines: {node: '>= 12'} + + stackback@0.0.2: + resolution: {integrity: sha512-1XMJE5fQo1jGH6Y/7ebnwPOBEkIEnT4QF32d5R1+VXdXveM0IBMJt8zfaxX1P3QhVwrYe+576+jkANtSS2mBbw==} + + std-env@3.10.0: + resolution: {integrity: sha512-5GS12FdOZNliM5mAOxFRg7Ir0pWz8MdpYm6AY6VPkGpbA7ZzmbzNcBJQ0GPvvyWgcY7QAhCgf9Uy89I03faLkg==} + + strip-json-comments@3.1.1: + resolution: {integrity: sha512-6fPc+R4ihwqP6N/aIv2f1gMH8lOVtWQHoqC4yK6oSDVVocumAsfCqjkXnqiYMhmMwS/mEHLp7Vehlt3ql6lEig==} + engines: {node: '>=8'} + + sucrase@3.35.1: + resolution: {integrity: sha512-DhuTmvZWux4H1UOnWMB3sk0sbaCVOoQZjv8u1rDoTV0HTdGem9hkAZtl4JZy8P2z4Bg0nT+YMeOFyVr4zcG5Tw==} + engines: {node: '>=16 || 14 >=14.17'} + hasBin: true + + supports-color@7.2.0: + resolution: {integrity: sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==} + engines: {node: '>=8'} + + thenify-all@1.6.0: + resolution: {integrity: sha512-RNxQH/qI8/t3thXJDwcstUO4zeqo64+Uy/+sNVRBx4Xn2OX+OZ9oP+iJnNFqplFra2ZUVeKCSa2oVWi3T4uVmA==} + engines: {node: '>=0.8'} + + thenify@3.3.1: + resolution: {integrity: sha512-RVZSIV5IG10Hk3enotrhvz0T9em6cyHBLkH/YAZuKqd8hRkKhSfCGIcP2KUY0EPxndzANBmNllzWPwak+bheSw==} + + tinybench@2.9.0: + resolution: {integrity: sha512-0+DUvqWMValLmha6lr4kD8iAMK1HzV0/aKnCtWb9v9641TnP/MFb7Pc2bxoxQjTXAErryXVgUOfv2YqNllqGeg==} + + tinyexec@0.3.2: + resolution: {integrity: sha512-KQQR9yN7R5+OSwaK0XQoj22pwHoTlgYqmUscPYoknOoWCWfj/5/ABTMRi69FrKU5ffPVh5QcFikpWJI/P1ocHA==} + + tinyexec@1.0.2: + resolution: {integrity: sha512-W/KYk+NFhkmsYpuHq5JykngiOCnxeVL8v8dFnqxSD8qEEdRfXk1SDM6JzNqcERbcGYj9tMrDQBYV9cjgnunFIg==} + engines: {node: '>=18'} + + tinyglobby@0.2.15: + resolution: {integrity: sha512-j2Zq4NyQYG5XMST4cbs02Ak8iJUdxRM0XI5QyxXuZOzKOINmWurp3smXu3y5wDcJrptwpSjgXHzIQxR0omXljQ==} + engines: {node: '>=12.0.0'} + + tinyrainbow@3.0.3: + resolution: {integrity: sha512-PSkbLUoxOFRzJYjjxHJt9xro7D+iilgMX/C9lawzVuYiIdcihh9DXmVibBe8lmcFrRi/VzlPjBxbN7rH24q8/Q==} + engines: {node: '>=14.0.0'} + + tree-kill@1.2.2: + resolution: {integrity: sha512-L0Orpi8qGpRG//Nd+H90vFB+3iHnue1zSSGmNOOCh1GLJ7rUKVwV2HvijphGQS2UmhUZewS9VgvxYIdgr+fG1A==} + hasBin: true + + ts-api-utils@2.1.0: + resolution: {integrity: sha512-CUgTZL1irw8u29bzrOD/nH85jqyc74D6SshFgujOIA7osm2Rz7dYH77agkx7H4FBNxDq7Cjf+IjaX/8zwFW+ZQ==} + engines: {node: '>=18.12'} + peerDependencies: + typescript: '>=4.8.4' + + ts-interface-checker@0.1.13: + resolution: {integrity: sha512-Y/arvbn+rrz3JCKl9C4kVNfTfSm2/mEp5FSz5EsZSANGPSlQrpRI5M4PKF+mJnE52jOO90PnPSc3Ur3bTQw0gA==} + + tsup@8.5.1: + resolution: {integrity: sha512-xtgkqwdhpKWr3tKPmCkvYmS9xnQK3m3XgxZHwSUjvfTjp7YfXe5tT3GgWi0F2N+ZSMsOeWeZFh7ZZFg5iPhing==} + engines: {node: '>=18'} + hasBin: true + peerDependencies: + '@microsoft/api-extractor': ^7.36.0 + '@swc/core': ^1 + postcss: ^8.4.12 + typescript: '>=4.5.0' + peerDependenciesMeta: + '@microsoft/api-extractor': + optional: true + '@swc/core': + optional: true + postcss: + optional: true + typescript: + optional: true + + type-check@0.4.0: + resolution: {integrity: sha512-XleUoc9uwGXqjWwXaUTZAmzMcFZ5858QA2vvx1Ur5xIcixXIP+8LnFDgRplU30us6teqdlskFfu+ae4K79Ooew==} + engines: {node: '>= 0.8.0'} + + typescript@5.9.3: + resolution: {integrity: sha512-jl1vZzPDinLr9eUt3J/t7V6FgNEw9QjvBPdysz9KfQDD41fQrC2Y4vKQdiaUpFT4bXlb1RHhLpp8wtm6M5TgSw==} + engines: {node: '>=14.17'} + hasBin: true + + ufo@1.6.1: + resolution: {integrity: sha512-9a4/uxlTWJ4+a5i0ooc1rU7C7YOw3wT+UGqdeNNHWnOF9qcMBgLRS+4IYUqbczewFx4mLEig6gawh7X6mFlEkA==} + + undici-types@7.16.0: + resolution: {integrity: sha512-Zz+aZWSj8LE6zoxD+xrjh4VfkIG8Ya6LvYkZqtUQGJPZjYl53ypCaUwWqo7eI0x66KBGeRo+mlBEkMSeSZ38Nw==} + + uri-js@4.4.1: + resolution: {integrity: sha512-7rKUyy33Q1yc98pQ1DAmLtwX109F7TIfWlW1Ydo8Wl1ii1SeHieeh0HHfPeL2fMXK6z0s8ecKs9frCuLJvndBg==} + + vite@7.3.0: + resolution: {integrity: sha512-dZwN5L1VlUBewiP6H9s2+B3e3Jg96D0vzN+Ry73sOefebhYr9f94wwkMNN/9ouoU8pV1BqA1d1zGk8928cx0rg==} + engines: {node: ^20.19.0 || >=22.12.0} + hasBin: true + peerDependencies: + '@types/node': ^20.19.0 || >=22.12.0 + jiti: '>=1.21.0' + less: ^4.0.0 + lightningcss: ^1.21.0 + sass: ^1.70.0 + sass-embedded: ^1.70.0 + stylus: '>=0.54.8' + sugarss: ^5.0.0 + terser: ^5.16.0 + tsx: ^4.8.1 + yaml: ^2.4.2 + peerDependenciesMeta: + '@types/node': + optional: true + jiti: + optional: true + less: + optional: true + lightningcss: + optional: true + sass: + optional: true + sass-embedded: + optional: true + stylus: + optional: true + sugarss: + optional: true + terser: + optional: true + tsx: + optional: true + yaml: + optional: true + + vitest@4.0.16: + resolution: {integrity: sha512-E4t7DJ9pESL6E3I8nFjPa4xGUd3PmiWDLsDztS2qXSJWfHtbQnwAWylaBvSNY48I3vr8PTqIZlyK8TE3V3CA4Q==} + engines: {node: ^20.0.0 || ^22.0.0 || >=24.0.0} + hasBin: true + peerDependencies: + '@edge-runtime/vm': '*' + '@opentelemetry/api': ^1.9.0 + '@types/node': ^20.0.0 || ^22.0.0 || >=24.0.0 + '@vitest/browser-playwright': 4.0.16 + '@vitest/browser-preview': 4.0.16 + '@vitest/browser-webdriverio': 4.0.16 + '@vitest/ui': 4.0.16 + happy-dom: '*' + jsdom: '*' + peerDependenciesMeta: + '@edge-runtime/vm': + optional: true + '@opentelemetry/api': + optional: true + '@types/node': + optional: true + '@vitest/browser-playwright': + optional: true + '@vitest/browser-preview': + optional: true + '@vitest/browser-webdriverio': + optional: true + '@vitest/ui': + optional: true + happy-dom: + optional: true + jsdom: + optional: true + + which@2.0.2: + resolution: {integrity: sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==} + engines: {node: '>= 8'} + hasBin: true + + why-is-node-running@2.3.0: + resolution: {integrity: sha512-hUrmaWBdVDcxvYqnyh09zunKzROWjbZTiNy8dBEjkS7ehEDQibXJ7XvlmtbwuTclUiIyN+CyXQD4Vmko8fNm8w==} + engines: {node: '>=8'} + hasBin: true + + word-wrap@1.2.5: + resolution: {integrity: sha512-BN22B5eaMMI9UMtjrGd5g5eCYPpCPDUy0FJXbYsaT5zYxjFOckS53SQDE3pWkVoWpHXVb3BrYcEN4Twa55B5cA==} + engines: {node: '>=0.10.0'} + + yocto-queue@0.1.0: + resolution: {integrity: sha512-rVksvsnNCdJ/ohGc6xgPwyN8eheCxsiLM8mxuE/t/mOVqJewPuO1miLpTHQiRgTKCLexL4MeAFVagts7HmNZ2Q==} + engines: {node: '>=10'} + +snapshots: + + '@babel/helper-string-parser@7.27.1': {} + + '@babel/helper-validator-identifier@7.28.5': {} + + '@babel/parser@7.28.5': + dependencies: + '@babel/types': 7.28.5 + + '@babel/types@7.28.5': + dependencies: + '@babel/helper-string-parser': 7.27.1 + '@babel/helper-validator-identifier': 7.28.5 + + '@bcoe/v8-coverage@1.0.2': {} + + '@esbuild/aix-ppc64@0.27.2': + optional: true + + '@esbuild/android-arm64@0.27.2': + optional: true + + '@esbuild/android-arm@0.27.2': + optional: true + + '@esbuild/android-x64@0.27.2': + optional: true + + '@esbuild/darwin-arm64@0.27.2': + optional: true + + '@esbuild/darwin-x64@0.27.2': + optional: true + + '@esbuild/freebsd-arm64@0.27.2': + optional: true + + '@esbuild/freebsd-x64@0.27.2': + optional: true + + '@esbuild/linux-arm64@0.27.2': + optional: true + + '@esbuild/linux-arm@0.27.2': + optional: true + + '@esbuild/linux-ia32@0.27.2': + optional: true + + '@esbuild/linux-loong64@0.27.2': + optional: true + + '@esbuild/linux-mips64el@0.27.2': + optional: true + + '@esbuild/linux-ppc64@0.27.2': + optional: true + + '@esbuild/linux-riscv64@0.27.2': + optional: true + + '@esbuild/linux-s390x@0.27.2': + optional: true + + '@esbuild/linux-x64@0.27.2': + optional: true + + '@esbuild/netbsd-arm64@0.27.2': + optional: true + + '@esbuild/netbsd-x64@0.27.2': + optional: true + + '@esbuild/openbsd-arm64@0.27.2': + optional: true + + '@esbuild/openbsd-x64@0.27.2': + optional: true + + '@esbuild/openharmony-arm64@0.27.2': + optional: true + + '@esbuild/sunos-x64@0.27.2': + optional: true + + '@esbuild/win32-arm64@0.27.2': + optional: true + + '@esbuild/win32-ia32@0.27.2': + optional: true + + '@esbuild/win32-x64@0.27.2': + optional: true + + '@eslint-community/eslint-utils@4.9.0(eslint@9.39.2)': + dependencies: + eslint: 9.39.2 + eslint-visitor-keys: 3.4.3 + + '@eslint-community/regexpp@4.12.2': {} + + '@eslint/config-array@0.21.1': + dependencies: + '@eslint/object-schema': 2.1.7 + debug: 4.4.3 + minimatch: 3.1.2 + transitivePeerDependencies: + - supports-color + + '@eslint/config-helpers@0.4.2': + dependencies: + '@eslint/core': 0.17.0 + + '@eslint/core@0.17.0': + dependencies: + '@types/json-schema': 7.0.15 + + '@eslint/eslintrc@3.3.3': + dependencies: + ajv: 6.12.6 + debug: 4.4.3 + espree: 10.4.0 + globals: 14.0.0 + ignore: 5.3.2 + import-fresh: 3.3.1 + js-yaml: 4.1.1 + minimatch: 3.1.2 + strip-json-comments: 3.1.1 + transitivePeerDependencies: + - supports-color + + '@eslint/js@9.39.2': {} + + '@eslint/object-schema@2.1.7': {} + + '@eslint/plugin-kit@0.4.1': + dependencies: + '@eslint/core': 0.17.0 + levn: 0.4.1 + + '@humanfs/core@0.19.1': {} + + '@humanfs/node@0.16.7': + dependencies: + '@humanfs/core': 0.19.1 + '@humanwhocodes/retry': 0.4.3 + + '@humanwhocodes/module-importer@1.0.1': {} + + '@humanwhocodes/retry@0.4.3': {} + + '@jridgewell/gen-mapping@0.3.13': + dependencies: + '@jridgewell/sourcemap-codec': 1.5.5 + '@jridgewell/trace-mapping': 0.3.31 + + '@jridgewell/resolve-uri@3.1.2': {} + + '@jridgewell/sourcemap-codec@1.5.5': {} + + '@jridgewell/trace-mapping@0.3.31': + dependencies: + '@jridgewell/resolve-uri': 3.1.2 + '@jridgewell/sourcemap-codec': 1.5.5 + + '@rollup/rollup-android-arm-eabi@4.54.0': + optional: true + + '@rollup/rollup-android-arm64@4.54.0': + optional: true + + '@rollup/rollup-darwin-arm64@4.54.0': + optional: true + + '@rollup/rollup-darwin-x64@4.54.0': + optional: true + + '@rollup/rollup-freebsd-arm64@4.54.0': + optional: true + + '@rollup/rollup-freebsd-x64@4.54.0': + optional: true + + '@rollup/rollup-linux-arm-gnueabihf@4.54.0': + optional: true + + '@rollup/rollup-linux-arm-musleabihf@4.54.0': + optional: true + + '@rollup/rollup-linux-arm64-gnu@4.54.0': + optional: true + + '@rollup/rollup-linux-arm64-musl@4.54.0': + optional: true + + '@rollup/rollup-linux-loong64-gnu@4.54.0': + optional: true + + '@rollup/rollup-linux-ppc64-gnu@4.54.0': + optional: true + + '@rollup/rollup-linux-riscv64-gnu@4.54.0': + optional: true + + '@rollup/rollup-linux-riscv64-musl@4.54.0': + optional: true + + '@rollup/rollup-linux-s390x-gnu@4.54.0': + optional: true + + '@rollup/rollup-linux-x64-gnu@4.54.0': + optional: true + + '@rollup/rollup-linux-x64-musl@4.54.0': + optional: true + + '@rollup/rollup-openharmony-arm64@4.54.0': + optional: true + + '@rollup/rollup-win32-arm64-msvc@4.54.0': + optional: true + + '@rollup/rollup-win32-ia32-msvc@4.54.0': + optional: true + + '@rollup/rollup-win32-x64-gnu@4.54.0': + optional: true + + '@rollup/rollup-win32-x64-msvc@4.54.0': + optional: true + + '@standard-schema/spec@1.1.0': {} + + '@types/chai@5.2.3': + dependencies: + '@types/deep-eql': 4.0.2 + assertion-error: 2.0.1 + + '@types/deep-eql@4.0.2': {} + + '@types/estree@1.0.8': {} + + '@types/json-schema@7.0.15': {} + + '@types/node@25.0.3': + dependencies: + undici-types: 7.16.0 + + '@typescript-eslint/eslint-plugin@8.50.1(@typescript-eslint/parser@8.50.1(eslint@9.39.2)(typescript@5.9.3))(eslint@9.39.2)(typescript@5.9.3)': + dependencies: + '@eslint-community/regexpp': 4.12.2 + '@typescript-eslint/parser': 8.50.1(eslint@9.39.2)(typescript@5.9.3) + '@typescript-eslint/scope-manager': 8.50.1 + '@typescript-eslint/type-utils': 8.50.1(eslint@9.39.2)(typescript@5.9.3) + '@typescript-eslint/utils': 8.50.1(eslint@9.39.2)(typescript@5.9.3) + '@typescript-eslint/visitor-keys': 8.50.1 + eslint: 9.39.2 + ignore: 7.0.5 + natural-compare: 1.4.0 + ts-api-utils: 2.1.0(typescript@5.9.3) + typescript: 5.9.3 + transitivePeerDependencies: + - supports-color + + '@typescript-eslint/parser@8.50.1(eslint@9.39.2)(typescript@5.9.3)': + dependencies: + '@typescript-eslint/scope-manager': 8.50.1 + '@typescript-eslint/types': 8.50.1 + '@typescript-eslint/typescript-estree': 8.50.1(typescript@5.9.3) + '@typescript-eslint/visitor-keys': 8.50.1 + debug: 4.4.3 + eslint: 9.39.2 + typescript: 5.9.3 + transitivePeerDependencies: + - supports-color + + '@typescript-eslint/project-service@8.50.1(typescript@5.9.3)': + dependencies: + '@typescript-eslint/tsconfig-utils': 8.50.1(typescript@5.9.3) + '@typescript-eslint/types': 8.50.1 + debug: 4.4.3 + typescript: 5.9.3 + transitivePeerDependencies: + - supports-color + + '@typescript-eslint/scope-manager@8.50.1': + dependencies: + '@typescript-eslint/types': 8.50.1 + '@typescript-eslint/visitor-keys': 8.50.1 + + '@typescript-eslint/tsconfig-utils@8.50.1(typescript@5.9.3)': + dependencies: + typescript: 5.9.3 + + '@typescript-eslint/type-utils@8.50.1(eslint@9.39.2)(typescript@5.9.3)': + dependencies: + '@typescript-eslint/types': 8.50.1 + '@typescript-eslint/typescript-estree': 8.50.1(typescript@5.9.3) + '@typescript-eslint/utils': 8.50.1(eslint@9.39.2)(typescript@5.9.3) + debug: 4.4.3 + eslint: 9.39.2 + ts-api-utils: 2.1.0(typescript@5.9.3) + typescript: 5.9.3 + transitivePeerDependencies: + - supports-color + + '@typescript-eslint/types@8.50.1': {} + + '@typescript-eslint/typescript-estree@8.50.1(typescript@5.9.3)': + dependencies: + '@typescript-eslint/project-service': 8.50.1(typescript@5.9.3) + '@typescript-eslint/tsconfig-utils': 8.50.1(typescript@5.9.3) + '@typescript-eslint/types': 8.50.1 + '@typescript-eslint/visitor-keys': 8.50.1 + debug: 4.4.3 + minimatch: 9.0.5 + semver: 7.7.3 + tinyglobby: 0.2.15 + ts-api-utils: 2.1.0(typescript@5.9.3) + typescript: 5.9.3 + transitivePeerDependencies: + - supports-color + + '@typescript-eslint/utils@8.50.1(eslint@9.39.2)(typescript@5.9.3)': + dependencies: + '@eslint-community/eslint-utils': 4.9.0(eslint@9.39.2) + '@typescript-eslint/scope-manager': 8.50.1 + '@typescript-eslint/types': 8.50.1 + '@typescript-eslint/typescript-estree': 8.50.1(typescript@5.9.3) + eslint: 9.39.2 + typescript: 5.9.3 + transitivePeerDependencies: + - supports-color + + '@typescript-eslint/visitor-keys@8.50.1': + dependencies: + '@typescript-eslint/types': 8.50.1 + eslint-visitor-keys: 4.2.1 + + '@vitest/coverage-v8@4.0.16(vitest@4.0.16(@types/node@25.0.3))': + dependencies: + '@bcoe/v8-coverage': 1.0.2 + '@vitest/utils': 4.0.16 + ast-v8-to-istanbul: 0.3.10 + istanbul-lib-coverage: 3.2.2 + istanbul-lib-report: 3.0.1 + istanbul-lib-source-maps: 5.0.6 + istanbul-reports: 3.2.0 + magicast: 0.5.1 + obug: 2.1.1 + std-env: 3.10.0 + tinyrainbow: 3.0.3 + vitest: 4.0.16(@types/node@25.0.3) + transitivePeerDependencies: + - supports-color + + '@vitest/expect@4.0.16': + dependencies: + '@standard-schema/spec': 1.1.0 + '@types/chai': 5.2.3 + '@vitest/spy': 4.0.16 + '@vitest/utils': 4.0.16 + chai: 6.2.2 + tinyrainbow: 3.0.3 + + '@vitest/mocker@4.0.16(vite@7.3.0(@types/node@25.0.3))': + dependencies: + '@vitest/spy': 4.0.16 + estree-walker: 3.0.3 + magic-string: 0.30.21 + optionalDependencies: + vite: 7.3.0(@types/node@25.0.3) + + '@vitest/pretty-format@4.0.16': + dependencies: + tinyrainbow: 3.0.3 + + '@vitest/runner@4.0.16': + dependencies: + '@vitest/utils': 4.0.16 + pathe: 2.0.3 + + '@vitest/snapshot@4.0.16': + dependencies: + '@vitest/pretty-format': 4.0.16 + magic-string: 0.30.21 + pathe: 2.0.3 + + '@vitest/spy@4.0.16': {} + + '@vitest/utils@4.0.16': + dependencies: + '@vitest/pretty-format': 4.0.16 + tinyrainbow: 3.0.3 + + acorn-jsx@5.3.2(acorn@8.15.0): + dependencies: + acorn: 8.15.0 + + acorn@8.15.0: {} + + ajv@6.12.6: + dependencies: + fast-deep-equal: 3.1.3 + fast-json-stable-stringify: 2.1.0 + json-schema-traverse: 0.4.1 + uri-js: 4.4.1 + + ansi-styles@4.3.0: + dependencies: + color-convert: 2.0.1 + + any-promise@1.3.0: {} + + argparse@2.0.1: {} + + assertion-error@2.0.1: {} + + ast-v8-to-istanbul@0.3.10: + dependencies: + '@jridgewell/trace-mapping': 0.3.31 + estree-walker: 3.0.3 + js-tokens: 9.0.1 + + asynckit@0.4.0: {} + + axios@1.13.2: + dependencies: + follow-redirects: 1.15.11 + form-data: 4.0.5 + proxy-from-env: 1.1.0 + transitivePeerDependencies: + - debug + + balanced-match@1.0.2: {} + + brace-expansion@1.1.12: + dependencies: + balanced-match: 1.0.2 + concat-map: 0.0.1 + + brace-expansion@2.0.2: + dependencies: + balanced-match: 1.0.2 + + bundle-require@5.1.0(esbuild@0.27.2): + dependencies: + esbuild: 0.27.2 + load-tsconfig: 0.2.5 + + cac@6.7.14: {} + + call-bind-apply-helpers@1.0.2: + dependencies: + es-errors: 1.3.0 + function-bind: 1.1.2 + + callsites@3.1.0: {} + + chai@6.2.2: {} + + chalk@4.1.2: + dependencies: + ansi-styles: 4.3.0 + supports-color: 7.2.0 + + chokidar@4.0.3: + dependencies: + readdirp: 4.1.2 + + color-convert@2.0.1: + dependencies: + color-name: 1.1.4 + + color-name@1.1.4: {} + + combined-stream@1.0.8: + dependencies: + delayed-stream: 1.0.0 + + commander@4.1.1: {} + + concat-map@0.0.1: {} + + confbox@0.1.8: {} + + consola@3.4.2: {} + + cross-spawn@7.0.6: + dependencies: + path-key: 3.1.1 + shebang-command: 2.0.0 + which: 2.0.2 + + debug@4.4.3: + dependencies: + ms: 2.1.3 + + deep-is@0.1.4: {} + + delayed-stream@1.0.0: {} + + dunder-proto@1.0.1: + dependencies: + call-bind-apply-helpers: 1.0.2 + es-errors: 1.3.0 + gopd: 1.2.0 + + es-define-property@1.0.1: {} + + es-errors@1.3.0: {} + + es-module-lexer@1.7.0: {} + + es-object-atoms@1.1.1: + dependencies: + es-errors: 1.3.0 + + es-set-tostringtag@2.1.0: + dependencies: + es-errors: 1.3.0 + get-intrinsic: 1.3.0 + has-tostringtag: 1.0.2 + hasown: 2.0.2 + + esbuild@0.27.2: + optionalDependencies: + '@esbuild/aix-ppc64': 0.27.2 + '@esbuild/android-arm': 0.27.2 + '@esbuild/android-arm64': 0.27.2 + '@esbuild/android-x64': 0.27.2 + '@esbuild/darwin-arm64': 0.27.2 + '@esbuild/darwin-x64': 0.27.2 + '@esbuild/freebsd-arm64': 0.27.2 + '@esbuild/freebsd-x64': 0.27.2 + '@esbuild/linux-arm': 0.27.2 + '@esbuild/linux-arm64': 0.27.2 + '@esbuild/linux-ia32': 0.27.2 + '@esbuild/linux-loong64': 0.27.2 + '@esbuild/linux-mips64el': 0.27.2 + '@esbuild/linux-ppc64': 0.27.2 + '@esbuild/linux-riscv64': 0.27.2 + '@esbuild/linux-s390x': 0.27.2 + '@esbuild/linux-x64': 0.27.2 + '@esbuild/netbsd-arm64': 0.27.2 + '@esbuild/netbsd-x64': 0.27.2 + '@esbuild/openbsd-arm64': 0.27.2 + '@esbuild/openbsd-x64': 0.27.2 + '@esbuild/openharmony-arm64': 0.27.2 + '@esbuild/sunos-x64': 0.27.2 + '@esbuild/win32-arm64': 0.27.2 + '@esbuild/win32-ia32': 0.27.2 + '@esbuild/win32-x64': 0.27.2 + + escape-string-regexp@4.0.0: {} + + eslint-scope@8.4.0: + dependencies: + esrecurse: 4.3.0 + estraverse: 5.3.0 + + eslint-visitor-keys@3.4.3: {} + + eslint-visitor-keys@4.2.1: {} + + eslint@9.39.2: + dependencies: + '@eslint-community/eslint-utils': 4.9.0(eslint@9.39.2) + '@eslint-community/regexpp': 4.12.2 + '@eslint/config-array': 0.21.1 + '@eslint/config-helpers': 0.4.2 + '@eslint/core': 0.17.0 + '@eslint/eslintrc': 3.3.3 + '@eslint/js': 9.39.2 + '@eslint/plugin-kit': 0.4.1 + '@humanfs/node': 0.16.7 + '@humanwhocodes/module-importer': 1.0.1 + '@humanwhocodes/retry': 0.4.3 + '@types/estree': 1.0.8 + ajv: 6.12.6 + chalk: 4.1.2 + cross-spawn: 7.0.6 + debug: 4.4.3 + escape-string-regexp: 4.0.0 + eslint-scope: 8.4.0 + eslint-visitor-keys: 4.2.1 + espree: 10.4.0 + esquery: 1.6.0 + esutils: 2.0.3 + fast-deep-equal: 3.1.3 + file-entry-cache: 8.0.0 + find-up: 5.0.0 + glob-parent: 6.0.2 + ignore: 5.3.2 + imurmurhash: 0.1.4 + is-glob: 4.0.3 + json-stable-stringify-without-jsonify: 1.0.1 + lodash.merge: 4.6.2 + minimatch: 3.1.2 + natural-compare: 1.4.0 + optionator: 0.9.4 + transitivePeerDependencies: + - supports-color + + espree@10.4.0: + dependencies: + acorn: 8.15.0 + acorn-jsx: 5.3.2(acorn@8.15.0) + eslint-visitor-keys: 4.2.1 + + esquery@1.6.0: + dependencies: + estraverse: 5.3.0 + + esrecurse@4.3.0: + dependencies: + estraverse: 5.3.0 + + estraverse@5.3.0: {} + + estree-walker@3.0.3: + dependencies: + '@types/estree': 1.0.8 + + esutils@2.0.3: {} + + expect-type@1.3.0: {} + + fast-deep-equal@3.1.3: {} + + fast-json-stable-stringify@2.1.0: {} + + fast-levenshtein@2.0.6: {} + + fdir@6.5.0(picomatch@4.0.3): + optionalDependencies: + picomatch: 4.0.3 + + file-entry-cache@8.0.0: + dependencies: + flat-cache: 4.0.1 + + find-up@5.0.0: + dependencies: + locate-path: 6.0.0 + path-exists: 4.0.0 + + fix-dts-default-cjs-exports@1.0.1: + dependencies: + magic-string: 0.30.21 + mlly: 1.8.0 + rollup: 4.54.0 + + flat-cache@4.0.1: + dependencies: + flatted: 3.3.3 + keyv: 4.5.4 + + flatted@3.3.3: {} + + follow-redirects@1.15.11: {} + + form-data@4.0.5: + dependencies: + asynckit: 0.4.0 + combined-stream: 1.0.8 + es-set-tostringtag: 2.1.0 + hasown: 2.0.2 + mime-types: 2.1.35 + + fsevents@2.3.3: + optional: true + + function-bind@1.1.2: {} + + get-intrinsic@1.3.0: + dependencies: + call-bind-apply-helpers: 1.0.2 + es-define-property: 1.0.1 + es-errors: 1.3.0 + es-object-atoms: 1.1.1 + function-bind: 1.1.2 + get-proto: 1.0.1 + gopd: 1.2.0 + has-symbols: 1.1.0 + hasown: 2.0.2 + math-intrinsics: 1.1.0 + + get-proto@1.0.1: + dependencies: + dunder-proto: 1.0.1 + es-object-atoms: 1.1.1 + + glob-parent@6.0.2: + dependencies: + is-glob: 4.0.3 + + globals@14.0.0: {} + + gopd@1.2.0: {} + + has-flag@4.0.0: {} + + has-symbols@1.1.0: {} + + has-tostringtag@1.0.2: + dependencies: + has-symbols: 1.1.0 + + hasown@2.0.2: + dependencies: + function-bind: 1.1.2 + + html-escaper@2.0.2: {} + + ignore@5.3.2: {} + + ignore@7.0.5: {} + + import-fresh@3.3.1: + dependencies: + parent-module: 1.0.1 + resolve-from: 4.0.0 + + imurmurhash@0.1.4: {} + + is-extglob@2.1.1: {} + + is-glob@4.0.3: + dependencies: + is-extglob: 2.1.1 + + isexe@2.0.0: {} + + istanbul-lib-coverage@3.2.2: {} + + istanbul-lib-report@3.0.1: + dependencies: + istanbul-lib-coverage: 3.2.2 + make-dir: 4.0.0 + supports-color: 7.2.0 + + istanbul-lib-source-maps@5.0.6: + dependencies: + '@jridgewell/trace-mapping': 0.3.31 + debug: 4.4.3 + istanbul-lib-coverage: 3.2.2 + transitivePeerDependencies: + - supports-color + + istanbul-reports@3.2.0: + dependencies: + html-escaper: 2.0.2 + istanbul-lib-report: 3.0.1 + + joycon@3.1.1: {} + + js-tokens@9.0.1: {} + + js-yaml@4.1.1: + dependencies: + argparse: 2.0.1 + + json-buffer@3.0.1: {} + + json-schema-traverse@0.4.1: {} + + json-stable-stringify-without-jsonify@1.0.1: {} + + keyv@4.5.4: + dependencies: + json-buffer: 3.0.1 + + levn@0.4.1: + dependencies: + prelude-ls: 1.2.1 + type-check: 0.4.0 + + lilconfig@3.1.3: {} + + lines-and-columns@1.2.4: {} + + load-tsconfig@0.2.5: {} + + locate-path@6.0.0: + dependencies: + p-locate: 5.0.0 + + lodash.merge@4.6.2: {} + + magic-string@0.30.21: + dependencies: + '@jridgewell/sourcemap-codec': 1.5.5 + + magicast@0.5.1: + dependencies: + '@babel/parser': 7.28.5 + '@babel/types': 7.28.5 + source-map-js: 1.2.1 + + make-dir@4.0.0: + dependencies: + semver: 7.7.3 + + math-intrinsics@1.1.0: {} + + mime-db@1.52.0: {} + + mime-types@2.1.35: + dependencies: + mime-db: 1.52.0 + + minimatch@3.1.2: + dependencies: + brace-expansion: 1.1.12 + + minimatch@9.0.5: + dependencies: + brace-expansion: 2.0.2 + + mlly@1.8.0: + dependencies: + acorn: 8.15.0 + pathe: 2.0.3 + pkg-types: 1.3.1 + ufo: 1.6.1 + + ms@2.1.3: {} + + mz@2.7.0: + dependencies: + any-promise: 1.3.0 + object-assign: 4.1.1 + thenify-all: 1.6.0 + + nanoid@3.3.11: {} + + natural-compare@1.4.0: {} + + object-assign@4.1.1: {} + + obug@2.1.1: {} + + optionator@0.9.4: + dependencies: + deep-is: 0.1.4 + fast-levenshtein: 2.0.6 + levn: 0.4.1 + prelude-ls: 1.2.1 + type-check: 0.4.0 + word-wrap: 1.2.5 + + p-limit@3.1.0: + dependencies: + yocto-queue: 0.1.0 + + p-locate@5.0.0: + dependencies: + p-limit: 3.1.0 + + parent-module@1.0.1: + dependencies: + callsites: 3.1.0 + + path-exists@4.0.0: {} + + path-key@3.1.1: {} + + pathe@2.0.3: {} + + picocolors@1.1.1: {} + + picomatch@4.0.3: {} + + pirates@4.0.7: {} + + pkg-types@1.3.1: + dependencies: + confbox: 0.1.8 + mlly: 1.8.0 + pathe: 2.0.3 + + postcss-load-config@6.0.1(postcss@8.5.6): + dependencies: + lilconfig: 3.1.3 + optionalDependencies: + postcss: 8.5.6 + + postcss@8.5.6: + dependencies: + nanoid: 3.3.11 + picocolors: 1.1.1 + source-map-js: 1.2.1 + + prelude-ls@1.2.1: {} + + proxy-from-env@1.1.0: {} + + punycode@2.3.1: {} + + readdirp@4.1.2: {} + + resolve-from@4.0.0: {} + + resolve-from@5.0.0: {} + + rollup@4.54.0: + dependencies: + '@types/estree': 1.0.8 + optionalDependencies: + '@rollup/rollup-android-arm-eabi': 4.54.0 + '@rollup/rollup-android-arm64': 4.54.0 + '@rollup/rollup-darwin-arm64': 4.54.0 + '@rollup/rollup-darwin-x64': 4.54.0 + '@rollup/rollup-freebsd-arm64': 4.54.0 + '@rollup/rollup-freebsd-x64': 4.54.0 + '@rollup/rollup-linux-arm-gnueabihf': 4.54.0 + '@rollup/rollup-linux-arm-musleabihf': 4.54.0 + '@rollup/rollup-linux-arm64-gnu': 4.54.0 + '@rollup/rollup-linux-arm64-musl': 4.54.0 + '@rollup/rollup-linux-loong64-gnu': 4.54.0 + '@rollup/rollup-linux-ppc64-gnu': 4.54.0 + '@rollup/rollup-linux-riscv64-gnu': 4.54.0 + '@rollup/rollup-linux-riscv64-musl': 4.54.0 + '@rollup/rollup-linux-s390x-gnu': 4.54.0 + '@rollup/rollup-linux-x64-gnu': 4.54.0 + '@rollup/rollup-linux-x64-musl': 4.54.0 + '@rollup/rollup-openharmony-arm64': 4.54.0 + '@rollup/rollup-win32-arm64-msvc': 4.54.0 + '@rollup/rollup-win32-ia32-msvc': 4.54.0 + '@rollup/rollup-win32-x64-gnu': 4.54.0 + '@rollup/rollup-win32-x64-msvc': 4.54.0 + fsevents: 2.3.3 + + semver@7.7.3: {} + + shebang-command@2.0.0: + dependencies: + shebang-regex: 3.0.0 + + shebang-regex@3.0.0: {} + + siginfo@2.0.0: {} + + source-map-js@1.2.1: {} + + source-map@0.7.6: {} + + stackback@0.0.2: {} + + std-env@3.10.0: {} + + strip-json-comments@3.1.1: {} + + sucrase@3.35.1: + dependencies: + '@jridgewell/gen-mapping': 0.3.13 + commander: 4.1.1 + lines-and-columns: 1.2.4 + mz: 2.7.0 + pirates: 4.0.7 + tinyglobby: 0.2.15 + ts-interface-checker: 0.1.13 + + supports-color@7.2.0: + dependencies: + has-flag: 4.0.0 + + thenify-all@1.6.0: + dependencies: + thenify: 3.3.1 + + thenify@3.3.1: + dependencies: + any-promise: 1.3.0 + + tinybench@2.9.0: {} + + tinyexec@0.3.2: {} + + tinyexec@1.0.2: {} + + tinyglobby@0.2.15: + dependencies: + fdir: 6.5.0(picomatch@4.0.3) + picomatch: 4.0.3 + + tinyrainbow@3.0.3: {} + + tree-kill@1.2.2: {} + + ts-api-utils@2.1.0(typescript@5.9.3): + dependencies: + typescript: 5.9.3 + + ts-interface-checker@0.1.13: {} + + tsup@8.5.1(postcss@8.5.6)(typescript@5.9.3): + dependencies: + bundle-require: 5.1.0(esbuild@0.27.2) + cac: 6.7.14 + chokidar: 4.0.3 + consola: 3.4.2 + debug: 4.4.3 + esbuild: 0.27.2 + fix-dts-default-cjs-exports: 1.0.1 + joycon: 3.1.1 + picocolors: 1.1.1 + postcss-load-config: 6.0.1(postcss@8.5.6) + resolve-from: 5.0.0 + rollup: 4.54.0 + source-map: 0.7.6 + sucrase: 3.35.1 + tinyexec: 0.3.2 + tinyglobby: 0.2.15 + tree-kill: 1.2.2 + optionalDependencies: + postcss: 8.5.6 + typescript: 5.9.3 + transitivePeerDependencies: + - jiti + - supports-color + - tsx + - yaml + + type-check@0.4.0: + dependencies: + prelude-ls: 1.2.1 + + typescript@5.9.3: {} + + ufo@1.6.1: {} + + undici-types@7.16.0: {} + + uri-js@4.4.1: + dependencies: + punycode: 2.3.1 + + vite@7.3.0(@types/node@25.0.3): + dependencies: + esbuild: 0.27.2 + fdir: 6.5.0(picomatch@4.0.3) + picomatch: 4.0.3 + postcss: 8.5.6 + rollup: 4.54.0 + tinyglobby: 0.2.15 + optionalDependencies: + '@types/node': 25.0.3 + fsevents: 2.3.3 + + vitest@4.0.16(@types/node@25.0.3): + dependencies: + '@vitest/expect': 4.0.16 + '@vitest/mocker': 4.0.16(vite@7.3.0(@types/node@25.0.3)) + '@vitest/pretty-format': 4.0.16 + '@vitest/runner': 4.0.16 + '@vitest/snapshot': 4.0.16 + '@vitest/spy': 4.0.16 + '@vitest/utils': 4.0.16 + es-module-lexer: 1.7.0 + expect-type: 1.3.0 + magic-string: 0.30.21 + obug: 2.1.1 + pathe: 2.0.3 + picomatch: 4.0.3 + std-env: 3.10.0 + tinybench: 2.9.0 + tinyexec: 1.0.2 + tinyglobby: 0.2.15 + tinyrainbow: 3.0.3 + vite: 7.3.0(@types/node@25.0.3) + why-is-node-running: 2.3.0 + optionalDependencies: + '@types/node': 25.0.3 + transitivePeerDependencies: + - jiti + - less + - lightningcss + - msw + - sass + - sass-embedded + - stylus + - sugarss + - terser + - tsx + - yaml + + which@2.0.2: + dependencies: + isexe: 2.0.0 + + why-is-node-running@2.3.0: + dependencies: + siginfo: 2.0.0 + stackback: 0.0.2 + + word-wrap@1.2.5: {} + + yocto-queue@0.1.0: {} diff --git a/sdks/nodejs-client/pnpm-workspace.yaml b/sdks/nodejs-client/pnpm-workspace.yaml new file mode 100644 index 0000000000..efc037aa84 --- /dev/null +++ b/sdks/nodejs-client/pnpm-workspace.yaml @@ -0,0 +1,2 @@ +onlyBuiltDependencies: + - esbuild diff --git a/sdks/nodejs-client/scripts/publish.sh b/sdks/nodejs-client/scripts/publish.sh new file mode 100755 index 0000000000..043cac046d --- /dev/null +++ b/sdks/nodejs-client/scripts/publish.sh @@ -0,0 +1,261 @@ +#!/usr/bin/env bash +# +# Dify Node.js SDK Publish Script +# ================================ +# A beautiful and reliable script to publish the SDK to npm +# +# Usage: +# ./scripts/publish.sh # Normal publish +# ./scripts/publish.sh --dry-run # Test without publishing +# ./scripts/publish.sh --skip-tests # Skip tests (not recommended) +# + +set -euo pipefail + +# ============================================================================ +# Colors and Formatting +# ============================================================================ +RED='\033[0;31m' +GREEN='\033[0;32m' +YELLOW='\033[1;33m' +BLUE='\033[0;34m' +MAGENTA='\033[0;35m' +CYAN='\033[0;36m' +BOLD='\033[1m' +DIM='\033[2m' +NC='\033[0m' # No Color + +# ============================================================================ +# Helper Functions +# ============================================================================ +print_banner() { + echo -e "${CYAN}" + echo "╔═══════════════════════════════════════════════════════════════╗" + echo "║ ║" + echo "║ 🚀 Dify Node.js SDK Publish Script 🚀 ║" + echo "║ ║" + echo "╚═══════════════════════════════════════════════════════════════╝" + echo -e "${NC}" +} + +info() { + echo -e "${BLUE}ℹ ${NC}$1" +} + +success() { + echo -e "${GREEN}✔ ${NC}$1" +} + +warning() { + echo -e "${YELLOW}⚠ ${NC}$1" +} + +error() { + echo -e "${RED}✖ ${NC}$1" +} + +step() { + echo -e "\n${MAGENTA}▶ ${BOLD}$1${NC}" +} + +divider() { + echo -e "${DIM}─────────────────────────────────────────────────────────────────${NC}" +} + +# ============================================================================ +# Configuration +# ============================================================================ +SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" +PROJECT_DIR="$(cd "$SCRIPT_DIR/.." && pwd)" + +DRY_RUN=false +SKIP_TESTS=false + +# Parse arguments +for arg in "$@"; do + case $arg in + --dry-run) + DRY_RUN=true + ;; + --skip-tests) + SKIP_TESTS=true + ;; + --help|-h) + echo "Usage: $0 [options]" + echo "" + echo "Options:" + echo " --dry-run Run without actually publishing" + echo " --skip-tests Skip running tests (not recommended)" + echo " --help, -h Show this help message" + exit 0 + ;; + esac +done + +# ============================================================================ +# Main Script +# ============================================================================ +main() { + print_banner + cd "$PROJECT_DIR" + + # Show mode + if [[ "$DRY_RUN" == true ]]; then + warning "Running in DRY-RUN mode - no actual publish will occur" + divider + fi + + # ======================================================================== + # Step 1: Environment Check + # ======================================================================== + step "Step 1/6: Checking environment..." + + # Check Node.js + if ! command -v node &> /dev/null; then + error "Node.js is not installed" + exit 1 + fi + NODE_VERSION=$(node -v) + success "Node.js: $NODE_VERSION" + + # Check npm + if ! command -v npm &> /dev/null; then + error "npm is not installed" + exit 1 + fi + NPM_VERSION=$(npm -v) + success "npm: v$NPM_VERSION" + + # Check pnpm (optional, for local dev) + if command -v pnpm &> /dev/null; then + PNPM_VERSION=$(pnpm -v) + success "pnpm: v$PNPM_VERSION" + else + info "pnpm not found (optional)" + fi + + # Check npm login status + if ! npm whoami &> /dev/null; then + error "Not logged in to npm. Run 'npm login' first." + exit 1 + fi + NPM_USER=$(npm whoami) + success "Logged in as: ${BOLD}$NPM_USER${NC}" + + # ======================================================================== + # Step 2: Read Package Info + # ======================================================================== + step "Step 2/6: Reading package info..." + + PACKAGE_NAME=$(node -p "require('./package.json').name") + PACKAGE_VERSION=$(node -p "require('./package.json').version") + + success "Package: ${BOLD}$PACKAGE_NAME${NC}" + success "Version: ${BOLD}$PACKAGE_VERSION${NC}" + + # Check if version already exists on npm + if npm view "$PACKAGE_NAME@$PACKAGE_VERSION" version &> /dev/null; then + error "Version $PACKAGE_VERSION already exists on npm!" + echo "" + info "Current published versions:" + npm view "$PACKAGE_NAME" versions --json 2>/dev/null | tail -5 + echo "" + warning "Please update the version in package.json before publishing." + exit 1 + fi + success "Version $PACKAGE_VERSION is available" + + # ======================================================================== + # Step 3: Install Dependencies + # ======================================================================== + step "Step 3/6: Installing dependencies..." + + if command -v pnpm &> /dev/null; then + pnpm install --frozen-lockfile 2>/dev/null || pnpm install + else + npm ci 2>/dev/null || npm install + fi + success "Dependencies installed" + + # ======================================================================== + # Step 4: Run Tests + # ======================================================================== + step "Step 4/6: Running tests..." + + if [[ "$SKIP_TESTS" == true ]]; then + warning "Skipping tests (--skip-tests flag)" + else + if command -v pnpm &> /dev/null; then + pnpm test + else + npm test + fi + success "All tests passed" + fi + + # ======================================================================== + # Step 5: Build + # ======================================================================== + step "Step 5/6: Building package..." + + # Clean previous build + rm -rf dist + + if command -v pnpm &> /dev/null; then + pnpm run build + else + npm run build + fi + success "Build completed" + + # Verify build output + if [[ ! -f "dist/index.js" ]]; then + error "Build failed - dist/index.js not found" + exit 1 + fi + if [[ ! -f "dist/index.d.ts" ]]; then + error "Build failed - dist/index.d.ts not found" + exit 1 + fi + success "Build output verified" + + # ======================================================================== + # Step 6: Publish + # ======================================================================== + step "Step 6/6: Publishing to npm..." + + divider + echo -e "${CYAN}Package contents:${NC}" + npm pack --dry-run 2>&1 | head -30 + divider + + if [[ "$DRY_RUN" == true ]]; then + warning "DRY-RUN: Skipping actual publish" + echo "" + info "To publish for real, run without --dry-run flag" + else + echo "" + echo -e "${YELLOW}About to publish ${BOLD}$PACKAGE_NAME@$PACKAGE_VERSION${NC}${YELLOW} to npm${NC}" + echo -e "${DIM}Press Enter to continue, or Ctrl+C to cancel...${NC}" + read -r + + npm publish --access public + + echo "" + success "🎉 Successfully published ${BOLD}$PACKAGE_NAME@$PACKAGE_VERSION${NC} to npm!" + echo "" + echo -e "${GREEN}Install with:${NC}" + echo -e " ${CYAN}npm install $PACKAGE_NAME${NC}" + echo -e " ${CYAN}pnpm add $PACKAGE_NAME${NC}" + echo -e " ${CYAN}yarn add $PACKAGE_NAME${NC}" + echo "" + echo -e "${GREEN}View on npm:${NC}" + echo -e " ${CYAN}https://www.npmjs.com/package/$PACKAGE_NAME${NC}" + fi + + divider + echo -e "${GREEN}${BOLD}✨ All done!${NC}" +} + +# Run main function +main "$@" diff --git a/sdks/nodejs-client/src/client/base.test.js b/sdks/nodejs-client/src/client/base.test.js new file mode 100644 index 0000000000..5e1b21d0f1 --- /dev/null +++ b/sdks/nodejs-client/src/client/base.test.js @@ -0,0 +1,175 @@ +import { beforeEach, describe, expect, it, vi } from "vitest"; +import { DifyClient } from "./base"; +import { ValidationError } from "../errors/dify-error"; +import { createHttpClientWithSpies } from "../../tests/test-utils"; + +describe("DifyClient base", () => { + beforeEach(() => { + vi.restoreAllMocks(); + }); + + it("getRoot calls root endpoint", async () => { + const { client, request } = createHttpClientWithSpies(); + const dify = new DifyClient(client); + + await dify.getRoot(); + + expect(request).toHaveBeenCalledWith({ + method: "GET", + path: "/", + }); + }); + + it("getApplicationParameters includes optional user", async () => { + const { client, request } = createHttpClientWithSpies(); + const dify = new DifyClient(client); + + await dify.getApplicationParameters(); + expect(request).toHaveBeenCalledWith({ + method: "GET", + path: "/parameters", + query: undefined, + }); + + await dify.getApplicationParameters("user-1"); + expect(request).toHaveBeenCalledWith({ + method: "GET", + path: "/parameters", + query: { user: "user-1" }, + }); + }); + + it("getMeta includes optional user", async () => { + const { client, request } = createHttpClientWithSpies(); + const dify = new DifyClient(client); + + await dify.getMeta("user-1"); + expect(request).toHaveBeenCalledWith({ + method: "GET", + path: "/meta", + query: { user: "user-1" }, + }); + }); + + it("getInfo and getSite support optional user", async () => { + const { client, request } = createHttpClientWithSpies(); + const dify = new DifyClient(client); + + await dify.getInfo(); + await dify.getSite("user"); + + expect(request).toHaveBeenCalledWith({ + method: "GET", + path: "/info", + query: undefined, + }); + expect(request).toHaveBeenCalledWith({ + method: "GET", + path: "/site", + query: { user: "user" }, + }); + }); + + it("messageFeedback builds payload from request object", async () => { + const { client, request } = createHttpClientWithSpies(); + const dify = new DifyClient(client); + + await dify.messageFeedback({ + messageId: "msg", + user: "user", + rating: "like", + content: "good", + }); + + expect(request).toHaveBeenCalledWith({ + method: "POST", + path: "/messages/msg/feedbacks", + data: { user: "user", rating: "like", content: "good" }, + }); + }); + + it("fileUpload appends user to form data", async () => { + const { client, request } = createHttpClientWithSpies(); + const dify = new DifyClient(client); + const form = { append: vi.fn(), getHeaders: () => ({}) }; + + await dify.fileUpload(form, "user"); + + expect(form.append).toHaveBeenCalledWith("user", "user"); + expect(request).toHaveBeenCalledWith({ + method: "POST", + path: "/files/upload", + data: form, + }); + }); + + it("filePreview uses arraybuffer response", async () => { + const { client, request } = createHttpClientWithSpies(); + const dify = new DifyClient(client); + + await dify.filePreview("file", "user", true); + + expect(request).toHaveBeenCalledWith({ + method: "GET", + path: "/files/file/preview", + query: { user: "user", as_attachment: "true" }, + responseType: "arraybuffer", + }); + }); + + it("audioToText appends user and sends form", async () => { + const { client, request } = createHttpClientWithSpies(); + const dify = new DifyClient(client); + const form = { append: vi.fn(), getHeaders: () => ({}) }; + + await dify.audioToText(form, "user"); + + expect(form.append).toHaveBeenCalledWith("user", "user"); + expect(request).toHaveBeenCalledWith({ + method: "POST", + path: "/audio-to-text", + data: form, + }); + }); + + it("textToAudio supports streaming and message id", async () => { + const { client, request, requestBinaryStream } = createHttpClientWithSpies(); + const dify = new DifyClient(client); + + await dify.textToAudio({ + user: "user", + message_id: "msg", + streaming: true, + }); + + expect(requestBinaryStream).toHaveBeenCalledWith({ + method: "POST", + path: "/text-to-audio", + data: { + user: "user", + message_id: "msg", + streaming: true, + }, + }); + + await dify.textToAudio("hello", "user", false, "voice"); + expect(request).toHaveBeenCalledWith({ + method: "POST", + path: "/text-to-audio", + data: { + text: "hello", + user: "user", + streaming: false, + voice: "voice", + }, + responseType: "arraybuffer", + }); + }); + + it("textToAudio requires text or message id", async () => { + const { client } = createHttpClientWithSpies(); + const dify = new DifyClient(client); + + expect(() => dify.textToAudio({ user: "user" })).toThrow(ValidationError); + }); +}); diff --git a/sdks/nodejs-client/src/client/base.ts b/sdks/nodejs-client/src/client/base.ts new file mode 100644 index 0000000000..0fa535a488 --- /dev/null +++ b/sdks/nodejs-client/src/client/base.ts @@ -0,0 +1,284 @@ +import type { + BinaryStream, + DifyClientConfig, + DifyResponse, + MessageFeedbackRequest, + QueryParams, + RequestMethod, + TextToAudioRequest, +} from "../types/common"; +import { HttpClient } from "../http/client"; +import { ensureNonEmptyString, ensureRating } from "./validation"; +import { FileUploadError, ValidationError } from "../errors/dify-error"; +import { isFormData } from "../http/form-data"; + +const toConfig = ( + init: string | DifyClientConfig, + baseUrl?: string +): DifyClientConfig => { + if (typeof init === "string") { + return { + apiKey: init, + baseUrl, + }; + } + return init; +}; + +const appendUserToFormData = (form: unknown, user: string): void => { + if (!isFormData(form)) { + throw new FileUploadError("FormData is required for file uploads"); + } + if (typeof form.append === "function") { + form.append("user", user); + } +}; + +export class DifyClient { + protected http: HttpClient; + + constructor(config: string | DifyClientConfig | HttpClient, baseUrl?: string) { + if (config instanceof HttpClient) { + this.http = config; + } else { + this.http = new HttpClient(toConfig(config, baseUrl)); + } + } + + updateApiKey(apiKey: string): void { + ensureNonEmptyString(apiKey, "apiKey"); + this.http.updateApiKey(apiKey); + } + + getHttpClient(): HttpClient { + return this.http; + } + + sendRequest( + method: RequestMethod, + endpoint: string, + data: unknown = null, + params: QueryParams | null = null, + stream = false, + headerParams: Record = {} + ): ReturnType { + return this.http.requestRaw({ + method, + path: endpoint, + data, + query: params ?? undefined, + headers: headerParams, + responseType: stream ? "stream" : "json", + }); + } + + getRoot(): Promise> { + return this.http.request({ + method: "GET", + path: "/", + }); + } + + getApplicationParameters(user?: string): Promise> { + if (user) { + ensureNonEmptyString(user, "user"); + } + return this.http.request({ + method: "GET", + path: "/parameters", + query: user ? { user } : undefined, + }); + } + + async getParameters(user?: string): Promise> { + return this.getApplicationParameters(user); + } + + getMeta(user?: string): Promise> { + if (user) { + ensureNonEmptyString(user, "user"); + } + return this.http.request({ + method: "GET", + path: "/meta", + query: user ? { user } : undefined, + }); + } + + messageFeedback( + request: MessageFeedbackRequest + ): Promise>>; + messageFeedback( + messageId: string, + rating: "like" | "dislike" | null, + user: string, + content?: string + ): Promise>>; + messageFeedback( + messageIdOrRequest: string | MessageFeedbackRequest, + rating?: "like" | "dislike" | null, + user?: string, + content?: string + ): Promise>> { + let messageId: string; + const payload: Record = {}; + + if (typeof messageIdOrRequest === "string") { + messageId = messageIdOrRequest; + ensureNonEmptyString(messageId, "messageId"); + ensureNonEmptyString(user, "user"); + payload.user = user; + if (rating !== undefined && rating !== null) { + ensureRating(rating); + payload.rating = rating; + } + if (content !== undefined) { + payload.content = content; + } + } else { + const request = messageIdOrRequest; + messageId = request.messageId; + ensureNonEmptyString(messageId, "messageId"); + ensureNonEmptyString(request.user, "user"); + payload.user = request.user; + if (request.rating !== undefined && request.rating !== null) { + ensureRating(request.rating); + payload.rating = request.rating; + } + if (request.content !== undefined) { + payload.content = request.content; + } + } + + return this.http.request({ + method: "POST", + path: `/messages/${messageId}/feedbacks`, + data: payload, + }); + } + + getInfo(user?: string): Promise> { + if (user) { + ensureNonEmptyString(user, "user"); + } + return this.http.request({ + method: "GET", + path: "/info", + query: user ? { user } : undefined, + }); + } + + getSite(user?: string): Promise> { + if (user) { + ensureNonEmptyString(user, "user"); + } + return this.http.request({ + method: "GET", + path: "/site", + query: user ? { user } : undefined, + }); + } + + fileUpload(form: unknown, user: string): Promise> { + if (!isFormData(form)) { + throw new FileUploadError("FormData is required for file uploads"); + } + ensureNonEmptyString(user, "user"); + appendUserToFormData(form, user); + return this.http.request({ + method: "POST", + path: "/files/upload", + data: form, + }); + } + + filePreview( + fileId: string, + user: string, + asAttachment?: boolean + ): Promise> { + ensureNonEmptyString(fileId, "fileId"); + ensureNonEmptyString(user, "user"); + return this.http.request({ + method: "GET", + path: `/files/${fileId}/preview`, + query: { + user, + as_attachment: asAttachment ? "true" : undefined, + }, + responseType: "arraybuffer", + }); + } + + audioToText(form: unknown, user: string): Promise> { + if (!isFormData(form)) { + throw new FileUploadError("FormData is required for audio uploads"); + } + ensureNonEmptyString(user, "user"); + appendUserToFormData(form, user); + return this.http.request({ + method: "POST", + path: "/audio-to-text", + data: form, + }); + } + + textToAudio( + request: TextToAudioRequest + ): Promise | BinaryStream>; + textToAudio( + text: string, + user: string, + streaming?: boolean, + voice?: string + ): Promise | BinaryStream>; + textToAudio( + textOrRequest: string | TextToAudioRequest, + user?: string, + streaming = false, + voice?: string + ): Promise | BinaryStream> { + let payload: TextToAudioRequest; + + if (typeof textOrRequest === "string") { + ensureNonEmptyString(textOrRequest, "text"); + ensureNonEmptyString(user, "user"); + payload = { + text: textOrRequest, + user, + streaming, + }; + if (voice) { + payload.voice = voice; + } + } else { + payload = { ...textOrRequest }; + ensureNonEmptyString(payload.user, "user"); + if (payload.text !== undefined && payload.text !== null) { + ensureNonEmptyString(payload.text, "text"); + } + if (payload.message_id !== undefined && payload.message_id !== null) { + ensureNonEmptyString(payload.message_id, "messageId"); + } + if (!payload.text && !payload.message_id) { + throw new ValidationError("text or message_id is required"); + } + payload.streaming = payload.streaming ?? false; + } + + if (payload.streaming) { + return this.http.requestBinaryStream({ + method: "POST", + path: "/text-to-audio", + data: payload, + }); + } + + return this.http.request({ + method: "POST", + path: "/text-to-audio", + data: payload, + responseType: "arraybuffer", + }); + } +} diff --git a/sdks/nodejs-client/src/client/chat.test.js b/sdks/nodejs-client/src/client/chat.test.js new file mode 100644 index 0000000000..a97c9d4a5c --- /dev/null +++ b/sdks/nodejs-client/src/client/chat.test.js @@ -0,0 +1,239 @@ +import { beforeEach, describe, expect, it, vi } from "vitest"; +import { ChatClient } from "./chat"; +import { ValidationError } from "../errors/dify-error"; +import { createHttpClientWithSpies } from "../../tests/test-utils"; + +describe("ChatClient", () => { + beforeEach(() => { + vi.restoreAllMocks(); + }); + + it("creates chat messages in blocking mode", async () => { + const { client, request } = createHttpClientWithSpies(); + const chat = new ChatClient(client); + + await chat.createChatMessage({ input: "x" }, "hello", "user", false, null); + + expect(request).toHaveBeenCalledWith({ + method: "POST", + path: "/chat-messages", + data: { + inputs: { input: "x" }, + query: "hello", + user: "user", + response_mode: "blocking", + files: undefined, + }, + }); + }); + + it("creates chat messages in streaming mode", async () => { + const { client, requestStream } = createHttpClientWithSpies(); + const chat = new ChatClient(client); + + await chat.createChatMessage({ + inputs: { input: "x" }, + query: "hello", + user: "user", + response_mode: "streaming", + }); + + expect(requestStream).toHaveBeenCalledWith({ + method: "POST", + path: "/chat-messages", + data: { + inputs: { input: "x" }, + query: "hello", + user: "user", + response_mode: "streaming", + }, + }); + }); + + it("stops chat messages", async () => { + const { client, request } = createHttpClientWithSpies(); + const chat = new ChatClient(client); + + await chat.stopChatMessage("task", "user"); + await chat.stopMessage("task", "user"); + + expect(request).toHaveBeenCalledWith({ + method: "POST", + path: "/chat-messages/task/stop", + data: { user: "user" }, + }); + }); + + it("gets suggested questions", async () => { + const { client, request } = createHttpClientWithSpies(); + const chat = new ChatClient(client); + + await chat.getSuggested("msg", "user"); + + expect(request).toHaveBeenCalledWith({ + method: "GET", + path: "/messages/msg/suggested", + query: { user: "user" }, + }); + }); + + it("submits message feedback", async () => { + const { client, request } = createHttpClientWithSpies(); + const chat = new ChatClient(client); + + await chat.messageFeedback("msg", "like", "user", "good"); + await chat.messageFeedback({ + messageId: "msg", + user: "user", + rating: "dislike", + }); + + expect(request).toHaveBeenCalledWith({ + method: "POST", + path: "/messages/msg/feedbacks", + data: { user: "user", rating: "like", content: "good" }, + }); + }); + + it("lists app feedbacks", async () => { + const { client, request } = createHttpClientWithSpies(); + const chat = new ChatClient(client); + + await chat.getAppFeedbacks(2, 5); + + expect(request).toHaveBeenCalledWith({ + method: "GET", + path: "/app/feedbacks", + query: { page: 2, limit: 5 }, + }); + }); + + it("lists conversations and messages", async () => { + const { client, request } = createHttpClientWithSpies(); + const chat = new ChatClient(client); + + await chat.getConversations("user", "last", 10, "-updated_at"); + await chat.getConversationMessages("user", "conv", "first", 5); + + expect(request).toHaveBeenCalledWith({ + method: "GET", + path: "/conversations", + query: { + user: "user", + last_id: "last", + limit: 10, + sort_by: "-updated_at", + }, + }); + expect(request).toHaveBeenCalledWith({ + method: "GET", + path: "/messages", + query: { + user: "user", + conversation_id: "conv", + first_id: "first", + limit: 5, + }, + }); + }); + + it("renames conversations with optional auto-generate", async () => { + const { client, request } = createHttpClientWithSpies(); + const chat = new ChatClient(client); + + await chat.renameConversation("conv", "name", "user", false); + await chat.renameConversation("conv", "user", { autoGenerate: true }); + + expect(request).toHaveBeenCalledWith({ + method: "POST", + path: "/conversations/conv/name", + data: { user: "user", auto_generate: false, name: "name" }, + }); + expect(request).toHaveBeenCalledWith({ + method: "POST", + path: "/conversations/conv/name", + data: { user: "user", auto_generate: true }, + }); + }); + + it("requires name when autoGenerate is false", async () => { + const { client } = createHttpClientWithSpies(); + const chat = new ChatClient(client); + + expect(() => + chat.renameConversation("conv", "", "user", false) + ).toThrow(ValidationError); + }); + + it("deletes conversations", async () => { + const { client, request } = createHttpClientWithSpies(); + const chat = new ChatClient(client); + + await chat.deleteConversation("conv", "user"); + + expect(request).toHaveBeenCalledWith({ + method: "DELETE", + path: "/conversations/conv", + data: { user: "user" }, + }); + }); + + it("manages conversation variables", async () => { + const { client, request } = createHttpClientWithSpies(); + const chat = new ChatClient(client); + + await chat.getConversationVariables("conv", "user", "last", 10, "name"); + await chat.updateConversationVariable("conv", "var", "user", "value"); + + expect(request).toHaveBeenCalledWith({ + method: "GET", + path: "/conversations/conv/variables", + query: { + user: "user", + last_id: "last", + limit: 10, + variable_name: "name", + }, + }); + expect(request).toHaveBeenCalledWith({ + method: "PUT", + path: "/conversations/conv/variables/var", + data: { user: "user", value: "value" }, + }); + }); + + it("handles annotation APIs", async () => { + const { client, request } = createHttpClientWithSpies(); + const chat = new ChatClient(client); + + await chat.annotationReplyAction("enable", { + score_threshold: 0.5, + embedding_provider_name: "prov", + embedding_model_name: "model", + }); + await chat.getAnnotationReplyStatus("enable", "job"); + await chat.listAnnotations({ page: 1, limit: 10, keyword: "k" }); + await chat.createAnnotation({ question: "q", answer: "a" }); + await chat.updateAnnotation("id", { question: "q", answer: "a" }); + await chat.deleteAnnotation("id"); + + expect(request).toHaveBeenCalledWith({ + method: "POST", + path: "/apps/annotation-reply/enable", + data: { + score_threshold: 0.5, + embedding_provider_name: "prov", + embedding_model_name: "model", + }, + }); + expect(request).toHaveBeenCalledWith({ + method: "GET", + path: "/apps/annotation-reply/enable/status/job", + }); + expect(request).toHaveBeenCalledWith({ + method: "GET", + path: "/apps/annotations", + query: { page: 1, limit: 10, keyword: "k" }, + }); + }); +}); diff --git a/sdks/nodejs-client/src/client/chat.ts b/sdks/nodejs-client/src/client/chat.ts new file mode 100644 index 0000000000..745c999552 --- /dev/null +++ b/sdks/nodejs-client/src/client/chat.ts @@ -0,0 +1,377 @@ +import { DifyClient } from "./base"; +import type { ChatMessageRequest, ChatMessageResponse } from "../types/chat"; +import type { + AnnotationCreateRequest, + AnnotationListOptions, + AnnotationReplyActionRequest, + AnnotationResponse, +} from "../types/annotation"; +import type { + DifyResponse, + DifyStream, + QueryParams, +} from "../types/common"; +import { + ensureNonEmptyString, + ensureOptionalInt, + ensureOptionalString, +} from "./validation"; + +export class ChatClient extends DifyClient { + createChatMessage( + request: ChatMessageRequest + ): Promise | DifyStream>; + createChatMessage( + inputs: Record, + query: string, + user: string, + stream?: boolean, + conversationId?: string | null, + files?: Array> | null + ): Promise | DifyStream>; + createChatMessage( + inputOrRequest: ChatMessageRequest | Record, + query?: string, + user?: string, + stream = false, + conversationId?: string | null, + files?: Array> | null + ): Promise | DifyStream> { + let payload: ChatMessageRequest; + let shouldStream = stream; + + if (query === undefined && "user" in (inputOrRequest as ChatMessageRequest)) { + payload = inputOrRequest as ChatMessageRequest; + shouldStream = payload.response_mode === "streaming"; + } else { + ensureNonEmptyString(query, "query"); + ensureNonEmptyString(user, "user"); + payload = { + inputs: inputOrRequest as Record, + query, + user, + response_mode: stream ? "streaming" : "blocking", + files, + }; + if (conversationId) { + payload.conversation_id = conversationId; + } + } + + ensureNonEmptyString(payload.user, "user"); + ensureNonEmptyString(payload.query, "query"); + + if (shouldStream) { + return this.http.requestStream({ + method: "POST", + path: "/chat-messages", + data: payload, + }); + } + + return this.http.request({ + method: "POST", + path: "/chat-messages", + data: payload, + }); + } + + stopChatMessage( + taskId: string, + user: string + ): Promise> { + ensureNonEmptyString(taskId, "taskId"); + ensureNonEmptyString(user, "user"); + return this.http.request({ + method: "POST", + path: `/chat-messages/${taskId}/stop`, + data: { user }, + }); + } + + stopMessage( + taskId: string, + user: string + ): Promise> { + return this.stopChatMessage(taskId, user); + } + + getSuggested( + messageId: string, + user: string + ): Promise> { + ensureNonEmptyString(messageId, "messageId"); + ensureNonEmptyString(user, "user"); + return this.http.request({ + method: "GET", + path: `/messages/${messageId}/suggested`, + query: { user }, + }); + } + + // Note: messageFeedback is inherited from DifyClient + + getAppFeedbacks( + page?: number, + limit?: number + ): Promise>> { + ensureOptionalInt(page, "page"); + ensureOptionalInt(limit, "limit"); + return this.http.request({ + method: "GET", + path: "/app/feedbacks", + query: { + page, + limit, + }, + }); + } + + getConversations( + user: string, + lastId?: string | null, + limit?: number | null, + sortByOrPinned?: string | boolean | null + ): Promise>> { + ensureNonEmptyString(user, "user"); + ensureOptionalString(lastId, "lastId"); + ensureOptionalInt(limit, "limit"); + + const params: QueryParams = { user }; + if (lastId) { + params.last_id = lastId; + } + if (limit) { + params.limit = limit; + } + if (typeof sortByOrPinned === "string") { + params.sort_by = sortByOrPinned; + } else if (typeof sortByOrPinned === "boolean") { + params.pinned = sortByOrPinned; + } + + return this.http.request({ + method: "GET", + path: "/conversations", + query: params, + }); + } + + getConversationMessages( + user: string, + conversationId: string, + firstId?: string | null, + limit?: number | null + ): Promise>> { + ensureNonEmptyString(user, "user"); + ensureNonEmptyString(conversationId, "conversationId"); + ensureOptionalString(firstId, "firstId"); + ensureOptionalInt(limit, "limit"); + + const params: QueryParams = { user }; + params.conversation_id = conversationId; + if (firstId) { + params.first_id = firstId; + } + if (limit) { + params.limit = limit; + } + + return this.http.request({ + method: "GET", + path: "/messages", + query: params, + }); + } + + renameConversation( + conversationId: string, + name: string, + user: string, + autoGenerate?: boolean + ): Promise>>; + renameConversation( + conversationId: string, + user: string, + options?: { name?: string | null; autoGenerate?: boolean } + ): Promise>>; + renameConversation( + conversationId: string, + nameOrUser: string, + userOrOptions?: string | { name?: string | null; autoGenerate?: boolean }, + autoGenerate?: boolean + ): Promise>> { + ensureNonEmptyString(conversationId, "conversationId"); + + let name: string | null | undefined; + let user: string; + let resolvedAutoGenerate: boolean; + + if (typeof userOrOptions === "string" || userOrOptions === undefined) { + name = nameOrUser; + user = userOrOptions ?? ""; + resolvedAutoGenerate = autoGenerate ?? false; + } else { + user = nameOrUser; + name = userOrOptions.name; + resolvedAutoGenerate = userOrOptions.autoGenerate ?? false; + } + + ensureNonEmptyString(user, "user"); + if (!resolvedAutoGenerate) { + ensureNonEmptyString(name, "name"); + } + + const payload: Record = { + user, + auto_generate: resolvedAutoGenerate, + }; + if (typeof name === "string" && name.trim().length > 0) { + payload.name = name; + } + + return this.http.request({ + method: "POST", + path: `/conversations/${conversationId}/name`, + data: payload, + }); + } + + deleteConversation( + conversationId: string, + user: string + ): Promise>> { + ensureNonEmptyString(conversationId, "conversationId"); + ensureNonEmptyString(user, "user"); + return this.http.request({ + method: "DELETE", + path: `/conversations/${conversationId}`, + data: { user }, + }); + } + + getConversationVariables( + conversationId: string, + user: string, + lastId?: string | null, + limit?: number | null, + variableName?: string | null + ): Promise>> { + ensureNonEmptyString(conversationId, "conversationId"); + ensureNonEmptyString(user, "user"); + ensureOptionalString(lastId, "lastId"); + ensureOptionalInt(limit, "limit"); + ensureOptionalString(variableName, "variableName"); + + return this.http.request({ + method: "GET", + path: `/conversations/${conversationId}/variables`, + query: { + user, + last_id: lastId ?? undefined, + limit: limit ?? undefined, + variable_name: variableName ?? undefined, + }, + }); + } + + updateConversationVariable( + conversationId: string, + variableId: string, + user: string, + value: unknown + ): Promise>> { + ensureNonEmptyString(conversationId, "conversationId"); + ensureNonEmptyString(variableId, "variableId"); + ensureNonEmptyString(user, "user"); + return this.http.request({ + method: "PUT", + path: `/conversations/${conversationId}/variables/${variableId}`, + data: { + user, + value, + }, + }); + } + + annotationReplyAction( + action: "enable" | "disable", + request: AnnotationReplyActionRequest + ): Promise> { + ensureNonEmptyString(action, "action"); + ensureNonEmptyString(request.embedding_provider_name, "embedding_provider_name"); + ensureNonEmptyString(request.embedding_model_name, "embedding_model_name"); + return this.http.request({ + method: "POST", + path: `/apps/annotation-reply/${action}`, + data: request, + }); + } + + getAnnotationReplyStatus( + action: "enable" | "disable", + jobId: string + ): Promise> { + ensureNonEmptyString(action, "action"); + ensureNonEmptyString(jobId, "jobId"); + return this.http.request({ + method: "GET", + path: `/apps/annotation-reply/${action}/status/${jobId}`, + }); + } + + listAnnotations( + options?: AnnotationListOptions + ): Promise> { + ensureOptionalInt(options?.page, "page"); + ensureOptionalInt(options?.limit, "limit"); + ensureOptionalString(options?.keyword, "keyword"); + return this.http.request({ + method: "GET", + path: "/apps/annotations", + query: { + page: options?.page, + limit: options?.limit, + keyword: options?.keyword ?? undefined, + }, + }); + } + + createAnnotation( + request: AnnotationCreateRequest + ): Promise> { + ensureNonEmptyString(request.question, "question"); + ensureNonEmptyString(request.answer, "answer"); + return this.http.request({ + method: "POST", + path: "/apps/annotations", + data: request, + }); + } + + updateAnnotation( + annotationId: string, + request: AnnotationCreateRequest + ): Promise> { + ensureNonEmptyString(annotationId, "annotationId"); + ensureNonEmptyString(request.question, "question"); + ensureNonEmptyString(request.answer, "answer"); + return this.http.request({ + method: "PUT", + path: `/apps/annotations/${annotationId}`, + data: request, + }); + } + + deleteAnnotation( + annotationId: string + ): Promise> { + ensureNonEmptyString(annotationId, "annotationId"); + return this.http.request({ + method: "DELETE", + path: `/apps/annotations/${annotationId}`, + }); + } + + // Note: audioToText is inherited from DifyClient +} diff --git a/sdks/nodejs-client/src/client/completion.test.js b/sdks/nodejs-client/src/client/completion.test.js new file mode 100644 index 0000000000..b79cf3fb8f --- /dev/null +++ b/sdks/nodejs-client/src/client/completion.test.js @@ -0,0 +1,83 @@ +import { beforeEach, describe, expect, it, vi } from "vitest"; +import { CompletionClient } from "./completion"; +import { createHttpClientWithSpies } from "../../tests/test-utils"; + +describe("CompletionClient", () => { + beforeEach(() => { + vi.restoreAllMocks(); + }); + + it("creates completion messages in blocking mode", async () => { + const { client, request } = createHttpClientWithSpies(); + const completion = new CompletionClient(client); + + await completion.createCompletionMessage({ input: "x" }, "user", false); + + expect(request).toHaveBeenCalledWith({ + method: "POST", + path: "/completion-messages", + data: { + inputs: { input: "x" }, + user: "user", + files: undefined, + response_mode: "blocking", + }, + }); + }); + + it("creates completion messages in streaming mode", async () => { + const { client, requestStream } = createHttpClientWithSpies(); + const completion = new CompletionClient(client); + + await completion.createCompletionMessage({ + inputs: { input: "x" }, + user: "user", + response_mode: "streaming", + }); + + expect(requestStream).toHaveBeenCalledWith({ + method: "POST", + path: "/completion-messages", + data: { + inputs: { input: "x" }, + user: "user", + response_mode: "streaming", + }, + }); + }); + + it("stops completion messages", async () => { + const { client, request } = createHttpClientWithSpies(); + const completion = new CompletionClient(client); + + await completion.stopCompletionMessage("task", "user"); + await completion.stop("task", "user"); + + expect(request).toHaveBeenCalledWith({ + method: "POST", + path: "/completion-messages/task/stop", + data: { user: "user" }, + }); + }); + + it("supports deprecated runWorkflow", async () => { + const { client, request, requestStream } = createHttpClientWithSpies(); + const completion = new CompletionClient(client); + const warn = vi.spyOn(console, "warn").mockImplementation(() => {}); + + await completion.runWorkflow({ input: "x" }, "user", false); + await completion.runWorkflow({ input: "x" }, "user", true); + + expect(warn).toHaveBeenCalled(); + expect(request).toHaveBeenCalledWith({ + method: "POST", + path: "/workflows/run", + data: { inputs: { input: "x" }, user: "user", response_mode: "blocking" }, + }); + expect(requestStream).toHaveBeenCalledWith({ + method: "POST", + path: "/workflows/run", + data: { inputs: { input: "x" }, user: "user", response_mode: "streaming" }, + }); + }); +}); diff --git a/sdks/nodejs-client/src/client/completion.ts b/sdks/nodejs-client/src/client/completion.ts new file mode 100644 index 0000000000..9e39898e8b --- /dev/null +++ b/sdks/nodejs-client/src/client/completion.ts @@ -0,0 +1,111 @@ +import { DifyClient } from "./base"; +import type { CompletionRequest, CompletionResponse } from "../types/completion"; +import type { DifyResponse, DifyStream } from "../types/common"; +import { ensureNonEmptyString } from "./validation"; + +const warned = new Set(); +const warnOnce = (message: string): void => { + if (warned.has(message)) { + return; + } + warned.add(message); + console.warn(message); +}; + +export class CompletionClient extends DifyClient { + createCompletionMessage( + request: CompletionRequest + ): Promise | DifyStream>; + createCompletionMessage( + inputs: Record, + user: string, + stream?: boolean, + files?: Array> | null + ): Promise | DifyStream>; + createCompletionMessage( + inputOrRequest: CompletionRequest | Record, + user?: string, + stream = false, + files?: Array> | null + ): Promise | DifyStream> { + let payload: CompletionRequest; + let shouldStream = stream; + + if (user === undefined && "user" in (inputOrRequest as CompletionRequest)) { + payload = inputOrRequest as CompletionRequest; + shouldStream = payload.response_mode === "streaming"; + } else { + ensureNonEmptyString(user, "user"); + payload = { + inputs: inputOrRequest as Record, + user, + files, + response_mode: stream ? "streaming" : "blocking", + }; + } + + ensureNonEmptyString(payload.user, "user"); + + if (shouldStream) { + return this.http.requestStream({ + method: "POST", + path: "/completion-messages", + data: payload, + }); + } + + return this.http.request({ + method: "POST", + path: "/completion-messages", + data: payload, + }); + } + + stopCompletionMessage( + taskId: string, + user: string + ): Promise> { + ensureNonEmptyString(taskId, "taskId"); + ensureNonEmptyString(user, "user"); + return this.http.request({ + method: "POST", + path: `/completion-messages/${taskId}/stop`, + data: { user }, + }); + } + + stop( + taskId: string, + user: string + ): Promise> { + return this.stopCompletionMessage(taskId, user); + } + + runWorkflow( + inputs: Record, + user: string, + stream = false + ): Promise> | DifyStream>> { + warnOnce( + "CompletionClient.runWorkflow is deprecated. Use WorkflowClient.run instead." + ); + ensureNonEmptyString(user, "user"); + const payload = { + inputs, + user, + response_mode: stream ? "streaming" : "blocking", + }; + if (stream) { + return this.http.requestStream>({ + method: "POST", + path: "/workflows/run", + data: payload, + }); + } + return this.http.request>({ + method: "POST", + path: "/workflows/run", + data: payload, + }); + } +} diff --git a/sdks/nodejs-client/src/client/knowledge-base.test.js b/sdks/nodejs-client/src/client/knowledge-base.test.js new file mode 100644 index 0000000000..4381b39e56 --- /dev/null +++ b/sdks/nodejs-client/src/client/knowledge-base.test.js @@ -0,0 +1,249 @@ +import { beforeEach, describe, expect, it, vi } from "vitest"; +import { KnowledgeBaseClient } from "./knowledge-base"; +import { createHttpClientWithSpies } from "../../tests/test-utils"; + +describe("KnowledgeBaseClient", () => { + beforeEach(() => { + vi.restoreAllMocks(); + }); + + it("handles dataset and tag operations", async () => { + const { client, request } = createHttpClientWithSpies(); + const kb = new KnowledgeBaseClient(client); + + await kb.listDatasets({ + page: 1, + limit: 2, + keyword: "k", + includeAll: true, + tagIds: ["t1"], + }); + await kb.createDataset({ name: "dataset" }); + await kb.getDataset("ds"); + await kb.updateDataset("ds", { name: "new" }); + await kb.deleteDataset("ds"); + await kb.updateDocumentStatus("ds", "enable", ["doc1"]); + + await kb.listTags(); + await kb.createTag({ name: "tag" }); + await kb.updateTag({ tag_id: "tag", name: "name" }); + await kb.deleteTag({ tag_id: "tag" }); + await kb.bindTags({ tag_ids: ["tag"], target_id: "doc" }); + await kb.unbindTags({ tag_id: "tag", target_id: "doc" }); + await kb.getDatasetTags("ds"); + + expect(request).toHaveBeenCalledWith({ + method: "GET", + path: "/datasets", + query: { + page: 1, + limit: 2, + keyword: "k", + include_all: true, + tag_ids: ["t1"], + }, + }); + expect(request).toHaveBeenCalledWith({ + method: "POST", + path: "/datasets", + data: { name: "dataset" }, + }); + expect(request).toHaveBeenCalledWith({ + method: "PATCH", + path: "/datasets/ds", + data: { name: "new" }, + }); + expect(request).toHaveBeenCalledWith({ + method: "PATCH", + path: "/datasets/ds/documents/status/enable", + data: { document_ids: ["doc1"] }, + }); + expect(request).toHaveBeenCalledWith({ + method: "POST", + path: "/datasets/tags/binding", + data: { tag_ids: ["tag"], target_id: "doc" }, + }); + }); + + it("handles document operations", async () => { + const { client, request } = createHttpClientWithSpies(); + const kb = new KnowledgeBaseClient(client); + const form = { append: vi.fn(), getHeaders: () => ({}) }; + + await kb.createDocumentByText("ds", { name: "doc", text: "text" }); + await kb.updateDocumentByText("ds", "doc", { name: "doc2" }); + await kb.createDocumentByFile("ds", form); + await kb.updateDocumentByFile("ds", "doc", form); + await kb.listDocuments("ds", { page: 1, limit: 20, keyword: "k" }); + await kb.getDocument("ds", "doc", { metadata: "all" }); + await kb.deleteDocument("ds", "doc"); + await kb.getDocumentIndexingStatus("ds", "batch"); + + expect(request).toHaveBeenCalledWith({ + method: "POST", + path: "/datasets/ds/document/create_by_text", + data: { name: "doc", text: "text" }, + }); + expect(request).toHaveBeenCalledWith({ + method: "POST", + path: "/datasets/ds/documents/doc/update_by_text", + data: { name: "doc2" }, + }); + expect(request).toHaveBeenCalledWith({ + method: "POST", + path: "/datasets/ds/document/create_by_file", + data: form, + }); + expect(request).toHaveBeenCalledWith({ + method: "GET", + path: "/datasets/ds/documents", + query: { page: 1, limit: 20, keyword: "k", status: undefined }, + }); + }); + + it("handles segments and child chunks", async () => { + const { client, request } = createHttpClientWithSpies(); + const kb = new KnowledgeBaseClient(client); + + await kb.createSegments("ds", "doc", { segments: [{ content: "x" }] }); + await kb.listSegments("ds", "doc", { page: 1, limit: 10, keyword: "k" }); + await kb.getSegment("ds", "doc", "seg"); + await kb.updateSegment("ds", "doc", "seg", { + segment: { content: "y" }, + }); + await kb.deleteSegment("ds", "doc", "seg"); + + await kb.createChildChunk("ds", "doc", "seg", { content: "c" }); + await kb.listChildChunks("ds", "doc", "seg", { page: 1, limit: 10 }); + await kb.updateChildChunk("ds", "doc", "seg", "child", { + content: "c2", + }); + await kb.deleteChildChunk("ds", "doc", "seg", "child"); + + expect(request).toHaveBeenCalledWith({ + method: "POST", + path: "/datasets/ds/documents/doc/segments", + data: { segments: [{ content: "x" }] }, + }); + expect(request).toHaveBeenCalledWith({ + method: "POST", + path: "/datasets/ds/documents/doc/segments/seg", + data: { segment: { content: "y" } }, + }); + expect(request).toHaveBeenCalledWith({ + method: "PATCH", + path: "/datasets/ds/documents/doc/segments/seg/child_chunks/child", + data: { content: "c2" }, + }); + }); + + it("handles metadata and retrieval", async () => { + const { client, request } = createHttpClientWithSpies(); + const kb = new KnowledgeBaseClient(client); + + await kb.listMetadata("ds"); + await kb.createMetadata("ds", { name: "m", type: "string" }); + await kb.updateMetadata("ds", "mid", { name: "m2" }); + await kb.deleteMetadata("ds", "mid"); + await kb.listBuiltInMetadata("ds"); + await kb.updateBuiltInMetadata("ds", "enable"); + await kb.updateDocumentsMetadata("ds", { + operation_data: [ + { document_id: "doc", metadata_list: [{ id: "m", name: "n" }] }, + ], + }); + await kb.hitTesting("ds", { query: "q" }); + await kb.retrieve("ds", { query: "q" }); + + expect(request).toHaveBeenCalledWith({ + method: "GET", + path: "/datasets/ds/metadata", + }); + expect(request).toHaveBeenCalledWith({ + method: "POST", + path: "/datasets/ds/metadata", + data: { name: "m", type: "string" }, + }); + expect(request).toHaveBeenCalledWith({ + method: "POST", + path: "/datasets/ds/hit-testing", + data: { query: "q" }, + }); + }); + + it("handles pipeline operations", async () => { + const { client, request, requestStream } = createHttpClientWithSpies(); + const kb = new KnowledgeBaseClient(client); + const warn = vi.spyOn(console, "warn").mockImplementation(() => {}); + const form = { append: vi.fn(), getHeaders: () => ({}) }; + + await kb.listDatasourcePlugins("ds", { isPublished: true }); + await kb.runDatasourceNode("ds", "node", { + inputs: { input: "x" }, + datasource_type: "custom", + is_published: true, + }); + await kb.runPipeline("ds", { + inputs: { input: "x" }, + datasource_type: "custom", + datasource_info_list: [], + start_node_id: "start", + is_published: true, + response_mode: "streaming", + }); + await kb.runPipeline("ds", { + inputs: { input: "x" }, + datasource_type: "custom", + datasource_info_list: [], + start_node_id: "start", + is_published: true, + response_mode: "blocking", + }); + await kb.uploadPipelineFile(form); + + expect(warn).toHaveBeenCalled(); + expect(request).toHaveBeenCalledWith({ + method: "GET", + path: "/datasets/ds/pipeline/datasource-plugins", + query: { is_published: true }, + }); + expect(requestStream).toHaveBeenCalledWith({ + method: "POST", + path: "/datasets/ds/pipeline/datasource/nodes/node/run", + data: { + inputs: { input: "x" }, + datasource_type: "custom", + is_published: true, + }, + }); + expect(requestStream).toHaveBeenCalledWith({ + method: "POST", + path: "/datasets/ds/pipeline/run", + data: { + inputs: { input: "x" }, + datasource_type: "custom", + datasource_info_list: [], + start_node_id: "start", + is_published: true, + response_mode: "streaming", + }, + }); + expect(request).toHaveBeenCalledWith({ + method: "POST", + path: "/datasets/ds/pipeline/run", + data: { + inputs: { input: "x" }, + datasource_type: "custom", + datasource_info_list: [], + start_node_id: "start", + is_published: true, + response_mode: "blocking", + }, + }); + expect(request).toHaveBeenCalledWith({ + method: "POST", + path: "/datasets/pipeline/file-upload", + data: form, + }); + }); +}); diff --git a/sdks/nodejs-client/src/client/knowledge-base.ts b/sdks/nodejs-client/src/client/knowledge-base.ts new file mode 100644 index 0000000000..7a0e39898b --- /dev/null +++ b/sdks/nodejs-client/src/client/knowledge-base.ts @@ -0,0 +1,706 @@ +import { DifyClient } from "./base"; +import type { + DatasetCreateRequest, + DatasetListOptions, + DatasetTagBindingRequest, + DatasetTagCreateRequest, + DatasetTagDeleteRequest, + DatasetTagUnbindingRequest, + DatasetTagUpdateRequest, + DatasetUpdateRequest, + DocumentGetOptions, + DocumentListOptions, + DocumentStatusAction, + DocumentTextCreateRequest, + DocumentTextUpdateRequest, + SegmentCreateRequest, + SegmentListOptions, + SegmentUpdateRequest, + ChildChunkCreateRequest, + ChildChunkListOptions, + ChildChunkUpdateRequest, + MetadataCreateRequest, + MetadataOperationRequest, + MetadataUpdateRequest, + HitTestingRequest, + DatasourcePluginListOptions, + DatasourceNodeRunRequest, + PipelineRunRequest, + KnowledgeBaseResponse, + PipelineStreamEvent, +} from "../types/knowledge-base"; +import type { DifyResponse, DifyStream, QueryParams } from "../types/common"; +import { + ensureNonEmptyString, + ensureOptionalBoolean, + ensureOptionalInt, + ensureOptionalString, + ensureStringArray, +} from "./validation"; +import { FileUploadError, ValidationError } from "../errors/dify-error"; +import { isFormData } from "../http/form-data"; + +const warned = new Set(); +const warnOnce = (message: string): void => { + if (warned.has(message)) { + return; + } + warned.add(message); + console.warn(message); +}; + +const ensureFormData = (form: unknown, context: string): void => { + if (!isFormData(form)) { + throw new FileUploadError(`${context} requires FormData`); + } +}; + +const ensureNonEmptyArray = (value: unknown, name: string): void => { + if (!Array.isArray(value) || value.length === 0) { + throw new ValidationError(`${name} must be a non-empty array`); + } +}; + +const warnPipelineRoutes = (): void => { + warnOnce( + "RAG pipeline endpoints may be unavailable unless the service API registers dataset/rag_pipeline routes." + ); +}; + +export class KnowledgeBaseClient extends DifyClient { + async listDatasets( + options?: DatasetListOptions + ): Promise> { + ensureOptionalInt(options?.page, "page"); + ensureOptionalInt(options?.limit, "limit"); + ensureOptionalString(options?.keyword, "keyword"); + ensureOptionalBoolean(options?.includeAll, "includeAll"); + + const query: QueryParams = { + page: options?.page, + limit: options?.limit, + keyword: options?.keyword ?? undefined, + include_all: options?.includeAll ?? undefined, + }; + + if (options?.tagIds && options.tagIds.length > 0) { + ensureStringArray(options.tagIds, "tagIds"); + query.tag_ids = options.tagIds; + } + + return this.http.request({ + method: "GET", + path: "/datasets", + query, + }); + } + + async createDataset( + request: DatasetCreateRequest + ): Promise> { + ensureNonEmptyString(request.name, "name"); + return this.http.request({ + method: "POST", + path: "/datasets", + data: request, + }); + } + + async getDataset(datasetId: string): Promise> { + ensureNonEmptyString(datasetId, "datasetId"); + return this.http.request({ + method: "GET", + path: `/datasets/${datasetId}`, + }); + } + + async updateDataset( + datasetId: string, + request: DatasetUpdateRequest + ): Promise> { + ensureNonEmptyString(datasetId, "datasetId"); + if (request.name !== undefined && request.name !== null) { + ensureNonEmptyString(request.name, "name"); + } + return this.http.request({ + method: "PATCH", + path: `/datasets/${datasetId}`, + data: request, + }); + } + + async deleteDataset(datasetId: string): Promise> { + ensureNonEmptyString(datasetId, "datasetId"); + return this.http.request({ + method: "DELETE", + path: `/datasets/${datasetId}`, + }); + } + + async updateDocumentStatus( + datasetId: string, + action: DocumentStatusAction, + documentIds: string[] + ): Promise> { + ensureNonEmptyString(datasetId, "datasetId"); + ensureNonEmptyString(action, "action"); + ensureStringArray(documentIds, "documentIds"); + return this.http.request({ + method: "PATCH", + path: `/datasets/${datasetId}/documents/status/${action}`, + data: { + document_ids: documentIds, + }, + }); + } + + async listTags(): Promise> { + return this.http.request({ + method: "GET", + path: "/datasets/tags", + }); + } + + async createTag( + request: DatasetTagCreateRequest + ): Promise> { + ensureNonEmptyString(request.name, "name"); + return this.http.request({ + method: "POST", + path: "/datasets/tags", + data: request, + }); + } + + async updateTag( + request: DatasetTagUpdateRequest + ): Promise> { + ensureNonEmptyString(request.tag_id, "tag_id"); + ensureNonEmptyString(request.name, "name"); + return this.http.request({ + method: "PATCH", + path: "/datasets/tags", + data: request, + }); + } + + async deleteTag( + request: DatasetTagDeleteRequest + ): Promise> { + ensureNonEmptyString(request.tag_id, "tag_id"); + return this.http.request({ + method: "DELETE", + path: "/datasets/tags", + data: request, + }); + } + + async bindTags( + request: DatasetTagBindingRequest + ): Promise> { + ensureStringArray(request.tag_ids, "tag_ids"); + ensureNonEmptyString(request.target_id, "target_id"); + return this.http.request({ + method: "POST", + path: "/datasets/tags/binding", + data: request, + }); + } + + async unbindTags( + request: DatasetTagUnbindingRequest + ): Promise> { + ensureNonEmptyString(request.tag_id, "tag_id"); + ensureNonEmptyString(request.target_id, "target_id"); + return this.http.request({ + method: "POST", + path: "/datasets/tags/unbinding", + data: request, + }); + } + + async getDatasetTags( + datasetId: string + ): Promise> { + ensureNonEmptyString(datasetId, "datasetId"); + return this.http.request({ + method: "GET", + path: `/datasets/${datasetId}/tags`, + }); + } + + async createDocumentByText( + datasetId: string, + request: DocumentTextCreateRequest + ): Promise> { + ensureNonEmptyString(datasetId, "datasetId"); + ensureNonEmptyString(request.name, "name"); + ensureNonEmptyString(request.text, "text"); + return this.http.request({ + method: "POST", + path: `/datasets/${datasetId}/document/create_by_text`, + data: request, + }); + } + + async updateDocumentByText( + datasetId: string, + documentId: string, + request: DocumentTextUpdateRequest + ): Promise> { + ensureNonEmptyString(datasetId, "datasetId"); + ensureNonEmptyString(documentId, "documentId"); + if (request.name !== undefined && request.name !== null) { + ensureNonEmptyString(request.name, "name"); + } + return this.http.request({ + method: "POST", + path: `/datasets/${datasetId}/documents/${documentId}/update_by_text`, + data: request, + }); + } + + async createDocumentByFile( + datasetId: string, + form: unknown + ): Promise> { + ensureNonEmptyString(datasetId, "datasetId"); + ensureFormData(form, "createDocumentByFile"); + return this.http.request({ + method: "POST", + path: `/datasets/${datasetId}/document/create_by_file`, + data: form, + }); + } + + async updateDocumentByFile( + datasetId: string, + documentId: string, + form: unknown + ): Promise> { + ensureNonEmptyString(datasetId, "datasetId"); + ensureNonEmptyString(documentId, "documentId"); + ensureFormData(form, "updateDocumentByFile"); + return this.http.request({ + method: "POST", + path: `/datasets/${datasetId}/documents/${documentId}/update_by_file`, + data: form, + }); + } + + async listDocuments( + datasetId: string, + options?: DocumentListOptions + ): Promise> { + ensureNonEmptyString(datasetId, "datasetId"); + ensureOptionalInt(options?.page, "page"); + ensureOptionalInt(options?.limit, "limit"); + ensureOptionalString(options?.keyword, "keyword"); + ensureOptionalString(options?.status, "status"); + + return this.http.request({ + method: "GET", + path: `/datasets/${datasetId}/documents`, + query: { + page: options?.page, + limit: options?.limit, + keyword: options?.keyword ?? undefined, + status: options?.status ?? undefined, + }, + }); + } + + async getDocument( + datasetId: string, + documentId: string, + options?: DocumentGetOptions + ): Promise> { + ensureNonEmptyString(datasetId, "datasetId"); + ensureNonEmptyString(documentId, "documentId"); + if (options?.metadata) { + const allowed = new Set(["all", "only", "without"]); + if (!allowed.has(options.metadata)) { + throw new ValidationError("metadata must be one of all, only, without"); + } + } + return this.http.request({ + method: "GET", + path: `/datasets/${datasetId}/documents/${documentId}`, + query: { + metadata: options?.metadata ?? undefined, + }, + }); + } + + async deleteDocument( + datasetId: string, + documentId: string + ): Promise> { + ensureNonEmptyString(datasetId, "datasetId"); + ensureNonEmptyString(documentId, "documentId"); + return this.http.request({ + method: "DELETE", + path: `/datasets/${datasetId}/documents/${documentId}`, + }); + } + + async getDocumentIndexingStatus( + datasetId: string, + batch: string + ): Promise> { + ensureNonEmptyString(datasetId, "datasetId"); + ensureNonEmptyString(batch, "batch"); + return this.http.request({ + method: "GET", + path: `/datasets/${datasetId}/documents/${batch}/indexing-status`, + }); + } + + async createSegments( + datasetId: string, + documentId: string, + request: SegmentCreateRequest + ): Promise> { + ensureNonEmptyString(datasetId, "datasetId"); + ensureNonEmptyString(documentId, "documentId"); + ensureNonEmptyArray(request.segments, "segments"); + return this.http.request({ + method: "POST", + path: `/datasets/${datasetId}/documents/${documentId}/segments`, + data: request, + }); + } + + async listSegments( + datasetId: string, + documentId: string, + options?: SegmentListOptions + ): Promise> { + ensureNonEmptyString(datasetId, "datasetId"); + ensureNonEmptyString(documentId, "documentId"); + ensureOptionalInt(options?.page, "page"); + ensureOptionalInt(options?.limit, "limit"); + ensureOptionalString(options?.keyword, "keyword"); + if (options?.status && options.status.length > 0) { + ensureStringArray(options.status, "status"); + } + + const query: QueryParams = { + page: options?.page, + limit: options?.limit, + keyword: options?.keyword ?? undefined, + }; + if (options?.status && options.status.length > 0) { + query.status = options.status; + } + + return this.http.request({ + method: "GET", + path: `/datasets/${datasetId}/documents/${documentId}/segments`, + query, + }); + } + + async getSegment( + datasetId: string, + documentId: string, + segmentId: string + ): Promise> { + ensureNonEmptyString(datasetId, "datasetId"); + ensureNonEmptyString(documentId, "documentId"); + ensureNonEmptyString(segmentId, "segmentId"); + return this.http.request({ + method: "GET", + path: `/datasets/${datasetId}/documents/${documentId}/segments/${segmentId}`, + }); + } + + async updateSegment( + datasetId: string, + documentId: string, + segmentId: string, + request: SegmentUpdateRequest + ): Promise> { + ensureNonEmptyString(datasetId, "datasetId"); + ensureNonEmptyString(documentId, "documentId"); + ensureNonEmptyString(segmentId, "segmentId"); + return this.http.request({ + method: "POST", + path: `/datasets/${datasetId}/documents/${documentId}/segments/${segmentId}`, + data: request, + }); + } + + async deleteSegment( + datasetId: string, + documentId: string, + segmentId: string + ): Promise> { + ensureNonEmptyString(datasetId, "datasetId"); + ensureNonEmptyString(documentId, "documentId"); + ensureNonEmptyString(segmentId, "segmentId"); + return this.http.request({ + method: "DELETE", + path: `/datasets/${datasetId}/documents/${documentId}/segments/${segmentId}`, + }); + } + + async createChildChunk( + datasetId: string, + documentId: string, + segmentId: string, + request: ChildChunkCreateRequest + ): Promise> { + ensureNonEmptyString(datasetId, "datasetId"); + ensureNonEmptyString(documentId, "documentId"); + ensureNonEmptyString(segmentId, "segmentId"); + ensureNonEmptyString(request.content, "content"); + return this.http.request({ + method: "POST", + path: `/datasets/${datasetId}/documents/${documentId}/segments/${segmentId}/child_chunks`, + data: request, + }); + } + + async listChildChunks( + datasetId: string, + documentId: string, + segmentId: string, + options?: ChildChunkListOptions + ): Promise> { + ensureNonEmptyString(datasetId, "datasetId"); + ensureNonEmptyString(documentId, "documentId"); + ensureNonEmptyString(segmentId, "segmentId"); + ensureOptionalInt(options?.page, "page"); + ensureOptionalInt(options?.limit, "limit"); + ensureOptionalString(options?.keyword, "keyword"); + + return this.http.request({ + method: "GET", + path: `/datasets/${datasetId}/documents/${documentId}/segments/${segmentId}/child_chunks`, + query: { + page: options?.page, + limit: options?.limit, + keyword: options?.keyword ?? undefined, + }, + }); + } + + async updateChildChunk( + datasetId: string, + documentId: string, + segmentId: string, + childChunkId: string, + request: ChildChunkUpdateRequest + ): Promise> { + ensureNonEmptyString(datasetId, "datasetId"); + ensureNonEmptyString(documentId, "documentId"); + ensureNonEmptyString(segmentId, "segmentId"); + ensureNonEmptyString(childChunkId, "childChunkId"); + ensureNonEmptyString(request.content, "content"); + return this.http.request({ + method: "PATCH", + path: `/datasets/${datasetId}/documents/${documentId}/segments/${segmentId}/child_chunks/${childChunkId}`, + data: request, + }); + } + + async deleteChildChunk( + datasetId: string, + documentId: string, + segmentId: string, + childChunkId: string + ): Promise> { + ensureNonEmptyString(datasetId, "datasetId"); + ensureNonEmptyString(documentId, "documentId"); + ensureNonEmptyString(segmentId, "segmentId"); + ensureNonEmptyString(childChunkId, "childChunkId"); + return this.http.request({ + method: "DELETE", + path: `/datasets/${datasetId}/documents/${documentId}/segments/${segmentId}/child_chunks/${childChunkId}`, + }); + } + + async listMetadata( + datasetId: string + ): Promise> { + ensureNonEmptyString(datasetId, "datasetId"); + return this.http.request({ + method: "GET", + path: `/datasets/${datasetId}/metadata`, + }); + } + + async createMetadata( + datasetId: string, + request: MetadataCreateRequest + ): Promise> { + ensureNonEmptyString(datasetId, "datasetId"); + ensureNonEmptyString(request.name, "name"); + ensureNonEmptyString(request.type, "type"); + return this.http.request({ + method: "POST", + path: `/datasets/${datasetId}/metadata`, + data: request, + }); + } + + async updateMetadata( + datasetId: string, + metadataId: string, + request: MetadataUpdateRequest + ): Promise> { + ensureNonEmptyString(datasetId, "datasetId"); + ensureNonEmptyString(metadataId, "metadataId"); + ensureNonEmptyString(request.name, "name"); + return this.http.request({ + method: "PATCH", + path: `/datasets/${datasetId}/metadata/${metadataId}`, + data: request, + }); + } + + async deleteMetadata( + datasetId: string, + metadataId: string + ): Promise> { + ensureNonEmptyString(datasetId, "datasetId"); + ensureNonEmptyString(metadataId, "metadataId"); + return this.http.request({ + method: "DELETE", + path: `/datasets/${datasetId}/metadata/${metadataId}`, + }); + } + + async listBuiltInMetadata( + datasetId: string + ): Promise> { + ensureNonEmptyString(datasetId, "datasetId"); + return this.http.request({ + method: "GET", + path: `/datasets/${datasetId}/metadata/built-in`, + }); + } + + async updateBuiltInMetadata( + datasetId: string, + action: "enable" | "disable" + ): Promise> { + ensureNonEmptyString(datasetId, "datasetId"); + ensureNonEmptyString(action, "action"); + return this.http.request({ + method: "POST", + path: `/datasets/${datasetId}/metadata/built-in/${action}`, + }); + } + + async updateDocumentsMetadata( + datasetId: string, + request: MetadataOperationRequest + ): Promise> { + ensureNonEmptyString(datasetId, "datasetId"); + ensureNonEmptyArray(request.operation_data, "operation_data"); + return this.http.request({ + method: "POST", + path: `/datasets/${datasetId}/documents/metadata`, + data: request, + }); + } + + async hitTesting( + datasetId: string, + request: HitTestingRequest + ): Promise> { + ensureNonEmptyString(datasetId, "datasetId"); + if (request.query !== undefined && request.query !== null) { + ensureOptionalString(request.query, "query"); + } + if (request.attachment_ids && request.attachment_ids.length > 0) { + ensureStringArray(request.attachment_ids, "attachment_ids"); + } + return this.http.request({ + method: "POST", + path: `/datasets/${datasetId}/hit-testing`, + data: request, + }); + } + + async retrieve( + datasetId: string, + request: HitTestingRequest + ): Promise> { + ensureNonEmptyString(datasetId, "datasetId"); + return this.http.request({ + method: "POST", + path: `/datasets/${datasetId}/retrieve`, + data: request, + }); + } + + async listDatasourcePlugins( + datasetId: string, + options?: DatasourcePluginListOptions + ): Promise> { + warnPipelineRoutes(); + ensureNonEmptyString(datasetId, "datasetId"); + ensureOptionalBoolean(options?.isPublished, "isPublished"); + return this.http.request({ + method: "GET", + path: `/datasets/${datasetId}/pipeline/datasource-plugins`, + query: { + is_published: options?.isPublished ?? undefined, + }, + }); + } + + async runDatasourceNode( + datasetId: string, + nodeId: string, + request: DatasourceNodeRunRequest + ): Promise> { + warnPipelineRoutes(); + ensureNonEmptyString(datasetId, "datasetId"); + ensureNonEmptyString(nodeId, "nodeId"); + ensureNonEmptyString(request.datasource_type, "datasource_type"); + return this.http.requestStream({ + method: "POST", + path: `/datasets/${datasetId}/pipeline/datasource/nodes/${nodeId}/run`, + data: request, + }); + } + + async runPipeline( + datasetId: string, + request: PipelineRunRequest + ): Promise | DifyStream> { + warnPipelineRoutes(); + ensureNonEmptyString(datasetId, "datasetId"); + ensureNonEmptyString(request.datasource_type, "datasource_type"); + ensureNonEmptyString(request.start_node_id, "start_node_id"); + const shouldStream = request.response_mode === "streaming"; + if (shouldStream) { + return this.http.requestStream({ + method: "POST", + path: `/datasets/${datasetId}/pipeline/run`, + data: request, + }); + } + return this.http.request({ + method: "POST", + path: `/datasets/${datasetId}/pipeline/run`, + data: request, + }); + } + + async uploadPipelineFile( + form: unknown + ): Promise> { + warnPipelineRoutes(); + ensureFormData(form, "uploadPipelineFile"); + return this.http.request({ + method: "POST", + path: "/datasets/pipeline/file-upload", + data: form, + }); + } +} diff --git a/sdks/nodejs-client/src/client/validation.test.js b/sdks/nodejs-client/src/client/validation.test.js new file mode 100644 index 0000000000..65bfa471a6 --- /dev/null +++ b/sdks/nodejs-client/src/client/validation.test.js @@ -0,0 +1,91 @@ +import { describe, expect, it } from "vitest"; +import { + ensureNonEmptyString, + ensureOptionalBoolean, + ensureOptionalInt, + ensureOptionalString, + ensureOptionalStringArray, + ensureRating, + ensureStringArray, + validateParams, +} from "./validation"; + +const makeLongString = (length) => "a".repeat(length); + +describe("validation utilities", () => { + it("ensureNonEmptyString throws on empty or whitespace", () => { + expect(() => ensureNonEmptyString("", "name")).toThrow(); + expect(() => ensureNonEmptyString(" ", "name")).toThrow(); + }); + + it("ensureNonEmptyString throws on overly long strings", () => { + expect(() => + ensureNonEmptyString(makeLongString(10001), "name") + ).toThrow(); + }); + + it("ensureOptionalString ignores undefined and validates when set", () => { + expect(() => ensureOptionalString(undefined, "opt")).not.toThrow(); + expect(() => ensureOptionalString("", "opt")).toThrow(); + }); + + it("ensureOptionalString throws on overly long strings", () => { + expect(() => ensureOptionalString(makeLongString(10001), "opt")).toThrow(); + }); + + it("ensureOptionalInt validates integer", () => { + expect(() => ensureOptionalInt(undefined, "limit")).not.toThrow(); + expect(() => ensureOptionalInt(1.2, "limit")).toThrow(); + }); + + it("ensureOptionalBoolean validates boolean", () => { + expect(() => ensureOptionalBoolean(undefined, "flag")).not.toThrow(); + expect(() => ensureOptionalBoolean("yes", "flag")).toThrow(); + }); + + it("ensureStringArray enforces size and content", () => { + expect(() => ensureStringArray([], "items")).toThrow(); + expect(() => ensureStringArray([""], "items")).toThrow(); + expect(() => + ensureStringArray(Array.from({ length: 1001 }, () => "a"), "items") + ).toThrow(); + expect(() => ensureStringArray(["ok"], "items")).not.toThrow(); + }); + + it("ensureOptionalStringArray ignores undefined", () => { + expect(() => ensureOptionalStringArray(undefined, "tags")).not.toThrow(); + }); + + it("ensureOptionalStringArray validates when set", () => { + expect(() => ensureOptionalStringArray(["valid"], "tags")).not.toThrow(); + expect(() => ensureOptionalStringArray([], "tags")).toThrow(); + expect(() => ensureOptionalStringArray([""], "tags")).toThrow(); + }); + + it("ensureRating validates allowed values", () => { + expect(() => ensureRating(undefined)).not.toThrow(); + expect(() => ensureRating("like")).not.toThrow(); + expect(() => ensureRating("bad")).toThrow(); + }); + + it("validateParams enforces generic rules", () => { + expect(() => validateParams({ user: 123 })).toThrow(); + expect(() => validateParams({ rating: "bad" })).toThrow(); + expect(() => validateParams({ page: 1.1 })).toThrow(); + expect(() => validateParams({ files: "bad" })).toThrow(); + // Empty strings are allowed for optional params (e.g., keyword: "" means no filter) + expect(() => validateParams({ keyword: "" })).not.toThrow(); + expect(() => validateParams({ name: makeLongString(10001) })).toThrow(); + expect(() => + validateParams({ items: Array.from({ length: 1001 }, () => "a") }) + ).toThrow(); + expect(() => + validateParams({ + data: Object.fromEntries( + Array.from({ length: 101 }, (_, i) => [String(i), i]) + ), + }) + ).toThrow(); + expect(() => validateParams({ user: "u", page: 1 })).not.toThrow(); + }); +}); diff --git a/sdks/nodejs-client/src/client/validation.ts b/sdks/nodejs-client/src/client/validation.ts new file mode 100644 index 0000000000..6aeec36bdc --- /dev/null +++ b/sdks/nodejs-client/src/client/validation.ts @@ -0,0 +1,136 @@ +import { ValidationError } from "../errors/dify-error"; + +const MAX_STRING_LENGTH = 10000; +const MAX_LIST_LENGTH = 1000; +const MAX_DICT_LENGTH = 100; + +export function ensureNonEmptyString( + value: unknown, + name: string +): asserts value is string { + if (typeof value !== "string" || value.trim().length === 0) { + throw new ValidationError(`${name} must be a non-empty string`); + } + if (value.length > MAX_STRING_LENGTH) { + throw new ValidationError( + `${name} exceeds maximum length of ${MAX_STRING_LENGTH} characters` + ); + } +} + +/** + * Validates optional string fields that must be non-empty when provided. + * Use this for fields like `name` that are optional but should not be empty strings. + * + * For filter parameters that accept empty strings (e.g., `keyword: ""`), + * use `validateParams` which allows empty strings for optional params. + */ +export function ensureOptionalString(value: unknown, name: string): void { + if (value === undefined || value === null) { + return; + } + if (typeof value !== "string" || value.trim().length === 0) { + throw new ValidationError(`${name} must be a non-empty string when set`); + } + if (value.length > MAX_STRING_LENGTH) { + throw new ValidationError( + `${name} exceeds maximum length of ${MAX_STRING_LENGTH} characters` + ); + } +} + +export function ensureOptionalInt(value: unknown, name: string): void { + if (value === undefined || value === null) { + return; + } + if (!Number.isInteger(value)) { + throw new ValidationError(`${name} must be an integer when set`); + } +} + +export function ensureOptionalBoolean(value: unknown, name: string): void { + if (value === undefined || value === null) { + return; + } + if (typeof value !== "boolean") { + throw new ValidationError(`${name} must be a boolean when set`); + } +} + +export function ensureStringArray(value: unknown, name: string): void { + if (!Array.isArray(value) || value.length === 0) { + throw new ValidationError(`${name} must be a non-empty string array`); + } + if (value.length > MAX_LIST_LENGTH) { + throw new ValidationError( + `${name} exceeds maximum size of ${MAX_LIST_LENGTH} items` + ); + } + value.forEach((item) => { + if (typeof item !== "string" || item.trim().length === 0) { + throw new ValidationError(`${name} must contain non-empty strings`); + } + }); +} + +export function ensureOptionalStringArray(value: unknown, name: string): void { + if (value === undefined || value === null) { + return; + } + ensureStringArray(value, name); +} + +export function ensureRating(value: unknown): void { + if (value === undefined || value === null) { + return; + } + if (value !== "like" && value !== "dislike") { + throw new ValidationError("rating must be either 'like' or 'dislike'"); + } +} + +export function validateParams(params: Record): void { + Object.entries(params).forEach(([key, value]) => { + if (value === undefined || value === null) { + return; + } + + // Only check max length for strings; empty strings are allowed for optional params + // Required fields are validated at method level via ensureNonEmptyString + if (typeof value === "string") { + if (value.length > MAX_STRING_LENGTH) { + throw new ValidationError( + `Parameter '${key}' exceeds maximum length of ${MAX_STRING_LENGTH} characters` + ); + } + } else if (Array.isArray(value)) { + if (value.length > MAX_LIST_LENGTH) { + throw new ValidationError( + `Parameter '${key}' exceeds maximum size of ${MAX_LIST_LENGTH} items` + ); + } + } else if (typeof value === "object") { + if (Object.keys(value as Record).length > MAX_DICT_LENGTH) { + throw new ValidationError( + `Parameter '${key}' exceeds maximum size of ${MAX_DICT_LENGTH} items` + ); + } + } + + if (key === "user" && typeof value !== "string") { + throw new ValidationError(`Parameter '${key}' must be a string`); + } + if ( + (key === "page" || key === "limit" || key === "page_size") && + !Number.isInteger(value) + ) { + throw new ValidationError(`Parameter '${key}' must be an integer`); + } + if (key === "files" && !Array.isArray(value) && typeof value !== "object") { + throw new ValidationError(`Parameter '${key}' must be a list or dict`); + } + if (key === "rating" && value !== "like" && value !== "dislike") { + throw new ValidationError(`Parameter '${key}' must be 'like' or 'dislike'`); + } + }); +} diff --git a/sdks/nodejs-client/src/client/workflow.test.js b/sdks/nodejs-client/src/client/workflow.test.js new file mode 100644 index 0000000000..79c419b55a --- /dev/null +++ b/sdks/nodejs-client/src/client/workflow.test.js @@ -0,0 +1,119 @@ +import { beforeEach, describe, expect, it, vi } from "vitest"; +import { WorkflowClient } from "./workflow"; +import { createHttpClientWithSpies } from "../../tests/test-utils"; + +describe("WorkflowClient", () => { + beforeEach(() => { + vi.restoreAllMocks(); + }); + + it("runs workflows with blocking and streaming modes", async () => { + const { client, request, requestStream } = createHttpClientWithSpies(); + const workflow = new WorkflowClient(client); + + await workflow.run({ inputs: { input: "x" }, user: "user" }); + await workflow.run({ input: "x" }, "user", true); + + expect(request).toHaveBeenCalledWith({ + method: "POST", + path: "/workflows/run", + data: { + inputs: { input: "x" }, + user: "user", + }, + }); + expect(requestStream).toHaveBeenCalledWith({ + method: "POST", + path: "/workflows/run", + data: { + inputs: { input: "x" }, + user: "user", + response_mode: "streaming", + }, + }); + }); + + it("runs workflow by id", async () => { + const { client, request, requestStream } = createHttpClientWithSpies(); + const workflow = new WorkflowClient(client); + + await workflow.runById("wf", { + inputs: { input: "x" }, + user: "user", + response_mode: "blocking", + }); + await workflow.runById("wf", { + inputs: { input: "x" }, + user: "user", + response_mode: "streaming", + }); + + expect(request).toHaveBeenCalledWith({ + method: "POST", + path: "/workflows/wf/run", + data: { + inputs: { input: "x" }, + user: "user", + response_mode: "blocking", + }, + }); + expect(requestStream).toHaveBeenCalledWith({ + method: "POST", + path: "/workflows/wf/run", + data: { + inputs: { input: "x" }, + user: "user", + response_mode: "streaming", + }, + }); + }); + + it("gets run details and stops workflow", async () => { + const { client, request } = createHttpClientWithSpies(); + const workflow = new WorkflowClient(client); + + await workflow.getRun("run"); + await workflow.stop("task", "user"); + + expect(request).toHaveBeenCalledWith({ + method: "GET", + path: "/workflows/run/run", + }); + expect(request).toHaveBeenCalledWith({ + method: "POST", + path: "/workflows/tasks/task/stop", + data: { user: "user" }, + }); + }); + + it("fetches workflow logs", async () => { + const { client, request } = createHttpClientWithSpies(); + const workflow = new WorkflowClient(client); + + // Use createdByEndUserSessionId to filter by user session (backend API parameter) + await workflow.getLogs({ + keyword: "k", + status: "succeeded", + startTime: "2024-01-01", + endTime: "2024-01-02", + createdByEndUserSessionId: "session-123", + page: 1, + limit: 20, + }); + + expect(request).toHaveBeenCalledWith({ + method: "GET", + path: "/workflows/logs", + query: { + keyword: "k", + status: "succeeded", + created_at__before: "2024-01-02", + created_at__after: "2024-01-01", + created_by_end_user_session_id: "session-123", + created_by_account: undefined, + page: 1, + limit: 20, + }, + }); + }); +}); diff --git a/sdks/nodejs-client/src/client/workflow.ts b/sdks/nodejs-client/src/client/workflow.ts new file mode 100644 index 0000000000..ae4d5861fa --- /dev/null +++ b/sdks/nodejs-client/src/client/workflow.ts @@ -0,0 +1,165 @@ +import { DifyClient } from "./base"; +import type { WorkflowRunRequest, WorkflowRunResponse } from "../types/workflow"; +import type { DifyResponse, DifyStream, QueryParams } from "../types/common"; +import { + ensureNonEmptyString, + ensureOptionalInt, + ensureOptionalString, +} from "./validation"; + +export class WorkflowClient extends DifyClient { + run( + request: WorkflowRunRequest + ): Promise | DifyStream>; + run( + inputs: Record, + user: string, + stream?: boolean + ): Promise | DifyStream>; + run( + inputOrRequest: WorkflowRunRequest | Record, + user?: string, + stream = false + ): Promise | DifyStream> { + let payload: WorkflowRunRequest; + let shouldStream = stream; + + if (user === undefined && "user" in (inputOrRequest as WorkflowRunRequest)) { + payload = inputOrRequest as WorkflowRunRequest; + shouldStream = payload.response_mode === "streaming"; + } else { + ensureNonEmptyString(user, "user"); + payload = { + inputs: inputOrRequest as Record, + user, + response_mode: stream ? "streaming" : "blocking", + }; + } + + ensureNonEmptyString(payload.user, "user"); + + if (shouldStream) { + return this.http.requestStream({ + method: "POST", + path: "/workflows/run", + data: payload, + }); + } + + return this.http.request({ + method: "POST", + path: "/workflows/run", + data: payload, + }); + } + + runById( + workflowId: string, + request: WorkflowRunRequest + ): Promise | DifyStream> { + ensureNonEmptyString(workflowId, "workflowId"); + ensureNonEmptyString(request.user, "user"); + if (request.response_mode === "streaming") { + return this.http.requestStream({ + method: "POST", + path: `/workflows/${workflowId}/run`, + data: request, + }); + } + return this.http.request({ + method: "POST", + path: `/workflows/${workflowId}/run`, + data: request, + }); + } + + getRun(workflowRunId: string): Promise> { + ensureNonEmptyString(workflowRunId, "workflowRunId"); + return this.http.request({ + method: "GET", + path: `/workflows/run/${workflowRunId}`, + }); + } + + stop( + taskId: string, + user: string + ): Promise> { + ensureNonEmptyString(taskId, "taskId"); + ensureNonEmptyString(user, "user"); + return this.http.request({ + method: "POST", + path: `/workflows/tasks/${taskId}/stop`, + data: { user }, + }); + } + + /** + * Get workflow execution logs with filtering options. + * + * Note: The backend API filters by `createdByEndUserSessionId` (end user session ID) + * or `createdByAccount` (account ID), not by a generic `user` parameter. + */ + getLogs(options?: { + keyword?: string; + status?: string; + createdAtBefore?: string; + createdAtAfter?: string; + createdByEndUserSessionId?: string; + createdByAccount?: string; + page?: number; + limit?: number; + startTime?: string; + endTime?: string; + }): Promise>> { + if (options?.keyword) { + ensureOptionalString(options.keyword, "keyword"); + } + if (options?.status) { + ensureOptionalString(options.status, "status"); + } + if (options?.createdAtBefore) { + ensureOptionalString(options.createdAtBefore, "createdAtBefore"); + } + if (options?.createdAtAfter) { + ensureOptionalString(options.createdAtAfter, "createdAtAfter"); + } + if (options?.createdByEndUserSessionId) { + ensureOptionalString( + options.createdByEndUserSessionId, + "createdByEndUserSessionId" + ); + } + if (options?.createdByAccount) { + ensureOptionalString(options.createdByAccount, "createdByAccount"); + } + if (options?.startTime) { + ensureOptionalString(options.startTime, "startTime"); + } + if (options?.endTime) { + ensureOptionalString(options.endTime, "endTime"); + } + ensureOptionalInt(options?.page, "page"); + ensureOptionalInt(options?.limit, "limit"); + + const createdAtAfter = options?.createdAtAfter ?? options?.startTime; + const createdAtBefore = options?.createdAtBefore ?? options?.endTime; + + const query: QueryParams = { + keyword: options?.keyword, + status: options?.status, + created_at__before: createdAtBefore, + created_at__after: createdAtAfter, + created_by_end_user_session_id: options?.createdByEndUserSessionId, + created_by_account: options?.createdByAccount, + page: options?.page, + limit: options?.limit, + }; + + return this.http.request({ + method: "GET", + path: "/workflows/logs", + query, + }); + } +} diff --git a/sdks/nodejs-client/src/client/workspace.test.js b/sdks/nodejs-client/src/client/workspace.test.js new file mode 100644 index 0000000000..f8fb6e375a --- /dev/null +++ b/sdks/nodejs-client/src/client/workspace.test.js @@ -0,0 +1,21 @@ +import { beforeEach, describe, expect, it, vi } from "vitest"; +import { WorkspaceClient } from "./workspace"; +import { createHttpClientWithSpies } from "../../tests/test-utils"; + +describe("WorkspaceClient", () => { + beforeEach(() => { + vi.restoreAllMocks(); + }); + + it("gets models by type", async () => { + const { client, request } = createHttpClientWithSpies(); + const workspace = new WorkspaceClient(client); + + await workspace.getModelsByType("llm"); + + expect(request).toHaveBeenCalledWith({ + method: "GET", + path: "/workspaces/current/models/model-types/llm", + }); + }); +}); diff --git a/sdks/nodejs-client/src/client/workspace.ts b/sdks/nodejs-client/src/client/workspace.ts new file mode 100644 index 0000000000..daee540c99 --- /dev/null +++ b/sdks/nodejs-client/src/client/workspace.ts @@ -0,0 +1,16 @@ +import { DifyClient } from "./base"; +import type { WorkspaceModelType, WorkspaceModelsResponse } from "../types/workspace"; +import type { DifyResponse } from "../types/common"; +import { ensureNonEmptyString } from "./validation"; + +export class WorkspaceClient extends DifyClient { + async getModelsByType( + modelType: WorkspaceModelType + ): Promise> { + ensureNonEmptyString(modelType, "modelType"); + return this.http.request({ + method: "GET", + path: `/workspaces/current/models/model-types/${modelType}`, + }); + } +} diff --git a/sdks/nodejs-client/src/errors/dify-error.test.js b/sdks/nodejs-client/src/errors/dify-error.test.js new file mode 100644 index 0000000000..d151eca391 --- /dev/null +++ b/sdks/nodejs-client/src/errors/dify-error.test.js @@ -0,0 +1,37 @@ +import { describe, expect, it } from "vitest"; +import { + APIError, + AuthenticationError, + DifyError, + FileUploadError, + NetworkError, + RateLimitError, + TimeoutError, + ValidationError, +} from "./dify-error"; + +describe("Dify errors", () => { + it("sets base error fields", () => { + const err = new DifyError("base", { + statusCode: 400, + responseBody: { message: "bad" }, + requestId: "req", + retryAfter: 1, + }); + expect(err.name).toBe("DifyError"); + expect(err.statusCode).toBe(400); + expect(err.responseBody).toEqual({ message: "bad" }); + expect(err.requestId).toBe("req"); + expect(err.retryAfter).toBe(1); + }); + + it("creates specific error types", () => { + expect(new APIError("api").name).toBe("APIError"); + expect(new AuthenticationError("auth").name).toBe("AuthenticationError"); + expect(new RateLimitError("rate").name).toBe("RateLimitError"); + expect(new ValidationError("val").name).toBe("ValidationError"); + expect(new NetworkError("net").name).toBe("NetworkError"); + expect(new TimeoutError("timeout").name).toBe("TimeoutError"); + expect(new FileUploadError("upload").name).toBe("FileUploadError"); + }); +}); diff --git a/sdks/nodejs-client/src/errors/dify-error.ts b/sdks/nodejs-client/src/errors/dify-error.ts new file mode 100644 index 0000000000..e393e1b132 --- /dev/null +++ b/sdks/nodejs-client/src/errors/dify-error.ts @@ -0,0 +1,75 @@ +export type DifyErrorOptions = { + statusCode?: number; + responseBody?: unknown; + requestId?: string; + retryAfter?: number; + cause?: unknown; +}; + +export class DifyError extends Error { + statusCode?: number; + responseBody?: unknown; + requestId?: string; + retryAfter?: number; + + constructor(message: string, options: DifyErrorOptions = {}) { + super(message); + this.name = "DifyError"; + this.statusCode = options.statusCode; + this.responseBody = options.responseBody; + this.requestId = options.requestId; + this.retryAfter = options.retryAfter; + if (options.cause) { + (this as { cause?: unknown }).cause = options.cause; + } + } +} + +export class APIError extends DifyError { + constructor(message: string, options: DifyErrorOptions = {}) { + super(message, options); + this.name = "APIError"; + } +} + +export class AuthenticationError extends APIError { + constructor(message: string, options: DifyErrorOptions = {}) { + super(message, options); + this.name = "AuthenticationError"; + } +} + +export class RateLimitError extends APIError { + constructor(message: string, options: DifyErrorOptions = {}) { + super(message, options); + this.name = "RateLimitError"; + } +} + +export class ValidationError extends APIError { + constructor(message: string, options: DifyErrorOptions = {}) { + super(message, options); + this.name = "ValidationError"; + } +} + +export class NetworkError extends DifyError { + constructor(message: string, options: DifyErrorOptions = {}) { + super(message, options); + this.name = "NetworkError"; + } +} + +export class TimeoutError extends DifyError { + constructor(message: string, options: DifyErrorOptions = {}) { + super(message, options); + this.name = "TimeoutError"; + } +} + +export class FileUploadError extends DifyError { + constructor(message: string, options: DifyErrorOptions = {}) { + super(message, options); + this.name = "FileUploadError"; + } +} diff --git a/sdks/nodejs-client/src/http/client.test.js b/sdks/nodejs-client/src/http/client.test.js new file mode 100644 index 0000000000..05892547ed --- /dev/null +++ b/sdks/nodejs-client/src/http/client.test.js @@ -0,0 +1,304 @@ +import axios from "axios"; +import { Readable } from "node:stream"; +import { beforeEach, describe, expect, it, vi } from "vitest"; +import { + APIError, + AuthenticationError, + FileUploadError, + NetworkError, + RateLimitError, + TimeoutError, + ValidationError, +} from "../errors/dify-error"; +import { HttpClient } from "./client"; + +describe("HttpClient", () => { + beforeEach(() => { + vi.restoreAllMocks(); + }); + it("builds requests with auth headers and JSON content type", async () => { + const mockRequest = vi.fn().mockResolvedValue({ + status: 200, + data: { ok: true }, + headers: { "x-request-id": "req" }, + }); + vi.spyOn(axios, "create").mockReturnValue({ request: mockRequest }); + + const client = new HttpClient({ apiKey: "test" }); + const response = await client.request({ + method: "POST", + path: "/chat-messages", + data: { user: "u" }, + }); + + expect(response.requestId).toBe("req"); + const config = mockRequest.mock.calls[0][0]; + expect(config.headers.Authorization).toBe("Bearer test"); + expect(config.headers["Content-Type"]).toBe("application/json"); + expect(config.responseType).toBe("json"); + }); + + it("serializes array query params", async () => { + const mockRequest = vi.fn().mockResolvedValue({ + status: 200, + data: "ok", + headers: {}, + }); + vi.spyOn(axios, "create").mockReturnValue({ request: mockRequest }); + + const client = new HttpClient({ apiKey: "test" }); + await client.requestRaw({ + method: "GET", + path: "/datasets", + query: { tag_ids: ["a", "b"], limit: 2 }, + }); + + const config = mockRequest.mock.calls[0][0]; + const queryString = config.paramsSerializer.serialize({ + tag_ids: ["a", "b"], + limit: 2, + }); + expect(queryString).toBe("tag_ids=a&tag_ids=b&limit=2"); + }); + + it("returns SSE stream helpers", async () => { + const mockRequest = vi.fn().mockResolvedValue({ + status: 200, + data: Readable.from(["data: {\"text\":\"hi\"}\n\n"]), + headers: { "x-request-id": "req" }, + }); + vi.spyOn(axios, "create").mockReturnValue({ request: mockRequest }); + + const client = new HttpClient({ apiKey: "test" }); + const stream = await client.requestStream({ + method: "POST", + path: "/chat-messages", + data: { user: "u" }, + }); + + expect(stream.status).toBe(200); + expect(stream.requestId).toBe("req"); + await expect(stream.toText()).resolves.toBe("hi"); + }); + + it("returns binary stream helpers", async () => { + const mockRequest = vi.fn().mockResolvedValue({ + status: 200, + data: Readable.from(["chunk"]), + headers: { "x-request-id": "req" }, + }); + vi.spyOn(axios, "create").mockReturnValue({ request: mockRequest }); + + const client = new HttpClient({ apiKey: "test" }); + const stream = await client.requestBinaryStream({ + method: "POST", + path: "/text-to-audio", + data: { user: "u", text: "hi" }, + }); + + expect(stream.status).toBe(200); + expect(stream.requestId).toBe("req"); + }); + + it("respects form-data headers", async () => { + const mockRequest = vi.fn().mockResolvedValue({ + status: 200, + data: "ok", + headers: {}, + }); + vi.spyOn(axios, "create").mockReturnValue({ request: mockRequest }); + + const client = new HttpClient({ apiKey: "test" }); + const form = { + append: () => {}, + getHeaders: () => ({ "content-type": "multipart/form-data; boundary=abc" }), + }; + + await client.requestRaw({ + method: "POST", + path: "/files/upload", + data: form, + }); + + const config = mockRequest.mock.calls[0][0]; + expect(config.headers["content-type"]).toBe( + "multipart/form-data; boundary=abc" + ); + expect(config.headers["Content-Type"]).toBeUndefined(); + }); + + it("maps 401 and 429 errors", async () => { + const mockRequest = vi.fn(); + vi.spyOn(axios, "create").mockReturnValue({ request: mockRequest }); + const client = new HttpClient({ apiKey: "test", maxRetries: 0 }); + + mockRequest.mockRejectedValueOnce({ + isAxiosError: true, + response: { + status: 401, + data: { message: "unauthorized" }, + headers: {}, + }, + }); + await expect( + client.requestRaw({ method: "GET", path: "/meta" }) + ).rejects.toBeInstanceOf(AuthenticationError); + + mockRequest.mockRejectedValueOnce({ + isAxiosError: true, + response: { + status: 429, + data: { message: "rate" }, + headers: { "retry-after": "2" }, + }, + }); + const error = await client + .requestRaw({ method: "GET", path: "/meta" }) + .catch((err) => err); + expect(error).toBeInstanceOf(RateLimitError); + expect(error.retryAfter).toBe(2); + }); + + it("maps validation and upload errors", async () => { + const mockRequest = vi.fn(); + vi.spyOn(axios, "create").mockReturnValue({ request: mockRequest }); + const client = new HttpClient({ apiKey: "test", maxRetries: 0 }); + + mockRequest.mockRejectedValueOnce({ + isAxiosError: true, + response: { + status: 422, + data: { message: "invalid" }, + headers: {}, + }, + }); + await expect( + client.requestRaw({ method: "POST", path: "/chat-messages", data: { user: "u" } }) + ).rejects.toBeInstanceOf(ValidationError); + + mockRequest.mockRejectedValueOnce({ + isAxiosError: true, + config: { url: "/files/upload" }, + response: { + status: 400, + data: { message: "bad upload" }, + headers: {}, + }, + }); + await expect( + client.requestRaw({ method: "POST", path: "/files/upload", data: { user: "u" } }) + ).rejects.toBeInstanceOf(FileUploadError); + }); + + it("maps timeout and network errors", async () => { + const mockRequest = vi.fn(); + vi.spyOn(axios, "create").mockReturnValue({ request: mockRequest }); + const client = new HttpClient({ apiKey: "test", maxRetries: 0 }); + + mockRequest.mockRejectedValueOnce({ + isAxiosError: true, + code: "ECONNABORTED", + message: "timeout", + }); + await expect( + client.requestRaw({ method: "GET", path: "/meta" }) + ).rejects.toBeInstanceOf(TimeoutError); + + mockRequest.mockRejectedValueOnce({ + isAxiosError: true, + message: "network", + }); + await expect( + client.requestRaw({ method: "GET", path: "/meta" }) + ).rejects.toBeInstanceOf(NetworkError); + }); + + it("retries on timeout errors", async () => { + const mockRequest = vi.fn(); + vi.spyOn(axios, "create").mockReturnValue({ request: mockRequest }); + const client = new HttpClient({ apiKey: "test", maxRetries: 1, retryDelay: 0 }); + + mockRequest + .mockRejectedValueOnce({ + isAxiosError: true, + code: "ECONNABORTED", + message: "timeout", + }) + .mockResolvedValueOnce({ status: 200, data: "ok", headers: {} }); + + await client.requestRaw({ method: "GET", path: "/meta" }); + expect(mockRequest).toHaveBeenCalledTimes(2); + }); + + it("validates query parameters before request", async () => { + const mockRequest = vi.fn(); + vi.spyOn(axios, "create").mockReturnValue({ request: mockRequest }); + const client = new HttpClient({ apiKey: "test" }); + + await expect( + client.requestRaw({ method: "GET", path: "/meta", query: { user: 1 } }) + ).rejects.toBeInstanceOf(ValidationError); + expect(mockRequest).not.toHaveBeenCalled(); + }); + + it("returns APIError for other http failures", async () => { + const mockRequest = vi.fn(); + vi.spyOn(axios, "create").mockReturnValue({ request: mockRequest }); + const client = new HttpClient({ apiKey: "test", maxRetries: 0 }); + + mockRequest.mockRejectedValueOnce({ + isAxiosError: true, + response: { status: 500, data: { message: "server" }, headers: {} }, + }); + + await expect( + client.requestRaw({ method: "GET", path: "/meta" }) + ).rejects.toBeInstanceOf(APIError); + }); + + it("logs requests and responses when enableLogging is true", async () => { + const mockRequest = vi.fn().mockResolvedValue({ + status: 200, + data: { ok: true }, + headers: {}, + }); + vi.spyOn(axios, "create").mockReturnValue({ request: mockRequest }); + const consoleInfo = vi.spyOn(console, "info").mockImplementation(() => {}); + + const client = new HttpClient({ apiKey: "test", enableLogging: true }); + await client.requestRaw({ method: "GET", path: "/meta" }); + + expect(consoleInfo).toHaveBeenCalledWith( + expect.stringContaining("dify-client-node response 200 GET") + ); + consoleInfo.mockRestore(); + }); + + it("logs retry attempts when enableLogging is true", async () => { + const mockRequest = vi.fn(); + vi.spyOn(axios, "create").mockReturnValue({ request: mockRequest }); + const consoleInfo = vi.spyOn(console, "info").mockImplementation(() => {}); + + const client = new HttpClient({ + apiKey: "test", + maxRetries: 1, + retryDelay: 0, + enableLogging: true, + }); + + mockRequest + .mockRejectedValueOnce({ + isAxiosError: true, + code: "ECONNABORTED", + message: "timeout", + }) + .mockResolvedValueOnce({ status: 200, data: "ok", headers: {} }); + + await client.requestRaw({ method: "GET", path: "/meta" }); + + expect(consoleInfo).toHaveBeenCalledWith( + expect.stringContaining("dify-client-node retry") + ); + consoleInfo.mockRestore(); + }); +}); diff --git a/sdks/nodejs-client/src/http/client.ts b/sdks/nodejs-client/src/http/client.ts new file mode 100644 index 0000000000..44b63c9903 --- /dev/null +++ b/sdks/nodejs-client/src/http/client.ts @@ -0,0 +1,368 @@ +import axios from "axios"; +import type { + AxiosError, + AxiosInstance, + AxiosRequestConfig, + AxiosResponse, +} from "axios"; +import type { Readable } from "node:stream"; +import { + DEFAULT_BASE_URL, + DEFAULT_MAX_RETRIES, + DEFAULT_RETRY_DELAY_SECONDS, + DEFAULT_TIMEOUT_SECONDS, +} from "../types/common"; +import type { + DifyClientConfig, + DifyResponse, + Headers, + QueryParams, + RequestMethod, +} from "../types/common"; +import type { DifyError } from "../errors/dify-error"; +import { + APIError, + AuthenticationError, + FileUploadError, + NetworkError, + RateLimitError, + TimeoutError, + ValidationError, +} from "../errors/dify-error"; +import { getFormDataHeaders, isFormData } from "./form-data"; +import { createBinaryStream, createSseStream } from "./sse"; +import { getRetryDelayMs, shouldRetry, sleep } from "./retry"; +import { validateParams } from "../client/validation"; + +const DEFAULT_USER_AGENT = "dify-client-node"; + +export type RequestOptions = { + method: RequestMethod; + path: string; + query?: QueryParams; + data?: unknown; + headers?: Headers; + responseType?: AxiosRequestConfig["responseType"]; +}; + +export type HttpClientSettings = Required< + Omit +> & { + apiKey: string; +}; + +const normalizeSettings = (config: DifyClientConfig): HttpClientSettings => ({ + apiKey: config.apiKey, + baseUrl: config.baseUrl ?? DEFAULT_BASE_URL, + timeout: config.timeout ?? DEFAULT_TIMEOUT_SECONDS, + maxRetries: config.maxRetries ?? DEFAULT_MAX_RETRIES, + retryDelay: config.retryDelay ?? DEFAULT_RETRY_DELAY_SECONDS, + enableLogging: config.enableLogging ?? false, +}); + +const normalizeHeaders = (headers: AxiosResponse["headers"]): Headers => { + const result: Headers = {}; + if (!headers) { + return result; + } + Object.entries(headers).forEach(([key, value]) => { + if (Array.isArray(value)) { + result[key.toLowerCase()] = value.join(", "); + } else if (typeof value === "string") { + result[key.toLowerCase()] = value; + } else if (typeof value === "number") { + result[key.toLowerCase()] = value.toString(); + } + }); + return result; +}; + +const resolveRequestId = (headers: Headers): string | undefined => + headers["x-request-id"] ?? headers["x-requestid"]; + +const buildRequestUrl = (baseUrl: string, path: string): string => { + const trimmed = baseUrl.replace(/\/+$/, ""); + return `${trimmed}${path}`; +}; + +const buildQueryString = (params?: QueryParams): string => { + if (!params) { + return ""; + } + const searchParams = new URLSearchParams(); + Object.entries(params).forEach(([key, value]) => { + if (value === undefined || value === null) { + return; + } + if (Array.isArray(value)) { + value.forEach((item) => { + searchParams.append(key, String(item)); + }); + return; + } + searchParams.append(key, String(value)); + }); + return searchParams.toString(); +}; + +const parseRetryAfterSeconds = (headerValue?: string): number | undefined => { + if (!headerValue) { + return undefined; + } + const asNumber = Number.parseInt(headerValue, 10); + if (!Number.isNaN(asNumber)) { + return asNumber; + } + const asDate = Date.parse(headerValue); + if (!Number.isNaN(asDate)) { + const diff = asDate - Date.now(); + return diff > 0 ? Math.ceil(diff / 1000) : 0; + } + return undefined; +}; + +const isReadableStream = (value: unknown): value is Readable => { + if (!value || typeof value !== "object") { + return false; + } + return typeof (value as { pipe?: unknown }).pipe === "function"; +}; + +const isUploadLikeRequest = (config?: AxiosRequestConfig): boolean => { + const url = (config?.url ?? "").toLowerCase(); + if (!url) { + return false; + } + return ( + url.includes("upload") || + url.includes("/files/") || + url.includes("audio-to-text") || + url.includes("create_by_file") || + url.includes("update_by_file") + ); +}; + +const resolveErrorMessage = (status: number, responseBody: unknown): string => { + if (typeof responseBody === "string" && responseBody.trim().length > 0) { + return responseBody; + } + if ( + responseBody && + typeof responseBody === "object" && + "message" in responseBody + ) { + const message = (responseBody as Record).message; + if (typeof message === "string" && message.trim().length > 0) { + return message; + } + } + return `Request failed with status code ${status}`; +}; + +const mapAxiosError = (error: unknown): DifyError => { + if (axios.isAxiosError(error)) { + const axiosError = error as AxiosError; + if (axiosError.response) { + const status = axiosError.response.status; + const headers = normalizeHeaders(axiosError.response.headers); + const requestId = resolveRequestId(headers); + const responseBody = axiosError.response.data; + const message = resolveErrorMessage(status, responseBody); + + if (status === 401) { + return new AuthenticationError(message, { + statusCode: status, + responseBody, + requestId, + }); + } + if (status === 429) { + const retryAfter = parseRetryAfterSeconds(headers["retry-after"]); + return new RateLimitError(message, { + statusCode: status, + responseBody, + requestId, + retryAfter, + }); + } + if (status === 422) { + return new ValidationError(message, { + statusCode: status, + responseBody, + requestId, + }); + } + if (status === 400) { + if (isUploadLikeRequest(axiosError.config)) { + return new FileUploadError(message, { + statusCode: status, + responseBody, + requestId, + }); + } + } + return new APIError(message, { + statusCode: status, + responseBody, + requestId, + }); + } + if (axiosError.code === "ECONNABORTED") { + return new TimeoutError("Request timed out", { cause: axiosError }); + } + return new NetworkError(axiosError.message, { cause: axiosError }); + } + if (error instanceof Error) { + return new NetworkError(error.message, { cause: error }); + } + return new NetworkError("Unexpected network error", { cause: error }); +}; + +export class HttpClient { + private axios: AxiosInstance; + private settings: HttpClientSettings; + + constructor(config: DifyClientConfig) { + this.settings = normalizeSettings(config); + this.axios = axios.create({ + baseURL: this.settings.baseUrl, + timeout: this.settings.timeout * 1000, + }); + } + + updateApiKey(apiKey: string): void { + this.settings.apiKey = apiKey; + } + + getSettings(): HttpClientSettings { + return { ...this.settings }; + } + + async request(options: RequestOptions): Promise> { + const response = await this.requestRaw(options); + const headers = normalizeHeaders(response.headers); + return { + data: response.data as T, + status: response.status, + headers, + requestId: resolveRequestId(headers), + }; + } + + async requestStream(options: RequestOptions) { + const response = await this.requestRaw({ + ...options, + responseType: "stream", + }); + const headers = normalizeHeaders(response.headers); + return createSseStream(response.data as Readable, { + status: response.status, + headers, + requestId: resolveRequestId(headers), + }); + } + + async requestBinaryStream(options: RequestOptions) { + const response = await this.requestRaw({ + ...options, + responseType: "stream", + }); + const headers = normalizeHeaders(response.headers); + return createBinaryStream(response.data as Readable, { + status: response.status, + headers, + requestId: resolveRequestId(headers), + }); + } + + async requestRaw(options: RequestOptions): Promise { + const { method, path, query, data, headers, responseType } = options; + const { apiKey, enableLogging, maxRetries, retryDelay, timeout } = + this.settings; + + if (query) { + validateParams(query as Record); + } + if ( + data && + typeof data === "object" && + !Array.isArray(data) && + !isFormData(data) && + !isReadableStream(data) + ) { + validateParams(data as Record); + } + + const requestHeaders: Headers = { + Authorization: `Bearer ${apiKey}`, + ...headers, + }; + if ( + typeof process !== "undefined" && + !!process.versions?.node && + !requestHeaders["User-Agent"] && + !requestHeaders["user-agent"] + ) { + requestHeaders["User-Agent"] = DEFAULT_USER_AGENT; + } + + if (isFormData(data)) { + Object.assign(requestHeaders, getFormDataHeaders(data)); + } else if (data && method !== "GET") { + requestHeaders["Content-Type"] = "application/json"; + } + + const url = buildRequestUrl(this.settings.baseUrl, path); + + if (enableLogging) { + console.info(`dify-client-node request ${method} ${url}`); + } + + const axiosConfig: AxiosRequestConfig = { + method, + url: path, + params: query, + paramsSerializer: { + serialize: (params) => buildQueryString(params as QueryParams), + }, + headers: requestHeaders, + responseType: responseType ?? "json", + timeout: timeout * 1000, + }; + + if (method !== "GET" && data !== undefined) { + axiosConfig.data = data; + } + + let attempt = 0; + // `attempt` is a zero-based retry counter + // Total attempts = 1 (initial) + maxRetries + // e.g., maxRetries=3 means: attempt 0 (initial), then retries at 1, 2, 3 + while (true) { + try { + const response = await this.axios.request(axiosConfig); + if (enableLogging) { + console.info( + `dify-client-node response ${response.status} ${method} ${url}` + ); + } + return response; + } catch (error) { + const mapped = mapAxiosError(error); + if (!shouldRetry(mapped, attempt, maxRetries)) { + throw mapped; + } + const retryAfterSeconds = + mapped instanceof RateLimitError ? mapped.retryAfter : undefined; + const delay = getRetryDelayMs(attempt + 1, retryDelay, retryAfterSeconds); + if (enableLogging) { + console.info( + `dify-client-node retry ${attempt + 1} in ${delay}ms for ${method} ${url}` + ); + } + attempt += 1; + await sleep(delay); + } + } + } +} diff --git a/sdks/nodejs-client/src/http/form-data.test.js b/sdks/nodejs-client/src/http/form-data.test.js new file mode 100644 index 0000000000..2938e41435 --- /dev/null +++ b/sdks/nodejs-client/src/http/form-data.test.js @@ -0,0 +1,23 @@ +import { describe, expect, it } from "vitest"; +import { getFormDataHeaders, isFormData } from "./form-data"; + +describe("form-data helpers", () => { + it("detects form-data like objects", () => { + const formLike = { + append: () => {}, + getHeaders: () => ({ "content-type": "multipart/form-data" }), + }; + expect(isFormData(formLike)).toBe(true); + expect(isFormData({})).toBe(false); + }); + + it("returns headers from form-data", () => { + const formLike = { + append: () => {}, + getHeaders: () => ({ "content-type": "multipart/form-data" }), + }; + expect(getFormDataHeaders(formLike)).toEqual({ + "content-type": "multipart/form-data", + }); + }); +}); diff --git a/sdks/nodejs-client/src/http/form-data.ts b/sdks/nodejs-client/src/http/form-data.ts new file mode 100644 index 0000000000..2efa23e54e --- /dev/null +++ b/sdks/nodejs-client/src/http/form-data.ts @@ -0,0 +1,31 @@ +import type { Headers } from "../types/common"; + +export type FormDataLike = { + append: (...args: unknown[]) => void; + getHeaders?: () => Headers; + constructor?: { name?: string }; +}; + +export const isFormData = (value: unknown): value is FormDataLike => { + if (!value || typeof value !== "object") { + return false; + } + if (typeof FormData !== "undefined" && value instanceof FormData) { + return true; + } + const candidate = value as FormDataLike; + if (typeof candidate.append !== "function") { + return false; + } + if (typeof candidate.getHeaders === "function") { + return true; + } + return candidate.constructor?.name === "FormData"; +}; + +export const getFormDataHeaders = (form: FormDataLike): Headers => { + if (typeof form.getHeaders === "function") { + return form.getHeaders(); + } + return {}; +}; diff --git a/sdks/nodejs-client/src/http/retry.test.js b/sdks/nodejs-client/src/http/retry.test.js new file mode 100644 index 0000000000..fc017f631b --- /dev/null +++ b/sdks/nodejs-client/src/http/retry.test.js @@ -0,0 +1,38 @@ +import { describe, expect, it } from "vitest"; +import { getRetryDelayMs, shouldRetry } from "./retry"; +import { NetworkError, RateLimitError, TimeoutError } from "../errors/dify-error"; + +const withMockedRandom = (value, fn) => { + const original = Math.random; + Math.random = () => value; + try { + fn(); + } finally { + Math.random = original; + } +}; + +describe("retry helpers", () => { + it("getRetryDelayMs honors retry-after header", () => { + expect(getRetryDelayMs(1, 1, 3)).toBe(3000); + }); + + it("getRetryDelayMs uses exponential backoff with jitter", () => { + withMockedRandom(0, () => { + expect(getRetryDelayMs(1, 1)).toBe(1000); + expect(getRetryDelayMs(2, 1)).toBe(2000); + expect(getRetryDelayMs(3, 1)).toBe(4000); + }); + }); + + it("shouldRetry respects max retries", () => { + expect(shouldRetry(new TimeoutError("timeout"), 3, 3)).toBe(false); + }); + + it("shouldRetry retries on network, timeout, and rate limit", () => { + expect(shouldRetry(new TimeoutError("timeout"), 0, 3)).toBe(true); + expect(shouldRetry(new NetworkError("network"), 0, 3)).toBe(true); + expect(shouldRetry(new RateLimitError("limit"), 0, 3)).toBe(true); + expect(shouldRetry(new Error("other"), 0, 3)).toBe(false); + }); +}); diff --git a/sdks/nodejs-client/src/http/retry.ts b/sdks/nodejs-client/src/http/retry.ts new file mode 100644 index 0000000000..3776b78d5f --- /dev/null +++ b/sdks/nodejs-client/src/http/retry.ts @@ -0,0 +1,40 @@ +import { RateLimitError, NetworkError, TimeoutError } from "../errors/dify-error"; + +export const sleep = (ms: number): Promise => + new Promise((resolve) => { + setTimeout(resolve, ms); + }); + +export const getRetryDelayMs = ( + attempt: number, + retryDelaySeconds: number, + retryAfterSeconds?: number +): number => { + if (retryAfterSeconds && retryAfterSeconds > 0) { + return retryAfterSeconds * 1000; + } + const base = retryDelaySeconds * 1000; + const exponential = base * Math.pow(2, Math.max(0, attempt - 1)); + const jitter = Math.random() * base; + return exponential + jitter; +}; + +export const shouldRetry = ( + error: unknown, + attempt: number, + maxRetries: number +): boolean => { + if (attempt >= maxRetries) { + return false; + } + if (error instanceof TimeoutError) { + return true; + } + if (error instanceof NetworkError) { + return true; + } + if (error instanceof RateLimitError) { + return true; + } + return false; +}; diff --git a/sdks/nodejs-client/src/http/sse.test.js b/sdks/nodejs-client/src/http/sse.test.js new file mode 100644 index 0000000000..fff85fd29b --- /dev/null +++ b/sdks/nodejs-client/src/http/sse.test.js @@ -0,0 +1,76 @@ +import { Readable } from "node:stream"; +import { describe, expect, it } from "vitest"; +import { createBinaryStream, createSseStream, parseSseStream } from "./sse"; + +describe("sse parsing", () => { + it("parses event and data lines", async () => { + const stream = Readable.from([ + "event: message\n", + "data: {\"answer\":\"hi\"}\n", + "\n", + ]); + const events = []; + for await (const event of parseSseStream(stream)) { + events.push(event); + } + expect(events).toHaveLength(1); + expect(events[0].event).toBe("message"); + expect(events[0].data).toEqual({ answer: "hi" }); + }); + + it("handles multi-line data payloads", async () => { + const stream = Readable.from(["data: line1\n", "data: line2\n", "\n"]); + const events = []; + for await (const event of parseSseStream(stream)) { + events.push(event); + } + expect(events[0].raw).toBe("line1\nline2"); + expect(events[0].data).toBe("line1\nline2"); + }); + + it("createSseStream exposes toText", async () => { + const stream = Readable.from([ + "data: {\"answer\":\"hello\"}\n\n", + "data: {\"delta\":\" world\"}\n\n", + ]); + const sseStream = createSseStream(stream, { + status: 200, + headers: {}, + requestId: "req", + }); + const text = await sseStream.toText(); + expect(text).toBe("hello world"); + }); + + it("toText extracts text from string data", async () => { + const stream = Readable.from(["data: plain text\n\n"]); + const sseStream = createSseStream(stream, { status: 200, headers: {} }); + const text = await sseStream.toText(); + expect(text).toBe("plain text"); + }); + + it("toText extracts text field from object", async () => { + const stream = Readable.from(['data: {"text":"hello"}\n\n']); + const sseStream = createSseStream(stream, { status: 200, headers: {} }); + const text = await sseStream.toText(); + expect(text).toBe("hello"); + }); + + it("toText returns empty for invalid data", async () => { + const stream = Readable.from(["data: null\n\n", "data: 123\n\n"]); + const sseStream = createSseStream(stream, { status: 200, headers: {} }); + const text = await sseStream.toText(); + expect(text).toBe(""); + }); + + it("createBinaryStream exposes metadata", () => { + const stream = Readable.from(["chunk"]); + const binary = createBinaryStream(stream, { + status: 200, + headers: { "content-type": "audio/mpeg" }, + requestId: "req", + }); + expect(binary.status).toBe(200); + expect(binary.headers["content-type"]).toBe("audio/mpeg"); + }); +}); diff --git a/sdks/nodejs-client/src/http/sse.ts b/sdks/nodejs-client/src/http/sse.ts new file mode 100644 index 0000000000..ed5a17fe39 --- /dev/null +++ b/sdks/nodejs-client/src/http/sse.ts @@ -0,0 +1,133 @@ +import type { Readable } from "node:stream"; +import { StringDecoder } from "node:string_decoder"; +import type { BinaryStream, DifyStream, Headers, StreamEvent } from "../types/common"; + +const readLines = async function* (stream: Readable): AsyncIterable { + const decoder = new StringDecoder("utf8"); + let buffered = ""; + for await (const chunk of stream) { + buffered += decoder.write(chunk as Buffer); + let index = buffered.indexOf("\n"); + while (index >= 0) { + let line = buffered.slice(0, index); + buffered = buffered.slice(index + 1); + if (line.endsWith("\r")) { + line = line.slice(0, -1); + } + yield line; + index = buffered.indexOf("\n"); + } + } + buffered += decoder.end(); + if (buffered) { + yield buffered; + } +}; + +const parseMaybeJson = (value: string): unknown => { + if (!value) { + return null; + } + try { + return JSON.parse(value); + } catch { + return value; + } +}; + +export const parseSseStream = async function* ( + stream: Readable +): AsyncIterable> { + let eventName: string | undefined; + const dataLines: string[] = []; + + const emitEvent = function* (): Iterable> { + if (!eventName && dataLines.length === 0) { + return; + } + const raw = dataLines.join("\n"); + const parsed = parseMaybeJson(raw) as T | string | null; + yield { + event: eventName, + data: parsed, + raw, + }; + eventName = undefined; + dataLines.length = 0; + }; + + for await (const line of readLines(stream)) { + if (!line) { + yield* emitEvent(); + continue; + } + if (line.startsWith(":")) { + continue; + } + if (line.startsWith("event:")) { + eventName = line.slice("event:".length).trim(); + continue; + } + if (line.startsWith("data:")) { + dataLines.push(line.slice("data:".length).trimStart()); + continue; + } + } + + yield* emitEvent(); +}; + +const extractTextFromEvent = (data: unknown): string => { + if (typeof data === "string") { + return data; + } + if (!data || typeof data !== "object") { + return ""; + } + const record = data as Record; + if (typeof record.answer === "string") { + return record.answer; + } + if (typeof record.text === "string") { + return record.text; + } + if (typeof record.delta === "string") { + return record.delta; + } + return ""; +}; + +export const createSseStream = ( + stream: Readable, + meta: { status: number; headers: Headers; requestId?: string } +): DifyStream => { + const iterator = parseSseStream(stream)[Symbol.asyncIterator](); + const iterable = { + [Symbol.asyncIterator]: () => iterator, + data: stream, + status: meta.status, + headers: meta.headers, + requestId: meta.requestId, + toReadable: () => stream, + toText: async () => { + let text = ""; + for await (const event of iterable) { + text += extractTextFromEvent(event.data); + } + return text; + }, + } satisfies DifyStream; + + return iterable; +}; + +export const createBinaryStream = ( + stream: Readable, + meta: { status: number; headers: Headers; requestId?: string } +): BinaryStream => ({ + data: stream, + status: meta.status, + headers: meta.headers, + requestId: meta.requestId, + toReadable: () => stream, +}); diff --git a/sdks/nodejs-client/src/index.test.js b/sdks/nodejs-client/src/index.test.js new file mode 100644 index 0000000000..289f4d9b1b --- /dev/null +++ b/sdks/nodejs-client/src/index.test.js @@ -0,0 +1,227 @@ +import { afterAll, beforeAll, beforeEach, describe, expect, it, vi } from "vitest"; +import { ChatClient, DifyClient, WorkflowClient, BASE_URL, routes } from "./index"; +import axios from "axios"; + +const mockRequest = vi.fn(); + +const setupAxiosMock = () => { + vi.spyOn(axios, "create").mockReturnValue({ request: mockRequest }); +}; + +beforeEach(() => { + vi.restoreAllMocks(); + mockRequest.mockReset(); + setupAxiosMock(); +}); + +describe("Client", () => { + it("should create a client", () => { + new DifyClient("test"); + + expect(axios.create).toHaveBeenCalledWith({ + baseURL: BASE_URL, + timeout: 60000, + }); + }); + + it("should update the api key", () => { + const difyClient = new DifyClient("test"); + difyClient.updateApiKey("test2"); + + expect(difyClient.getHttpClient().getSettings().apiKey).toBe("test2"); + }); +}); + +describe("Send Requests", () => { + it("should make a successful request to the application parameter", async () => { + const difyClient = new DifyClient("test"); + const method = "GET"; + const endpoint = routes.application.url(); + mockRequest.mockResolvedValue({ + status: 200, + data: "response", + headers: {}, + }); + + await difyClient.sendRequest(method, endpoint); + + const requestConfig = mockRequest.mock.calls[0][0]; + expect(requestConfig).toMatchObject({ + method, + url: endpoint, + params: undefined, + responseType: "json", + timeout: 60000, + }); + expect(requestConfig.headers.Authorization).toBe("Bearer test"); + }); + + it("uses the getMeta route configuration", async () => { + const difyClient = new DifyClient("test"); + mockRequest.mockResolvedValue({ status: 200, data: "ok", headers: {} }); + + await difyClient.getMeta("end-user"); + + expect(mockRequest).toHaveBeenCalledWith(expect.objectContaining({ + method: routes.getMeta.method, + url: routes.getMeta.url(), + params: { user: "end-user" }, + headers: expect.objectContaining({ + Authorization: "Bearer test", + }), + responseType: "json", + timeout: 60000, + })); + }); +}); + +describe("File uploads", () => { + const OriginalFormData = globalThis.FormData; + + beforeAll(() => { + globalThis.FormData = class FormDataMock { + append() {} + + getHeaders() { + return { + "content-type": "multipart/form-data; boundary=test", + }; + } + }; + }); + + afterAll(() => { + globalThis.FormData = OriginalFormData; + }); + + it("does not override multipart boundary headers for FormData", async () => { + const difyClient = new DifyClient("test"); + const form = new globalThis.FormData(); + mockRequest.mockResolvedValue({ status: 200, data: "ok", headers: {} }); + + await difyClient.fileUpload(form, "end-user"); + + expect(mockRequest).toHaveBeenCalledWith(expect.objectContaining({ + method: routes.fileUpload.method, + url: routes.fileUpload.url(), + params: undefined, + headers: expect.objectContaining({ + Authorization: "Bearer test", + "content-type": "multipart/form-data; boundary=test", + }), + responseType: "json", + timeout: 60000, + data: form, + })); + }); +}); + +describe("Workflow client", () => { + it("uses tasks stop path for workflow stop", async () => { + const workflowClient = new WorkflowClient("test"); + mockRequest.mockResolvedValue({ status: 200, data: "stopped", headers: {} }); + + await workflowClient.stop("task-1", "end-user"); + + expect(mockRequest).toHaveBeenCalledWith(expect.objectContaining({ + method: routes.stopWorkflow.method, + url: routes.stopWorkflow.url("task-1"), + params: undefined, + headers: expect.objectContaining({ + Authorization: "Bearer test", + "Content-Type": "application/json", + }), + responseType: "json", + timeout: 60000, + data: { user: "end-user" }, + })); + }); + + it("maps workflow log filters to service api params", async () => { + const workflowClient = new WorkflowClient("test"); + mockRequest.mockResolvedValue({ status: 200, data: "ok", headers: {} }); + + await workflowClient.getLogs({ + createdAtAfter: "2024-01-01T00:00:00Z", + createdAtBefore: "2024-01-02T00:00:00Z", + createdByEndUserSessionId: "sess-1", + createdByAccount: "acc-1", + page: 2, + limit: 10, + }); + + expect(mockRequest).toHaveBeenCalledWith(expect.objectContaining({ + method: "GET", + url: "/workflows/logs", + params: { + created_at__after: "2024-01-01T00:00:00Z", + created_at__before: "2024-01-02T00:00:00Z", + created_by_end_user_session_id: "sess-1", + created_by_account: "acc-1", + page: 2, + limit: 10, + }, + headers: expect.objectContaining({ + Authorization: "Bearer test", + }), + responseType: "json", + timeout: 60000, + })); + }); +}); + +describe("Chat client", () => { + it("places user in query for suggested messages", async () => { + const chatClient = new ChatClient("test"); + mockRequest.mockResolvedValue({ status: 200, data: "ok", headers: {} }); + + await chatClient.getSuggested("msg-1", "end-user"); + + expect(mockRequest).toHaveBeenCalledWith(expect.objectContaining({ + method: routes.getSuggested.method, + url: routes.getSuggested.url("msg-1"), + params: { user: "end-user" }, + headers: expect.objectContaining({ + Authorization: "Bearer test", + }), + responseType: "json", + timeout: 60000, + })); + }); + + it("uses last_id when listing conversations", async () => { + const chatClient = new ChatClient("test"); + mockRequest.mockResolvedValue({ status: 200, data: "ok", headers: {} }); + + await chatClient.getConversations("end-user", "last-1", 10); + + expect(mockRequest).toHaveBeenCalledWith(expect.objectContaining({ + method: routes.getConversations.method, + url: routes.getConversations.url(), + params: { user: "end-user", last_id: "last-1", limit: 10 }, + headers: expect.objectContaining({ + Authorization: "Bearer test", + }), + responseType: "json", + timeout: 60000, + })); + }); + + it("lists app feedbacks without user params", async () => { + const chatClient = new ChatClient("test"); + mockRequest.mockResolvedValue({ status: 200, data: "ok", headers: {} }); + + await chatClient.getAppFeedbacks(1, 20); + + expect(mockRequest).toHaveBeenCalledWith(expect.objectContaining({ + method: "GET", + url: "/app/feedbacks", + params: { page: 1, limit: 20 }, + headers: expect.objectContaining({ + Authorization: "Bearer test", + }), + responseType: "json", + timeout: 60000, + })); + }); +}); diff --git a/sdks/nodejs-client/src/index.ts b/sdks/nodejs-client/src/index.ts new file mode 100644 index 0000000000..3ba3757baa --- /dev/null +++ b/sdks/nodejs-client/src/index.ts @@ -0,0 +1,103 @@ +import { DEFAULT_BASE_URL } from "./types/common"; + +export const BASE_URL = DEFAULT_BASE_URL; + +export const routes = { + feedback: { + method: "POST", + url: (messageId: string) => `/messages/${messageId}/feedbacks`, + }, + application: { + method: "GET", + url: () => "/parameters", + }, + fileUpload: { + method: "POST", + url: () => "/files/upload", + }, + filePreview: { + method: "GET", + url: (fileId: string) => `/files/${fileId}/preview`, + }, + textToAudio: { + method: "POST", + url: () => "/text-to-audio", + }, + audioToText: { + method: "POST", + url: () => "/audio-to-text", + }, + getMeta: { + method: "GET", + url: () => "/meta", + }, + getInfo: { + method: "GET", + url: () => "/info", + }, + getSite: { + method: "GET", + url: () => "/site", + }, + createCompletionMessage: { + method: "POST", + url: () => "/completion-messages", + }, + stopCompletionMessage: { + method: "POST", + url: (taskId: string) => `/completion-messages/${taskId}/stop`, + }, + createChatMessage: { + method: "POST", + url: () => "/chat-messages", + }, + getSuggested: { + method: "GET", + url: (messageId: string) => `/messages/${messageId}/suggested`, + }, + stopChatMessage: { + method: "POST", + url: (taskId: string) => `/chat-messages/${taskId}/stop`, + }, + getConversations: { + method: "GET", + url: () => "/conversations", + }, + getConversationMessages: { + method: "GET", + url: () => "/messages", + }, + renameConversation: { + method: "POST", + url: (conversationId: string) => `/conversations/${conversationId}/name`, + }, + deleteConversation: { + method: "DELETE", + url: (conversationId: string) => `/conversations/${conversationId}`, + }, + runWorkflow: { + method: "POST", + url: () => "/workflows/run", + }, + stopWorkflow: { + method: "POST", + url: (taskId: string) => `/workflows/tasks/${taskId}/stop`, + }, +}; + +export { DifyClient } from "./client/base"; +export { ChatClient } from "./client/chat"; +export { CompletionClient } from "./client/completion"; +export { WorkflowClient } from "./client/workflow"; +export { KnowledgeBaseClient } from "./client/knowledge-base"; +export { WorkspaceClient } from "./client/workspace"; + +export * from "./errors/dify-error"; +export * from "./types/common"; +export * from "./types/annotation"; +export * from "./types/chat"; +export * from "./types/completion"; +export * from "./types/knowledge-base"; +export * from "./types/workflow"; +export * from "./types/workspace"; +export { HttpClient } from "./http/client"; diff --git a/sdks/nodejs-client/src/types/annotation.ts b/sdks/nodejs-client/src/types/annotation.ts new file mode 100644 index 0000000000..dcbd644dab --- /dev/null +++ b/sdks/nodejs-client/src/types/annotation.ts @@ -0,0 +1,18 @@ +export type AnnotationCreateRequest = { + question: string; + answer: string; +}; + +export type AnnotationReplyActionRequest = { + score_threshold: number; + embedding_provider_name: string; + embedding_model_name: string; +}; + +export type AnnotationListOptions = { + page?: number; + limit?: number; + keyword?: string; +}; + +export type AnnotationResponse = Record; diff --git a/sdks/nodejs-client/src/types/chat.ts b/sdks/nodejs-client/src/types/chat.ts new file mode 100644 index 0000000000..5b627f6cf6 --- /dev/null +++ b/sdks/nodejs-client/src/types/chat.ts @@ -0,0 +1,17 @@ +import type { StreamEvent } from "./common"; + +export type ChatMessageRequest = { + inputs?: Record; + query: string; + user: string; + response_mode?: "blocking" | "streaming"; + files?: Array> | null; + conversation_id?: string; + auto_generate_name?: boolean; + workflow_id?: string; + retriever_from?: "app" | "dataset"; +}; + +export type ChatMessageResponse = Record; + +export type ChatStreamEvent = StreamEvent>; diff --git a/sdks/nodejs-client/src/types/common.ts b/sdks/nodejs-client/src/types/common.ts new file mode 100644 index 0000000000..00b0fcc756 --- /dev/null +++ b/sdks/nodejs-client/src/types/common.ts @@ -0,0 +1,71 @@ +export const DEFAULT_BASE_URL = "https://api.dify.ai/v1"; +export const DEFAULT_TIMEOUT_SECONDS = 60; +export const DEFAULT_MAX_RETRIES = 3; +export const DEFAULT_RETRY_DELAY_SECONDS = 1; + +export type RequestMethod = "GET" | "POST" | "PATCH" | "PUT" | "DELETE"; + +export type QueryParamValue = + | string + | number + | boolean + | Array + | undefined; + +export type QueryParams = Record; + +export type Headers = Record; + +export type DifyClientConfig = { + apiKey: string; + baseUrl?: string; + timeout?: number; + maxRetries?: number; + retryDelay?: number; + enableLogging?: boolean; +}; + +export type DifyResponse = { + data: T; + status: number; + headers: Headers; + requestId?: string; +}; + +export type MessageFeedbackRequest = { + messageId: string; + user: string; + rating?: "like" | "dislike" | null; + content?: string | null; +}; + +export type TextToAudioRequest = { + user: string; + text?: string; + message_id?: string; + streaming?: boolean; + voice?: string; +}; + +export type StreamEvent = { + event?: string; + data: T | string | null; + raw: string; +}; + +export type DifyStream = AsyncIterable> & { + data: NodeJS.ReadableStream; + status: number; + headers: Headers; + requestId?: string; + toText(): Promise; + toReadable(): NodeJS.ReadableStream; +}; + +export type BinaryStream = { + data: NodeJS.ReadableStream; + status: number; + headers: Headers; + requestId?: string; + toReadable(): NodeJS.ReadableStream; +}; diff --git a/sdks/nodejs-client/src/types/completion.ts b/sdks/nodejs-client/src/types/completion.ts new file mode 100644 index 0000000000..4074137c5d --- /dev/null +++ b/sdks/nodejs-client/src/types/completion.ts @@ -0,0 +1,13 @@ +import type { StreamEvent } from "./common"; + +export type CompletionRequest = { + inputs?: Record; + response_mode?: "blocking" | "streaming"; + user: string; + files?: Array> | null; + retriever_from?: "app" | "dataset"; +}; + +export type CompletionResponse = Record; + +export type CompletionStreamEvent = StreamEvent>; diff --git a/sdks/nodejs-client/src/types/knowledge-base.ts b/sdks/nodejs-client/src/types/knowledge-base.ts new file mode 100644 index 0000000000..a4ddef50ea --- /dev/null +++ b/sdks/nodejs-client/src/types/knowledge-base.ts @@ -0,0 +1,184 @@ +export type DatasetListOptions = { + page?: number; + limit?: number; + keyword?: string | null; + tagIds?: string[]; + includeAll?: boolean; +}; + +export type DatasetCreateRequest = { + name: string; + description?: string; + indexing_technique?: "high_quality" | "economy"; + permission?: string | null; + external_knowledge_api_id?: string | null; + provider?: string; + external_knowledge_id?: string | null; + retrieval_model?: Record | null; + embedding_model?: string | null; + embedding_model_provider?: string | null; +}; + +export type DatasetUpdateRequest = { + name?: string; + description?: string | null; + indexing_technique?: "high_quality" | "economy" | null; + permission?: string | null; + embedding_model?: string | null; + embedding_model_provider?: string | null; + retrieval_model?: Record | null; + partial_member_list?: Array> | null; + external_retrieval_model?: Record | null; + external_knowledge_id?: string | null; + external_knowledge_api_id?: string | null; +}; + +export type DocumentStatusAction = "enable" | "disable" | "archive" | "un_archive"; + +export type DatasetTagCreateRequest = { + name: string; +}; + +export type DatasetTagUpdateRequest = { + tag_id: string; + name: string; +}; + +export type DatasetTagDeleteRequest = { + tag_id: string; +}; + +export type DatasetTagBindingRequest = { + tag_ids: string[]; + target_id: string; +}; + +export type DatasetTagUnbindingRequest = { + tag_id: string; + target_id: string; +}; + +export type DocumentTextCreateRequest = { + name: string; + text: string; + process_rule?: Record | null; + original_document_id?: string | null; + doc_form?: string; + doc_language?: string; + indexing_technique?: string | null; + retrieval_model?: Record | null; + embedding_model?: string | null; + embedding_model_provider?: string | null; +}; + +export type DocumentTextUpdateRequest = { + name?: string | null; + text?: string | null; + process_rule?: Record | null; + doc_form?: string; + doc_language?: string; + retrieval_model?: Record | null; +}; + +export type DocumentListOptions = { + page?: number; + limit?: number; + keyword?: string | null; + status?: string | null; +}; + +export type DocumentGetOptions = { + metadata?: "all" | "only" | "without"; +}; + +export type SegmentCreateRequest = { + segments: Array>; +}; + +export type SegmentUpdateRequest = { + segment: { + content?: string | null; + answer?: string | null; + keywords?: string[] | null; + regenerate_child_chunks?: boolean; + enabled?: boolean | null; + attachment_ids?: string[] | null; + }; +}; + +export type SegmentListOptions = { + page?: number; + limit?: number; + status?: string[]; + keyword?: string | null; +}; + +export type ChildChunkCreateRequest = { + content: string; +}; + +export type ChildChunkUpdateRequest = { + content: string; +}; + +export type ChildChunkListOptions = { + page?: number; + limit?: number; + keyword?: string | null; +}; + +export type MetadataCreateRequest = { + type: "string" | "number" | "time"; + name: string; +}; + +export type MetadataUpdateRequest = { + name: string; + value?: string | number | null; +}; + +export type DocumentMetadataDetail = { + id: string; + name: string; + value?: string | number | null; +}; + +export type DocumentMetadataOperation = { + document_id: string; + metadata_list: DocumentMetadataDetail[]; + partial_update?: boolean; +}; + +export type MetadataOperationRequest = { + operation_data: DocumentMetadataOperation[]; +}; + +export type HitTestingRequest = { + query?: string | null; + retrieval_model?: Record | null; + external_retrieval_model?: Record | null; + attachment_ids?: string[] | null; +}; + +export type DatasourcePluginListOptions = { + isPublished?: boolean; +}; + +export type DatasourceNodeRunRequest = { + inputs: Record; + datasource_type: string; + credential_id?: string | null; + is_published: boolean; +}; + +export type PipelineRunRequest = { + inputs: Record; + datasource_type: string; + datasource_info_list: Array>; + start_node_id: string; + is_published: boolean; + response_mode: "streaming" | "blocking"; +}; + +export type KnowledgeBaseResponse = Record; +export type PipelineStreamEvent = Record; diff --git a/sdks/nodejs-client/src/types/workflow.ts b/sdks/nodejs-client/src/types/workflow.ts new file mode 100644 index 0000000000..2b507c7352 --- /dev/null +++ b/sdks/nodejs-client/src/types/workflow.ts @@ -0,0 +1,12 @@ +import type { StreamEvent } from "./common"; + +export type WorkflowRunRequest = { + inputs?: Record; + user: string; + response_mode?: "blocking" | "streaming"; + files?: Array> | null; +}; + +export type WorkflowRunResponse = Record; + +export type WorkflowStreamEvent = StreamEvent>; diff --git a/sdks/nodejs-client/src/types/workspace.ts b/sdks/nodejs-client/src/types/workspace.ts new file mode 100644 index 0000000000..0ab6743063 --- /dev/null +++ b/sdks/nodejs-client/src/types/workspace.ts @@ -0,0 +1,2 @@ +export type WorkspaceModelType = string; +export type WorkspaceModelsResponse = Record; diff --git a/sdks/nodejs-client/tests/test-utils.js b/sdks/nodejs-client/tests/test-utils.js new file mode 100644 index 0000000000..0d42514e9a --- /dev/null +++ b/sdks/nodejs-client/tests/test-utils.js @@ -0,0 +1,30 @@ +import axios from "axios"; +import { vi } from "vitest"; +import { HttpClient } from "../src/http/client"; + +export const createHttpClient = (configOverrides = {}) => { + const mockRequest = vi.fn(); + vi.spyOn(axios, "create").mockReturnValue({ request: mockRequest }); + const client = new HttpClient({ apiKey: "test", ...configOverrides }); + return { client, mockRequest }; +}; + +export const createHttpClientWithSpies = (configOverrides = {}) => { + const { client, mockRequest } = createHttpClient(configOverrides); + const request = vi + .spyOn(client, "request") + .mockResolvedValue({ data: "ok", status: 200, headers: {} }); + const requestStream = vi + .spyOn(client, "requestStream") + .mockResolvedValue({ data: null }); + const requestBinaryStream = vi + .spyOn(client, "requestBinaryStream") + .mockResolvedValue({ data: null }); + return { + client, + mockRequest, + request, + requestStream, + requestBinaryStream, + }; +}; diff --git a/sdks/nodejs-client/tsconfig.json b/sdks/nodejs-client/tsconfig.json new file mode 100644 index 0000000000..d2da9a2a59 --- /dev/null +++ b/sdks/nodejs-client/tsconfig.json @@ -0,0 +1,17 @@ +{ + "compilerOptions": { + "target": "ES2022", + "module": "ESNext", + "moduleResolution": "Bundler", + "rootDir": "src", + "outDir": "dist", + "declaration": true, + "declarationMap": true, + "sourceMap": true, + "strict": true, + "esModuleInterop": true, + "forceConsistentCasingInFileNames": true, + "skipLibCheck": true + }, + "include": ["src/**/*.ts"] +} diff --git a/sdks/nodejs-client/tsup.config.ts b/sdks/nodejs-client/tsup.config.ts new file mode 100644 index 0000000000..522382c2a5 --- /dev/null +++ b/sdks/nodejs-client/tsup.config.ts @@ -0,0 +1,12 @@ +import { defineConfig } from "tsup"; + +export default defineConfig({ + entry: ["src/index.ts"], + format: ["esm"], + dts: true, + clean: true, + sourcemap: true, + splitting: false, + treeshake: true, + outDir: "dist", +}); diff --git a/sdks/nodejs-client/vitest.config.ts b/sdks/nodejs-client/vitest.config.ts new file mode 100644 index 0000000000..5a0a8637a2 --- /dev/null +++ b/sdks/nodejs-client/vitest.config.ts @@ -0,0 +1,14 @@ +import { defineConfig } from "vitest/config"; + +export default defineConfig({ + test: { + environment: "node", + include: ["**/*.test.js"], + coverage: { + provider: "v8", + reporter: ["text", "text-summary"], + include: ["src/**/*.ts"], + exclude: ["src/**/*.test.*", "src/**/*.spec.*"], + }, + }, +}); diff --git a/web/.dockerignore b/web/.dockerignore index 31eb66c210..91437a2259 100644 --- a/web/.dockerignore +++ b/web/.dockerignore @@ -7,8 +7,12 @@ logs # node node_modules +dist +build +coverage .husky .next +.pnpm-store # vscode .vscode @@ -22,3 +26,7 @@ node_modules # Jetbrains .idea + +# git +.git +.gitignore \ No newline at end of file diff --git a/web/.env.example b/web/.env.example index b488c31057..f2f25454cb 100644 --- a/web/.env.example +++ b/web/.env.example @@ -47,6 +47,8 @@ NEXT_PUBLIC_TOP_K_MAX_VALUE=10 # The maximum number of tokens for segmentation NEXT_PUBLIC_INDEXING_MAX_SEGMENTATION_TOKENS_LENGTH=4000 +# Used by web/docker/entrypoint.sh to overwrite/export NEXT_PUBLIC_INDEXING_MAX_SEGMENTATION_TOKENS_LENGTH at container startup (Docker only) +INDEXING_MAX_SEGMENTATION_TOKENS_LENGTH=4000 # Maximum loop count in the workflow NEXT_PUBLIC_LOOP_NODE_MAX_COUNT=100 @@ -73,3 +75,6 @@ NEXT_PUBLIC_MAX_TREE_DEPTH=50 # The API key of amplitude NEXT_PUBLIC_AMPLITUDE_API_KEY= + +# number of concurrency +NEXT_PUBLIC_BATCH_CONCURRENCY=5 diff --git a/web/.gitignore b/web/.gitignore index 048c5f6485..9de3dc83f9 100644 --- a/web/.gitignore +++ b/web/.gitignore @@ -54,3 +54,13 @@ package-lock.json # mise mise.toml + +# PWA generated files +public/sw.js +public/sw.js.map +public/workbox-*.js +public/workbox-*.js.map +public/fallback-*.js + +.vscode/settings.json +.vscode/mcp.json diff --git a/web/.nvmrc b/web/.nvmrc new file mode 100644 index 0000000000..5767036af0 --- /dev/null +++ b/web/.nvmrc @@ -0,0 +1 @@ +22.21.1 diff --git a/web/.oxlintrc.json b/web/.oxlintrc.json deleted file mode 100644 index 57eddd34fb..0000000000 --- a/web/.oxlintrc.json +++ /dev/null @@ -1,144 +0,0 @@ -{ - "plugins": [ - "unicorn", - "typescript", - "oxc" - ], - "categories": {}, - "rules": { - "for-direction": "error", - "no-async-promise-executor": "error", - "no-caller": "error", - "no-class-assign": "error", - "no-compare-neg-zero": "error", - "no-cond-assign": "warn", - "no-const-assign": "warn", - "no-constant-binary-expression": "error", - "no-constant-condition": "warn", - "no-control-regex": "warn", - "no-debugger": "warn", - "no-delete-var": "warn", - "no-dupe-class-members": "warn", - "no-dupe-else-if": "warn", - "no-dupe-keys": "warn", - "no-duplicate-case": "warn", - "no-empty-character-class": "warn", - "no-empty-pattern": "warn", - "no-empty-static-block": "warn", - "no-eval": "warn", - "no-ex-assign": "warn", - "no-extra-boolean-cast": "warn", - "no-func-assign": "warn", - "no-global-assign": "warn", - "no-import-assign": "warn", - "no-invalid-regexp": "warn", - "no-irregular-whitespace": "warn", - "no-loss-of-precision": "warn", - "no-new-native-nonconstructor": "warn", - "no-nonoctal-decimal-escape": "warn", - "no-obj-calls": "warn", - "no-self-assign": "warn", - "no-setter-return": "warn", - "no-shadow-restricted-names": "warn", - "no-sparse-arrays": "warn", - "no-this-before-super": "warn", - "no-unassigned-vars": "warn", - "no-unsafe-finally": "warn", - "no-unsafe-negation": "warn", - "no-unsafe-optional-chaining": "error", - "no-unused-labels": "warn", - "no-unused-private-class-members": "warn", - "no-unused-vars": "warn", - "no-useless-backreference": "warn", - "no-useless-catch": "error", - "no-useless-escape": "warn", - "no-useless-rename": "warn", - "no-with": "warn", - "require-yield": "warn", - "use-isnan": "warn", - "valid-typeof": "warn", - "oxc/bad-array-method-on-arguments": "warn", - "oxc/bad-char-at-comparison": "warn", - "oxc/bad-comparison-sequence": "warn", - "oxc/bad-min-max-func": "warn", - "oxc/bad-object-literal-comparison": "warn", - "oxc/bad-replace-all-arg": "warn", - "oxc/const-comparisons": "warn", - "oxc/double-comparisons": "warn", - "oxc/erasing-op": "warn", - "oxc/missing-throw": "warn", - "oxc/number-arg-out-of-range": "warn", - "oxc/only-used-in-recursion": "warn", - "oxc/uninvoked-array-callback": "warn", - "typescript/await-thenable": "warn", - "typescript/no-array-delete": "warn", - "typescript/no-base-to-string": "warn", - "typescript/no-confusing-void-expression": "warn", - "typescript/no-duplicate-enum-values": "warn", - "typescript/no-duplicate-type-constituents": "warn", - "typescript/no-extra-non-null-assertion": "warn", - "typescript/no-floating-promises": "warn", - "typescript/no-for-in-array": "warn", - "typescript/no-implied-eval": "warn", - "typescript/no-meaningless-void-operator": "warn", - "typescript/no-misused-new": "warn", - "typescript/no-misused-spread": "warn", - "typescript/no-non-null-asserted-optional-chain": "warn", - "typescript/no-redundant-type-constituents": "warn", - "typescript/no-this-alias": "warn", - "typescript/no-unnecessary-parameter-property-assignment": "warn", - "typescript/no-unsafe-declaration-merging": "warn", - "typescript/no-unsafe-unary-minus": "warn", - "typescript/no-useless-empty-export": "warn", - "typescript/no-wrapper-object-types": "warn", - "typescript/prefer-as-const": "warn", - "typescript/require-array-sort-compare": "warn", - "typescript/restrict-template-expressions": "warn", - "typescript/triple-slash-reference": "warn", - "typescript/unbound-method": "warn", - "unicorn/no-await-in-promise-methods": "warn", - "unicorn/no-empty-file": "warn", - "unicorn/no-invalid-fetch-options": "warn", - "unicorn/no-invalid-remove-event-listener": "warn", - "unicorn/no-new-array": "warn", - "unicorn/no-single-promise-in-promise-methods": "warn", - "unicorn/no-thenable": "warn", - "unicorn/no-unnecessary-await": "warn", - "unicorn/no-useless-fallback-in-spread": "warn", - "unicorn/no-useless-length-check": "warn", - "unicorn/no-useless-spread": "warn", - "unicorn/prefer-set-size": "warn", - "unicorn/prefer-string-starts-ends-with": "warn" - }, - "settings": { - "jsx-a11y": { - "polymorphicPropName": null, - "components": {}, - "attributes": {} - }, - "next": { - "rootDir": [] - }, - "react": { - "formComponents": [], - "linkComponents": [] - }, - "jsdoc": { - "ignorePrivate": false, - "ignoreInternal": false, - "ignoreReplacesDocs": true, - "overrideReplacesDocs": true, - "augmentsExtendsReplacesDocs": false, - "implementsReplacesDocs": false, - "exemptDestructuredRootsFromChecks": false, - "tagNamePreference": {} - } - }, - "env": { - "builtin": true - }, - "globals": {}, - "ignorePatterns": [ - "**/*.js" - ] -} \ No newline at end of file diff --git a/web/.storybook/preview.tsx b/web/.storybook/preview.tsx index 1f5726de34..5b38424776 100644 --- a/web/.storybook/preview.tsx +++ b/web/.storybook/preview.tsx @@ -1,8 +1,10 @@ import type { Preview } from '@storybook/react' +import type { Resource } from 'i18next' import { withThemeByDataAttribute } from '@storybook/addon-themes' import { QueryClient, QueryClientProvider } from '@tanstack/react-query' -import I18N from '../app/components/i18n' import { ToastProvider } from '../app/components/base/toast' +import { I18nClientProvider as I18N } from '../app/components/provider/i18n' +import commonEnUS from '../i18n/en-US/common.json' import '../app/styles/globals.css' import '../app/styles/markdown.scss' @@ -16,6 +18,14 @@ const queryClient = new QueryClient({ }, }) +const storyResources: Resource = { + 'en-US': { + // Preload the most common namespace to avoid missing keys during initial render; + // other namespaces will be loaded on demand via resourcesToBackend. + common: commonEnUS as unknown as Record, + }, +} + export const decorators = [ withThemeByDataAttribute({ themes: { @@ -28,7 +38,7 @@ export const decorators = [ (Story) => { return ( - + diff --git a/web/.storybook/utils/form-story-wrapper.tsx b/web/.storybook/utils/form-story-wrapper.tsx index 689c3a20ff..90349a0325 100644 --- a/web/.storybook/utils/form-story-wrapper.tsx +++ b/web/.storybook/utils/form-story-wrapper.tsx @@ -1,6 +1,6 @@ -import { useState } from 'react' import type { ReactNode } from 'react' import { useStore } from '@tanstack/react-form' +import { useState } from 'react' import { useAppForm } from '@/app/components/base/form' type UseAppFormOptions = Parameters[0] @@ -49,7 +49,12 @@ export const FormStoryWrapper = ({