Compare commits

...

1347 Commits

Author SHA1 Message Date
-LAN- fae6d4f2dd
chore: add default value for FILES_URL
Signed-off-by: -LAN- <laipz8200@outlook.com>
2025-09-04 21:30:12 +08:00
twwu 495324b85b Merge branch 'feat/rag-2' of https://github.com/langgenius/dify into feat/rag-2 2025-09-04 20:52:02 +08:00
twwu 65df8f6c57 refactor(DocumentList): Optimize dataset configuration handling and improve data source type checks 2025-09-04 20:51:56 +08:00
jyong d4ee915058 Merge branch 'feat/queue-based-graph-engine' into feat/rag-2 2025-09-04 20:51:48 +08:00
-LAN- 81e9d6f63a
fix: correct type checking for None values in code node output validation
- Fixed isinstance() checks to properly handle None values by checking None separately
- Fixed typo in STRING type validation where 'output_name' was hardcoded as string instead of variable
- Updated error message format to be consistent and more informative
- Updated test assertion to match new error message format
2025-09-04 20:39:37 +08:00
twwu 637afe1ab0 Merge branch 'feat/rag-2' of https://github.com/langgenius/dify into feat/rag-2 2025-09-04 20:36:38 +08:00
twwu df70e28481 chore(docker): Update Docker images to version 2.0.0-beta.1 for API, worker, web, and plugin daemon services 2025-09-04 20:36:23 +08:00
jyong b9394d542c Merge branch 'feat/queue-based-graph-engine' into feat/rag-2
# Conflicts:
#	api/core/app/apps/advanced_chat/generate_task_pipeline.py
#	api/pyproject.toml
#	api/uv.lock
#	docker/docker-compose-template.yaml
#	docker/docker-compose.yaml
#	web/package.json
2025-09-04 20:30:08 +08:00
twwu 1fda319ecb chore(docker): Update Docker images to version 2.0.0-beta1 for API, worker, web, and plugin daemon services 2025-09-04 20:24:17 +08:00
-LAN- 9c2943183e
test: fix code node
Signed-off-by: -LAN- <laipz8200@outlook.com>
2025-09-04 20:17:28 +08:00
twwu 814cbee28e chore: Update GitHub Actions workflow to include 'feat/rag-2' path for build triggers 2025-09-04 20:11:16 +08:00
twwu 443e1fad32 chore: Bump version to 2.0.0-beta1 2025-09-04 20:06:15 +08:00
-LAN- f6a2a09815
test: fix code node
Signed-off-by: -LAN- <laipz8200@outlook.com>
2025-09-04 20:04:29 +08:00
jyong e21c133e1d change migration 2025-09-04 19:38:51 +08:00
-LAN- e229510e73
perf: eliminate lock contention in worker pool by removing callbacks
Remove worker idle/active callbacks that caused severe lock contention.
Instead, use sampling-based monitoring where worker states are queried
on-demand during scaling decisions. This eliminates the performance
bottleneck caused by workers acquiring locks 10+ times per second.

Changes:
- Remove callback parameters from Worker class
- Add properties to expose worker idle state directly
- Update WorkerPool to query worker states without callbacks
- Maintain scaling functionality with better performance
2025-09-04 19:37:31 +08:00
-LAN- 36048d1526
feat(graph_engine): allow to scale down without lock
Signed-off-by: -LAN- <laipz8200@outlook.com>
2025-09-04 19:32:07 +08:00
-LAN- aff7ca12b8
fix(code_node): type checking bypass
Signed-off-by: -LAN- <laipz8200@outlook.com>
2025-09-04 19:25:08 +08:00
twwu 8506288180 fix(firecrawl): map markdown content to content field in crawl results 2025-09-04 19:24:54 +08:00
-LAN- ad9eed2551
fix: disable scale for perfermance
Signed-off-by: -LAN- <laipz8200@outlook.com>
2025-09-04 19:11:22 +08:00
twwu ef64729771 refactor(notion-page-preview): update fetchNotionPagePreview to include pageType parameter for improved API request 2025-09-04 19:02:35 +08:00
jyong 771ea54a0b change migration 2025-09-04 18:37:58 +08:00
jyong 35ec0d25e8 change migration 2025-09-04 18:06:45 +08:00
-LAN- 07109846e0
Merge remote-tracking branch 'origin/main' into feat/queue-based-graph-engine 2025-09-04 17:48:08 +08:00
-LAN- 2aeaefccec
test: fix test 2025-09-04 17:47:36 +08:00
-LAN- 4d63bd2083
refactor(graph_engine): rename SimpleWorkerPool to WorkerPool 2025-09-04 17:47:13 +08:00
jyong 952fb16b91 change migration 2025-09-04 17:37:21 +08:00
jyong e9f11b4706 change migration 2025-09-04 17:29:09 +08:00
jyong 41b5596441 fix file name 2025-09-04 17:11:13 +08:00
twwu 55b936003f refactor(context-menu): conditionally render export option based on pipeline ID 2025-09-04 16:47:21 +08:00
WTW0313 a6b0071ca0 feat: add datasource category handling in marketplace list condition 2025-09-04 16:05:29 +08:00
-LAN- 226f14a20f
feat(graph_engine): implement scale down worker
Signed-off-by: -LAN- <laipz8200@outlook.com>
2025-09-04 15:35:20 +08:00
twwu ebeb17ec96 refactor(form-input-item): enhance type switch logic to include select input handling 2025-09-04 15:34:21 +08:00
twwu 313069a63e refactor(workflow): improve loading state rendering and enhance control prompt editor re-rendering logic 2025-09-04 15:27:55 +08:00
jyong 64fc0c9073 Merge remote-tracking branch 'origin/feat/rag-2' into feat/rag-2 2025-09-04 14:52:34 +08:00
jyong 94f2956e3a fix file name 2025-09-04 14:52:23 +08:00
zxhlyh 1bf04b7b74 fix: export workflow image 2025-09-04 14:43:14 +08:00
jyong 1e5f6c1475 fix file name 2025-09-04 14:07:58 +08:00
Novice 9f3c996be2 fix(pipeline-publish): publish not update dataset 2025-09-04 14:07:42 +08:00
twwu 0bcdd0db99 fix(field): add z-index to sticky header for improved layering in workflow nodes 2025-09-04 13:18:14 +08:00
twwu 5ecf382180 refactor(chunk-preview): improve key assignment for ChunkContainer components and enhance localFileList handling in Preparation component 2025-09-04 13:03:49 +08:00
autofix-ci[bot] 2b28aed4e2
[autofix.ci] apply automated fixes 2025-09-04 04:50:21 +00:00
-LAN- 938a845852
Merge remote-tracking branch 'origin/main' into feat/queue-based-graph-engine 2025-09-04 12:48:58 +08:00
twwu 04d01c8409 refactor(icons): remove unused Globe01 icon and related files 2025-09-04 12:37:01 +08:00
jyong 7efe215d2b fix file name 2025-09-04 12:28:52 +08:00
-LAN- ead8568bfc
fix: some errors reported by basedpyright
Signed-off-by: -LAN- <laipz8200@outlook.com>
2025-09-04 11:58:54 +08:00
Novice 25e3f4da04 fix(pipeline-tracing-log): datasource icon error 2025-09-04 11:44:10 +08:00
Novice 68f4d4b97c fix(rag-pipeline-dsl): dsl import session error 2025-09-04 11:03:45 +08:00
twwu c40dfe7060 fix(pipeline): correct API endpoint for stopping workflow runs 2025-09-04 10:39:23 +08:00
jyong 647af6fc69 fix file name 2025-09-04 10:36:44 +08:00
twwu 5114569017 refactor(document-picker): enhance chunking mode handling and improve parent mode label logic 2025-09-04 09:59:55 +08:00
-LAN- ed22d04ea0
test: remove outdated test case 2025-09-04 02:42:36 +08:00
-LAN- 04bbf540d9
chore: code format
Signed-off-by: -LAN- <laipz8200@outlook.com>
2025-09-04 02:33:53 +08:00
-LAN- 657c27ec75
feat(graph_engine): make runtime state read-only in layer
Signed-off-by: -LAN- <laipz8200@outlook.com>
2025-09-04 02:30:40 +08:00
-LAN- 16e9cd5ac5
feat(graph_runtime_state): prevent to set variable pool after initialized. 2025-09-04 02:20:19 +08:00
-LAN- 61c79b0013
test: correct imported name 2025-09-04 02:15:46 +08:00
-LAN- 8332472944
refactor(graph_engine): rename Layer to GraphEngineLayer
Signed-off-by: -LAN- <laipz8200@outlook.com>
2025-09-04 02:11:31 +08:00
-LAN- fe3f03e50a
feat: add property-based access control to GraphRuntimeState
- Replace direct field access with private attributes and property decorators
- Implement deep copy protection for mutable objects (dict, LLMUsage)
- Add helper methods: set_output(), get_output(), update_outputs()
- Add increment_node_run_steps() and add_tokens() convenience methods
- Update loop_node and event_handlers to use new accessor methods
- Add comprehensive unit tests for immutability and validation
- Ensure backward compatibility with existing property access patterns
2025-09-04 02:08:58 +08:00
-LAN- 9c96b23d55
Merge remote-tracking branch 'origin/main' into feat/queue-based-graph-engine 2025-09-04 00:27:08 +08:00
twwu f811855f79 fix(workflow): ensure variable updates only occur for matching selectors in updateNodeVars 2025-09-03 23:28:32 +08:00
twwu c7510d3f54 refactor(panel): simplify outputSchema mapping for improved readability 2025-09-03 23:06:12 +08:00
jyong e3092837e4 fix file name 2025-09-03 21:26:28 +08:00
twwu 46731dc8a1 Merge branch 'feat/rag-2' of https://github.com/langgenius/dify into feat/rag-2 2025-09-03 20:33:01 +08:00
twwu 1214942eb7 refactor(datasets): rename 'markdown' to 'content' for consistency across components 2025-09-03 20:32:48 +08:00
jyong dd89c9aa21 fix preview and recommend plugins 2025-09-03 19:51:07 +08:00
jyong 5ab3cd7213 fix preview and recommend plugins 2025-09-03 19:42:20 +08:00
jyong 49d268d4a3 fix preview and recommend plugins 2025-09-03 19:20:14 +08:00
Novice aa670c8982 fix(datasource): add datasource icon in tracing panel 2025-09-03 18:23:23 +08:00
Joel c8d60f372d fix: in pipeline not show the node config right 2025-09-03 18:21:51 +08:00
twwu c4f0691454 refactor(workflow): update fetchInspectVars calls to accept empty parameters 2025-09-03 18:15:14 +08:00
jyong 451948d49c Merge remote-tracking branch 'origin/feat/rag-2' into feat/rag-2 2025-09-03 18:10:14 +08:00
jyong fd3b55cc16 fix preview and recommend plugins 2025-09-03 18:09:56 +08:00
QuantumGhost 0212f0de9f fix(api): fix workflow execution 2025-09-03 17:11:43 +08:00
twwu 9eb1c15906 Merge branch 'feat/rag-2' of https://github.com/langgenius/dify into feat/rag-2 2025-09-03 16:51:57 +08:00
twwu 110211d966 refactor(uploader): replace YAML icon with pipeline icon and update label text 2025-09-03 16:51:49 +08:00
Joel 08d6fcfd52 fix: large data markdown ui problems 2025-09-03 16:48:41 +08:00
twwu d755b2885e refactor(workflow): enhance handleExportDSL function and clean up code structure 2025-09-03 16:22:59 +08:00
twwu b4c98daa8d refactor(workflow): update RAG tool suggestions and improve filtering logic 2025-09-03 16:05:55 +08:00
jyong 58e598dac8 Merge branch 'main' into feat/rag-2 2025-09-03 15:01:41 +08:00
jyong d4aed3df5c Merge branch 'feat/queue-based-graph-engine' into feat/rag-2
# Conflicts:
#	api/core/memory/token_buffer_memory.py
#	api/core/rag/extractor/notion_extractor.py
#	api/core/repositories/sqlalchemy_workflow_node_execution_repository.py
#	api/core/variables/variables.py
#	api/core/workflow/graph/graph.py
#	api/core/workflow/graph_engine/entities/event.py
#	api/services/dataset_service.py
#	web/app/components/app-sidebar/index.tsx
#	web/app/components/base/tag-management/selector.tsx
#	web/app/components/base/toast/index.tsx
#	web/app/components/datasets/create/website/index.tsx
#	web/app/components/datasets/create/website/jina-reader/base/options-wrap.tsx
#	web/app/components/workflow/header/version-history-button.tsx
#	web/app/components/workflow/hooks/use-inspect-vars-crud-common.ts
#	web/app/components/workflow/hooks/use-workflow-interactions.ts
#	web/app/components/workflow/panel/version-history-panel/index.tsx
#	web/service/base.ts
2025-09-03 15:01:06 +08:00
Joel c422d732d2 fix: tool bool input can choose file 2025-09-03 14:41:29 +08:00
Joel df0fe49fcc fix: one step run schema type file not support 2025-09-03 14:28:13 +08:00
zxhlyh 97875d2b55 plugin dependencies select all 2025-09-03 14:27:29 +08:00
-LAN- 8c97937cae
Merge remote-tracking branch 'origin/main' into feat/queue-based-graph-engine 2025-09-03 13:53:43 +08:00
twwu 9a79d8941e Merge branch 'feat/rag-2' of https://github.com/langgenius/dify into feat/rag-2 2025-09-03 12:59:34 +08:00
twwu 9d3772f7d6 feat(workflow): add DisplayContent component for improved variable inspection and integrate schema type handling 2025-09-03 12:59:22 +08:00
QuantumGhost d2e341367e fix(api): fix relations of WorkflowNodeExecutionModel not preloaded
`WorkflowNodeExecutionModel.offload_data` should be preloaded to
provide info about the offloading information of execution record.

The `RagPipelineService.get_node_last_run` does not utilize
`DifyAPISQLAlchemyWorkflowNodeExecutionRepository` so the loading
logics is not changed.

In the commit we migrate to calling
`DifyAPISQLAlchemyWorkflowNodeExecutionRepository` to avoid such issue.
2025-09-03 12:30:20 +08:00
QuantumGhost e761f38d26 fix(api): adjust gevent patching 2025-09-03 12:29:39 +08:00
QuantumGhost e2ff7fac77 fix(api): fix variable truncation related field not returned. 2025-09-03 12:29:38 +08:00
jyong 617dc247f5 fix preview and recommend plugins 2025-09-03 12:15:03 +08:00
-LAN- f6acff4cce
chore: remove unused variables
Signed-off-by: -LAN- <laipz8200@outlook.com>
2025-09-03 12:12:27 +08:00
-LAN- 3fa48cb5cf
chore: remove ty-check from Python style check.
Signed-off-by: -LAN- <laipz8200@outlook.com>
2025-09-03 12:05:41 +08:00
-LAN- b81745aed8
Merge remote-tracking branch 'origin/main' into feat/queue-based-graph-engine 2025-09-03 11:56:05 +08:00
jyong e1b6da21f4 fix preview and recommend plugins 2025-09-03 11:35:41 +08:00
Joel b10d7d5b22 fix: datasource file can not be chosen 2025-09-03 11:31:53 +08:00
-LAN- 8c41d95d03
Merge remote-tracking branch 'origin/main' into feat/queue-based-graph-engine 2025-09-03 11:06:42 +08:00
Joel 885c7d26e5 feat: handle choose select 2025-09-03 11:05:15 +08:00
Novice 7896eeec5b fix: correct run history tracing sequence 2025-09-03 10:56:23 +08:00
Novice 38e24922e1 Merge branch 'feat/tool-rag-tag' into feat/rag-2 2025-09-03 10:27:02 +08:00
Novice d2787dc925 feat: tools add rag tag 2025-09-03 10:25:08 +08:00
twwu 706969d812 fix(workflow): enhance variable inspection by integrating schema type handling and refactoring logic 2025-09-03 10:17:38 +08:00
-LAN- 9d004a0971
test: fix test
Signed-off-by: -LAN- <laipz8200@outlook.com>
2025-09-03 02:11:37 +08:00
autofix-ci[bot] 02fcd08c08
[autofix.ci] apply automated fixes 2025-09-02 17:34:07 +00:00
-LAN- 77a9a73d0d
Merge remote-tracking branch 'origin/main' into feat/queue-based-graph-engine 2025-09-03 01:33:17 +08:00
jyong 7623dc14bb fix preview and recommend plugins 2025-09-02 20:12:45 +08:00
twwu 1ad46d0962 fix(pipeline): add handling for RAG pipeline variables in workflow state 2025-09-02 20:11:38 +08:00
twwu 5e854238b0 fix(workflow): improve formatting and add checkbox type to input variable type mapping 2025-09-02 19:57:18 +08:00
jyong 343f1a375f fix preview and recommend plugins 2025-09-02 19:41:29 +08:00
twwu fc4bc08796 feat(workflow): enhance RAG recommended plugins structure and update related components 2025-09-02 19:11:03 +08:00
Joel 60da4c9048 feat: set var inspect schema type 2025-09-02 18:44:49 +08:00
twwu 32a009654f feat(input-field): add isEditMode prop to InputFieldForm and update handling of variable changes 2025-09-02 18:19:40 +08:00
twwu 1522fd50df Merge branch 'feat/rag-2' of https://github.com/langgenius/dify into feat/rag-2 2025-09-02 17:25:54 +08:00
twwu 00b5772012 feat(portal): add customContainer prop to PortalToFollowElem for flexible rendering 2025-09-02 17:25:47 +08:00
jyong 8467494706 fix preview and recommend plugins 2025-09-02 17:02:25 +08:00
jyong a8cd1e2483 Merge remote-tracking branch 'origin/feat/rag-2' into feat/rag-2 2025-09-02 16:44:33 +08:00
jyong 7ace7e65e1 fix preview and recommend plugins 2025-09-02 16:44:18 +08:00
twwu 273dae6738 fix(header): update boolean form type to checkbox and improve JSX formatting in form input component 2025-09-02 16:26:41 +08:00
-LAN- 1770b93e5b
chore(graph_engine): Add a TODO commment in `_update_response_outputs` in event_handlers
Signed-off-by: -LAN- <laipz8200@outlook.com>
2025-09-02 15:20:03 +08:00
-LAN- d8ff4aa9ba
feat(graph_engine): Handle NodeRunAgentLogEvent
Signed-off-by: -LAN- <laipz8200@outlook.com>
2025-09-02 15:02:07 +08:00
-LAN- 9f8f21bf87
chore: remove backup files
Signed-off-by: -LAN- <laipz8200@outlook.com>
2025-09-02 15:01:58 +08:00
jyong dfb967773b Merge remote-tracking branch 'origin/feat/rag-2' into feat/rag-2 2025-09-02 11:56:13 +08:00
jyong 56fc9088dd add recommended rag plugin endpoint 2025-09-02 11:56:05 +08:00
-LAN- 0b0dc63f29
Merge remote-tracking branch 'origin/main' into feat/queue-based-graph-engine 2025-09-02 11:52:25 +08:00
twwu bf09c3eb10 feat(rag): integrate RAG recommended plugins into workflow component 2025-09-02 10:43:57 +08:00
twwu e5e52ab4b9 refactor(value-content): improve JSX formatting for readability in variable inspect component 2025-09-02 10:19:01 +08:00
jyong 826b9d5b21 add recommended rag plugin endpoint 2025-09-01 20:41:14 +08:00
jyong 633de2cb47 add recommended rag plugin endpoint 2025-09-01 20:12:27 +08:00
jyong d94e03c72b add recommended rag plugin endpoint 2025-09-01 19:38:28 +08:00
QuantumGhost 89ec13ec67 fix(api): fix ToolNode._extract_variable_selector_to_variable_mapping
The original selector syntax does not match our current implementation
for injecting user inputs into VariablePool.
2025-09-01 18:11:33 +08:00
jyong 1103130f81 add recommended rag plugin endpoint 2025-09-01 18:03:35 +08:00
jyong 493dae239f add recommended rag plugin endpoint 2025-09-01 17:08:37 +08:00
QuantumGhost eb7b21c7f1 Merge remote-tracking branch 'upstream/feat/rag-2' into feat/rag-2 2025-09-01 17:06:53 +08:00
QuantumGhost ad5a2c77c1 Merge remote-tracking branch 'upstream/feat/big-var-value-show-fe' into feat/rag-2 2025-09-01 17:06:23 +08:00
QuantumGhost acbf27eb65 chore(api): adjust migration order and date 2025-09-01 15:20:57 +08:00
QuantumGhost e2ae89e08d fix(tests): fix broken tests and linter issues 2025-09-01 14:55:35 +08:00
jyong 1676207776 add recommended rag plugin endpoint 2025-09-01 14:50:02 +08:00
jyong 8c780df8fd Merge remote-tracking branch 'origin/feat/rag-2' into feat/rag-2 2025-09-01 14:46:08 +08:00
jyong 7920714f49 add recommended rag plugin endpoint 2025-09-01 14:45:56 +08:00
-LAN- 8433cf4437
refactor(graph_engine): Merge event_collector and event_emitter into event_manager
Signed-off-by: -LAN- <laipz8200@outlook.com>
2025-09-01 13:15:58 +08:00
-LAN- bb5d52539c
refactor(graph_engine): Merge branch_handler into edge_processor
Signed-off-by: -LAN- <laipz8200@outlook.com>
2025-09-01 12:53:06 +08:00
-LAN- 88622f70fb
refactor(graph_engine): Move setup methods into `__init__`
Signed-off-by: -LAN- <laipz8200@outlook.com>
2025-09-01 12:08:03 +08:00
twwu a41af6f322 refactor: update file upload and download handling for pipeline format 2025-09-01 11:13:11 +08:00
zxhlyh ce3eb0fcbb fix: rag pipeline template 2025-09-01 10:23:08 +08:00
-LAN- 0fdb1b2bc9
refactor(graph_engine): Correct private attributes and private methods naming
Signed-off-by: -LAN- <laipz8200@outlook.com>
2025-09-01 04:37:23 +08:00
-LAN- a5cb9d2b73
refactor(graph_engine): inline output_registry into response_coordinator
Signed-off-by: -LAN- <laipz8200@outlook.com>
2025-09-01 03:59:53 +08:00
-LAN- 64c1234724
refactor(graph_engine): Merge worker management into one WorkerPool
Signed-off-by: -LAN- <laipz8200@outlook.com>
2025-09-01 03:23:47 +08:00
-LAN- 202fdfcb81
refactor(graph_engine): Remove backward compatibility code
Signed-off-by: -LAN- <laipz8200@outlook.com>
2025-09-01 02:41:16 +08:00
-LAN- e2f4c9ba8d
refactor(graph_engine): Merge state managers into unified_state_manager
Signed-off-by: -LAN- <laipz8200@outlook.com>
2025-09-01 02:08:08 +08:00
-LAN- 546d75d84d
Merge remote-tracking branch 'origin/main' into feat/queue-based-graph-engine 2025-09-01 00:29:28 +08:00
QuantumGhost 63c035d8a2 chore(api): migrate import for db 2025-08-31 16:46:25 +08:00
QuantumGhost e9e7d4f3cd Merge branch 'feat/rag-2' into feat/workflow-draft-var-optimize 2025-08-31 15:17:23 +08:00
QuantumGhost c72d847ac7 feat(api): support array[boolean] truncation 2025-08-31 14:32:53 +08:00
QuantumGhost 982fd9170c WIP: test(api): tests for truncation logic 2025-08-31 13:45:16 +08:00
QuantumGhost 91fac9b720 WIP: feat(api): draft var cleanup task 2025-08-31 13:45:16 +08:00
QuantumGhost a527bd42b5 chore(api): optimize file url signing 2025-08-31 13:45:16 +08:00
QuantumGhost 621b75b343 feat(api): implement truncation for SSE events 2025-08-31 13:45:15 +08:00
QuantumGhost 6b9d2e98b9 feat(api): Implement truncation for WorkflowNodeExecution 2025-08-31 13:44:39 +08:00
QuantumGhost 2fd337e610 feat(api): add WorkflowNodeExecutionOffload model 2025-08-31 13:44:39 +08:00
QuantumGhost a7aa17e361 chore(api): remove orphan files in draft var cleanup script 2025-08-31 13:44:38 +08:00
QuantumGhost 13eb9f7d7d feat(api): implement truncation for draft var 2025-08-31 13:44:38 +08:00
QuantumGhost d7db58cabd feat(api): implement VariableTruncator 2025-08-31 13:44:38 +08:00
QuantumGhost 0cf8a80bdd chore(api): Introduce variable truncation configuration 2025-08-31 13:44:38 +08:00
QuantumGhost a10586c8ea fixup! feat(api): Add migration for WorkflowDraftVariableFile 2025-08-31 13:44:38 +08:00
QuantumGhost 40faa9ce16 refactor(api): Inject db dependency to FileService 2025-08-31 13:44:38 +08:00
QuantumGhost 58dfae60f0 feat(api): Add migration for WorkflowDraftVariableFile 2025-08-31 13:44:38 +08:00
QuantumGhost de08b2310c feat(api): Introduce WorkflowDraftVariableFile model
This model is used to track offloaded variables values for
`WorkflowDraftVariable`.
2025-08-31 13:44:38 +08:00
QuantumGhost e9c6038192 docs(api): update docs for UploadFile 2025-08-31 13:44:38 +08:00
-LAN- a8fe4ea802
Merge remote-tracking branch 'origin/main' into feat/queue-based-graph-engine 2025-08-30 16:36:10 +08:00
-LAN- 82193580de
chore: improve typing
Signed-off-by: -LAN- <laipz8200@outlook.com>
2025-08-30 16:35:57 +08:00
-LAN- 1fd27cf3ad
Merge remote-tracking branch 'origin/main' into feat/queue-based-graph-engine 2025-08-30 00:13:45 +08:00
twwu 1e9bfd8872 feat(chunk-card-list): implement ChunkCard and QAItem components, refactor ChunkCardList to utilize new structure and types 2025-08-29 23:28:43 +08:00
-LAN- 11d32ca87d
test: fix web test
Signed-off-by: -LAN- <laipz8200@outlook.com>
2025-08-29 23:20:28 +08:00
-LAN- 5415d0c6d1
Merge remote-tracking branch 'origin/main' into feat/queue-based-graph-engine 2025-08-29 23:17:30 +08:00
-LAN- d8af8ae4e6
fix: update workflow service tests for new graph engine
- Update method calls from _handle_node_run_result to _handle_single_step_result
- Add required fields (id, node_id, node_type, start_at) to graph events
- Use proper NodeType enum values instead of strings
- Fix imports to use correct modules (Node instead of BaseNode)
- Ensure event generators return proper generator objects

These tests were failing because the internal implementation changed
with the new graph engine architecture.
2025-08-29 23:04:33 +08:00
-LAN- 04e5d4692f
Merge remote-tracking branch 'origin/main' into feat/queue-based-graph-engine 2025-08-29 22:34:47 +08:00
-LAN- 3aa48efd0a
test(test_workflow_service): Use new engine's method.
Signed-off-by: -LAN- <laipz8200@outlook.com>
2025-08-29 22:06:10 +08:00
jyong c2afb84884 fix chunk format 2025-08-29 17:10:18 +08:00
-LAN- 8eb78c04b2
chore(token_buffer_memory): code format
Signed-off-by: -LAN- <laipz8200@outlook.com>
2025-08-29 17:02:51 +08:00
-LAN- 22ee318cf8
Merge remote-tracking branch 'origin/main' into feat/queue-based-graph-engine 2025-08-29 17:01:42 +08:00
jyong 3c0adfb48a fix chunk format 2025-08-29 16:27:22 +08:00
twwu b3dbf9fe94 Merge branch 'feat/rag-2' of https://github.com/langgenius/dify into feat/rag-2 2025-08-29 16:20:54 +08:00
twwu a597ae1a8a feat(block-selector): add RAG tool suggestions component and integrate with existing tools 2025-08-29 16:20:43 +08:00
-LAN- f2bc4f5d87
fix: resolve type error in node_factory by using type guard for node_type_str 2025-08-29 16:16:58 +08:00
-LAN- d7d456349d
Merge remote-tracking branch 'origin/main' into feat/queue-based-graph-engine 2025-08-29 16:14:04 +08:00
jyong 65215135e5 Merge remote-tracking branch 'origin/feat/rag-2' into feat/rag-2 2025-08-29 14:49:05 +08:00
jyong 5cf795f2b8 fix preview run 2025-08-29 14:48:51 +08:00
twwu 1c8190f142 feat(search-box): enhance marketplace search box with new triggers and conditional rendering 2025-08-29 14:45:04 +08:00
-LAN- dce4d0ff80
Merge remote-tracking branch 'origin/main' into feat/queue-based-graph-engine 2025-08-29 13:22:13 +08:00
-LAN- 3dee8064ba
feat: enhance typing 2025-08-29 13:17:02 +08:00
jyong d0dd728e6c Merge branch 'main' into feat/rag-2
# Conflicts:
#	api/core/app/entities/queue_entities.py
#	api/core/workflow/graph_engine/entities/event.py
2025-08-29 11:29:51 +08:00
jyong 0bb3407385 fix preview run 2025-08-29 11:25:08 +08:00
kurokobo 05c21883f9
fix(pipeline): make kb node deletable (#24731) 2025-08-29 09:19:09 +08:00
jyong 5b4335c4b5 fix preview run 2025-08-28 22:02:22 +08:00
jyong 39080eed10 fix preview run 2025-08-28 22:01:49 +08:00
jyong 1db04aa729 Merge branch 'feat/queue-based-graph-engine' into feat/rag-2 2025-08-28 18:12:49 +08:00
jyong 64772fb413 fix preview run 2025-08-28 18:12:27 +08:00
twwu 1022e67fb6 fix: adjust padding in search box component for improved layout 2025-08-28 18:02:19 +08:00
twwu a67589d5db feat: add title property to DataSourceDefaultValue type in block-selector 2025-08-28 17:25:26 +08:00
-LAN- bfbb36756a
feat(graph_engine): Add NodeExecutionType.ROOT and auto mark skipped in Graph.init
Signed-off-by: -LAN- <laipz8200@outlook.com>
2025-08-28 16:41:51 +08:00
twwu 9448d49305 feat: add 'rag' tag to plugin constants and translations in multiple languages 2025-08-28 16:21:53 +08:00
twwu 169a8edc28 refactor: update import statement style in IfElseNode component 2025-08-28 15:47:39 +08:00
-LAN- d7e0c5f759
chore: use 'XXX | None' instead of Optional[XXX] in graph.py 2025-08-28 15:45:22 +08:00
twwu 9c93a49605 Merge branch 'feat/rag-2' of https://github.com/langgenius/dify into feat/rag-2 2025-08-28 15:40:11 +08:00
twwu f6ed1c5643 fix: update chunk structure tip messages in English and Chinese translations for clarity 2025-08-28 15:40:02 +08:00
jyong 334bc8f1a2 Merge remote-tracking branch 'origin/feat/rag-2' into feat/rag-2 2025-08-28 15:39:36 +08:00
jyong a9f7ee029e fix preview run 2025-08-28 15:39:22 +08:00
-LAN- c396788128
chore(graph_engine): add final mark to classes
Signed-off-by: -LAN- <laipz8200@outlook.com>
2025-08-28 15:38:35 +08:00
twwu edd41a30eb feat: add AddChunks icon and related components for knowledge base chunk structure 2025-08-28 15:31:30 +08:00
Joel 580201ed2c merge main 2025-08-28 14:57:30 +08:00
Joel e955a603c6 fix: can choose file in add in variable aggregator 2025-08-28 14:19:07 +08:00
Joel 740f1c5f2c fix: tools value not update caused data outdated 2025-08-28 14:06:28 +08:00
twwu 9a13cb5bdf refactor: remove unused state management in usePipelineRun hook 2025-08-28 13:58:03 +08:00
twwu 048feb4165 refactor: update local file and online drive state management in create-from-pipeline components 2025-08-28 13:47:20 +08:00
twwu 843b14ccc6 refactor: Refactor online drive breadcrumbs navigation 2025-08-28 10:56:51 +08:00
-LAN- e3a7b1f691
fix: type hints
Signed-off-by: -LAN- <laipz8200@outlook.com>
2025-08-28 05:24:18 +08:00
-LAN- 8aab7f49c3
chore(graph_engine): Use `XXX | None` instead of `Optional[XXX]` 2025-08-28 05:09:33 +08:00
autofix-ci[bot] 1e12c1cbf2
[autofix.ci] apply automated fixes 2025-08-27 21:00:36 +00:00
-LAN- affedd6ce4
chore(graph_engine): Use `XXX | None` instead of `Optional[XXX]` 2025-08-28 04:59:49 +08:00
-LAN- ef21097774
refactor(graph_engine): Remove unnecessary check from SkipPropagator
Signed-off-by: -LAN- <laipz8200@outlook.com>
2025-08-28 04:45:26 +08:00
-LAN- 1d377fe994
refactor(graph_engine): Use _ to mark unused variable in BranchHandler
Signed-off-by: -LAN- <laipz8200@outlook.com>
2025-08-28 04:44:45 +08:00
-LAN- c82697f267
refactor(graph_engine): Remove `node_id` from SkipPropagator.skip_branch_paths
Signed-off-by: -LAN- <laipz8200@outlook.com>
2025-08-28 04:43:56 +08:00
-LAN- 98b25c0bbc
refactor(graph_engine): Convert attrs to private in error_handler
Signed-off-by: -LAN- <laipz8200@outlook.com>
2025-08-28 04:42:37 +08:00
-LAN- 1cd0792606
chore(graph_events): Improve type hints
Signed-off-by: -LAN- <laipz8200@outlook.com>
2025-08-28 04:41:48 +08:00
-LAN- 7cbf4093f4
chore(graph_engine): Use `TYPE | None` instead of `Optional`
Signed-off-by: -LAN- <laipz8200@outlook.com>
2025-08-28 04:30:50 +08:00
-LAN- 8129ca7c05
chore(graph_engine): Move error_strategy.py to protocols/
Signed-off-by: -LAN- <laipz8200@outlook.com>
2025-08-28 04:29:32 +08:00
-LAN- 65617f000d
feat(event_collector): Update to use ReadWriteLock 2025-08-28 03:26:42 +08:00
-LAN- 635eff2e25
test(graph_engine): remove outdated tests
Signed-off-by: -LAN- <laipz8200@outlook.com>
2025-08-28 02:53:19 +08:00
-LAN- 55085a9ca2
chore(graph_engine): add type hint for event_queue
Signed-off-by: -LAN- <laipz8200@outlook.com>
2025-08-28 02:38:56 +08:00
-LAN- 9dc1e9724e
Merge remote-tracking branch 'origin/main' into feat/queue-based-graph-engine 2025-08-28 02:26:40 +08:00
jyong 4403a26f37 merge new graph engine 2025-08-27 20:44:14 +08:00
jyong 88abaa840c merge new graph engine 2025-08-27 18:33:38 +08:00
Joel 4e2c3bfb05 fix: not obj type struct schema render error 2025-08-27 18:32:04 +08:00
Joel a644153e9f feat: just use chunk type in knowledge base 2025-08-27 18:16:46 +08:00
twwu 0316eb6064 Merge branch 'feat/rag-2' of https://github.com/langgenius/dify into feat/rag-2 2025-08-27 18:09:55 +08:00
twwu 6a56649be2 feat(web): add support for Tool block type in BlockIcon component 2025-08-27 18:09:48 +08:00
Joel 8a585607c1 fix: tool schem not right 2025-08-27 18:07:33 +08:00
-LAN- c3f66e2901
Merge remote-tracking branch 'origin/main' into feat/queue-based-graph-engine 2025-08-27 18:05:35 +08:00
jyong 90d72f5ddf merge new graph engine 2025-08-27 17:46:46 +08:00
jyong 6c8212d509 Merge remote-tracking branch 'origin/feat/rag-2' into feat/rag-2 2025-08-27 17:46:05 +08:00
Joel 19d3ba42e5 chore: schema type change 2025-08-27 17:40:05 +08:00
twwu ee144452e2 Merge branch 'main' into feat/rag-2 2025-08-27 17:28:21 +08:00
twwu 367b2d0320 refactor(web): streamline data source before run form and enhance run handling logic 2025-08-27 16:56:33 +08:00
Joel bd294ffe0d feat: file schem file replace 2025-08-27 16:07:31 +08:00
jyong 392514fa13 Merge branch 'feat/queue-based-graph-engine' into feat/rag-2
# Conflicts:
#	api/commands.py
#	api/core/app/apps/common/workflow_response_converter.py
#	api/core/llm_generator/llm_generator.py
#	api/core/plugin/entities/plugin.py
#	api/core/plugin/impl/tool.py
#	api/core/rag/index_processor/index_processor_base.py
#	api/core/workflow/entities/workflow_execution.py
#	api/core/workflow/entities/workflow_node_execution.py
#	api/core/workflow/enums.py
#	api/core/workflow/graph_engine/entities/graph.py
#	api/core/workflow/graph_engine/graph_engine.py
#	api/core/workflow/nodes/enums.py
#	api/services/dataset_service.py
2025-08-27 16:05:59 +08:00
autofix-ci[bot] 86e7cb713c
[autofix.ci] apply automated fixes 2025-08-27 07:38:26 +00:00
-LAN- 0f29244459
fix: test
Signed-off-by: -LAN- <laipz8200@outlook.com>
2025-08-27 15:37:37 +08:00
autofix-ci[bot] 48cbf4c78f
[autofix.ci] apply automated fixes 2025-08-27 15:33:30 +08:00
-LAN- 8c35663220
feat: queue-based graph engine
Signed-off-by: -LAN- <laipz8200@outlook.com>
2025-08-27 15:33:28 +08:00
Joel 4cb286c765 feat: handle file schem show 2025-08-27 14:46:44 +08:00
zxhlyh ff33d42c55 rag pipeline initial template 2025-08-27 14:39:39 +08:00
twwu e1a2755e3b fix(web): add spinning animation to loader icon in variable inspect trigger for better UX 2025-08-27 13:55:43 +08:00
twwu b1f348fb31 refactor(web): reorganize imports in document index component for improved clarity 2025-08-27 13:48:04 +08:00
twwu 5f0bae0ae5 Merge branch 'feat/rag-2' of https://github.com/langgenius/dify into feat/rag-2 2025-08-27 13:46:10 +08:00
twwu 57c242fff3 Update dataset pipeline header text and translations for improved clarity 2025-08-27 13:46:03 +08:00
kurokobo 1814b99c79
fix(web): correct title color in option card in dark mode (#24579) 2025-08-27 11:27:25 +08:00
Joel f24f573731 feat: show struct schema in output 2025-08-27 11:18:22 +08:00
twwu da48e54778 Merge branch 'main' into feat/rag-2 2025-08-27 11:16:27 +08:00
Joel 2db699522c feat: compare schema type 2025-08-27 10:48:38 +08:00
jyong 5c15fe7b01 fix pipeline export 2025-08-26 18:52:24 +08:00
jyong da9b38f642 pipeline name 2025-08-26 18:26:25 +08:00
jyong 918958743f Merge remote-tracking branch 'origin/feat/rag-2' into feat/rag-2 2025-08-26 17:18:49 +08:00
jyong 51c3f1b2e2 add pipeline async run 2025-08-26 17:18:43 +08:00
twwu bdcd9ad9cb refactor(workflow): update output schema handling in tool and data source nodes for improved integration with plugin info 2025-08-26 16:48:18 +08:00
Harry 1f5fd13359 refactor(plugin_migration): improve code readability by formatting the install function signature 2025-08-26 16:03:59 +08:00
Harry 61f2f8fd31 refactor(schemas): update titles in JSON schemas for consistency and clarity 2025-08-26 16:03:59 +08:00
jyong 60fb242f27 Merge remote-tracking branch 'origin/feat/rag-2' into feat/rag-2 2025-08-26 15:52:01 +08:00
jyong 85f0d31fab add pipeline async run 2025-08-26 15:51:49 +08:00
Harry a282b6cea4 refactor(tests): remove unused TestPerformance class from schema resolver tests 2025-08-26 15:47:34 +08:00
Harry 46019ea927 refactor(resolver): enhance schema reference resolution with improved error handling and caching 2025-08-26 15:47:34 +08:00
Harry 7e20273bce refactor(resolver): implement BFS approach for resolving references in Dify schemas 2025-08-26 15:47:34 +08:00
jyong 1d2d0ff49f add pipeline async run 2025-08-26 15:32:39 +08:00
jyong c77bdd1fb3 add pipeline async run 2025-08-26 15:20:40 +08:00
Harry 0f3ca1d8f4 fix: ruff 2025-08-26 12:51:40 +08:00
Harry d1be9544fb fix: restful to restx 2025-08-26 12:51:40 +08:00
jyong 1692a7bd1b Merge remote-tracking branch 'origin/feat/rag-2' into feat/rag-2 2025-08-26 12:06:43 +08:00
jyong c3c4ef3a8e add pipeline async run 2025-08-26 12:06:34 +08:00
Harry c911ac8c01 refactor(console): add spec controller import and enhance tool output schema resolution 2025-08-26 11:07:12 +08:00
twwu 1bc77204b1 fix(use-initial): handle potential undefined nodeData by providing a fallback object 2025-08-26 10:23:13 +08:00
jyong 1ad7b0e852 add pipeline async run 2025-08-25 18:26:05 +08:00
twwu e0e3224413 refactor(option-card): enhance styling and accessibility by updating class names and adding title attribute 2025-08-25 17:46:27 +08:00
jyong f418164648 add pipeline async run 2025-08-25 17:33:49 +08:00
jyong 4fc498bd48 fix 2025-08-25 16:55:08 +08:00
twwu 14d8788dac refactor: remove unused DataSourceType import and simplify iconType handling in DatasetDetailLayout 2025-08-25 16:43:52 +08:00
jyong 101d6504fb fix 2025-08-25 16:14:22 +08:00
twwu d440577913 refactor(operations): remove document download functionality and associated UI elements 2025-08-25 15:32:48 +08:00
twwu 85fd97e090 Merge branch 'main' into feat/rag-2 2025-08-25 15:30:18 +08:00
twwu 412e4b04f3 fix(use-settings-display): correct translation key for keyword search in settings display 2025-08-25 15:20:59 +08:00
jyong cc1f0d4d8d fix 2025-08-25 13:55:42 +08:00
Joel 1b9c817dba feat: add process data truncate 2025-08-25 11:26:57 +08:00
zxhlyh b7ed2cade1 initial template 2025-08-22 18:12:24 +08:00
twwu 8c44151e6f Merge branch 'main' into feat/rag-2 2025-08-22 17:40:34 +08:00
twwu 570b644a7e refactor(header, option-card): improve layout and responsiveness by adjusting flex properties and adding title attributes 2025-08-22 16:49:28 +08:00
twwu 5824a2d71c refactor(use-tool-icon): make data parameter optional and update usage in variable inspect components 2025-08-22 16:02:14 +08:00
twwu 83cc3b4710 refactor(credential-selector): enhance layout with overflow handling for better UI responsiveness 2025-08-22 14:49:24 +08:00
Joel 0ca7c29c47 feat: preiew show the large data 2025-08-22 11:35:54 +08:00
jyong 2f6c51927e Merge remote-tracking branch 'origin/feat/rag-2' into feat/rag-2 2025-08-22 11:34:13 +08:00
jyong 6b7ea9885c support name generate 2025-08-22 11:34:01 +08:00
Joel 68ba41e57e chore: fix trunate change proplem 2025-08-22 11:08:54 +08:00
twwu 8ff4f34773 refactor(create-pipeline): remove unused CreateForm and CreateModal components, streamline dataset creation process 2025-08-22 10:49:23 +08:00
jyong 4545f509a5 support name generate 2025-08-21 19:15:16 +08:00
twwu af74019d6e refactor(knowledge-base): update title from 'Question-Answer' to 'Q&A' for improved clarity 2025-08-21 18:13:44 +08:00
twwu d3b6631df8 refactor(retrieval-method): replace all instances of invertedIndex with keyword_search in components and translations 2025-08-21 17:31:25 +08:00
twwu 5699adf69c refactor(result-preview): update chunk handling to use GeneralChunkPreview type and extract content 2025-08-21 17:07:20 +08:00
twwu 42935997aa refactor(retrieval-method): update retrieval method from invertedIndex to keywordSearch across components 2025-08-21 16:29:23 +08:00
Joel f82f06bdd0 feat: support filter file vars 2025-08-21 16:11:32 +08:00
zxhlyh c5ee6e09d4 fix: output schema file type 2025-08-21 15:12:53 +08:00
twwu 60fafc524c feat(data-source): add LOCAL_FILE_OUTPUT to constants and integrate into panel for local file handling 2025-08-21 14:17:25 +08:00
twwu cecef01bf7 feat(rag-pipeline): enhance result preview with chunk formatting and add configuration for preview chunk limit 2025-08-21 11:04:33 +08:00
twwu dfd33b3d84 Merge branch 'main' into feat/rag-2 2025-08-21 09:43:51 +08:00
twwu 3e27e97364 feat(publish): improve success and error notifications for knowledge pipeline publishing with localized messages and enhanced user guidance 2025-08-20 18:41:09 +08:00
twwu 9d3198f808 refactor(data-source, before-run-form): enhance data handling and user interface for data source selection, integrating new components and improving state management 2025-08-20 18:21:59 +08:00
twwu 449755ada4 refactor(test-run, preparation): restructure test run components, enhance data handling, and improve user experience with new loading states and error handling 2025-08-20 16:40:56 +08:00
twwu 8ab3f1212b refactor(billing, chunk-preview): update link target and improve file property handling in document preview 2025-08-20 10:23:33 +08:00
twwu be045a68ee refactor(credential-icon, create-from-pipeline, test-run): improve component structure and enhance data handling for online drive files 2025-08-19 15:24:14 +08:00
Joel d4370a8ca5 chore: alert ui 2025-08-19 15:07:53 +08:00
twwu 95f60d89ab refactor(datasets): reorganize document components and enhance operations with download functionality 2025-08-19 15:07:52 +08:00
twwu a1666fe058 Merge branch 'main' into feat/rag-2 2025-08-19 14:59:06 +08:00
twwu ed4bd56146 refactor(create-card, template-card): add TODO comments for direct pipeline dataset creation and improve code organization 2025-08-19 14:41:32 +08:00
Joel 64897bc6fe chore: hide mock data 2025-08-19 14:21:53 +08:00
Joel 559d014b29 chore: use api return truncate 2025-08-19 14:18:41 +08:00
twwu ad523ef4ad feat(publish): enhance success notification for pipeline template publishing with additional information and links 2025-08-19 14:15:49 +08:00
Joel 3c4b374038 feat: run result data too long export 2025-08-19 14:04:00 +08:00
twwu d9cdce3f7a fix(i18n): refine Simplified Chinese translations for dataset and pipeline terminology 2025-08-19 13:10:51 +08:00
twwu 865e3ee85b fix(billing): adjust padding and margin for pricing plan item components 2025-08-19 10:43:49 +08:00
twwu 07887fb24f fix(i18n): update Simplified Chinese translations for "知识管道" to "知识流水线" 2025-08-19 10:24:52 +08:00
Harry fcdbe3b84a fix: plugin service import 2025-08-19 10:06:27 +08:00
twwu 38c51b1011 fix(i18n): highlight "プレミアム" in Japanese billing translations 2025-08-18 21:04:57 +08:00
twwu 469ed5a311 Merge branch 'feat/rag-2' of https://github.com/langgenius/dify into feat/rag-2 2025-08-18 21:00:13 +08:00
twwu 56273a2dcf fix(i18n): update Japanese and Simplified Chinese translations for billing terms 2025-08-18 20:59:49 +08:00
Harry 6ce013ac52 fix: OAuth client parameter retrieval by verifying plugin status 2025-08-18 20:41:22 +08:00
twwu fb6fe4a32b fix(billing): adjust z-index for noise effects in pricing plans and self-hosted plan items 2025-08-18 20:40:45 +08:00
yessenia ae183b348c feat: variable preview 2025-08-18 18:16:48 +08:00
Joel b4e76af4a7 feat: string type too long hide 2025-08-18 18:15:10 +08:00
twwu 4a5c883988 feat(pipeline): implement footer component for dataset creation and enhance UI with new styles 2025-08-18 16:58:43 +08:00
Joel 2391e582f2 feat: debug show big data 2025-08-18 16:57:41 +08:00
twwu cd760633cb feat(billing): add noise effects to pricing plans and update rendering logic 2025-08-18 14:31:09 +08:00
twwu ece1330567 feat(billing): add Enterprise plan component and update plan rendering logic 2025-08-18 11:25:48 +08:00
twwu 386614951f Merge branch 'main' into feat/rag-2 2025-08-18 11:16:18 +08:00
twwu a2892773f2 fix: dark mode color 2025-08-17 15:25:10 +08:00
twwu e80645bfa1 feat: enhance billing plan components with new SVG assets and update styles for premium and enterprise plans 2025-08-15 22:17:32 +08:00
twwu c7d5ec1520 feat: add new pricing assets and update billing plan components to utilize them 2025-08-15 18:39:59 +08:00
twwu 8cf98ba0ce feat: add knowledge pipeline publishing feature and update billing context; refactor popup component for conditional rendering 2025-08-15 18:06:15 +08:00
twwu fb10706c20 feat: refactor billing plans components to improve structure and add self-hosted plan item button; update pricing layout and translations 2025-08-15 16:29:45 +08:00
jyong fb7dc4e0e1 fix file 2025-08-15 14:55:37 +08:00
twwu d558f98aa6 Merge branch 'feat/rag-2' of https://github.com/langgenius/dify into feat/rag-2 2025-08-15 14:17:46 +08:00
twwu 89c7f71199 feat: add Footer component and integrate it into Pricing layout; refactor Header styles and update Plans component structure 2025-08-15 14:16:35 +08:00
autofix-ci[bot] ff76adc88a
[autofix.ci] apply automated fixes 2025-08-15 04:17:51 +00:00
Yunlu Wen cfc555d05d feat: add knowledge pipeline creation feature 2025-08-15 12:15:37 +08:00
twwu 153d5e8f03 refactor: Change plan-item directory 2025-08-15 11:14:38 +08:00
twwu ac456c1a95 feat: refactor billing plans components and add new PlanItem structure with tooltip support 2025-08-15 11:08:36 +08:00
jyong 8e88765261 fix file 2025-08-14 18:17:44 +08:00
jyong 5a2618d002 fix file 2025-08-14 18:07:52 +08:00
jyong 69a821db02 fix file 2025-08-14 18:01:29 +08:00
twwu 46224724a2 feat: add Cloud and SelfHosted components with updated PlanSwitcher integration 2025-08-14 15:54:07 +08:00
twwu 8a5bcd11f2 fix: add missing newline at end of JSON files for icon components 2025-08-14 15:21:53 +08:00
jyong a8fbf123e4 Merge remote-tracking branch 'origin/feat/rag-2' into feat/rag-2 2025-08-14 15:12:29 +08:00
jyong 8b8d257f78 fix file 2025-08-14 15:12:09 +08:00
twwu 02720c9b95 Merge branch 'main' into feat/rag-2 2025-08-14 15:03:48 +08:00
twwu d8a9645e83 feat: Implement billing plan selection UI with plan switcher and range options 2025-08-14 15:01:38 +08:00
jyong 72ea3b4d01 fix variable_pool 2025-08-13 17:38:14 +08:00
jyong 3797416fe0 fix online drive 2025-08-13 15:45:33 +08:00
jyong f74706a4a5 fix online drive 2025-08-13 15:28:18 +08:00
twwu 463ca7043d fix: sync doc styling change 2025-08-13 15:13:03 +08:00
twwu 5a6818c817 Merge branch 'main' into feat/rag-2 2025-08-13 15:05:57 +08:00
twwu acde411629 fix: Update breadcrumb styles in Online Drive component for improved visual consistency 2025-08-13 14:28:55 +08:00
twwu 4d34891ac0 Merge branch 'feat/rag-2' of https://github.com/langgenius/dify into feat/rag-2 2025-08-13 11:34:27 +08:00
twwu cf46fba753 feat: Enhance Online Drive component with bucket handling and breadcrumb navigation 2025-08-13 11:34:22 +08:00
jyong fcdbfbda4f add credential id 2025-08-12 17:56:28 +08:00
twwu 71d8a0c0b6 refactor: Refactor FileIcon component to use useMemo for file type determination and rename loading state variable for clarity 2025-08-12 16:55:00 +08:00
jyong ae3addb922 add credential id 2025-08-12 15:43:11 +08:00
jyong bd1d7f8652 add credential id 2025-08-12 15:38:26 +08:00
jyong a0006ce968 add credential id 2025-08-12 14:45:45 +08:00
jyong 2b7243dbc7 add credential id 2025-08-12 11:13:47 +08:00
jyong 22b3933cc3 Merge branch 'main' into feat/rag-2
# Conflicts:
#	api/core/workflow/entities/variable_pool.py
2025-08-12 11:13:04 +08:00
jyong 1bc506603a add credential id 2025-08-12 11:10:21 +08:00
zxhlyh 54b935f609 fix 2025-08-12 10:49:49 +08:00
Harry cf4a526e7f refactor: replace db.session with session in DatasourceProviderService for consistency 2025-08-11 20:35:46 +08:00
Harry 543f80ad5d refactor: replace get_real_credential_by_id with get_datasource_credentials in multiple services for consistency 2025-08-11 20:04:04 +08:00
twwu 7f328328fb Merge branch 'feat/rag-2' of https://github.com/langgenius/dify into feat/rag-2 2025-08-11 18:37:22 +08:00
twwu 9893b8110d refactor: rename selectedFileKeys to selectedFileIds and update related logic in online drive components 2025-08-11 18:37:15 +08:00
jyong 087a4fbd14 add credential id 2025-08-11 18:26:41 +08:00
Harry ada0875ac4 fix: update default value for expires_at in DatasourceProvider model 2025-08-11 11:40:51 +08:00
Harry 6b07e0e8d6 feat: add expiration for OAuth credentials in datasource provider 2025-08-11 11:25:50 +08:00
twwu fc779d00df Merge branch 'main' into feat/rag-2 2025-08-11 11:15:58 +08:00
zxhlyh 58aca75ee0 fix: json schema 2025-08-08 17:38:01 +08:00
zxhlyh 8464ec46e6 fix: json schema 2025-08-08 17:38:01 +08:00
twwu ac7953a32c feat: add credential_id handling in CreateFormPipeline and OnlineDrive components 2025-08-08 14:48:58 +08:00
twwu b21d991fdb feat(rag): pass credentialId to online document preview and wire to data source store 2025-08-08 14:26:38 +08:00
jyong df5a4e5c08 Merge remote-tracking branch 'origin/feat/rag-2' into feat/rag-2 2025-08-08 14:18:45 +08:00
jyong d07ce809be add credential id 2025-08-08 14:18:30 +08:00
twwu ec3cb126a0 feat: add clear data functions for online documents, website crawl, and online drive; integrate credential change handling 2025-08-08 13:49:08 +08:00
twwu 184c3c88b7 refactor: Refactor online document and online drive components to handle credential changes 2025-08-08 13:43:01 +08:00
zxhlyh 097a6fc1e0 fix: variable 2025-08-07 17:22:29 +08:00
zxhlyh d5f82d0d5f fix: json schema 2025-08-07 16:36:59 +08:00
twwu 1b3860d012 Merge branch 'main' into feat/rag-2 2025-08-07 16:27:20 +08:00
twwu 5729d38776 feat: add CredentialIcon component and integrate it into credential selector for improved avatar display 2025-08-07 15:34:15 +08:00
zxhlyh 9e882122ca fix: json schema 2025-08-07 15:28:42 +08:00
jyong e6f1bc165c add tool file preview 2025-08-07 14:35:04 +08:00
jyong 842ced218e Merge remote-tracking branch 'origin/feat/rag-2' into feat/rag-2 2025-08-07 11:13:10 +08:00
jyong ca8f80ee33 notion fix 2025-08-07 11:13:02 +08:00
twwu 646b798e9c Merge branch 'feat/rag-2' of https://github.com/langgenius/dify into feat/rag-2 2025-08-07 10:10:32 +08:00
twwu e0753359f8 fix: adjust Toast component positioning to top-right corner 2025-08-07 10:10:27 +08:00
Harry 1d79c21ae3 feat: implement structured output wrapping for pipeline items 2025-08-06 17:41:07 +08:00
twwu 5b433aa2d1 feat: add useFloatingRight hook and integrate it into InputFieldEditorPanel and PreviewPanel for dynamic positioning 2025-08-06 16:59:22 +08:00
jyong 218e778099 Merge remote-tracking branch 'origin/feat/rag-2' into feat/rag-2 2025-08-06 15:39:27 +08:00
jyong 1567b615dc notion fix 2025-08-06 15:39:22 +08:00
Harry 29da2e5c19 feat: enhance output schema descriptions and remove unused constants 2025-08-06 15:12:57 +08:00
Harry facbe02cf7 feat: datasource output schema 2025-08-06 15:12:57 +08:00
Harry 2a1e1a8042 feat: datasource output schema 2025-08-06 15:12:57 +08:00
jyong 13f38045d4 Merge remote-tracking branch 'origin/feat/rag-2' into feat/rag-2 2025-08-06 15:11:45 +08:00
jyong 94a0fb6dc1 notion fix 2025-08-06 15:11:37 +08:00
twwu c5a3bf9b9e refactor: simplify Markdown rendering in ChunkContent component 2025-08-06 14:32:28 +08:00
twwu 95982d37a6 refactor: remove unused DataSourceOauthBinding import from dataset_service and document_indexing_sync_task 2025-08-06 14:25:39 +08:00
twwu 40f3524cfe Merge branch 'main' into feat/rag-2 2025-08-06 14:23:51 +08:00
twwu da68cf48c4 Merge branch 'feat/rag-2' of https://github.com/langgenius/dify into feat/rag-2 2025-08-06 11:40:35 +08:00
twwu 8717a789b3 feat: dynamically import panel components to improve performance 2025-08-06 11:40:30 +08:00
jyong 05e96e56e5 notion fix 2025-08-06 11:13:20 +08:00
twwu 6954926df9 Merge branch 'feat/rag-2' of https://github.com/langgenius/dify into feat/rag-2 2025-08-06 10:33:50 +08:00
twwu ec6fabb222 feat: introduce useInputFieldPanel hook to manage input field panel state and refactor related components 2025-08-06 10:32:57 +08:00
zxhlyh 6cae1a2872 fix: rag variable 2025-08-05 18:26:07 +08:00
jyong 2f163bad8f transform document 2025-08-05 18:16:24 +08:00
zxhlyh 6faa4b107b fix: rag variable 2025-08-05 16:31:14 +08:00
twwu 8ab5c47737 Merge branch 'feat/rag-2' of https://github.com/langgenius/dify into feat/rag-2 2025-08-05 14:20:41 +08:00
twwu 0a397ac477 feat: Refactor document context and update chunking mode handling across components 2025-08-05 14:19:28 +08:00
zxhlyh 09b5cacbad fix: rag variable 2025-08-05 13:02:51 +08:00
zxhlyh 522210bad6 fix: rag variable 2025-08-05 11:39:17 +08:00
zxhlyh 0975f5bdc2 merge main 2025-08-05 10:31:12 +08:00
zxhlyh 201e4cd64d merge main 2025-08-05 10:30:53 +08:00
twwu 1e5317d3f0 feat: Enhance InputFieldPanel to manage preview and edit states more effectively 2025-08-01 16:35:27 +08:00
twwu 44d569a7c1 feat: Implement input field management panel 2025-08-01 16:27:53 +08:00
twwu ec501cf664 Merge branch 'feat/rag-2' of https://github.com/langgenius/dify into feat/rag-2 2025-08-01 14:43:00 +08:00
twwu cd9bfe0df3 feat: Update website crawl provider to use jinaReader and synchronize selection changes 2025-08-01 14:42:54 +08:00
jyong d36501203f Merge remote-tracking branch 'origin/feat/rag-2' into feat/rag-2 2025-08-01 14:11:18 +08:00
jyong f3f3a99e5b transform document 2025-08-01 14:11:11 +08:00
twwu 19a93c6554 feat: Enhance Notion integration by adding credential_id to NotionInfo and updating related functions 2025-08-01 14:04:01 +08:00
jyong 383ee368e6 Merge remote-tracking branch 'origin/feat/rag-2' into feat/rag-2 2025-08-01 13:59:19 +08:00
jyong 79f30daf03 transform document 2025-08-01 13:59:11 +08:00
twwu e7e531dc06 feat: Refactor WebsiteCrawl component to improve header configuration and remove unused memoization 2025-08-01 11:50:50 +08:00
zxhlyh d8ac78056e fix: open input field modal from var picker 2025-08-01 11:28:30 +08:00
twwu f75a3ef212 feat: Enhance InputFieldDialog and PreviewPanel with improved styling for better layout 2025-07-31 20:24:18 +08:00
twwu c9ab0fb8f6 feat: Update InputFieldDialog styling to allow for flexible growth 2025-07-31 20:18:20 +08:00
twwu 2dc71f059c feat: Update FooterTip component and enhance InputFieldDialog layout 2025-07-31 18:38:04 +08:00
twwu 18af3dfe5d Merge branch 'feat/rag-2' of https://github.com/langgenius/dify into feat/rag-2 2025-07-31 16:29:00 +08:00
twwu 0b871abe59 feat: Add credential handling to Notion page selector and related components 2025-07-31 16:28:53 +08:00
jyong 82819af55c transform document 2025-07-31 15:59:30 +08:00
jyong 9915364740 Merge remote-tracking branch 'origin/feat/rag-2' into feat/rag-2 2025-07-31 15:21:18 +08:00
jyong 97136ca8f0 transform document 2025-07-31 15:21:06 +08:00
twwu 195bf6621a Merge branch 'feat/rag-2' of https://github.com/langgenius/dify into feat/rag-2 2025-07-31 15:19:11 +08:00
twwu 8711a57d92 feat: Enhance NotionPageSelector and NotionPageSelectorModal with loading states and credential handling 2025-07-31 15:19:03 +08:00
jyong 8e96b9ed77 transform document 2025-07-31 11:51:40 +08:00
Joel 5a21da00c5 chore: knowledge base single run 2025-07-31 11:20:46 +08:00
jyong a7a4c8228e Merge branch 'main' into feat/rag-2
# Conflicts:
#	web/app/components/workflow/hooks/use-workflow.ts
2025-07-31 10:30:28 +08:00
twwu 1b6a925b34 Merge branch 'feat/rag-2' of https://github.com/langgenius/dify into feat/rag-2 2025-07-30 17:42:10 +08:00
twwu 41363459b5 feat: Update Header component button styling and remove redundant onClick from icon 2025-07-30 17:41:55 +08:00
Harry deceaa38f0 feat: datasource oauth default credentials 2025-07-30 16:36:27 +08:00
Harry f7ec255b3e feat: oauth 2025-07-30 15:55:16 +08:00
Harry 4dab128900 feat: oauth 2025-07-30 15:52:59 +08:00
zxhlyh 76b4288b34 datasource change authed page 2025-07-30 15:23:04 +08:00
twwu a1c38a2740 Merge branch 'feat/rag-2' of https://github.com/langgenius/dify into feat/rag-2 2025-07-30 15:19:16 +08:00
twwu f8d7d07c13 feat: Introduce CredentialSelector component and remove WorkspaceSelector 2025-07-30 15:19:10 +08:00
Joel 69738794bc feat: support custom before run form 2025-07-30 14:43:29 +08:00
jyong f37109ef39 transform document 2025-07-30 14:34:38 +08:00
Harry 875aea1c22 feat: datasource reauthentication 2025-07-30 13:39:04 +08:00
Joel c70a7e832e chore: data source add single run button 2025-07-29 18:31:35 +08:00
jyong ecba9e44ff transform document 2025-07-29 18:17:56 +08:00
jyong a7d4675831 transform document 2025-07-29 18:12:35 +08:00
jyong 21df72a57a transform document 2025-07-29 17:56:28 +08:00
Joel 240f6890f1 fix: some lint 2025-07-29 16:29:59 +08:00
Joel 27f65150d7 fix: run tool cause page crash because of feature context 2025-07-29 16:19:14 +08:00
jyong e2df3f182d transform document 2025-07-29 16:01:06 +08:00
zxhlyh a996c1d90c merge main 2025-07-29 15:45:01 +08:00
zxhlyh e19a07c2e6 merge main 2025-07-29 15:44:23 +08:00
Joel 2c6f88ef82 chore: reload vars 2025-07-29 15:24:26 +08:00
jyong 786d121fdf Merge remote-tracking branch 'origin/feat/rag-2' into feat/rag-2 2025-07-29 15:23:22 +08:00
jyong 9cfb531e3b transform document 2025-07-29 15:23:11 +08:00
twwu 1c813239c9 Merge branch 'feat/rag-2' of https://github.com/langgenius/dify into feat/rag-2 2025-07-29 14:52:25 +08:00
twwu fbf3abddf2 feat: Add tooltip for configuration button in header and update translations 2025-07-29 14:52:17 +08:00
jyong e89398f415 add old auth transform 2025-07-29 14:13:50 +08:00
twwu cc911f46f2 Merge branch 'feat/rag-2' of https://github.com/langgenius/dify into feat/rag-2 2025-07-29 13:46:23 +08:00
twwu c2cbdcd3bf feat: Enhance dataset info and card components with memoization for improved performance 2025-07-29 13:46:17 +08:00
jyong 46db1acf98 Merge remote-tracking branch 'origin/feat/rag-2' into feat/rag-2 2025-07-29 11:18:14 +08:00
jyong 6d00ffa509 add old auth transform 2025-07-29 11:18:06 +08:00
zxhlyh 446301a443 fix: search method 2025-07-29 11:10:16 +08:00
jyong 657e813c7f add old auth transform 2025-07-28 19:29:36 +08:00
jyong 829e6f0d1a add old auth transform 2025-07-28 19:29:07 +08:00
twwu b0cd4daf54 feat: Add credential seletor for online docuemnts and online drive 2025-07-28 16:55:40 +08:00
zxhlyh fc3250678c fix: module not found 2025-07-28 16:36:40 +08:00
zxhlyh a95cf6f8b0 merge main 2025-07-28 16:00:38 +08:00
zxhlyh acae51c309 initial nodes 2025-07-28 15:38:48 +08:00
zxhlyh 347cd6befc publish toast 2025-07-28 14:29:05 +08:00
jyong 50fed69c0c r2 transform 2025-07-28 14:00:18 +08:00
jyong 9dbc887da5 Merge remote-tracking branch 'origin/feat/rag-2' into feat/rag-2 2025-07-25 17:43:23 +08:00
jyong 38e6391be4 r2 transform 2025-07-25 17:43:15 +08:00
twwu e1861f5f9c fix: add dataset reset functionality and improve warning message consistency 2025-07-25 17:30:19 +08:00
jyong f887bbedab r2 transform 2025-07-25 17:06:29 +08:00
jyong f4dd22b9cb r2 transform 2025-07-25 15:17:03 +08:00
jyong 7f6759e0ac r2 transform 2025-07-25 14:41:39 +08:00
jyong b01c66acbc Merge remote-tracking branch 'origin/feat/rag-2' into feat/rag-2 2025-07-25 14:10:20 +08:00
jyong ed1bec9344 r2 transform 2025-07-25 14:10:12 +08:00
zxhlyh 4fdcd74f52 fix: publish 2025-07-25 12:00:00 +08:00
jyong bb609ee3ca Merge remote-tracking branch 'origin/feat/rag-2' into feat/rag-2 2025-07-24 17:43:38 +08:00
jyong 1938991162 r2 transform 2025-07-24 17:43:26 +08:00
twwu bae2af0c85 Merge branch main into feat/rag-2 2025-07-24 17:40:04 +08:00
jyong 3b0be18d47 r2 transform 2025-07-24 17:08:39 +08:00
zxhlyh 0417e2f4d9 fix: auth provider 2025-07-24 16:58:25 +08:00
zxhlyh 16603952a0 datasource template 2025-07-23 18:20:32 +08:00
jyong 5401299e6e Merge remote-tracking branch 'origin/feat/rag-2' into feat/rag-2 2025-07-23 17:02:12 +08:00
jyong 98ef4ffb4d r2 transform 2025-07-23 17:02:01 +08:00
zxhlyh 761d717656 fix 2025-07-23 16:59:47 +08:00
zxhlyh 44a8b4120b pipeline template 2025-07-23 15:34:32 +08:00
zxhlyh 53cdee1e2f pipeline template 2025-07-23 10:46:45 +08:00
zxhlyh d76e37b018 add datasource empty node 2025-07-22 16:48:24 +08:00
zxhlyh 3b8d96f45c merge main 2025-07-22 13:52:24 +08:00
zxhlyh 10657b6bd3 datasource auth 2025-07-22 10:39:19 +08:00
Harry e5c7fd5b14 feat: enforce maximum length for authorization name in datasource authentication 2025-07-21 19:31:58 +08:00
Harry 12661ce0ca feat: improve authorization name validation and enhance credential encryption handling 2025-07-21 19:28:00 +08:00
Harry 34072371a1 feat: refactor OAuth client retrieval in datasource authentication 2025-07-21 18:55:21 +08:00
Harry 666868fa35 feat: remove unused import for CredentialsValidateFailedError in datasource provider service 2025-07-21 18:53:36 +08:00
Harry ba7f0b3004 feat: enhance datasource authentication by improving credential handling and updating API parameters 2025-07-21 18:51:55 +08:00
zxhlyh 386d320650 rename auth name 2025-07-21 17:59:13 +08:00
zxhlyh 4d36e784b7 merge main 2025-07-21 17:45:26 +08:00
zxhlyh caa2de3344 datasource oauth 2025-07-21 17:41:19 +08:00
Harry 039a053027 feat: standardize credential type string for API key in datasource provider service 2025-07-21 17:40:50 +08:00
Harry 7141181732 feat: remove unnecessary blank line in datasource authentication setup 2025-07-21 17:00:10 +08:00
Harry 17da96bdd8 feat: refactor datasource authentication APIs for improved credential management 2025-07-21 16:43:50 +08:00
Harry 57b48f51b5 feat: convert credential form schemas to lists for consistency 2025-07-21 15:51:24 +08:00
Harry af94602d37 feat: add APIs for setting default datasource provider and updating provider name 2025-07-21 15:49:39 +08:00
jyong 9c96f1db6c r2 transform 2025-07-21 14:51:40 +08:00
Harry 51d7a9b6be feat: mask hidden values in tenant OAuth client retrieval 2025-07-21 14:35:46 +08:00
Harry 529eca70bc feat: enhance datasource credential and OAuth schema serialization 2025-07-21 14:31:26 +08:00
Harry ef8d941633 feat: simplify OAuth encrypter retrieval and remove unnecessary validation 2025-07-21 13:48:05 +08:00
Harry e97f03c130 feat: add custom OAuth client setup and enhance datasource provider model with avatar_url 2025-07-21 12:36:02 +08:00
Harry 7364d051d2 feat: refactor provider name generation to use incremental naming & enforce unique constraints 2025-07-18 21:34:59 +08:00
Harry 23a5ff410e feat: add avatar_url to datasource providers and update OAuth handling 2025-07-18 19:47:59 +08:00
jyong 34a6ed74b6 r2 transform 2025-07-18 19:22:31 +08:00
jyong dc359c6442 r2 transform 2025-07-18 19:04:46 +08:00
Harry dd59cea085 migrations 2025-07-18 14:58:10 +08:00
Harry ab775bce26 feat: remove BuiltinDatasourceProvider class and related credential handling 2025-07-18 14:47:08 +08:00
Harry 82b531e949 feat: remove tenant_plugin_auto_upgrade_strategies table and adjust datasource_oauth_params 2025-07-18 14:44:01 +08:00
Harry f325662141 feat: refactor DatasourceNode and KnowledgeIndexNode to use _node_data attribute 2025-07-18 14:25:11 +08:00
twwu 32fe8313b4 feat: import and extend dayjs relativeTime plugin in multiple components 2025-07-18 14:15:07 +08:00
Harry 6ca5bc1063 feat: add datasource OAuth client setup command and refactor related models 2025-07-18 14:11:15 +08:00
twwu f153319a77 Merge branch 'feat/rag-2' of https://github.com/langgenius/dify into feat/rag-2 2025-07-18 14:05:55 +08:00
twwu 0e428afe43 feat: convert VersionHistoryPanel to dynamic import for improved performance 2025-07-18 14:05:34 +08:00
Joel f336245a45 fix: tool node text overflow 2025-07-18 14:05:08 +08:00
twwu 5b2c99e183 Merge branch 'main' into feat/rag-2 2025-07-18 14:03:48 +08:00
twwu 399866e0ac fix: add 'no-spinner' class to InputNumber component for better styling 2025-07-18 13:42:05 +08:00
Harry 633bfc25e0 feat: update provider parameter naming and refactor related logic in datasource_auth.py 2025-07-18 13:13:20 +08:00
Harry 0ac5c0bf3e feat: refactor OAuth provider handling and improve provider name generation 2025-07-18 12:47:32 +08:00
twwu 9f2a9ad271 fix: update keyboard shortcut and clean up component structure in various files 2025-07-17 18:22:03 +08:00
twwu 59f68cd63b fix: ensure default values are handled correctly in InputNumber and related components 2025-07-17 18:15:52 +08:00
Harry 3388e83920 Merge remote-tracking branch 'origin/main' into feat/rag-2
# Conflicts:
#	.github/workflows/build-push.yml
#	web/app/components/workflow/nodes/_base/components/workflow-panel/index.tsx
2025-07-17 17:56:56 +08:00
zxhlyh e0e0a7661d merge main 2025-07-17 16:49:59 +08:00
zxhlyh de47b56ca4 merge main 2025-07-17 16:49:22 +08:00
zxhlyh 01566035e3 merge main 2025-07-17 16:48:43 +08:00
jyong cc96b7f507 r2 transform 2025-07-17 16:45:30 +08:00
jyong ad7650e724 r2 transform 2025-07-17 16:36:40 +08:00
zxhlyh f79a90fb21 fix agent default 2025-07-17 16:07:58 +08:00
jyong d0c78d079b r2 transform 2025-07-17 15:32:58 +08:00
jyong cc06ce60fd Merge remote-tracking branch 'origin/feat/rag-2' into feat/rag-2 2025-07-17 15:22:51 +08:00
jyong 761ea0eedb r2 transform 2025-07-17 15:22:39 +08:00
zxhlyh c706793847 fix: file upload config 2025-07-17 13:48:23 +08:00
zxhlyh 2c52561060 datasource oauth 2025-07-17 11:18:08 +08:00
jyong a39d7e1f85 r2 transform 2025-07-16 19:26:33 +08:00
jyong aaa5b0e295 r2 transform 2025-07-16 18:05:40 +08:00
jyong 3bdb40f37b Merge remote-tracking branch 'origin/feat/rag-2' into feat/rag-2 2025-07-16 14:25:30 +08:00
jyong c660c98b04 r2 transform 2025-07-16 14:25:16 +08:00
twwu e7f31a66be fix: correct notification type for successful dataset conversion 2025-07-16 14:12:33 +08:00
jyong 675ff9bd7d r2 transform 2025-07-16 12:00:26 +08:00
jyong 6363ecef97 r2 transform 2025-07-16 11:49:59 +08:00
twwu 5c0d19e36d fix: improve handleVariableNameBlur logic to prevent setting label when it already exists 2025-07-16 10:33:59 +08:00
twwu e0753ebfd1 fix: update dataset conversion endpoint path for correct API integration 2025-07-16 10:08:46 +08:00
twwu b8e9b97f07 feat: implement dataset conversion to pipeline with success and error notifications 2025-07-16 09:53:11 +08:00
jyong 384073f025 r2 transform 2025-07-16 02:02:08 +08:00
jyong 2012ea3213 r2 transform 2025-07-16 01:50:37 +08:00
jyong 1ad73ccdc8 r2 2025-07-15 17:54:53 +08:00
jyong 96484731a2 r2 2025-07-15 16:13:45 +08:00
jyong 537e535d9a r2 2025-07-15 15:33:40 +08:00
jyong 3a3b60bab5 r2 2025-07-15 15:00:38 +08:00
jyong 63111e8050 r2 2025-07-14 18:17:34 +08:00
twwu 405139c377 fix: add isRunning prop to ProcessDocuments and related components for better processing state management 2025-07-14 17:45:19 +08:00
jyong a919e3e135 r2 2025-07-14 17:33:08 +08:00
twwu 3e5772c50c fix: enhance layout and tooltip handling in Actions component 2025-07-14 16:36:51 +08:00
twwu cb8fab7364 Merge branch 'feat/rag-2' of https://github.com/langgenius/dify into feat/rag-2 2025-07-14 16:26:03 +08:00
twwu 60f3832c65 fix: refactor OptionCard and StepTwo components for improved structure and readability 2025-07-14 16:25:56 +08:00
Joel cf89d06023 fix: single run url error in pipeline 2025-07-14 16:15:52 +08:00
jyong 4b9a5a66c1 r2 2025-07-14 16:14:27 +08:00
Joel e095de05c5 feat: pipeline run 2025-07-14 16:04:43 +08:00
Joel 82f4b35d52 chore: use flow type instead of whole url 2025-07-14 15:30:04 +08:00
Joel 7a9faf909e feat: workflow use common last run 2025-07-14 15:10:35 +08:00
jyong 928751a856 r2 2025-07-14 14:11:58 +08:00
twwu d77d86f53b fix: remove unused showWorkflowEmpty prop from Tools component for cleaner code 2025-07-14 14:11:05 +08:00
twwu 2a5bab10b8 fix: pass dataSources prop to PortalToFollowElem for improved functionality 2025-07-14 13:48:45 +08:00
twwu 6313f819cf fix: add block enumeration and tool icon handling for enhanced workflow functionality 2025-07-14 11:44:13 +08:00
twwu 682b65034c Merge branch 'main' into feat/rag-2 2025-07-14 11:17:42 +08:00
twwu adbad0ad33 fix: enhance bucket list initiation check for improved accuracy 2025-07-14 10:06:55 +08:00
jyong ed77877db1 Merge remote-tracking branch 'origin/feat/rag-2' into feat/rag-2 2025-07-11 18:46:29 +08:00
jyong 9c4356e9a1 r2 2025-07-11 18:46:22 +08:00
twwu 0fdb1fedb0 feat: add PluginDependency component to RagPipelineChildren and WorkflowChildren for enhanced functionality 2025-07-11 18:38:18 +08:00
twwu 23a6fe3259 fix: adjust default selection of crawl results based on pipeline status 2025-07-11 18:02:23 +08:00
twwu 90d0f12ee9 refactor: update file extension handling in ChunkPreview component to use dynamic extension retrieval 2025-07-11 16:53:03 +08:00
twwu 50e16f8362 refactor: optimize state selection in data source components using useShallow for improved performance 2025-07-11 16:41:01 +08:00
twwu 9dbb06fccc fix: Fix node panel positioning issue when chat log modal is open 2025-07-11 15:58:26 +08:00
jyong 3b70f8ac08 r2 2025-07-11 15:25:58 +08:00
twwu 58a3471a5f refactor: update variable utility functions to include isRagVariableVar for enhanced variable validation 2025-07-11 13:51:23 +08:00
twwu 3e187ba6aa refactor: update BlockIcon component to handle Tool and DataSource types for conditional rendering 2025-07-10 16:56:59 +08:00
twwu f677f2f71b refactor: update useEffect dependency to include currentPage.page_id for proper content fetching 2025-07-10 16:27:56 +08:00
twwu de6867f875 refactor: update CrawledResult and WebsiteCrawl components to handle showPreview prop and adjust previewIndex logic 2025-07-10 15:43:36 +08:00
twwu c39746181d refactor: update data source store usage in LocalFile and WebsiteCrawl components 2025-07-10 15:24:58 +08:00
twwu 15cd9e0b12 refactor: rename selectedFileList to selectedFileKeys across components and update related logic 2025-07-10 15:14:23 +08:00
twwu e66c2badda refactor: update error handling to use DataSourceNodeErrorResponse in OnlineDocuments and WebsiteCrawl components 2025-07-10 13:55:24 +08:00
twwu 6030ae9d0f refactor: integrate currentNodeIdRef into data source store and update related components 2025-07-10 12:02:54 +08:00
twwu 42fd40500a refactor: remove isTruncated state and update related logic to use mutable refs 2025-07-10 10:39:01 +08:00
twwu 611bc728d0 fix: update hover background color for disabled and active NavLink states 2025-07-10 10:21:45 +08:00
twwu e2a141b3bb Merge branch 'main' into feat/rag-2 2025-07-10 10:14:12 +08:00
jyong 8b97551f1a r2 2025-07-09 18:50:13 +08:00
twwu 966e6e03fc style: Update component attributes to use single quotes and adjust z-index in ContentDialog 2025-07-09 18:34:21 +08:00
twwu 775983b04b Merge branch 'feat/rag-2' of https://github.com/langgenius/dify into feat/rag-2 2025-07-09 18:21:01 +08:00
twwu a2e0bc26c6 feat: Add conversion functionality to Knowledge Pipeline with UI and translations 2025-07-09 18:20:52 +08:00
jyong bd33b9ffec r2 2025-07-09 17:34:42 +08:00
jyong b538eee5dd r2 2025-07-09 17:28:52 +08:00
twwu 7c6bdb9ec9 feat: Enhance operations with pause and resume functionality 2025-07-09 16:05:42 +08:00
twwu 258c965bd0 Merge branch 'feat/rag-2' of https://github.com/langgenius/dify into feat/rag-2 2025-07-09 15:55:31 +08:00
twwu 9e44f2b805 Merge branch 'main' into feat/rag-2 2025-07-09 15:54:57 +08:00
jyong 9dcba51225 r2 2025-07-09 15:48:08 +08:00
twwu e7d394f160 feat: Add DatasetSidebarDropdown component and integrate ExtraInfo for dataset details 2025-07-09 15:13:02 +08:00
twwu dfe3c2caa1 Merge branch 'feat/rag-2' of https://github.com/langgenius/dify into feat/rag-2 2025-07-09 14:45:03 +08:00
twwu 4ea4d227c6 refactor: Remove unused file support types logic from CreateFormPipeline component 2025-07-09 14:44:56 +08:00
jyong b5e4ce6c68 r2 2025-07-09 14:27:49 +08:00
twwu 4a8061d14c fix: Integrate dataset list reset functionality in dropdown and step two components 2025-07-09 13:54:49 +08:00
twwu 59c3305dcc feat: Enhance dataset dropdown functionality with export and delete options 2025-07-09 13:42:24 +08:00
twwu 8fc15c83d0 feat: Refactor dataset info components and add export pipeline functionality 2025-07-09 10:45:50 +08:00
twwu a0942399cd Merge branch 'feat/rag-2' of https://github.com/langgenius/dify into feat/rag-2 2025-07-08 20:04:26 +08:00
twwu 1c85dada53 feat: Implement sidebar toggle functionality with keyboard shortcuts and improve translations 2025-07-08 20:04:15 +08:00
jyong bc1a517a97 r2 2025-07-08 17:16:10 +08:00
twwu b3431ab0c4 feat: Refactor online drive components to improve file retrieval and selection logic 2025-07-08 16:46:59 +08:00
twwu 073a0974a4 fix: Update breadcrumb click handling to close dropdown and adjust prefix slicing logic 2025-07-08 15:40:16 +08:00
twwu e911a4e719 fix: Update button styles and improve file size validation in breadcrumb and item components 2025-07-08 15:28:22 +08:00
twwu 5e2b60664f fix: Improve item selection logic and reset selected file list on folder open 2025-07-08 14:26:55 +08:00
twwu b36f36d242 feat: Enhance CreateFormPipeline with file selection and validation for online documents and drives 2025-07-08 14:14:50 +08:00
jyong 6332fe795e Merge remote-tracking branch 'origin/feat/rag-2' into feat/rag-2 2025-07-08 14:05:45 +08:00
jyong db886ae3fb r2 2025-07-08 14:04:44 +08:00
twwu 467cd2c4c1 fix: Update onlineDrive file check to use selectedFileList for better validation 2025-07-07 16:34:43 +08:00
twwu d3ca50626d feat: Integrate useOnlineDrive hook and enhance datasource handling in CreateFormPipeline 2025-07-07 16:30:15 +08:00
twwu 13f168ed1c refactor: Refactor Online Drive components to improve state management and add truncation support 2025-07-07 15:51:59 +08:00
twwu 83c8219942 feat: Enhance file item component with support for disabled state and file type validation 2025-07-07 14:36:09 +08:00
twwu a30b92d6b1 Merge branch 'feat/rag-2' of https://github.com/langgenius/dify into feat/rag-2 2025-07-04 19:15:52 +08:00
twwu 1bd664e655 feat: Implement Dropdown and Menu components for breadcrumb navigation in Online Drive 2025-07-04 19:15:37 +08:00
jyong 1fb59adba9 r2 2025-07-04 19:09:40 +08:00
jyong 1b3888a13e Merge remote-tracking branch 'origin/feat/rag-2' into feat/rag-2 2025-07-04 18:49:50 +08:00
jyong 9c6eb95700 r2 2025-07-04 18:49:37 +08:00
twwu 1ff608dfa9 Merge branch 'feat/rag-2' of https://github.com/langgenius/dify into feat/rag-2 2025-07-04 18:24:59 +08:00
twwu 933ad0649c feat: Add subItems mapping to Panel component for enhanced item details 2025-07-04 18:24:52 +08:00
jyong a8b676ade0 r2 2025-07-04 18:05:58 +08:00
twwu 0d9991ec88 feat: Add ONLINE_DRIVE_OUTPUT and integrate into DataSource components for online drive support 2025-07-04 18:04:47 +08:00
twwu e67a19b26b refactor: Enhance Bucket and Breadcrumbs components; improve event handling and add button for bucket name 2025-07-04 17:55:30 +08:00
twwu d44af3ec46 refactor: Restructure breadcrumbs component; introduce Bucket and BreadcrumbItem components for improved navigation 2025-07-04 16:44:21 +08:00
twwu 9ce0c69687 refactor: Update event handling in Checkbox and Radio components; optimize Online Drive file filtering 2025-07-04 15:30:08 +08:00
twwu 2ecbcd6a7f refactor: Add loading state and bucket handling to Online Drive components 2025-07-04 15:14:19 +08:00
twwu d3b17ea567 fix: Update key property for onlineDrive datasource handling in TestRunPanel 2025-07-04 14:11:23 +08:00
twwu a4f7d373b5 refactor: Replace useDataSourceStore with useDataSourceStoreWithSelector for improved state selection across components 2025-07-04 14:03:04 +08:00
twwu 334f0c905a feat: Enhance Online Drive file handling with selection and folder opening functionality 2025-07-04 13:42:36 +08:00
jyong 44c2efcfe4 r2 2025-07-03 18:56:42 +08:00
twwu f2960989c1 refactor: Refactor data source components 2025-07-03 18:34:54 +08:00
jyong 816b49483a Merge remote-tracking branch 'origin/feat/rag-2' into feat/rag-2 2025-07-03 15:15:14 +08:00
jyong 798d0880d6 r2 2025-07-03 15:15:01 +08:00
twwu cf4f652105 fix: Enhance data source handling by adding error response type and updating local file and online document slices 2025-07-03 14:58:58 +08:00
jyong 76c418c0b7 r2 2025-07-03 14:03:06 +08:00
jyong 7c5893db91 Merge branch 'feat/r2' into deploy/rag-dev
# Conflicts:
#	web/app/components/workflow-app/components/workflow-main.tsx
#	web/app/components/workflow/constants.ts
#	web/app/components/workflow/header/run-and-history.tsx
#	web/app/components/workflow/hooks-store/store.ts
#	web/app/components/workflow/hooks/use-nodes-interactions.ts
#	web/app/components/workflow/hooks/use-workflow-interactions.ts
#	web/app/components/workflow/hooks/use-workflow.ts
#	web/app/components/workflow/nodes/_base/components/panel-operator/panel-operator-popup.tsx
#	web/app/components/workflow/nodes/_base/components/variable/var-reference-picker.tsx
#	web/app/components/workflow/nodes/code/use-config.ts
#	web/app/components/workflow/nodes/llm/default.ts
#	web/app/components/workflow/panel/index.tsx
#	web/app/components/workflow/panel/version-history-panel/index.tsx
#	web/app/components/workflow/store/workflow/index.ts
#	web/app/components/workflow/types.ts
#	web/config/index.ts
#	web/types/workflow.ts
2025-07-03 11:40:54 +08:00
twwu 2dd1f41ad3 feat: Implement data source store with slices for local files, online documents, website crawls, and online drives 2025-07-03 10:31:29 +08:00
twwu ddde576b4a refactor: Update notification messages in CreateFromDSLModal and CreateFromScratchModal for dataset creation 2025-07-03 10:17:55 +08:00
jyong 38d895ab5f r2 2025-07-02 18:46:36 +08:00
jyong a6ff9b224b r2 2025-07-02 18:20:41 +08:00
jyong 832bef053f Merge branch 'main' into feat/r2
# Conflicts:
#	docker/docker-compose.middleware.yaml
#	web/app/components/workflow-app/components/workflow-main.tsx
#	web/app/components/workflow-app/hooks/index.ts
#	web/app/components/workflow/hooks-store/store.ts
#	web/app/components/workflow/hooks/index.ts
#	web/app/components/workflow/nodes/_base/components/variable/var-reference-picker.tsx
2025-07-02 18:20:05 +08:00
jyong 81b07dc3be r2 2025-07-02 18:15:23 +08:00
zxhlyh a52bf6211a merge main 2025-07-02 18:07:09 +08:00
twwu d7b0ccd6f7 feat: Add name field to data source credentials update function in usePipeline service 2025-07-02 15:08:20 +08:00
twwu 68d59ee8b3 refactor: Refactor useOnlineDocument hook 2025-07-02 14:56:29 +08:00
jyong e23d7e39ec Merge branch 'feat/datasource' into feat/r2
# Conflicts:
#	api/services/rag_pipeline/rag_pipeline.py
#	web/app/components/workflow/constants.ts
#	web/app/components/workflow/header/run-and-history.tsx
#	web/app/components/workflow/hooks/use-nodes-interactions.ts
#	web/app/components/workflow/hooks/use-workflow-interactions.ts
#	web/app/components/workflow/hooks/use-workflow.ts
#	web/app/components/workflow/index.tsx
#	web/app/components/workflow/nodes/_base/components/panel-operator/panel-operator-popup.tsx
#	web/app/components/workflow/nodes/_base/panel.tsx
#	web/app/components/workflow/nodes/code/use-config.ts
#	web/app/components/workflow/nodes/llm/default.ts
#	web/app/components/workflow/panel/index.tsx
#	web/app/components/workflow/panel/version-history-panel/index.tsx
#	web/app/components/workflow/store/workflow/index.ts
#	web/app/components/workflow/types.ts
#	web/config/index.ts
#	web/types/workflow.ts
2025-07-02 14:01:59 +08:00
twwu 0284e7556e refactor: Refactor useDatasourceIcon hook and enhance dataset node rendering with AppIcon component 2025-07-02 13:48:11 +08:00
jyong 9f14b5db9a r2 2025-07-02 11:55:21 +08:00
jyong 39d3f58082 r2 2025-07-02 11:33:00 +08:00
twwu 5d7a533ada fix: Improve layout by adding overflow handling in CreateFromPipeline and List components 2025-07-01 17:46:41 +08:00
twwu 0db7967e5f refactor: Add useOnlineDrive hook and integrate it into CreateFormPipeline and TestRunPanel components 2025-07-01 16:54:44 +08:00
twwu a81dc49ad2 feat: Refactor OnlineDocuments and PageSelector components to enhance state management and integrate new Actions component 2025-07-01 16:32:21 +08:00
jyong f33b6c0c73 add online drive 2025-07-01 16:08:54 +08:00
jyong a4eddd7dc2 r2 2025-07-01 15:16:33 +08:00
jyong c993a05da7 Merge remote-tracking branch 'origin/feat/r2' into feat/r2 2025-07-01 14:23:58 +08:00
jyong f44f0fa34c r2 2025-07-01 14:23:46 +08:00
twwu 2d0d448667 feat: Update selection handling to support multiple choice in OnlineDocuments and PageSelector components 2025-07-01 14:14:28 +08:00
Dongyu Li bfcf09b684 feat(datasource): fix datasource icon 2025-07-01 14:04:09 +08:00
Dongyu Li cdbba1400c feat(datasource): update fetch_datasource_provider 2025-07-01 11:57:06 +08:00
twwu 55d7d7ef76 fix: Update default value handling for number input in useInitialData hook 2025-07-01 11:24:30 +08:00
twwu 7b473bb5c9 feat: Integrate OnlineDrive component into CreateFormPipeline and update related components 2025-06-30 18:31:52 +08:00
twwu ff511c6f31 refactor: Remove unused variables and simplify next button logic in TestRunPanel 2025-06-30 17:54:33 +08:00
twwu 310102bebd feat: Add SearchMenu icon and integrate it into the file list component with empty state handling 2025-06-30 17:31:27 +08:00
zxhlyh 1f5c32525f datasource page 2025-06-30 16:16:54 +08:00
jyong 618ad4c291 r2 2025-06-30 15:36:20 +08:00
twwu ada632f9f5 feat: Enhance input field handling by adding allVariableNames prop and localizing error messages 2025-06-30 15:28:55 +08:00
twwu 4c82c9d029 feat: Add Online Drive file management components and enhance file icon handling 2025-06-30 14:19:14 +08:00
zxhlyh 42655a3b1f fix: checklist 2025-06-30 14:11:26 +08:00
Harry 1449ed86c4 feat: rename online driver to online drive and update related classes and methods :) 2025-06-27 20:11:28 +08:00
zxhlyh 94674e99ab datasource page add marketplace 2025-06-27 16:44:23 +08:00
Harry eee72101f4 feat(online_driver): add online driver plugin, support browsing and downloading 2025-06-27 16:41:39 +08:00
twwu 93eabef58a refactor: Refactor OnlineDocuments component and remove OnlineDocumentSelector 2025-06-27 16:29:30 +08:00
twwu 5248fcca56 feat: implement support for single and multiple choice in crawled result items 2025-06-27 15:56:38 +08:00
twwu 264b95e572 feat: separate input fields into datasource and global categories in RAG pipeline 2025-06-27 15:23:04 +08:00
zxhlyh 8f2ad89027 datasource page 2025-06-27 15:01:33 +08:00
twwu 18b1a9cb2e Merge branch 'feat/rag-pipeline' of https://github.com/langgenius/dify into feat/rag-pipeline 2025-06-26 15:28:19 +08:00
zxhlyh 25fef5d757 merge main 2025-06-26 15:21:24 +08:00
twwu 2a25ca2b2c feat: enhance online drive connection UI and add localization for connection status in dataset pipeline 2025-06-26 14:24:50 +08:00
twwu 3a9c79b09a feat: refactor data source handling and integrate OnlineDrive component in TestRunPanel 2025-06-26 13:46:12 +08:00
twwu 025b55ef3b feat: update tooltip text for test run mode in English and Chinese translations for clarity 2025-06-26 10:17:48 +08:00
twwu cf7574bd10 feat: add FooterTips component and integrate it into TestRunPanel; extend DatasourceType enum with onlineDrive 2025-06-26 10:16:37 +08:00
jyong efccbe4039 r2 2025-06-25 17:32:26 +08:00
twwu c7cec120a6 feat: update variable validation regex for consistency in ExternalDataToolModal and schema 2025-06-25 17:07:31 +08:00
twwu 7d7fd18e65 Merge branch 'feat/rag-pipeline' of https://github.com/langgenius/dify into feat/rag-pipeline 2025-06-25 16:16:41 +08:00
twwu c6ae9628af feat: refactor input variable handling and configurations in pipeline processing components 2025-06-25 16:15:59 +08:00
Joel 4631575c12 feat: can support choose current node var 2025-06-25 16:06:08 +08:00
zxhlyh a4f4fea0a5 fix: note node delete 2025-06-25 16:01:45 +08:00
jyong 540096a8d8 Merge branch 'main' into feat/r2
# Conflicts:
#	api/core/plugin/impl/oauth.py
#	api/core/workflow/entities/variable_pool.py
#	api/models/workflow.py
#	api/services/dataset_service.py
2025-06-25 14:35:23 +08:00
jyong 7b7cdad1d8 r2 2025-06-25 13:28:08 +08:00
twwu 261b7cabc8 feat: enhance OnlineDocumentPreview with datasourceNodeId and implement preview functionality 2025-06-25 11:36:56 +08:00
twwu ccd346d1da feat: add handling for RAG pipeline variables in node interactions 2025-06-25 10:40:48 +08:00
twwu a866cbc6d7 feat: implement usePipeline hook for managing pipeline variables and refactor input field handling 2025-06-25 10:11:26 +08:00
Dongyu Li 6aba39a2dd feat(datasource): add datasource content preview api 2025-06-24 17:43:25 +08:00
zxhlyh 8f4a0d4a22 variable picker 2025-06-24 17:27:06 +08:00
Dongyu Li 49bb15fae1 feat(datasource): add datasource content preview api 2025-06-24 17:14:31 +08:00
Dongyu Li e165f4a102 feat(datasource): add datasource content preview api 2025-06-24 17:14:16 +08:00
twwu 1c51bef3cb fix: standardize capitalization in translation keys and remove unused group property in FieldListContainer 2025-06-24 14:25:58 +08:00
zxhlyh c31754e6cd fix: create pipeline from customized 2025-06-24 11:12:39 +08:00
jyong 83cc484c24 Merge branch 'feat/r2' into deploy/rag-dev 2025-06-23 17:12:26 +08:00
jyong 1ff9c07a92 fix notion dataset rule not found 2025-06-23 17:12:08 +08:00
twwu b25b284d7f Merge branch 'feat/rag-pipeline' into deploy/rag-dev 2025-06-23 16:41:13 +08:00
twwu 2414dbb5f8 feat: clear selected IDs on document deletion action in DocumentList component 2025-06-23 16:38:19 +08:00
twwu 916a8c76e7 fix: rename currentDocuments to currentDocument for consistency in online documents handling 2025-06-23 16:31:09 +08:00
Dongyu Li 9783832223 Merge branch 'feat/datasource' into deploy/rag-dev 2025-06-23 16:12:03 +08:00
Dongyu Li b77081a19e feat(datasource): update datasource icon 2025-06-23 15:57:37 +08:00
twwu 896906ae77 feat: refactor layout structure in PipelineSettings component for improved responsiveness 2025-06-23 15:57:02 +08:00
twwu 2365a3a5fc Merge branch 'feat/rag-pipeline' into deploy/rag-dev 2025-06-23 15:39:00 +08:00
twwu dd792210f6 Merge branch 'feat/rag-pipeline' of https://github.com/langgenius/dify into feat/rag-pipeline 2025-06-23 15:38:34 +08:00
twwu 6ba4a4c165 feat: enhance website crawl functionality with state management and result handling 2025-06-23 15:38:24 +08:00
Dongyu Li 0a6dbf6ee2 Merge remote-tracking branch 'origin/deploy/rag-dev' into deploy/rag-dev 2025-06-23 15:21:15 +08:00
Dongyu Li ca0979dd43 feat(datasource): update fetch_datasource_provider 2025-06-23 15:18:15 +08:00
zxhlyh 0762e5ae50 Merge branch 'feat/rag-pipeline' into deploy/rag-dev 2025-06-23 15:03:32 +08:00
zxhlyh 48f53f3b9b workflow dependency 2025-06-23 15:02:57 +08:00
twwu af64f29e87 Merge branch 'feat/rag-pipeline' into deploy/rag-dev 2025-06-23 13:59:39 +08:00
twwu b9f59e3a75 Merge branch 'main' into feat/rag-pipeline 2025-06-23 13:59:05 +08:00
Dongyu Li b12a8eeb90 feat(datasource): change datasource result type to event-stream 2025-06-20 10:09:47 +08:00
twwu e551cf65c9 Merge branch 'feat/rag-pipeline' into deploy/rag-dev 2025-06-19 15:58:51 +08:00
twwu 3899211c41 Merge branch 'feat/rag-pipeline' of https://github.com/langgenius/dify into feat/rag-pipeline 2025-06-19 15:58:01 +08:00
twwu 335e1e3602 feat: enhance pipeline settings with execution log and processing capabilities 2025-06-19 15:57:49 +08:00
jyong 725fc72c6f Merge branch 'feat/r2' into deploy/rag-dev 2025-06-19 15:31:03 +08:00
jyong b618f3bd9e r2 2025-06-19 15:30:46 +08:00
zxhlyh 95ba55af4d fix: import dsl sync rag variables 2025-06-19 15:04:26 +08:00
jyong f4e1ea9011 Merge branch 'feat/r2' into deploy/rag-dev 2025-06-19 14:30:05 +08:00
jyong 3d0e288e85 r2 2025-06-19 14:29:39 +08:00
jyong 9620d6bcd8 Merge branch 'feat/r2' into deploy/rag-dev
# Conflicts:
#	web/i18n/zh-Hans/app.ts
2025-06-19 13:32:49 +08:00
jyong f7fbded8b9 Merge branch 'main' into feat/r2 2025-06-19 13:32:07 +08:00
Dongyu Li 0c5706b3f6 Merge remote-tracking branch 'origin/deploy/rag-dev' into deploy/rag-dev 2025-06-19 11:10:43 +08:00
Dongyu Li 82d0a70cb4 feat(datasource): change datasource result type to event-stream 2025-06-19 11:10:24 +08:00
twwu 55516c4e57 fix: add type checks for workspace roles in DatasetsLayout component 2025-06-19 10:56:03 +08:00
jyong cc2cd85ff5 Merge branch 'feat/r2' into deploy/rag-dev 2025-06-19 10:46:15 +08:00
jyong 6ec742539a r2 2025-06-19 10:45:59 +08:00
jyong 09e0a54070 r2 2025-06-19 10:38:10 +08:00
twwu 5d25199f42 refactor: update layout for creation title and content in StepThree component 2025-06-19 09:36:04 +08:00
twwu 387826674c Merge branch 'main' into feat/rag-pipeline 2025-06-19 09:34:09 +08:00
twwu 02ae479636 Merge branch 'feat/rag-pipeline' into deploy/rag-dev 2025-06-18 18:04:23 +08:00
twwu a103324f25 refactor: enhance UI components with new icons and improved styling in billing and dataset processes 2025-06-18 18:03:43 +08:00
jyong 643efc5d85 Merge branch 'feat/r2' into deploy/rag-dev 2025-06-18 17:11:52 +08:00
Dongyu Li 43e5798e13 feat(datasource): change datasource result type to event-stream 2025-06-18 16:27:10 +08:00
Dongyu Li 8aca70cd50 Merge remote-tracking branch 'origin/deploy/rag-dev' into deploy/rag-dev 2025-06-18 16:05:08 +08:00
Dongyu Li 2cf980026e feat(datasource): change datasource result type to event-stream 2025-06-18 16:04:47 +08:00
Dongyu Li 224111081b feat(datasource): change datasource result type to event-stream 2025-06-18 16:04:40 +08:00
twwu 4dc6cad588 Merge branch 'feat/rag-pipeline' into deploy/rag-dev 2025-06-18 15:08:05 +08:00
twwu f85e6a0dea feat: implement SSE for data source node processing and completion events, replacing previous run methods 2025-06-18 15:06:50 +08:00
twwu 4b3a54633f refactor: streamline dataset detail fetching and improve dataset list handling across components 2025-06-18 15:05:21 +08:00
jyong 6f67a34349 r2 qa index 2025-06-18 14:37:18 +08:00
zxhlyh e51d308312 Merge branch 'feat/rag-pipeline' into deploy/rag-dev 2025-06-18 13:47:36 +08:00
zxhlyh 379c92bd82 Merge branch 'main' into feat/rag-pipeline 2025-06-18 13:47:06 +08:00
jyong fa9f0ebfb1 Merge branch 'feat/r2' into deploy/rag-dev 2025-06-18 11:06:37 +08:00
jyong ac917bb56d r2 2025-06-18 11:05:52 +08:00
jyong f7a4e5d1a6 Merge branch 'main' into feat/r2 2025-06-18 10:57:44 +08:00
jyong 515d34bbfb Merge branch 'feat/r2' into deploy/rag-dev 2025-06-17 19:07:38 +08:00
jyong 66de2e1f0a Merge remote-tracking branch 'origin/feat/r2' into feat/r2
# Conflicts:
#	api/core/workflow/graph_engine/entities/event.py
#	api/services/rag_pipeline/rag_pipeline.py
2025-06-17 19:07:15 +08:00
jyong 7f7ea92a45 r2 2025-06-17 19:06:17 +08:00
Dongyu Li a014345688 Merge branch 'feat/r2' into deploy/rag-dev 2025-06-17 18:29:24 +08:00
Dongyu Li cf66d111ba feat(datasource): change datasource result type to event-stream 2025-06-17 18:29:02 +08:00
Dongyu Li 2d01b1a808 Merge branch 'feat/r2' into deploy/rag-dev 2025-06-17 18:24:52 +08:00
Dongyu Li 739ebf2117 feat(datasource): change datasource result type to event-stream 2025-06-17 18:24:09 +08:00
twwu 176b844cd5 refactor: consolidate DialogWrapper component usage and improve prop handling across input fields 2025-06-17 18:20:30 +08:00
jyong 8fc6684ab1 Merge branch 'feat/r2' into deploy/rag-dev 2025-06-17 18:12:36 +08:00
jyong 7c41f71248 r2 2025-06-17 18:11:38 +08:00
zxhlyh 2c2bfb4f54 Merge branch 'feat/rag-pipeline' into deploy/rag-dev 2025-06-17 17:45:50 +08:00
zxhlyh 3164f90327 merge main 2025-06-17 17:44:08 +08:00
twwu 90ac52482c test: add unit tests for ActionButton component with various states and sizes 2025-06-17 16:48:52 +08:00
twwu 879ac940dd Merge branch 'feat/rag-pipeline' into deploy/rag-dev 2025-06-17 16:30:37 +08:00
twwu 796797d12b feat: centralize variable type mapping by introducing VAR_TYPE_MAP in pipeline model 2025-06-17 16:28:50 +08:00
twwu 7ac0f0c08c feat: enhance processing components by adding runDisabled state and fetching indicators 2025-06-17 16:13:49 +08:00
twwu 5cc6a2bf33 refactor: update toast notification handling and improve context usage in DocumentDetail 2025-06-17 14:41:06 +08:00
jyong 2db0b19044 Merge branch 'feat/r2' into deploy/rag-dev 2025-06-17 14:05:15 +08:00
jyong 1d2ee9020c r2 2025-06-17 14:04:55 +08:00
Dongyu Li f2538bf381 Merge branch 'feat/r2' into deploy/rag-dev 2025-06-17 13:56:00 +08:00
Dongyu Li f37e28a368 feat(datasource): Comment out the datasource_file_manager. 2025-06-17 13:54:25 +08:00
Dongyu Li c5976f5a09 feat(datasource): change datasource result type to event-stream 2025-06-17 13:51:41 +08:00
twwu 64a9181ee4 Merge branch 'feat/rag-pipeline' into deploy/rag-dev 2025-06-17 11:30:38 +08:00
twwu 33cd32382f feat: add indexing status batch and process rule hooks; refactor Notion page preview types 2025-06-17 11:29:56 +08:00
twwu 9456c59290 Merge branch 'feat/rag-pipeline' into deploy/rag-dev 2025-06-17 10:14:20 +08:00
twwu ce0bd421ae Merge branch 'main' into feat/rag-pipeline 2025-06-17 10:13:31 +08:00
twwu f9d04c6975 Merge branch 'feat/rag-pipeline' of https://github.com/langgenius/dify into feat/rag-pipeline 2025-06-16 18:26:01 +08:00
twwu ecb07a5d0d feat: enhance field list functionality by adding chosen and selected properties to SortableItem 2025-06-16 18:25:30 +08:00
zxhlyh a165ba2059 merge main 2025-06-16 15:43:57 +08:00
zxhlyh 12fd2903d8 fix 2025-06-16 15:41:27 +08:00
twwu 0a2c569b3b fix: replace useGetDocLanguage with useDocLink for consistent documentation linking 2025-06-16 14:58:52 +08:00
twwu 9ab0d5fe60 Merge branch 'main' into feat/rag-pipeline 2025-06-16 14:25:58 +08:00
jyong 1d71fd5b56 Merge branch 'feat/r2' into deploy/rag-dev 2025-06-16 14:09:04 +08:00
jyong b277acc298 Merge branch 'main' into feat/r2 2025-06-16 14:08:02 +08:00
jyong 8d47d8ce4f Merge remote-tracking branch 'origin/feat/r2' into feat/r2
# Conflicts:
#	api/core/datasource/website_crawl/website_crawl_plugin.py
#	api/services/rag_pipeline/rag_pipeline.py
2025-06-16 13:50:33 +08:00
jyong 41fef8a21f r2 2025-06-16 13:48:43 +08:00
zxhlyh b853a42e37 Merge branch 'feat/rag-pipeline' into deploy/rag-dev 2025-06-16 11:45:24 +08:00
zxhlyh 1633626d23 Merge branch 'main' into feat/rag-pipeline 2025-06-16 11:44:42 +08:00
zxhlyh 6c7a40c571 Merge branch 'feat/rag-pipeline' into deploy/rag-dev 2025-06-16 10:16:00 +08:00
zxhlyh abb2ed66e7 merge main 2025-06-16 10:15:24 +08:00
zxhlyh 5ae78f79b0 datasource dark theme 2025-06-16 10:00:14 +08:00
twwu e3b3a6d040 Merge branch 'feat/rag-pipeline' into deploy/rag-dev 2025-06-13 18:32:56 +08:00
twwu 6622ce6ad8 fix: update formData construction in convertToInputFieldFormData for improved handling of optional fields
fix: adjust z-index value in DialogWrapper for proper stacking context
2025-06-13 18:32:36 +08:00
Harry 5ccb8d9736 feat: online document 2025-06-13 18:22:15 +08:00
twwu 55906c8375 fix: remove unused billing plan logic from CreateFromDSLModal component 2025-06-13 18:01:01 +08:00
Harry 0908f310fc feat: webcrawl 2025-06-13 17:47:51 +08:00
twwu 58842898e1 Merge branch 'feat/rag-pipeline' of https://github.com/langgenius/dify into feat/rag-pipeline 2025-06-13 16:42:27 +08:00
zxhlyh 1c17c8fa36 Merge branch 'feat/rag-pipeline' into deploy/rag-dev 2025-06-13 16:39:40 +08:00
zxhlyh 26aff400e4 node default configs 2025-06-13 16:38:54 +08:00
twwu 4b11d29ede fix: update VAR_TYPE_MAP and initialData handling in useConfigurations for improved variable processing 2025-06-13 15:57:16 +08:00
jyong b2b95412b9 r2 2025-06-13 15:04:22 +08:00
twwu 5c228bca4f feat: replace TypeIcon with AppIcon in SelectDataSet component for improved icon display 2025-06-13 15:02:31 +08:00
Jyong 7bd2509ad5
Update deploy-dev.yml 2025-06-13 14:50:38 +08:00
twwu 2a5d70d9e1 Merge branch 'feat/rag-pipeline' into deploy/rag-dev 2025-06-13 14:44:53 +08:00
twwu b0107f4128 fix: update z-index values for DialogWrapper components to ensure proper stacking context 2025-06-13 14:44:32 +08:00
Joel dc3c5362e4 Merge branch 'deploy/rag-dev' of https://github.com/langgenius/dify into deploy/rag-dev 2025-06-13 14:43:58 +08:00
Joel 1d106c3660 Merge branch 'feat/rag-pipeline' into deploy/rag-dev 2025-06-13 14:43:46 +08:00
zxhlyh fcb2fa04e7 Merge branch 'feat/rag-pipeline' into deploy/rag-dev 2025-06-13 14:43:38 +08:00
zxhlyh 55bff10f0d fix 2025-06-13 14:43:02 +08:00
Joel 45c9b77e82 fix: match var reg 2025-06-13 14:42:35 +08:00
twwu 767860e76b Merge branch 'feat/rag-pipeline' into deploy/rag-dev 2025-06-13 11:44:22 +08:00
twwu 80f656f79a fix: adjust layout and visibility conditions in CreateFormPipeline and ChunkPreview components 2025-06-13 11:38:26 +08:00
Joel c891eb28fc fix: in prompt editor show vars error 2025-06-13 11:12:11 +08:00
twwu b9fa3f54e9 refactor: refactor component imports and enhance layout for better responsiveness in dataset previews 2025-06-13 10:54:31 +08:00
twwu 4d2f904d72 feat: enhance WorkflowPreview and TemplateCard components with additional styling and className prop 2025-06-13 10:14:45 +08:00
twwu 26b7911177 Merge branch 'main' into feat/rag-pipeline 2025-06-13 09:49:03 +08:00
twwu dd91edf70b Merge branch 'feat/rag-pipeline' into deploy/rag-dev 2025-06-12 18:11:19 +08:00
twwu d994e6b6c7 feat: replace data source icons with AppIcon component in Item and DatasetItem 2025-06-12 18:11:00 +08:00
twwu aba48bde0b feat: update SettingsModal to integrate keyword number handling and refactor index method logic 2025-06-12 18:06:00 +08:00
twwu 3e5d9884cb test: add unit tests for AppIcon component with various rendering scenarios 2025-06-12 17:33:28 +08:00
twwu faadad62ff Merge branch 'feat/rag-pipeline' into deploy/rag-dev 2025-06-12 17:01:41 +08:00
twwu 406d70e4a3 feat: integrate resetDatasetList hook into CreateOptions and TemplateCard components 2025-06-12 16:59:33 +08:00
zxhlyh f17f256b2b Merge branch 'feat/rag-pipeline' into deploy/rag-dev 2025-06-12 16:25:04 +08:00
zxhlyh b367f48de6 add datasource category 2025-06-12 16:24:21 +08:00
Jyong dee7b6eb22
Update deploy-dev.yml 2025-06-12 16:19:12 +08:00
twwu 6f17200dec refactor: update dataset handling to use runtime_mode instead of pipeline_id 2025-06-12 15:57:07 +08:00
twwu d3dbfbe8b3 Merge branch 'feat/rag-pipeline' into deploy/rag-dev 2025-06-12 15:18:49 +08:00
twwu b1f250862f Merge branch 'main' into feat/rag-pipeline 2025-06-12 15:18:19 +08:00
twwu 141d6b1abf feat: implement document settings and pipeline settings components with localization support 2025-06-12 15:13:15 +08:00
zxhlyh a7eb534761 add datasource category 2025-06-12 15:05:36 +08:00
twwu 808f792f55 fix: update isPending condition and add indexing technique checks for segment detail and new segment modal 2025-06-12 11:11:03 +08:00
twwu 346d066128 Merge branch 'feat/rag-pipeline' into deploy/rag-dev 2025-06-11 18:12:17 +08:00
twwu 5c41922b8a fix: update file extension for downloaded DSL files and refine mutation keys for template operations 2025-06-11 18:11:38 +08:00
jyong 9c3e3b00d0 Merge branch 'feat/r2' into deploy/rag-dev 2025-06-11 18:07:20 +08:00
jyong da3a3ce165 r2 2025-06-11 18:07:06 +08:00
jyong b525bc2b81 Merge branch 'feat/r2' into deploy/rag-dev 2025-06-11 18:03:53 +08:00
jyong 14dc3e8642 r2 2025-06-11 18:03:21 +08:00
twwu e52c905aa5 refactor: improve layout-main component structure and readability 2025-06-11 17:46:50 +08:00
twwu 7b9a3c1084 fix: update translation keys for document availability messages in English and Chinese 2025-06-11 17:42:45 +08:00
jyong ce8ddae11e Merge remote-tracking branch 'origin/deploy/rag-dev' into deploy/rag-dev 2025-06-11 17:30:45 +08:00
jyong 4e8184bc56 Merge branch 'feat/r2' into deploy/rag-dev
# Conflicts:
#	api/models/dataset.py
2025-06-11 17:30:30 +08:00
jyong 9eb8597957 r2 2025-06-11 17:29:14 +08:00
twwu cde584046d Merge branch 'feat/rag-pipeline' into deploy/rag-dev 2025-06-11 17:19:08 +08:00
twwu b7f9d7e94a Merge branch 'feat/rag-pipeline' of https://github.com/langgenius/dify into feat/rag-pipeline 2025-06-11 17:17:15 +08:00
zxhlyh 88817bf974 Merge branch 'feat/rag-pipeline' into deploy/rag-dev 2025-06-11 17:17:10 +08:00
twwu 92e6c52c0e refactor: update handleUseTemplate to use callback for dataset creation and improve error handling; change HTTP method for dependency check 2025-06-11 17:17:09 +08:00
jyong 309dfe8829 Merge branch 'feat/r2' into deploy/rag-dev 2025-06-11 17:16:52 +08:00
jyong 1d8b390584 Merge branch 'main' into feat/r2
# Conflicts:
#	docker/docker-compose.middleware.yaml
2025-06-11 17:16:27 +08:00
zxhlyh 7dea7f77ac Merge branch 'main' into feat/rag-pipeline 2025-06-11 17:12:38 +08:00
zxhlyh 4d9b15e519 fix 2025-06-11 17:11:57 +08:00
jyong 45a708f17e Merge branch 'feat/r2' into deploy/rag-dev 2025-06-11 17:10:42 +08:00
jyong 5f08a9314c r2 2025-06-11 17:10:20 +08:00
twwu 5802b2b437 Merge branch 'feat/rag-pipeline' into deploy/rag-dev 2025-06-11 16:39:18 +08:00
twwu f995436eec feat: implement chunk structure card and related hooks for dataset creation; update translations and refactor pipeline template fetching 2025-06-11 16:38:42 +08:00
jyong 25f0c61e65 Merge branch 'feat/r2' into deploy/rag-dev 2025-06-11 16:36:56 +08:00
jyong 66fa68fa18 r2 2025-06-11 16:36:36 +08:00
jyong 3e5781c6f1 Merge branch 'feat/r2' into deploy/rag-dev 2025-06-11 14:03:50 +08:00
jyong a6f7560d2f r2 2025-06-11 14:03:32 +08:00
twwu 45c76c1d68 refactor: rename icon property to icon_info in UpdateTemplateInfoRequest and related components 2025-06-11 13:39:07 +08:00
jyong 14d5af468c Merge branch 'feat/r2' into deploy/rag-dev 2025-06-11 13:12:41 +08:00
jyong 874e1bc41d r2 2025-06-11 13:12:18 +08:00
zxhlyh d2ae695b3b Merge branch 'feat/rag-pipeline' into deploy/rag-dev 2025-06-11 11:55:46 +08:00
zxhlyh 6ecdac6344 pipeline preview 2025-06-11 11:51:19 +08:00
twwu 3c2ce07f38 Merge branch 'feat/rag-pipeline' into deploy/rag-dev 2025-06-11 11:30:14 +08:00
twwu 5c58b11b22 refactor: standardize pipeline template properties and improve related components 2025-06-11 11:25:08 +08:00
jyong be92122f17 Merge branch 'feat/r2' into deploy/rag-dev 2025-06-11 11:21:37 +08:00
jyong 2972a06f16 r2 2025-06-11 11:21:17 +08:00
twwu caa275fdbd refactor: remove unused websiteCrawlJobId state and related props from useWebsiteCrawl and CreateFormPipeline components; update loading and file preview components for consistent width 2025-06-11 10:50:03 +08:00
zxhlyh 5dbda7f4c5 merge main 2025-06-11 10:41:50 +08:00
zxhlyh 0564651f6f publish as customized pipeline 2025-06-11 10:40:49 +08:00
twwu eff8108f1c refactor: update dataset model and improve batch action component 2025-06-11 10:24:07 +08:00
jyong 127a77d807 r2 2025-06-10 19:22:08 +08:00
jyong 265842223c Merge branch 'feat/r2' into deploy/rag-dev 2025-06-10 18:20:44 +08:00
jyong 95a24156de r2 2025-06-10 18:20:32 +08:00
jyong 80ca5b3356 Merge branch 'feat/r2' into deploy/rag-dev 2025-06-10 18:16:45 +08:00
jyong e934503fa0 r2 2025-06-10 18:16:30 +08:00
jyong 442bcd18c0 Merge remote-tracking branch 'origin/deploy/rag-dev' into deploy/rag-dev 2025-06-10 17:59:27 +08:00
jyong aeb1d1946c r2 2025-06-10 17:59:14 +08:00
zxhlyh 12f2913e08 Merge branch 'feat/rag-pipeline' into deploy/rag-dev 2025-06-10 17:40:08 +08:00
zxhlyh 0aeeee49f7 fix: draft sync 2025-06-10 17:39:20 +08:00
jyong eb7479b1ea Merge branch 'feat/r2' into deploy/rag-dev 2025-06-10 17:12:12 +08:00
jyong 80b219707e r2 2025-06-10 17:11:49 +08:00
twwu 65ac022245 Merge branch 'feat/rag-pipeline' of https://github.com/langgenius/dify into feat/rag-pipeline 2025-06-10 16:06:58 +08:00
twwu 6e6090d5a9 test(SegmentedControl): add test cases 2025-06-10 16:06:42 +08:00
jyong 58b5daeef3 r2 2025-06-10 15:56:28 +08:00
zxhlyh 33fd1fa79d Merge branch 'feat/rag-pipeline' into deploy/rag-dev 2025-06-10 15:40:52 +08:00
zxhlyh 978118f770 fix: datasource 2025-06-10 15:40:15 +08:00
zxhlyh a2610b22cc Merge branch 'feat/rag-pipeline' into deploy/rag-dev 2025-06-10 15:20:19 +08:00
zxhlyh f4789d750d publish as pipeline 2025-06-10 15:19:47 +08:00
zxhlyh 176f9ea2f4 Merge branch 'feat/rag-pipeline' into deploy/rag-dev 2025-06-10 15:10:52 +08:00
zxhlyh 5e71f7c825 publish as pipeline 2025-06-10 15:10:13 +08:00
jyong 7624edd32d r2 2025-06-10 14:56:18 +08:00
jyong 7b79354849 Merge branch 'feat/r2' into deploy/rag-dev 2025-06-10 14:54:27 +08:00
jyong a7ff2ab470 r2 2025-06-10 14:53:07 +08:00
twwu d3eedaf0ec feat(i18n): add new translation entries for local file, website crawl, and online document 2025-06-10 14:22:09 +08:00
jyong bcb0496bf4 Merge branch 'feat/r2' into deploy/rag-dev 2025-06-10 14:13:32 +08:00
jyong 4d967544f3 r2 2025-06-10 14:13:10 +08:00
jyong c18ee4be50 Merge branch 'feat/r2' into deploy/rag-dev 2025-06-10 11:45:19 +08:00
jyong 65873aa411 r2 2025-06-10 11:44:52 +08:00
jyong b95256d624 Merge branch 'feat/r2' into deploy/rag-dev 2025-06-10 11:00:11 +08:00
jyong c0d3452494 r2 2025-06-10 10:59:44 +08:00
twwu c91456de1b fix(ChunkStructure): add disabled prop to OptionCard component 2025-06-10 10:43:40 +08:00
zxhlyh e1ce156433 Merge branch 'feat/rag-pipeline' into deploy/rag-dev 2025-06-10 10:16:49 +08:00
zxhlyh 9e19ed4e67 knowledge base node checklisst 2025-06-10 10:16:13 +08:00
jyong ba383b1b0d Merge branch 'feat/r2' into deploy/rag-dev 2025-06-10 10:00:48 +08:00
jyong ad3d9cf782 r2 2025-06-10 10:00:20 +08:00
twwu 69053332e4 Merge branch 'feat/rag-pipeline' into deploy/rag-dev 2025-06-10 09:47:36 +08:00
zxhlyh 5b4d04b348 Merge branch 'main' into feat/rag-pipeline 2025-06-10 09:38:06 +08:00
jyong 47664f8fd3 r2 2025-06-09 14:00:34 +08:00
twwu 8d8f21addd Merge branch 'feat/rag-pipeline' into deploy/rag-dev 2025-06-06 18:52:57 +08:00
twwu 9b9640b3db refactor: remove job ID handling from website crawl components and update related hooks 2025-06-06 18:52:32 +08:00
jyong 83ba61203b Merge branch 'feat/r2' into deploy/rag-dev 2025-06-06 17:47:47 +08:00
jyong fcbd5febeb r2 2025-06-06 17:47:06 +08:00
twwu b8813e199f Merge branch 'feat/rag-pipeline' into deploy/rag-dev 2025-06-06 17:27:14 +08:00
jyong 2322496552 Merge branch 'feat/r2' into deploy/rag-dev
# Conflicts:
#	docker/docker-compose.middleware.yaml
2025-06-06 17:15:24 +08:00
jyong 21a3509bef r2 2025-06-06 17:14:43 +08:00
twwu 3e2f12b065 refactor: update website crawl handling and improve parameter naming in pipeline processing 2025-06-06 17:00:34 +08:00
Jyong 55e20d189a
Update deploy-dev.yml 2025-06-06 16:16:44 +08:00
jyong 1aa13bd20d r2 2025-06-06 16:05:49 +08:00
jyong cc2dd052df Merge remote-tracking branch 'origin/deploy/rag-dev' into deploy/rag-dev 2025-06-06 16:03:46 +08:00
jyong 4ffdf68a20 r2 2025-06-06 16:03:35 +08:00
twwu 547bd3cc1b refactor: rename cancel editor handler and improve variable name validation in field list 2025-06-06 15:54:55 +08:00
zxhlyh f3e9761c75 Merge branch 'feat/rag-pipeline' into deploy/rag-dev 2025-06-06 15:35:53 +08:00
twwu 83ca59e0f1 Merge branch 'feat/rag-pipeline' of https://github.com/langgenius/dify into feat/rag-pipeline 2025-06-06 15:35:50 +08:00
twwu d725aa8791 Merge branch 'feat/rag-pipeline' of https://github.com/langgenius/dify into feat/rag-pipeline 2025-06-06 15:35:31 +08:00
zxhlyh cc8ee0ff69 dsl 2025-06-06 15:35:19 +08:00
twwu 4a249c40b1 feat: enhance input field configurations with blur listeners and update translations for display name 2025-06-06 15:35:19 +08:00
jyong 04e4a1e3aa Merge branch 'feat/r2' into deploy/rag-dev 2025-06-06 15:20:06 +08:00
jyong d2d5fc62ae r2 2025-06-06 15:19:53 +08:00
jyong 52460f6929 Merge remote-tracking branch 'origin/deploy/rag-dev' into deploy/rag-dev 2025-06-06 15:06:59 +08:00
jyong 06dfc32e0f Merge branch 'feat/r2' into deploy/rag-dev
# Conflicts:
#	docker/docker-compose.middleware.yaml
2025-06-06 15:06:47 +08:00
jyong 0ca38d8215 r2 2025-06-06 15:06:26 +08:00
zxhlyh 3da6becad3 Merge branch 'feat/rag-pipeline' into deploy/rag-dev 2025-06-06 15:02:00 +08:00
zxhlyh f9d0a7bdc8 Merge branch 'main' into feat/rag-pipeline 2025-06-06 15:01:27 +08:00
zxhlyh e961722597 dsl 2025-06-06 15:00:37 +08:00
zxhlyh 2ddd2616ec confirm publish 2025-06-06 14:24:02 +08:00
twwu a82a9fb9d4 fix: update condition for handling datasource selection in DataSourceOptions 2025-06-06 14:24:02 +08:00
jyong 3fce6f2581 Merge branch 'feat/r2' into deploy/rag-dev
# Conflicts:
#	api/services/rag_pipeline/rag_pipeline.py
2025-06-06 14:23:05 +08:00
zxhlyh 3db864561e confirm publish 2025-06-06 14:22:15 +08:00
jyong d2750f1a02 r2 2025-06-06 14:22:00 +08:00
jyong 30a50c5cc8 Merge remote-tracking branch 'origin/deploy/rag-dev' into deploy/rag-dev 2025-06-06 12:08:20 +08:00
jyong 0ff746ebf6 r2 2025-06-06 12:08:09 +08:00
twwu 5193fa2118 fix: update condition for handling datasource selection in DataSourceOptions 2025-06-06 11:43:00 +08:00
zxhlyh 9a0dc82e6a Merge branch 'feat/rag-pipeline' into deploy/rag-dev 2025-06-06 10:55:43 +08:00
zxhlyh 8e4165defe datasource 2025-06-06 10:55:13 +08:00
twwu d917bc8ed0 Merge branch 'deploy/rag-dev' of https://github.com/langgenius/dify into deploy/rag-dev 2025-06-06 10:53:23 +08:00
twwu ef7bd262c5 Merge branch 'feat/rag-pipeline' into deploy/rag-dev 2025-06-06 10:52:53 +08:00
jyong d3e29ffa74 Merge branch 'feat/r2' into deploy/rag-dev 2025-06-06 10:40:57 +08:00
jyong 70432952fd r2 2025-06-06 10:40:06 +08:00
twwu cf2ef93ad5 Merge branch 'main' into feat/rag-pipeline 2025-06-06 10:10:53 +08:00
twwu cbf0864edc refactor: refactor online documents handling and update related components 2025-06-06 10:08:19 +08:00
twwu bce2bdd0de Merge branch 'feat/rag-pipeline' of https://github.com/langgenius/dify into feat/rag-pipeline 2025-06-05 18:28:58 +08:00
twwu 82e7c8a2f9 refactor: update datasource handling and improve documentation properties in pipeline components 2025-06-05 18:28:48 +08:00
Joel 2acdb0a4ea fix: var type error in cal var type in data source type 2025-06-05 17:40:49 +08:00
twwu 350ea6be6e fix: correct spacing and formatting in variable utility functions 2025-06-05 17:32:03 +08:00
zxhlyh 4664174ef3 Merge branch 'feat/rag-pipeline' into deploy/rag-dev 2025-06-05 16:51:55 +08:00
zxhlyh f0413f359a datasource 2025-06-05 16:51:19 +08:00
jyong 53b32c8b22 Merge branch 'feat/r2' into deploy/rag-dev 2025-06-05 16:44:02 +08:00
jyong b8ef1d9585 r2 2025-06-05 16:43:47 +08:00
Joel 90ca98ff3a fix: in node show rag var 2025-06-05 16:41:04 +08:00
Joel d4a1d045f8 fix: to new var format 2025-06-05 16:36:41 +08:00
twwu 91fefa0e37 refactor: improve layout and event handling in Header and FieldItem components 2025-06-05 15:44:18 +08:00
jyong 067ec17539 Merge branch 'feat/r2' into deploy/rag-dev 2025-06-05 15:29:37 +08:00
jyong c084b57933 r2 2025-06-05 15:28:44 +08:00
zxhlyh 876be7e6e9 Merge branch 'feat/rag-pipeline' into deploy/rag-dev 2025-06-05 15:07:59 +08:00
zxhlyh 468bfdfed9 datasource 2025-06-05 15:07:29 +08:00
jyong 82d817f612 Merge branch 'feat/r2' into deploy/rag-dev 2025-06-05 14:56:19 +08:00
jyong 9e84a5321d r2 2025-06-05 14:55:09 +08:00
jyong d77e27ac05 Merge branch 'feat/r2' into deploy/rag-dev 2025-06-05 14:10:07 +08:00
jyong 8a86a2c817 r2 2025-06-05 14:09:50 +08:00
twwu fdc4c36b77 refactor: replace useStore with useDatasetDetailContextWithSelector for pipeline ID retrieval 2025-06-05 14:05:27 +08:00
jyong 52c118f5b8 Merge remote-tracking branch 'origin/deploy/rag-dev' into deploy/rag-dev 2025-06-05 11:46:19 +08:00
jyong 5d7c7023c3 Merge branch 'feat/r2' into deploy/rag-dev 2025-06-05 11:46:07 +08:00
jyong 3e0a10b7ed r2 2025-06-05 11:45:53 +08:00
twwu 84f5272f72 Merge branch 'feat/rag-pipeline' into deploy/rag-dev 2025-06-05 11:40:47 +08:00
twwu 6286f368f1 refactor: replace ImagePlus icon with RiImageCircleAiLine and improve tab button styling 2025-06-05 11:21:17 +08:00
jyong cb2ca0b533 Merge branch 'feat/r2' into deploy/rag-dev 2025-06-05 11:12:32 +08:00
jyong 5fe5da7c1d r2 2025-06-05 11:12:06 +08:00
twwu c83370f701 refactor: simplify workflow draft synchronization in InputFieldDialog 2025-06-05 11:07:28 +08:00
twwu 7506867fb9 Merge branch 'main' into feat/rag-pipeline 2025-06-05 10:29:18 +08:00
twwu 842136959b feat: update data source handling and improve processing parameters integration 2025-06-05 10:24:25 +08:00
twwu 4c2cc98ebc Merge branch 'feat/rag-pipeline' of https://github.com/langgenius/dify into feat/rag-pipeline 2025-06-04 18:37:25 +08:00
twwu 44b9f49ab1 feat: enhance field item interaction and add preprocessing parameters hooks 2025-06-04 18:37:19 +08:00
zxhlyh f7f7952951 Merge branch 'feat/rag-pipeline' into deploy/rag-dev 2025-06-04 18:10:03 +08:00
zxhlyh a7fa5044e3 datasource 2025-06-04 18:09:31 +08:00
jyong eb84134706 Merge branch 'feat/r2' into deploy/rag-dev 2025-06-04 17:39:46 +08:00
jyong fbca9010f3 r2 2025-06-04 17:39:31 +08:00
Joel 0bf0c7dbe8 feat: var type to inner 2025-06-04 17:34:22 +08:00
jyong e071bd63e6 Merge branch 'feat/r2' into deploy/rag-dev 2025-06-04 17:29:55 +08:00
jyong 8a147a00e8 r2 2025-06-04 17:29:39 +08:00
twwu c9a4c66b07 fix: update input type from 'number-input' to 'number' for consistency 2025-06-04 16:58:08 +08:00
jyong edec654b68 Merge branch 'feat/r2' into deploy/rag-dev 2025-06-04 16:51:52 +08:00
jyong a82ab1d152 r2 2025-06-04 16:51:23 +08:00
twwu 9934eac15c refactor: refactor data source handling and add form for document processing 2025-06-04 16:50:27 +08:00
Joel c155afac29 chore: rename rag var spell errro 2025-06-04 16:43:24 +08:00
Joel 7080c9f279 fix: show rag vars names 2025-06-04 16:29:36 +08:00
jyong e41699cbc8 Merge branch 'feat/r2' into deploy/rag-dev 2025-06-04 16:23:34 +08:00
jyong 133193e7d0 r2 2025-06-04 16:23:12 +08:00
zxhlyh 9d6371e0a3 Merge branch 'feat/rag-pipeline' into deploy/rag-dev 2025-06-04 15:48:58 +08:00
zxhlyh dfe091789c datasource 2025-06-04 15:48:29 +08:00
zxhlyh 4c9bf78363 knowledge base node checklist 2025-06-04 15:18:03 +08:00
Jyong b95ecaf8a3
Update build-push.yml 2025-06-04 15:17:39 +08:00
jyong 7a0e8108ae Merge branch 'feat/r2' into deploy/rag-dev 2025-06-04 15:16:47 +08:00
twwu 3afd5e73c9 feat: enhance input field dialog with preview functionality and global inputs 2025-06-04 15:16:02 +08:00
jyong c09c8c6e5b r2 2025-06-04 15:12:05 +08:00
Joel cab491795a chore: some special to some fns 2025-06-04 14:54:11 +08:00
twwu e290ddc3e5 Merge branch 'feat/rag-pipeline' of https://github.com/langgenius/dify into feat/rag-pipeline 2025-06-04 14:50:16 +08:00
twwu db154e33b7 Merge branch 'main' into feat/rag-pipeline 2025-06-04 14:48:10 +08:00
zxhlyh 32f9004b5f merge feat/rag-pipeline 2025-06-04 11:43:38 +08:00
zxhlyh 225402280e datasource auth 2025-06-04 11:39:31 +08:00
jyong abcca11479 r2 2025-06-03 19:10:40 +08:00
jyong 9cdd2cbb27 r2 2025-06-03 19:02:57 +08:00
jyong 309fffd1e4 Merge branch 'main' into feat/r2
# Conflicts:
#	api/core/repositories/sqlalchemy_workflow_node_execution_repository.py
#	api/core/workflow/entities/node_entities.py
#	api/core/workflow/enums.py
2025-06-03 18:56:49 +08:00
twwu 0a9f50e85f Merge branch 'main' into feat/rag-pipeline 2025-06-03 18:44:53 +08:00
twwu ed1d71f4d0 Merge branch 'feat/rag-pipeline' of https://github.com/langgenius/dify into feat/rag-pipeline 2025-06-03 18:33:07 +08:00
twwu 7039ec33b9 refactor: update retrieval search method from invertedIndex to keywordSearch 2025-06-03 18:33:01 +08:00
Joel 025dc7c781 feat: can show rag vars 2025-06-03 18:32:52 +08:00
jyong 4130c50643 r2 2025-06-03 18:32:39 +08:00
jyong 7b7f8ef51d r2 2025-06-03 18:12:24 +08:00
twwu bad451d5ec Merge branch 'feat/rag-pipeline' of https://github.com/langgenius/dify into feat/rag-pipeline 2025-06-03 17:42:53 +08:00
twwu 87c15062e6 feat: enhance document processing with embedding and rule detail components 2025-06-03 17:42:40 +08:00
jyong 573cd15e77 r2 2025-06-03 16:52:21 +08:00
jyong ab1730bbaa r2 2025-06-03 16:51:21 +08:00
zxhlyh 163bae3aaf input rag variable 2025-06-03 16:07:58 +08:00
jyong 270edd43ab r2 2025-06-03 15:53:17 +08:00
jyong b8f3b23b1a r2 2025-06-03 15:51:31 +08:00
zxhlyh b9c6496fea datasource default value & publish sync draft 2025-06-03 14:42:17 +08:00
jyong 0486aa3445 r2 2025-06-03 13:30:51 +08:00
twwu 5fb771218c fix: update types and improve data handling in pipeline components 2025-06-03 10:14:48 +08:00
jyong 3fb02a7933 r2 2025-05-30 17:28:09 +08:00
twwu 898495b5c4 Merge branch 'feat/rag-pipeline' of https://github.com/langgenius/dify into feat/rag-pipeline 2025-05-30 15:54:46 +08:00
twwu 08624878cf fix: update ChunkStructureEnum values for consistency with model naming 2025-05-30 15:53:32 +08:00
zxhlyh 6fe473f0fa knowledge base node 2025-05-30 15:45:58 +08:00
jyong 11cf23e5fc Merge remote-tracking branch 'origin/feat/r2' into feat/r2 2025-05-30 15:42:56 +08:00
jyong 631768ea1d r2 2025-05-30 15:42:36 +08:00
Jyong e1d658b482
Update build-push.yml 2025-05-30 15:26:51 +08:00
twwu 1274aaed5d fix: add dataset mutation context to Popup component 2025-05-30 15:17:19 +08:00
twwu 9be036e0ca Merge branch 'main' into feat/rag-pipeline 2025-05-30 15:10:16 +08:00
Jyong 7284569c5f
Update build-push.yml 2025-05-30 01:02:33 +08:00
jyong 976b465e76 r2 2025-05-30 00:55:06 +08:00
jyong 804e55824d r2 2025-05-30 00:37:36 +08:00
jyong 69529fb16d r2 2025-05-30 00:37:27 +08:00
jyong cb5cfb2dae r2 2025-05-30 00:03:43 +08:00
jyong a826879cf7 Merge branch 'main' into feat/r2 2025-05-29 23:04:38 +08:00
jyong e7c48c0b69 r2 2025-05-29 23:04:04 +08:00
zxhlyh 558a280fc8 datasource type 2025-05-29 18:21:29 +08:00
twwu 2158c03231 fix: update button disabled state to reflect publishedAt status in Popup component 2025-05-29 17:53:08 +08:00
twwu a61f1f8eb0 refactor: replace anchor tag with Link component for navigation in Actions 2025-05-29 17:42:00 +08:00
twwu 9f724c19db refactor: refactor navigation components to use Link for improved routing 2025-05-29 17:33:04 +08:00
twwu 4ae936b263 refactor: refactor navigation handling in dataset components to use button elements 2025-05-29 15:48:54 +08:00
twwu 80875a109a feat: add logic to handle navigation based on pipeline status in DatasetCard 2025-05-29 15:22:29 +08:00
zxhlyh 121e54f3e3 plugins page 2025-05-29 15:18:27 +08:00
zxhlyh 1c2c4b62f8 run & tracing 2025-05-29 14:31:35 +08:00
twwu 9176790adf feat: enhance dataset detail layout with button disable logic based on pipeline status 2025-05-29 14:06:12 +08:00
zxhlyh 6ff6525d1d test run 2025-05-29 11:30:42 +08:00
zxhlyh 71ce505631 data source panel 2025-05-29 11:03:22 +08:00
twwu 11dfe3713f refactor: enhance document upload flow with step indicators and file preview handling 2025-05-29 10:18:11 +08:00
jyong a025db137d Merge branch 'main' into feat/r2 2025-05-29 09:54:28 +08:00
jyong 797d044714 r2 2025-05-29 09:53:42 +08:00
zxhlyh c4169f8aa0 Merge branch 'main' into feat/rag-pipeline 2025-05-29 09:39:36 +08:00
twwu 3005419573 feat: implement document upload steps and enhance test run panel with new hooks and components 2025-05-28 18:34:26 +08:00
jyong 7f59ffe7af r2 2025-05-28 17:56:04 +08:00
twwu cc7ad5ac97 feat: add input field variables change sync 2025-05-28 16:38:49 +08:00
zxhlyh 769b5e185a workflow init config staletime 2025-05-28 15:36:10 +08:00
twwu 9e763c9e87 feat: enhance file uploader and test run panel with batch upload limits and tooltips 2025-05-28 14:51:10 +08:00
zxhlyh b9214ca76b knowledge base default data 2025-05-28 13:57:24 +08:00
twwu 29d2f2339b refactor: enhance document preview functionality and refactor form handling 2025-05-28 13:44:37 +08:00
zxhlyh 5ac1e3584d Merge branch 'main' into feat/rag-pipeline 2025-05-28 11:01:56 +08:00
twwu dd0cf6fadc refactor: streamline data source type handling and improve FieldList props 2025-05-28 10:07:12 +08:00
zxhlyh b320ebe2ba datasource variables 2025-05-27 18:44:29 +08:00
twwu 377093b776 fix: conditionally render FieldListContainer based on inputFields length 2025-05-27 18:19:10 +08:00
twwu 70119a054a fix: add is_preview flag to datasource submission and improve dataset card rendering logic 2025-05-27 17:54:39 +08:00
zxhlyh 69d1e3ec7d input field in datasource 2025-05-27 17:42:30 +08:00
twwu 365157c37d refactor: enhance action button logic to include workflow running state 2025-05-27 15:43:10 +08:00
zxhlyh 4bc0a1bd37 knowledge base node init 2025-05-27 15:28:35 +08:00
twwu d6640f2adf refactor: streamline input field data conversion and enhance datasource component 2025-05-27 15:25:35 +08:00
twwu 987f845e79 Merge branch 'feat/rag-pipeline' of https://github.com/langgenius/dify into feat/rag-pipeline 2025-05-27 14:42:12 +08:00
zxhlyh 84daf49047 node meta data 2025-05-27 14:40:56 +08:00
twwu 31e183ef0d refactor: enhance datasource handling by adding fileExtensions support 2025-05-27 14:39:52 +08:00
twwu 754a1d1197 refactor: add DatasourceIcon component and update related hooks and options 2025-05-27 14:17:55 +08:00
twwu 049a6de4b3 refactor: update data source handling and replace icon implementation 2025-05-27 13:52:43 +08:00
zxhlyh 6bd28cadc4 datasource icon 2025-05-27 13:42:13 +08:00
zxhlyh 3b9a0b1d25 datasource icon 2025-05-27 11:27:25 +08:00
twwu db963a638c Merge branch 'main' into feat/rag-pipeline 2025-05-27 11:03:49 +08:00
twwu dcb4c9e84a refactor: refactor datasource type handling 2025-05-27 11:01:38 +08:00
jyong 5fc2bc58a9 r2 2025-05-27 00:01:23 +08:00
twwu d333645e09 Merge branch 'feat/rag-pipeline' of https://github.com/langgenius/dify into feat/rag-pipeline 2025-05-26 17:48:59 +08:00
twwu 2812c774c6 fix(i18n): Update economy index method description to include keyword count 2025-05-26 17:48:54 +08:00
zxhlyh e2f3f0ae4c datasource 2025-05-26 17:47:03 +08:00
Yeuoly 83ca7f8deb feat: add datasource support to PluginDeclaration and PluginCategory 2025-05-26 17:32:25 +08:00
zxhlyh e6c6fa8ed8 tool icon 2025-05-26 17:28:16 +08:00
jyong 678d6ffe2b r2 2025-05-26 17:00:16 +08:00
zxhlyh cef77a3717 datasource icon 2025-05-26 16:41:50 +08:00
zxhlyh 28726b6cf3 block selector 2025-05-26 16:33:08 +08:00
jyong ef0e41de07 r2 2025-05-26 16:02:11 +08:00
zxhlyh dc2b63b832 Merge branch 'main' into feat/rag-pipeline 2025-05-26 15:58:15 +08:00
zxhlyh 0478fc9649 datasource node variable 2025-05-26 15:57:34 +08:00
jyong 1b07e612d2 r2 2025-05-26 15:49:37 +08:00
jyong 38cce3f62a r2 2025-05-26 14:52:09 +08:00
jyong 35be8721b9 Merge branch 'main' into feat/r2 2025-05-26 14:50:33 +08:00
jyong 665ffbdc10 r2 2025-05-26 14:49:59 +08:00
zxhlyh b5f88c77a3 datasource list 2025-05-26 14:13:59 +08:00
twwu 324c0d7b4c refactor: improve layout and styling in TestRunPanel and FilePreview components 2025-05-26 14:06:04 +08:00
twwu 13e3f17493 refactor: standardize terminology by renaming 'data-source' to 'datasource' across components and translations 2025-05-26 10:50:39 +08:00
twwu 841bd35ebb Merge branch 'feat/rag-pipeline' of https://github.com/langgenius/dify into feat/rag-pipeline 2025-05-26 09:49:15 +08:00
twwu ccefd41606 refactor: rename InputType to InputTypeEnum and update related usages for consistency 2025-05-26 09:48:17 +08:00
jyong ec1c4efca9 r2 2025-05-25 23:09:01 +08:00
jyong 0f10852b6b Merge remote-tracking branch 'origin/feat/r2' into feat/r2 2025-05-23 19:30:59 +08:00
jyong 6d547447d3 r2 2025-05-23 19:30:48 +08:00
Yeuoly 6123f1ab21 refactor: reorganize imports and fix datasource endpoint URL 2025-05-23 19:22:50 +08:00
zxhlyh e7370766bd datasource 2025-05-23 18:19:28 +08:00
twwu db4958be05 fix: fix modal handling in InputFieldEditor 2025-05-23 17:52:00 +08:00
Yeuoly a15bf8e8fe remove output schema 2025-05-23 17:35:26 +08:00
jyong 70d2c78176 r2 2025-05-23 17:13:09 +08:00
jyong 42fcda3dc8 r2 2025-05-23 17:11:56 +08:00
twwu ac049d938e Merge branch 'feat/rag-pipeline' of https://github.com/langgenius/dify into feat/rag-pipeline 2025-05-23 16:57:46 +08:00
twwu 3af61f4b5d refactor: update input type mappings and enums for consistency across components 2025-05-23 16:57:27 +08:00
zxhlyh e19adbbbc5 datasource 2025-05-23 16:27:49 +08:00
jyong 64d997fdb0 r2 2025-05-23 15:55:41 +08:00
Yeuoly a49942b949 fix: rename first_step_parameters 2025-05-23 15:12:31 +08:00
Yeuoly 4460d96e58 feat: add oauth schema 2025-05-23 15:11:40 +08:00
Yeuoly a7d5f2f53b apply ruff 2025-05-23 15:10:56 +08:00
twwu c9bf99a1e2 refactor: update input variable types and initial data handling in pipeline components 2025-05-23 15:10:20 +08:00
Yeuoly 4300ebc8aa fix: remove provide type 2025-05-23 15:10:16 +08:00
zxhlyh 720ce79901 checklist & datasource icon 2025-05-23 14:26:06 +08:00
twwu 693107a6c8 Merge branch 'feat/rag-pipeline' of https://github.com/langgenius/dify into feat/rag-pipeline 2025-05-23 13:54:59 +08:00
twwu 583db24ee7 refactor: update CustomActions type to use structured props across form components 2025-05-23 13:54:49 +08:00
zxhlyh 7d92574e02 datasource panel 2025-05-23 11:51:17 +08:00
twwu 5aaa06c8b0 refactor: integrate routing for document creation in Popup component 2025-05-23 11:19:57 +08:00
twwu 52b773770b refactor: update datasource handling in InputFieldDialog and Datasource components 2025-05-23 11:07:48 +08:00
zxhlyh 23adc7d8a8 datasource 2025-05-23 10:47:31 +08:00
twwu e3708bfa85 refactor: enhance ChunkPreview with form handling and preview functionality 2025-05-23 10:29:59 +08:00
twwu 7d65e9980c Merge branch 'main' into feat/rag-pipeline 2025-05-23 09:35:08 +08:00
jyong b93d26ee9f Merge remote-tracking branch 'origin/feat/r2' into feat/r2
# Conflicts:
#	api/core/datasource/entities/datasource_entities.py
2025-05-23 00:06:51 +08:00
jyong b82b26bba5 r2 2025-05-23 00:05:57 +08:00
twwu 21c24977d8 refactor: enhance document processing UI and functionality with new components and translations 2025-05-22 23:05:58 +08:00
zxhlyh fe435c23c3 i18n 2025-05-22 17:44:07 +08:00
twwu ead1209f98 Merge branch 'feat/rag-pipeline' of https://github.com/langgenius/dify into feat/rag-pipeline 2025-05-22 17:41:18 +08:00
twwu 3994bb1771 refactor: refactor document processing components and update translations 2025-05-22 17:39:39 +08:00
zxhlyh 327690e4a7 merge main 2025-05-22 16:45:13 +08:00
zxhlyh c2a7e0e986 version panel 2025-05-22 16:43:30 +08:00
twwu faf6b9ea03 refactor: refactor preview components 2025-05-22 14:49:40 +08:00
Yeuoly 3bfc602561 refactor: update datasource entity structure and parameter handling
- Renamed and split parameters in DatasourceEntity into first_step_parameters and second_step_parameters.
- Updated validation methods for new parameter structure.
- Adjusted datasource_node to reference first_step_parameters.
- Cleaned up unused imports and improved type hints in workflow.py.
2025-05-21 20:36:26 +08:00
Yeuoly 5fa2aca2c8 feat: add oauth schema to datasource 2025-05-21 20:29:59 +08:00
twwu 69a60101fe Merge branch 'feat/rag-pipeline' of https://github.com/langgenius/dify into feat/rag-pipeline 2025-05-21 16:37:08 +08:00
twwu b18519b824 refactor: add create-from-pipeline page and associated components for document processing 2025-05-21 16:37:02 +08:00
zxhlyh 0d01025254 parallel check 2025-05-21 16:34:41 +08:00
zxhlyh eef1542cb3 use available nodes 2025-05-21 15:51:05 +08:00
twwu 9aef4b6d6b refactor: Notion component and add NotionPageSelector for improved page selection 2025-05-21 14:02:37 +08:00
zxhlyh 7dba83754f Merge branch 'main' into feat/rag-pipeline 2025-05-21 13:42:28 +08:00
zxhlyh e2585bc778 Merge branch 'main' into feat/rag-pipeline 2025-05-21 11:27:50 +08:00
twwu cc6e2558ef Merge branch 'feat/rag-pipeline' of https://github.com/langgenius/dify into feat/rag-pipeline 2025-05-21 10:53:29 +08:00
twwu 20343facad refactor: website data source components and hooks 2025-05-21 10:53:18 +08:00
zxhlyh eff123a11c checklist 2025-05-20 16:52:45 +08:00
jyong 9bafd3a226 r2 2025-05-20 15:41:10 +08:00
jyong 82be119fec Merge branch 'main' into feat/r2 2025-05-20 15:18:52 +08:00
jyong a64df507f6 r2 2025-05-20 15:18:33 +08:00
twwu cf73faf174 feat: add FileUploaderField and TextAreaField components; enhance BaseField to support file inputs 2025-05-20 15:09:30 +08:00
jyong ba52bf27c1 r2 2025-05-20 14:57:26 +08:00
zxhlyh 55f4177b01 merge main 2025-05-20 14:03:54 +08:00
twwu 14a9052d60 refactor: update variable naming for consistency and improve data source handling in pipeline components 2025-05-20 11:42:22 +08:00
twwu 314a2f9be8 Merge branch 'feat/rag-pipeline' of https://github.com/langgenius/dify into feat/rag-pipeline 2025-05-19 18:21:29 +08:00
twwu 8eee344fbb fix: correct hover state logic and refactor environment variable handling in FieldItem and usePipelineInit 2025-05-19 18:21:17 +08:00
zxhlyh 0e0a266142 merge main 2025-05-19 18:11:45 +08:00
zxhlyh 7bce35913d i18n 2025-05-19 18:09:12 +08:00
twwu 7898dbd5bf Merge branch 'feat/rag-pipeline' of https://github.com/langgenius/dify into feat/rag-pipeline 2025-05-19 16:26:24 +08:00
twwu bd1073ff1a refactor: update input variable types to use PipelineInputVarType and simplify form data handling 2025-05-19 16:26:13 +08:00
zxhlyh 1bbd572593 option card 2025-05-19 15:59:04 +08:00
zxhlyh 5199297f61 run and history 2025-05-19 14:23:40 +08:00
Yeuoly c5a2f43ceb refactor: replace BuiltinToolManageService with RagPipelineManageService for datasource management and remove unused datasource engine and related code 2025-05-16 18:42:07 +08:00
twwu 8d4ced227e fix: update click handler logic in OptionCard 2025-05-16 17:54:41 +08:00
zxhlyh f481075f8f pipeline sync draft 2025-05-16 17:48:01 +08:00
zxhlyh 836cf6453e pipeline sync draft 2025-05-16 17:48:01 +08:00
jyong 8bea88c8cc r2 2025-05-16 17:22:17 +08:00
twwu 4b7274f9a5 fix: update link in Form component and correct endpoint for related apps query 2025-05-16 16:49:43 +08:00
twwu 7de5585da6 refactor: replace SWR with custom hooks for dataset detail and related apps; update context usage in components 2025-05-16 16:32:25 +08:00
twwu 87dc80f6fa fix: add cursor pointer and hover effect to MemberItem; adjust padding in PermissionItem 2025-05-16 15:52:28 +08:00
twwu a008c04331 refactor: standardize naming for load more handlers and navigation items across components 2025-05-16 15:43:28 +08:00
twwu 56b66b8a57 Merge branch 'feat/rag-pipeline' of https://github.com/langgenius/dify into feat/rag-pipeline 2025-05-16 15:15:00 +08:00
twwu 35a7add4e9 refactor: refactor pipeline-related components and services to use template terminology 2025-05-16 15:14:50 +08:00
zxhlyh f1fe143962 add i18n 2025-05-16 14:53:39 +08:00
jyong 9e72afee3c r2 2025-05-16 14:00:35 +08:00
jyong 613b94a6e6 r2 2025-05-16 13:45:47 +08:00
jyong 7b0d38f7d3 r2 2025-05-16 12:02:35 +08:00
jyong 4ff971c8a3 r2 2025-05-16 11:26:56 +08:00
twwu 019ef74bf2 refactor: replace Container with List, update DatasetCard z-index, and implement useDatasetList for data fetching 2025-05-16 10:50:31 +08:00
zxhlyh 2670557258 merge main 2025-05-16 10:09:24 +08:00
jyong 93ac6d37e9 r2 2025-05-15 16:44:55 +08:00
jyong e710a8402c r2 2025-05-15 16:07:17 +08:00
jyong 360f8a3375 Merge branch 'main' into feat/r2 2025-05-15 15:15:23 +08:00
jyong 818eb46a8b r2 2025-05-15 15:14:52 +08:00
zxhlyh f5c297708b Merge branch 'main' into feat/rag-pipeline 2025-05-15 14:52:54 +08:00
zxhlyh bf8324f7f7 tag filter 2025-05-15 14:52:00 +08:00
zxhlyh b730d153ea Merge branch 'main' into feat/rag-pipeline 2025-05-15 10:27:47 +08:00
zxhlyh 11977596c9 merge main 2025-05-15 10:14:40 +08:00
twwu 612dca8b7d feat: add WorkflowPreview component to Details and define graph structure in PipelineTemplateByIdResponse 2025-05-14 18:20:29 +08:00
zxhlyh 53018289d4 workflow preview 2025-05-14 18:02:58 +08:00
twwu 958ff44707 refactor: simplify import DSL confirmation request structure 2025-05-14 16:27:59 +08:00
twwu d910770b3c feat: add dataset_id to DSL import responses and update routing logic in CreateFromDSLModal 2025-05-14 16:00:17 +08:00
twwu 5a8f10520f feat: refactor template card actions and details to standardize prop names; add create modal for dataset creation 2025-05-14 15:53:17 +08:00
twwu df928772c0 Merge branch 'feat/rag-pipeline' of https://github.com/langgenius/dify into feat/rag-pipeline 2025-05-14 14:49:10 +08:00
twwu b713218cab feat: add DSL modal header and tab components; enhance pipeline import functionality 2025-05-14 14:49:01 +08:00
zxhlyh 9ea2123e7f component add readonly 2025-05-14 11:14:49 +08:00
twwu de0cb06f8c feat: implement create dataset pipeline forms and modals 2025-05-14 10:48:54 +08:00
twwu cfb6d59513 Merge branch 'main' into feat/rag-pipeline 2025-05-13 18:38:26 +08:00
twwu 4c30d1c1eb feat: Enhance InputFieldDialog and workflow hooks to handle ragPipelineVariables 2025-05-13 16:33:38 +08:00
twwu 5bb02c79cc Merge branch 'feat/rag-pipeline' of https://github.com/langgenius/dify into feat/rag-pipeline 2025-05-13 16:18:07 +08:00
twwu 0a891e5392 feat: Update retrieval method configuration to use new OptionCard component and improve layout 2025-05-13 16:17:59 +08:00
zxhlyh f6978ce6b1 fix: pipeline init 2025-05-13 16:02:36 +08:00
twwu 4d68aadc1c Refactor: Replace IndexMethodRadio with IndexMethod component, add keyword number functionality, and update related translations 2025-05-13 15:35:21 +08:00
twwu cef6463847 feat: Enhance dataset settings with chunk structure and icon selection 2025-05-13 11:07:31 +08:00
zxhlyh 39b8331f81 merge main 2025-05-09 18:20:56 +08:00
twwu 212d4c5899 Merge branch 'feat/rag-pipeline' of https://github.com/langgenius/dify into feat/rag-pipeline 2025-05-09 16:35:14 +08:00
twwu 97ec855df4 feat: enhance input field management with internationalization support and improved state handling 2025-05-09 16:35:09 +08:00
zxhlyh d83b9b70e3 fix: import 2025-05-09 16:25:34 +08:00
zxhlyh b51c18c2cf pipeline init 2025-05-09 15:53:31 +08:00
twwu 7e31da7882 refactor: update data source handling and improve internationalization support in test run panel 2025-05-09 12:56:57 +08:00
twwu d9ed61287d Merge branch 'feat/rag-pipeline' of https://github.com/langgenius/dify into feat/rag-pipeline 2025-05-08 18:29:58 +08:00
twwu 6024dbe98d refactor: simplify type definitions in form components and update related configurations 2025-05-08 18:29:49 +08:00
zxhlyh 13ce6317f1 pipeline header 2025-05-08 18:27:44 +08:00
twwu 0099f2296d Merge branch 'feat/rag-pipeline' of https://github.com/langgenius/dify into feat/rag-pipeline 2025-05-08 16:26:37 +08:00
twwu 2d93bc6725 refactor: update DatasetInfo component layout and styling for better responsiveness 2025-05-08 16:26:30 +08:00
zxhlyh cb52f9ecc5 pipeline header 2025-05-08 15:32:19 +08:00
twwu 1fbeb3a21a refactor: enhance dataset components with new icons and improve layout structure 2025-05-08 13:48:14 +08:00
twwu 38f1a42ce8 refactor: remove unused icon components and update imports in dataset components 2025-05-08 11:15:27 +08:00
twwu 3d11af2dd6 refactor: update imports for knowledge and pipeline components 2025-05-08 10:44:26 +08:00
twwu d1fd5db7f8 Merge branch 'feat/rag-pipeline' of https://github.com/langgenius/dify into feat/rag-pipeline 2025-05-08 09:52:53 +08:00
twwu c240cf3bb1 refactor: dataset creation to support pipeline datasets, update related types and hooks 2025-05-08 09:42:02 +08:00
zxhlyh bbbcd68258 portal element 2025-05-07 18:14:26 +08:00
twwu 7ce9710229 feat: add pipeline template details and import functionality, enhance dataset pipeline management 2025-05-07 18:09:38 +08:00
zxhlyh 3f7f21ce70 show test run panel 2025-05-07 17:31:06 +08:00
zxhlyh fa8ab4ea04 rag pipeline 2025-05-07 16:30:24 +08:00
jyong 3f1363503b r2 2025-05-07 16:19:09 +08:00
zxhlyh 3f52f491d7 feat: knowledge base node 2025-05-07 15:08:37 +08:00
twwu e86a3fc672 feat: Enhance dataset pipeline creation and management with new export and delete functionalities, improved internationalization, and refactor for better clarity 2025-05-07 14:29:01 +08:00
twwu 6f77f67427 Merge branch 'feat/rag-pipeline' of https://github.com/langgenius/dify into feat/rag-pipeline 2025-05-07 11:30:20 +08:00
twwu 4025cd0b46 feat: Refactor dataset pipeline creation components and add internationalization support 2025-05-07 11:30:13 +08:00
zxhlyh 3bbb22750c merge main 2025-05-06 18:28:44 +08:00
zxhlyh d196872059 merge main 2025-05-06 17:31:48 +08:00
zxhlyh a478d95950 feat: knowledge base node 2025-05-06 17:25:18 +08:00
twwu 12c060b795 feat: enhance dataset icon handling by making icon background and URL optional 2025-05-06 16:58:37 +08:00
twwu c480c3d881 feat: enhance dataset detail layout with new icon structure and additional document count display 2025-05-06 16:37:21 +08:00
jyong a998022c12 r2 2025-05-06 16:18:34 +08:00
jyong a25cc4e8af r2 2025-05-06 13:56:13 +08:00
twwu b4bccf5fef feat: Add External Knowledge Base and Pipeline icons, update DatasetCard component 2025-05-06 11:58:53 +08:00
twwu 14ad34af71 feat: enhance dataset creation UI with new pipeline list and edit functionality 2025-05-03 17:16:00 +08:00
twwu 7ed398267f Merge branch 'feat/rag-pipeline' of https://github.com/langgenius/dify into feat/rag-pipeline 2025-05-03 13:43:43 +08:00
twwu fc9556e057 feat: add dataset creation components and functionality 2025-05-03 13:43:37 +08:00
zxhlyh acf6872a50 fix: variable selector 2025-04-30 16:55:00 +08:00
twwu e689f21a60 Merge branch 'feat/rag-pipeline' of https://github.com/langgenius/dify into feat/rag-pipeline 2025-04-30 14:16:25 +08:00
twwu a7f9259e27 feat: new Dataset list 2025-04-30 14:16:13 +08:00
zxhlyh a46b4e3616 Merge branch 'main' into feat/rag-pipeline 2025-04-29 16:27:49 +08:00
zxhlyh e7e12c1d2e fix: node selector 2025-04-29 16:26:53 +08:00
zxhlyh 66176c4d71 fix: node default 2025-04-29 16:14:20 +08:00
twwu 2613a380b6 fix: Correct link path for creating datasets and optimize Link component with memoization 2025-04-29 15:47:29 +08:00
twwu 9392ce259f feat: Refactor dataset components and update translations for new dataset creation options 2025-04-29 15:44:32 +08:00
twwu d1287f08b4 Merge branch 'feat/rag-pipeline' of https://github.com/langgenius/dify into feat/rag-pipeline 2025-04-29 10:44:37 +08:00
twwu 7ee8472a5f feat: Add SegmentedControl component with styling and option handling 2025-04-29 10:44:03 +08:00
zxhlyh cdb615deeb knowledge base node 2025-04-28 18:37:18 +08:00
zxhlyh abbba1d004 knowledge base node 2025-04-28 18:37:17 +08:00
jyong 3c386c63a6 Merge remote-tracking branch 'origin/feat/r2' into feat/r2 2025-04-28 16:19:29 +08:00
jyong 49d1846e63 r2 2025-04-28 16:19:12 +08:00
twwu 53f2882077 feat: Implement document processing component with configuration and action handling 2025-04-28 15:55:24 +08:00
twwu 8f07e088f5 feat: Add JinaReader and WaterCrawl components with configurations and schema handling 2025-04-28 14:33:01 +08:00
twwu f71b0eccb2 Refactor: dataset creation components and implement Firecrawl functionality 2025-04-28 13:33:16 +08:00
twwu 5b89d36ea1 feat: Update Zod schema generation for file types and upload methods to use new constants 2025-04-27 20:44:05 +08:00
twwu 7c3af74b0d feat: Update useConfigurations and useHiddenConfigurations to use InputVarType constants for type values 2025-04-27 20:34:25 +08:00
twwu d1d83f8a2a feat: Enhance form components with hidden fields and popup properties for improved configuration 2025-04-27 20:17:50 +08:00
twwu 839fe12087 feat: Update OptionsField to use correct Options type and enhance Zod schema generation for options and select input types 2025-04-27 18:45:22 +08:00
twwu fd8ee9f53e Refactor input field form components and schema 2025-04-27 15:29:11 +08:00
Yeuoly c2d02f8f4d Merge branch 'main' into feat/r2 2025-04-27 14:31:19 +08:00
twwu 8367ae85de feat: Replace BaseVarType with BaseFieldType for consistent field type usage across components 2025-04-27 10:16:16 +08:00
twwu d1f0e6e5c2 feat: Implement Zod schema generation for form validation and update form component usage 2025-04-27 09:56:48 +08:00
twwu 7deb44f864 feat: Add additional field components to form hook for enhanced functionality 2025-04-26 22:43:51 +08:00
twwu d12e9b81e3 feat: Introduce new form field components and enhance existing ones with label options 2025-04-26 21:50:21 +08:00
twwu b1fbaaed95 refactor: Simplify type checks for form field rendering and correct comment grammar 2025-04-25 18:39:05 +08:00
twwu 3f8b0b937c Merge branch 'feat/rag-pipeline' of https://github.com/langgenius/dify into feat/rag-pipeline 2025-04-25 18:13:57 +08:00
twwu 734c62998f feat: Implement dynamic form field rendering and replace SubmitButton with Actions component 2025-04-25 18:13:52 +08:00
zxhlyh 4792ca1813 knowledge base node 2025-04-25 17:24:47 +08:00
jyong d4007ae073 r2 2025-04-25 15:49:36 +08:00
jyong 389f15f8e3 r2 2025-04-25 14:56:22 +08:00
jyong 9437145218 r2 2025-04-25 13:42:57 +08:00
zxhlyh 076924bbd6 rag pipeline main 2025-04-25 11:32:17 +08:00
zxhlyh 97cf6b2d65 refactor workflow 2025-04-25 11:04:14 +08:00
twwu f317ef2fe2 feat: Refactor NotionConnector integration and add Header component for improved UI in NotionPageSelector 2025-04-24 21:26:54 +08:00
zxhlyh f7de55364f chore: refactor workflow 2025-04-24 16:29:58 +08:00
twwu de30e9278c feat: Refactor Notion and LocalFile components to remove unused VectorSpaceFull prop and improve step indicator logic 2025-04-24 15:47:22 +08:00
jyong b9ab1555fb r2 2025-04-24 15:42:30 +08:00
twwu 44b9ce0951 feat: Implement Notion connector and related components for data source selection in the RAG pipeline 2025-04-24 15:32:04 +08:00
twwu d768094376 feat: Refactor file upload configuration and validation logic 2025-04-24 13:46:50 +08:00
twwu 93f83086c1 feat: add CustomSelectField component and integrate with input field form 2025-04-23 22:16:19 +08:00
zxhlyh 8d9c252811 block selector & data source node 2025-04-22 16:46:33 +08:00
jyong c7f4b41920 r2 2025-04-22 16:08:58 +08:00
twwu efb27eb443 feat: enhance FieldList component with sorting and dynamic input field management 2025-04-22 12:56:22 +08:00
twwu 5b8c43052e feat: implement input field dialog and related components for rag pipeline 2025-04-22 11:29:03 +08:00
twwu e04ae927b6 Merge branch 'main' into feat/rag-pipeline 2025-04-22 10:14:28 +08:00
twwu ac68d62d1c Merge branch 'main' into feat/rag-pipeline 2025-04-21 18:07:15 +08:00
zxhlyh caa17b8fe9 rag pipeline store 2025-04-21 17:52:34 +08:00
twwu cd1562ee24 Merge branch 'feat/rag-pipeline' of https://github.com/langgenius/dify into feat/rag-pipeline 2025-04-21 16:58:28 +08:00
twwu 47af1a9c42 feat: add InputField component and integrate into RagPipeline panel 2025-04-21 16:58:22 +08:00
zxhlyh 0cd6a427af add publisher 2025-04-21 14:41:41 +08:00
twwu 51165408ed feat: implement input field form with file upload settings and validation 2025-04-21 09:53:35 +08:00
zxhlyh a2dc38f90a feat: add rag pipeline store slice 2025-04-18 15:51:40 +08:00
zxhlyh a36436b585 feat: add rag pipeline store slice 2025-04-18 15:46:54 +08:00
zxhlyh 2d87823fc6 init rag pipeline 2025-04-18 14:56:34 +08:00
zxhlyh d238da9826 Merge branch 'main' into feat/rag-pipeline 2025-04-18 14:00:58 +08:00
twwu 6eef5990c9 feat: enhance form components with additional props for validation and tooltips; add OptionsField component 2025-04-18 11:32:23 +08:00
jyong 5c4bf2a9e4 r2 2025-04-17 15:07:23 +08:00
twwu 0345eb4659 feat: add new form components including CheckboxField, NumberInputField, SelectField, TextField, and SubmitButton with updated input sizes 2025-04-17 13:33:33 +08:00
twwu 71f78e0d33 feat: replace existing page content with DemoForm component for improved layout 2025-04-16 14:16:56 +08:00
twwu 942648e9e9 feat: implement form components including CheckboxField, SelectField, TextField, and SubmitButton with validation 2025-04-16 14:16:32 +08:00
twwu d841581679 feat: add IndeterminateIcon component and update Checkbox to support indeterminate state
refactor: remove mixed state handling and update related styles
fix: update useCallback dependencies for better performance
2025-04-15 17:30:18 +08:00
jyong 9f8e05d9f0 r2 2025-04-14 18:17:17 +08:00
jyong 3340775052 r2 2025-04-14 11:10:44 +08:00
jyong 9987774471 r2 2025-04-10 18:00:22 +08:00
1509 changed files with 92060 additions and 18312 deletions

View File

@ -8,6 +8,8 @@ on:
- "deploy/enterprise"
- "build/**"
- "release/e-*"
- "deploy/rag-dev"
- "feat/rag-2"
tags:
- "*"

View File

@ -4,7 +4,7 @@ on:
workflow_run:
workflows: ["Build and Push API & Web"]
branches:
- "deploy/dev"
- "deploy/rag-dev"
types:
- completed
@ -12,12 +12,13 @@ jobs:
deploy:
runs-on: ubuntu-latest
if: |
github.event.workflow_run.conclusion == 'success'
github.event.workflow_run.conclusion == 'success' &&
github.event.workflow_run.head_branch == 'deploy/rag-dev'
steps:
- name: Deploy to server
uses: appleboy/ssh-action@v0.1.8
with:
host: ${{ secrets.SSH_HOST }}
host: ${{ secrets.RAG_SSH_HOST }}
username: ${{ secrets.SSH_USER }}
key: ${{ secrets.SSH_PRIVATE_KEY }}
script: |

View File

@ -12,7 +12,6 @@ permissions:
statuses: write
contents: read
jobs:
python-style:
name: Python Style

View File

@ -461,6 +461,16 @@ WORKFLOW_CALL_MAX_DEPTH=5
WORKFLOW_PARALLEL_DEPTH_LIMIT=3
MAX_VARIABLE_SIZE=204800
# GraphEngine Worker Pool Configuration
# Minimum number of workers per GraphEngine instance (default: 1)
GRAPH_ENGINE_MIN_WORKERS=1
# Maximum number of workers per GraphEngine instance (default: 10)
GRAPH_ENGINE_MAX_WORKERS=10
# Queue depth threshold that triggers worker scale up (default: 3)
GRAPH_ENGINE_SCALE_UP_THRESHOLD=3
# Seconds of idle time before scaling down workers (default: 5.0)
GRAPH_ENGINE_SCALE_DOWN_IDLE_TIME=5.0
# Workflow storage configuration
# Options: rdbms, hybrid
# rdbms: Use only the relational database (default)

112
api/.importlinter Normal file
View File

@ -0,0 +1,112 @@
[importlinter]
root_packages =
core
configs
controllers
models
tasks
services
[importlinter:contract:workflow]
name = Workflow
type=layers
layers =
graph_engine
graph_events
graph
nodes
node_events
entities
containers =
core.workflow
ignore_imports =
core.workflow.nodes.base.node -> core.workflow.graph_events
core.workflow.nodes.iteration.iteration_node -> core.workflow.graph_events
core.workflow.nodes.iteration.iteration_node -> core.workflow.graph_engine
core.workflow.nodes.iteration.iteration_node -> core.workflow.graph
core.workflow.nodes.iteration.iteration_node -> core.workflow.graph_engine.command_channels
core.workflow.nodes.loop.loop_node -> core.workflow.graph_events
core.workflow.nodes.loop.loop_node -> core.workflow.graph_engine
core.workflow.nodes.loop.loop_node -> core.workflow.graph
core.workflow.nodes.loop.loop_node -> core.workflow.graph_engine.command_channels
core.workflow.nodes.node_factory -> core.workflow.graph
[importlinter:contract:rsc]
name = RSC
type = layers
layers =
graph_engine
response_coordinator
containers =
core.workflow.graph_engine
[importlinter:contract:worker]
name = Worker
type = layers
layers =
graph_engine
worker
containers =
core.workflow.graph_engine
[importlinter:contract:graph-engine-architecture]
name = Graph Engine Architecture
type = layers
layers =
graph_engine
orchestration
command_processing
event_management
error_handling
graph_traversal
state_management
worker_management
domain
containers =
core.workflow.graph_engine
[importlinter:contract:domain-isolation]
name = Domain Model Isolation
type = forbidden
source_modules =
core.workflow.graph_engine.domain
forbidden_modules =
core.workflow.graph_engine.worker_management
core.workflow.graph_engine.command_channels
core.workflow.graph_engine.layers
core.workflow.graph_engine.protocols
[importlinter:contract:worker-management]
name = Worker Management
type = forbidden
source_modules =
core.workflow.graph_engine.worker_management
forbidden_modules =
core.workflow.graph_engine.orchestration
core.workflow.graph_engine.command_processing
core.workflow.graph_engine.event_management
[importlinter:contract:error-handling-strategies]
name = Error Handling Strategies
type = independence
modules =
core.workflow.graph_engine.error_handling.abort_strategy
core.workflow.graph_engine.error_handling.retry_strategy
core.workflow.graph_engine.error_handling.fail_branch_strategy
core.workflow.graph_engine.error_handling.default_value_strategy
[importlinter:contract:graph-traversal-components]
name = Graph Traversal Components
type = layers
layers =
edge_processor
skip_propagator
containers =
core.workflow.graph_engine.graph_traversal
[importlinter:contract:command-channels]
name = Command Channels Independence
type = independence
modules =
core.workflow.graph_engine.command_channels.in_memory_channel
core.workflow.graph_engine.command_channels.redis_channel

View File

@ -1,4 +1,3 @@
import os
import sys
@ -17,20 +16,20 @@ else:
# It seems that JetBrains Python debugger does not work well with gevent,
# so we need to disable gevent in debug mode.
# If you are using debugpy and set GEVENT_SUPPORT=True, you can debug with gevent.
if (flask_debug := os.environ.get("FLASK_DEBUG", "0")) and flask_debug.lower() in {"false", "0", "no"}:
from gevent import monkey
# if (flask_debug := os.environ.get("FLASK_DEBUG", "0")) and flask_debug.lower() in {"false", "0", "no"}:
# from gevent import monkey
#
# # gevent
# monkey.patch_all()
#
# from grpc.experimental import gevent as grpc_gevent # type: ignore
#
# # grpc gevent
# grpc_gevent.init_gevent()
# gevent
monkey.patch_all()
from grpc.experimental import gevent as grpc_gevent # type: ignore
# grpc gevent
grpc_gevent.init_gevent()
import psycogreen.gevent # type: ignore
psycogreen.gevent.patch_psycopg()
# import psycogreen.gevent # type: ignore
#
# psycogreen.gevent.patch_psycopg()
from app_factory import create_app

22
api/celery_entrypoint.py Normal file
View File

@ -0,0 +1,22 @@
import logging
import psycogreen.gevent as pscycogreen_gevent # type: ignore
from grpc.experimental import gevent as grpc_gevent # type: ignore
_logger = logging.getLogger(__name__)
def _log(message: str):
print(message, flush=True)
# grpc gevent
grpc_gevent.init_gevent()
_log("gRPC patched with gevent.")
pscycogreen_gevent.patch_psycopg()
_log("psycopg2 patched with gevent.")
from app import app, celery
__all__ = ["app", "celery"]

View File

@ -13,11 +13,14 @@ from sqlalchemy.exc import SQLAlchemyError
from configs import dify_config
from constants.languages import languages
from core.plugin.entities.plugin import ToolProviderID
from core.helper import encrypter
from core.plugin.entities.plugin import PluginInstallationSource
from core.plugin.impl.plugin import PluginInstaller
from core.rag.datasource.vdb.vector_factory import Vector
from core.rag.datasource.vdb.vector_type import VectorType
from core.rag.index_processor.constant.built_in_field import BuiltInField
from core.rag.models.document import Document
from core.tools.entities.tool_entities import CredentialType
from core.tools.utils.system_oauth_encryption import encrypt_system_oauth_params
from events.app_event import app_was_created
from extensions.ext_database import db
@ -30,12 +33,16 @@ from models import Tenant
from models.dataset import Dataset, DatasetCollectionBinding, DatasetMetadata, DatasetMetadataBinding, DocumentSegment
from models.dataset import Document as DatasetDocument
from models.model import Account, App, AppAnnotationSetting, AppMode, Conversation, MessageAnnotation
from models.oauth import DatasourceOauthParamConfig, DatasourceProvider
from models.provider import Provider, ProviderModel
from models.provider_ids import DatasourceProviderID, ToolProviderID
from models.source import DataSourceApiKeyAuthBinding, DataSourceOauthBinding
from models.tools import ToolOAuthSystemClient
from services.account_service import AccountService, RegisterService, TenantService
from services.clear_free_plan_tenant_expired_logs import ClearFreePlanTenantExpiredLogs
from services.plugin.data_migration import PluginDataMigration
from services.plugin.plugin_migration import PluginMigration
from services.plugin.plugin_service import PluginService
from tasks.remove_app_and_related_data_task import delete_draft_variables_batch
logger = logging.getLogger(__name__)
@ -1233,15 +1240,17 @@ def _find_orphaned_draft_variables(batch_size: int = 1000) -> list[str]:
def _count_orphaned_draft_variables() -> dict[str, Any]:
"""
Count orphaned draft variables by app.
Count orphaned draft variables by app, including associated file counts.
Returns:
Dictionary with statistics about orphaned variables
Dictionary with statistics about orphaned variables and files
"""
query = """
# Count orphaned variables by app
variables_query = """
SELECT
wdv.app_id,
COUNT(*) as variable_count
COUNT(*) as variable_count,
COUNT(wdv.file_id) as file_count
FROM workflow_draft_variables AS wdv
WHERE NOT EXISTS(
SELECT 1 FROM apps WHERE apps.id = wdv.app_id
@ -1251,14 +1260,21 @@ def _count_orphaned_draft_variables() -> dict[str, Any]:
"""
with db.engine.connect() as conn:
result = conn.execute(sa.text(query))
orphaned_by_app = {row[0]: row[1] for row in result}
result = conn.execute(sa.text(variables_query))
orphaned_by_app = {}
total_files = 0
total_orphaned = sum(orphaned_by_app.values())
for row in result:
app_id, variable_count, file_count = row
orphaned_by_app[app_id] = {"variables": variable_count, "files": file_count}
total_files += file_count
total_orphaned = sum(app_data["variables"] for app_data in orphaned_by_app.values())
app_count = len(orphaned_by_app)
return {
"total_orphaned_variables": total_orphaned,
"total_orphaned_files": total_files,
"orphaned_app_count": app_count,
"orphaned_by_app": orphaned_by_app,
}
@ -1287,6 +1303,7 @@ def cleanup_orphaned_draft_variables(
stats = _count_orphaned_draft_variables()
logger.info("Found %s orphaned draft variables", stats["total_orphaned_variables"])
logger.info("Found %s associated offload files", stats["total_orphaned_files"])
logger.info("Across %s non-existent apps", stats["orphaned_app_count"])
if stats["total_orphaned_variables"] == 0:
@ -1295,10 +1312,10 @@ def cleanup_orphaned_draft_variables(
if dry_run:
logger.info("DRY RUN: Would delete the following:")
for app_id, count in sorted(stats["orphaned_by_app"].items(), key=lambda x: x[1], reverse=True)[
for app_id, data in sorted(stats["orphaned_by_app"].items(), key=lambda x: x[1]["variables"], reverse=True)[
:10
]: # Show top 10
logger.info(" App %s: %s variables", app_id, count)
logger.info(" App %s: %s variables, %s files", app_id, data["variables"], data["files"])
if len(stats["orphaned_by_app"]) > 10:
logger.info(" ... and %s more apps", len(stats["orphaned_by_app"]) - 10)
return
@ -1307,7 +1324,8 @@ def cleanup_orphaned_draft_variables(
if not force:
click.confirm(
f"Are you sure you want to delete {stats['total_orphaned_variables']} "
f"orphaned draft variables from {stats['orphaned_app_count']} apps?",
f"orphaned draft variables and {stats['total_orphaned_files']} associated files "
f"from {stats['orphaned_app_count']} apps?",
abort=True,
)
@ -1340,3 +1358,231 @@ def cleanup_orphaned_draft_variables(
continue
logger.info("Cleanup completed. Total deleted: %s variables across %s apps", total_deleted, processed_apps)
@click.command("setup-datasource-oauth-client", help="Setup datasource oauth client.")
@click.option("--provider", prompt=True, help="Provider name")
@click.option("--client-params", prompt=True, help="Client Params")
def setup_datasource_oauth_client(provider, client_params):
"""
Setup datasource oauth client
"""
provider_id = DatasourceProviderID(provider)
provider_name = provider_id.provider_name
plugin_id = provider_id.plugin_id
try:
# json validate
click.echo(click.style(f"Validating client params: {client_params}", fg="yellow"))
client_params_dict = TypeAdapter(dict[str, Any]).validate_json(client_params)
click.echo(click.style("Client params validated successfully.", fg="green"))
except Exception as e:
click.echo(click.style(f"Error parsing client params: {str(e)}", fg="red"))
return
click.echo(click.style(f"Ready to delete existing oauth client params: {provider_name}", fg="yellow"))
deleted_count = (
db.session.query(DatasourceOauthParamConfig)
.filter_by(
provider=provider_name,
plugin_id=plugin_id,
)
.delete()
)
if deleted_count > 0:
click.echo(click.style(f"Deleted {deleted_count} existing oauth client params.", fg="yellow"))
click.echo(click.style(f"Ready to setup datasource oauth client: {provider_name}", fg="yellow"))
oauth_client = DatasourceOauthParamConfig(
provider=provider_name,
plugin_id=plugin_id,
system_credentials=client_params_dict,
)
db.session.add(oauth_client)
db.session.commit()
click.echo(click.style(f"provider: {provider_name}", fg="green"))
click.echo(click.style(f"plugin_id: {plugin_id}", fg="green"))
click.echo(click.style(f"params: {json.dumps(client_params_dict, indent=2, ensure_ascii=False)}", fg="green"))
click.echo(click.style(f"Datasource oauth client setup successfully. id: {oauth_client.id}", fg="green"))
@click.command("transform-datasource-credentials", help="Transform datasource credentials.")
def transform_datasource_credentials():
"""
Transform datasource credentials
"""
try:
installer_manager = PluginInstaller()
plugin_migration = PluginMigration()
notion_plugin_id = "langgenius/notion_datasource"
firecrawl_plugin_id = "langgenius/firecrawl_datasource"
jina_plugin_id = "langgenius/jina_datasource"
notion_plugin_unique_identifier = plugin_migration._fetch_plugin_unique_identifier(notion_plugin_id)
firecrawl_plugin_unique_identifier = plugin_migration._fetch_plugin_unique_identifier(firecrawl_plugin_id)
jina_plugin_unique_identifier = plugin_migration._fetch_plugin_unique_identifier(jina_plugin_id)
oauth_credential_type = CredentialType.OAUTH2
api_key_credential_type = CredentialType.API_KEY
# deal notion credentials
deal_notion_count = 0
notion_credentials = db.session.query(DataSourceOauthBinding).filter_by(provider="notion").all()
if notion_credentials:
notion_credentials_tenant_mapping: dict[str, list[DataSourceOauthBinding]] = {}
for credential in notion_credentials:
tenant_id = credential.tenant_id
if tenant_id not in notion_credentials_tenant_mapping:
notion_credentials_tenant_mapping[tenant_id] = []
notion_credentials_tenant_mapping[tenant_id].append(credential)
for tenant_id, credentials in notion_credentials_tenant_mapping.items():
# check notion plugin is installed
installed_plugins = installer_manager.list_plugins(tenant_id)
installed_plugins_ids = [plugin.plugin_id for plugin in installed_plugins]
if notion_plugin_id not in installed_plugins_ids:
if notion_plugin_unique_identifier:
# install notion plugin
PluginService.install_from_marketplace_pkg(tenant_id, [notion_plugin_unique_identifier])
auth_count = 0
for credential in credentials:
auth_count += 1
# get credential oauth params
access_token = credential.access_token
# notion info
notion_info = credential.source_info
workspace_id = notion_info.get("workspace_id")
workspace_name = notion_info.get("workspace_name")
workspace_icon = notion_info.get("workspace_icon")
new_credentials = {
"integration_secret": encrypter.encrypt_token(tenant_id, access_token),
"workspace_id": workspace_id,
"workspace_name": workspace_name,
"workspace_icon": workspace_icon,
}
datasource_provider = DatasourceProvider(
provider="notion_datasource",
tenant_id=tenant_id,
plugin_id=notion_plugin_id,
auth_type=oauth_credential_type.value,
encrypted_credentials=new_credentials,
name=f"Auth {auth_count}",
avatar_url=workspace_icon or "default",
is_default=False,
)
db.session.add(datasource_provider)
deal_notion_count += 1
db.session.commit()
# deal firecrawl credentials
deal_firecrawl_count = 0
firecrawl_credentials = db.session.query(DataSourceApiKeyAuthBinding).filter_by(provider="firecrawl").all()
if firecrawl_credentials:
firecrawl_credentials_tenant_mapping: dict[str, list[DataSourceApiKeyAuthBinding]] = {}
for credential in firecrawl_credentials:
tenant_id = credential.tenant_id
if tenant_id not in firecrawl_credentials_tenant_mapping:
firecrawl_credentials_tenant_mapping[tenant_id] = []
firecrawl_credentials_tenant_mapping[tenant_id].append(credential)
for tenant_id, credentials in firecrawl_credentials_tenant_mapping.items():
# check firecrawl plugin is installed
installed_plugins = installer_manager.list_plugins(tenant_id)
installed_plugins_ids = [plugin.plugin_id for plugin in installed_plugins]
if firecrawl_plugin_id not in installed_plugins_ids:
if firecrawl_plugin_unique_identifier:
# install firecrawl plugin
PluginService.install_from_marketplace_pkg(tenant_id, [firecrawl_plugin_unique_identifier])
auth_count = 0
for credential in credentials:
auth_count += 1
# get credential api key
credentials_json =json.loads(credential.credentials)
api_key = credentials_json.get("config", {}).get("api_key")
base_url = credentials_json.get("config", {}).get("base_url")
new_credentials = {
"firecrawl_api_key": api_key,
"base_url": base_url,
}
datasource_provider = DatasourceProvider(
provider="firecrawl",
tenant_id=tenant_id,
plugin_id=firecrawl_plugin_id,
auth_type=api_key_credential_type.value,
encrypted_credentials=new_credentials,
name=f"Auth {auth_count}",
avatar_url="default",
is_default=False,
)
db.session.add(datasource_provider)
deal_firecrawl_count += 1
db.session.commit()
# deal jina credentials
deal_jina_count = 0
jina_credentials = db.session.query(DataSourceApiKeyAuthBinding).filter_by(provider="jinareader").all()
if jina_credentials:
jina_credentials_tenant_mapping: dict[str, list[DataSourceApiKeyAuthBinding]] = {}
for credential in jina_credentials:
tenant_id = credential.tenant_id
if tenant_id not in jina_credentials_tenant_mapping:
jina_credentials_tenant_mapping[tenant_id] = []
jina_credentials_tenant_mapping[tenant_id].append(credential)
for tenant_id, credentials in jina_credentials_tenant_mapping.items():
# check jina plugin is installed
installed_plugins = installer_manager.list_plugins(tenant_id)
installed_plugins_ids = [plugin.plugin_id for plugin in installed_plugins]
if jina_plugin_id not in installed_plugins_ids:
if jina_plugin_unique_identifier:
# install jina plugin
print(jina_plugin_unique_identifier)
PluginService.install_from_marketplace_pkg(tenant_id, [jina_plugin_unique_identifier])
auth_count = 0
for credential in credentials:
auth_count += 1
# get credential api key
credentials_json = json.loads(credential.credentials)
api_key = credentials_json.get("config", {}).get("api_key")
new_credentials = {
"integration_secret": api_key,
}
datasource_provider = DatasourceProvider(
provider="jina",
tenant_id=tenant_id,
plugin_id=jina_plugin_id,
auth_type=api_key_credential_type.value,
encrypted_credentials=new_credentials,
name=f"Auth {auth_count}",
avatar_url="default",
is_default=False,
)
db.session.add(datasource_provider)
deal_jina_count += 1
db.session.commit()
except Exception as e:
click.echo(click.style(f"Error parsing client params: {str(e)}", fg="red"))
return
click.echo(click.style(f"Transforming notion successfully. deal_notion_count: {deal_notion_count}", fg="green"))
click.echo(
click.style(f"Transforming firecrawl successfully. deal_firecrawl_count: {deal_firecrawl_count}", fg="green")
)
click.echo(click.style(f"Transforming jina successfully. deal_jina_count: {deal_jina_count}", fg="green"))
@click.command("install-rag-pipeline-plugins", help="Install rag pipeline plugins.")
@click.option(
"--input_file", prompt=True, help="The file to store the extracted unique identifiers.", default="plugins.jsonl"
)
@click.option(
"--output_file", prompt=True, help="The file to store the installed plugins.", default="installed_plugins.jsonl"
)
@click.option("--workers", prompt=True, help="The number of workers to install plugins.", default=100)
def install_rag_pipeline_plugins(input_file, output_file, workers):
"""
Install rag pipeline plugins
"""
click.echo(click.style("Installing rag pipeline plugins", fg="yellow"))
plugin_migration = PluginMigration()
plugin_migration.install_rag_pipeline_plugins(
input_file,
output_file,
workers,
)
click.echo(click.style("Installing rag pipeline plugins successfully", fg="green"))

View File

@ -499,6 +499,22 @@ class UpdateConfig(BaseSettings):
)
class WorkflowVariableTruncationConfig(BaseSettings):
WORKFLOW_VARIABLE_TRUNCATION_MAX_SIZE: PositiveInt = Field(
# 100KB
1024_000,
description="Maximum size for variable to trigger final truncation.",
)
WORKFLOW_VARIABLE_TRUNCATION_STRING_LENGTH: PositiveInt = Field(
50000,
description="maximum length for string to trigger tuncation, measure in number of characters",
)
WORKFLOW_VARIABLE_TRUNCATION_ARRAY_LENGTH: PositiveInt = Field(
100,
description="maximum length for array to trigger truncation.",
)
class WorkflowConfig(BaseSettings):
"""
Configuration for workflow execution
@ -529,6 +545,28 @@ class WorkflowConfig(BaseSettings):
default=200 * 1024,
)
# GraphEngine Worker Pool Configuration
GRAPH_ENGINE_MIN_WORKERS: PositiveInt = Field(
description="Minimum number of workers per GraphEngine instance",
default=1,
)
GRAPH_ENGINE_MAX_WORKERS: PositiveInt = Field(
description="Maximum number of workers per GraphEngine instance",
default=10,
)
GRAPH_ENGINE_SCALE_UP_THRESHOLD: PositiveInt = Field(
description="Queue depth threshold that triggers worker scale up",
default=3,
)
GRAPH_ENGINE_SCALE_DOWN_IDLE_TIME: float = Field(
description="Seconds of idle time before scaling down workers",
default=5.0,
ge=0.1,
)
class WorkflowNodeExecutionConfig(BaseSettings):
"""
@ -1025,5 +1063,6 @@ class FeatureConfig(
CeleryBeatConfig,
CeleryScheduleTasksConfig,
WorkflowLogConfig,
WorkflowVariableTruncationConfig,
):
pass

View File

@ -222,11 +222,28 @@ class HostedFetchAppTemplateConfig(BaseSettings):
)
class HostedFetchPipelineTemplateConfig(BaseSettings):
"""
Configuration for fetching pipeline templates
"""
HOSTED_FETCH_PIPELINE_TEMPLATES_MODE: str = Field(
description="Mode for fetching pipeline templates: remote, db, or builtin default to remote,",
default="remote",
)
HOSTED_FETCH_PIPELINE_TEMPLATES_REMOTE_DOMAIN: str = Field(
description="Domain for fetching remote pipeline templates",
default="https://tmpl.dify.ai",
)
class HostedServiceConfig(
# place the configs in alphabet order
HostedAnthropicConfig,
HostedAzureOpenAiConfig,
HostedFetchAppTemplateConfig,
HostedFetchPipelineTemplateConfig,
HostedMinmaxConfig,
HostedOpenAiConfig,
HostedSparkConfig,

View File

@ -3,6 +3,7 @@ from threading import Lock
from typing import TYPE_CHECKING
from contexts.wrapper import RecyclableContextVar
from core.datasource.__base.datasource_provider import DatasourcePluginProviderController
if TYPE_CHECKING:
from core.model_runtime.entities.model_entities import AIModelEntity
@ -33,3 +34,11 @@ plugin_model_schema_lock: RecyclableContextVar[Lock] = RecyclableContextVar(Cont
plugin_model_schemas: RecyclableContextVar[dict[str, "AIModelEntity"]] = RecyclableContextVar(
ContextVar("plugin_model_schemas")
)
datasource_plugin_providers: RecyclableContextVar[dict[str, "DatasourcePluginProviderController"]] = (
RecyclableContextVar(ContextVar("datasource_plugin_providers"))
)
datasource_plugin_providers_lock: RecyclableContextVar[Lock] = RecyclableContextVar(
ContextVar("datasource_plugin_providers_lock")
)

View File

@ -43,7 +43,7 @@ api.add_resource(AppImportConfirmApi, "/apps/imports/<string:import_id>/confirm"
api.add_resource(AppImportCheckDependenciesApi, "/apps/imports/<string:app_id>/check-dependencies")
# Import other controllers
from . import admin, apikey, extension, feature, ping, setup, version
from . import admin, apikey, extension, feature, ping, setup, spec, version
# Import app controllers
from .app import (
@ -86,6 +86,15 @@ from .datasets import (
metadata,
website,
)
from .datasets.rag_pipeline import (
datasource_auth,
datasource_content_preview,
rag_pipeline,
rag_pipeline_datasets,
rag_pipeline_draft_variable,
rag_pipeline_import,
rag_pipeline_workflow,
)
# Import explore controllers
from .explore import (

View File

@ -16,7 +16,10 @@ from core.helper.code_executor.javascript.javascript_code_provider import Javasc
from core.helper.code_executor.python3.python3_code_provider import Python3CodeProvider
from core.llm_generator.llm_generator import LLMGenerator
from core.model_runtime.errors.invoke import InvokeError
from extensions.ext_database import db
from libs.login import login_required
from models import App
from services.workflow_service import WorkflowService
class RuleGenerateApi(Resource):
@ -135,9 +138,6 @@ class InstructionGenerateApi(Resource):
try:
# Generate from nothing for a workflow node
if (args["current"] == code_template or args["current"] == "") and args["node_id"] != "":
from models import App, db
from services.workflow_service import WorkflowService
app = db.session.query(App).where(App.id == args["flow_id"]).first()
if not app:
return {"error": f"app {args['flow_id']} not found"}, 400

View File

@ -24,6 +24,7 @@ from core.app.apps.base_app_queue_manager import AppQueueManager
from core.app.entities.app_invoke_entities import InvokeFrom
from core.file.models import File
from core.helper.trace_id_helper import get_external_trace_id
from core.workflow.graph_engine.manager import GraphEngineManager
from extensions.ext_database import db
from factories import file_factory, variable_factory
from fields.workflow_fields import workflow_fields, workflow_pagination_fields
@ -413,7 +414,12 @@ class WorkflowTaskStopApi(Resource):
if not current_user.is_editor:
raise Forbidden()
AppQueueManager.set_stop_flag(task_id, InvokeFrom.DEBUGGER, current_user.id)
# Stop using both mechanisms for backward compatibility
# Legacy stop flag mechanism (without user check)
AppQueueManager.set_stop_flag_no_user_check(task_id)
# New graph engine command channel mechanism
GraphEngineManager.send_stop_command(task_id)
return {"result": "success"}

View File

@ -6,7 +6,7 @@ from sqlalchemy.orm import Session
from controllers.console import api
from controllers.console.app.wraps import get_app_model
from controllers.console.wraps import account_initialization_required, setup_required
from core.workflow.entities.workflow_execution import WorkflowExecutionStatus
from core.workflow.enums import WorkflowExecutionStatus
from extensions.ext_database import db
from fields.workflow_app_log_fields import workflow_app_log_pagination_fields
from libs.login import login_required

View File

@ -13,14 +13,16 @@ from controllers.console.app.error import (
from controllers.console.app.wraps import get_app_model
from controllers.console.wraps import account_initialization_required, setup_required
from controllers.web.error import InvalidArgumentError, NotFoundError
from core.file import helpers as file_helpers
from core.variables.segment_group import SegmentGroup
from core.variables.segments import ArrayFileSegment, FileSegment, Segment
from core.variables.types import SegmentType
from core.workflow.constants import CONVERSATION_VARIABLE_NODE_ID, SYSTEM_VARIABLE_NODE_ID
from extensions.ext_database import db
from factories.file_factory import build_from_mapping, build_from_mappings
from factories.variable_factory import build_segment_with_type
from libs.login import current_user, login_required
from models import App, AppMode, db
from models import App, AppMode
from models.account import Account
from models.workflow import WorkflowDraftVariable
from services.workflow_draft_variable_service import WorkflowDraftVariableList, WorkflowDraftVariableService
@ -41,6 +43,7 @@ def _convert_values_to_json_serializable_object(value: Segment) -> Any:
def _serialize_var_value(variable: WorkflowDraftVariable) -> Any:
"""Serialize variable value. If variable is truncated, return the truncated value."""
value = variable.get_value()
# create a copy of the value to avoid affecting the model cache.
value = value.model_copy(deep=True)
@ -74,6 +77,22 @@ def _serialize_variable_type(workflow_draft_var: WorkflowDraftVariable) -> str:
return value_type.exposed_type().value
def _serialize_full_content(variable: WorkflowDraftVariable) -> dict | None:
"""Serialize full_content information for large variables."""
if not variable.is_truncated():
return None
variable_file = variable.variable_file
assert variable_file is not None
return {
"size_bytes": variable_file.size,
"value_type": variable_file.value_type.exposed_type().value,
"length": variable_file.length,
"download_url": file_helpers.get_signed_file_url(variable_file.upload_file_id, as_attachment=True),
}
_WORKFLOW_DRAFT_VARIABLE_WITHOUT_VALUE_FIELDS = {
"id": fields.String,
"type": fields.String(attribute=lambda model: model.get_variable_type()),
@ -83,11 +102,13 @@ _WORKFLOW_DRAFT_VARIABLE_WITHOUT_VALUE_FIELDS = {
"value_type": fields.String(attribute=_serialize_variable_type),
"edited": fields.Boolean(attribute=lambda model: model.edited),
"visible": fields.Boolean,
"is_truncated": fields.Boolean(attribute=lambda model: model.file_id is not None),
}
_WORKFLOW_DRAFT_VARIABLE_FIELDS = dict(
_WORKFLOW_DRAFT_VARIABLE_WITHOUT_VALUE_FIELDS,
value=fields.Raw(attribute=_serialize_var_value),
full_content=fields.Raw(attribute=_serialize_full_content),
)
_WORKFLOW_DRAFT_ENV_VARIABLE_FIELDS = {

View File

@ -1,4 +1,6 @@
import json
from collections.abc import Generator
from typing import cast
from flask import request
from flask_login import current_user
@ -9,6 +11,8 @@ from werkzeug.exceptions import NotFound
from controllers.console import api
from controllers.console.wraps import account_initialization_required, setup_required
from core.datasource.entities.datasource_entities import DatasourceProviderType, OnlineDocumentPagesMessage
from core.datasource.online_document.online_document_plugin import OnlineDocumentDatasourcePlugin
from core.indexing_runner import IndexingRunner
from core.rag.extractor.entity.datasource_type import DatasourceType
from core.rag.extractor.entity.extract_setting import ExtractSetting
@ -19,6 +23,7 @@ from libs.datetime_utils import naive_utc_now
from libs.login import login_required
from models import DataSourceOauthBinding, Document
from services.dataset_service import DatasetService, DocumentService
from services.datasource_provider_service import DatasourceProviderService
from tasks.document_indexing_sync_task import document_indexing_sync_task
@ -113,6 +118,18 @@ class DataSourceNotionListApi(Resource):
@marshal_with(integrate_notion_info_list_fields)
def get(self):
dataset_id = request.args.get("dataset_id", default=None, type=str)
credential_id = request.args.get("credential_id", default=None, type=str)
if not credential_id:
raise ValueError("Credential id is required.")
datasource_provider_service = DatasourceProviderService()
credential = datasource_provider_service.get_datasource_credentials(
tenant_id=current_user.current_tenant_id,
credential_id=credential_id,
provider="notion_datasource",
plugin_id="langgenius/notion_datasource",
)
if not credential:
raise NotFound("Credential not found.")
exist_page_ids = []
with Session(db.engine) as session:
# import notion in the exist dataset
@ -136,31 +153,49 @@ class DataSourceNotionListApi(Resource):
data_source_info = json.loads(document.data_source_info)
exist_page_ids.append(data_source_info["notion_page_id"])
# get all authorized pages
data_source_bindings = session.scalars(
select(DataSourceOauthBinding).filter_by(
tenant_id=current_user.current_tenant_id, provider="notion", disabled=False
from core.datasource.datasource_manager import DatasourceManager
datasource_runtime = DatasourceManager.get_datasource_runtime(
provider_id="langgenius/notion_datasource/notion_datasource",
datasource_name="notion_datasource",
tenant_id=current_user.current_tenant_id,
datasource_type=DatasourceProviderType.ONLINE_DOCUMENT,
)
datasource_provider_service = DatasourceProviderService()
if credential:
datasource_runtime.runtime.credentials = credential
datasource_runtime = cast(OnlineDocumentDatasourcePlugin, datasource_runtime)
online_document_result: Generator[OnlineDocumentPagesMessage, None, None] = (
datasource_runtime.get_online_document_pages(
user_id=current_user.id,
datasource_parameters={},
provider_type=datasource_runtime.datasource_provider_type(),
)
).all()
if not data_source_bindings:
return {"notion_info": []}, 200
pre_import_info_list = []
for data_source_binding in data_source_bindings:
source_info = data_source_binding.source_info
pages = source_info["pages"]
# Filter out already bound pages
for page in pages:
if page["page_id"] in exist_page_ids:
page["is_bound"] = True
else:
page["is_bound"] = False
pre_import_info = {
"workspace_name": source_info["workspace_name"],
"workspace_icon": source_info["workspace_icon"],
"workspace_id": source_info["workspace_id"],
"pages": pages,
}
pre_import_info_list.append(pre_import_info)
return {"notion_info": pre_import_info_list}, 200
)
try:
pages = []
workspace_info = {}
for message in online_document_result:
result = message.result
for info in result:
workspace_info = {
"workspace_id": info.workspace_id,
"workspace_name": info.workspace_name,
"workspace_icon": info.workspace_icon,
}
for page in info.pages:
page_info = {
"page_id": page.page_id,
"page_name": page.page_name,
"type": page.type,
"parent_id": page.parent_id,
"is_bound": page.page_id in exist_page_ids,
"page_icon": page.page_icon,
}
pages.append(page_info)
except Exception as e:
raise e
return {"notion_info": {**workspace_info, "pages": pages}}, 200
class DataSourceNotionApi(Resource):
@ -168,27 +203,25 @@ class DataSourceNotionApi(Resource):
@login_required
@account_initialization_required
def get(self, workspace_id, page_id, page_type):
credential_id = request.args.get("credential_id", default=None, type=str)
if not credential_id:
raise ValueError("Credential id is required.")
datasource_provider_service = DatasourceProviderService()
credential = datasource_provider_service.get_datasource_credentials(
tenant_id=current_user.current_tenant_id,
credential_id=credential_id,
provider="notion_datasource",
plugin_id="langgenius/notion_datasource",
)
workspace_id = str(workspace_id)
page_id = str(page_id)
with Session(db.engine) as session:
data_source_binding = session.execute(
select(DataSourceOauthBinding).where(
db.and_(
DataSourceOauthBinding.tenant_id == current_user.current_tenant_id,
DataSourceOauthBinding.provider == "notion",
DataSourceOauthBinding.disabled == False,
DataSourceOauthBinding.source_info["workspace_id"] == f'"{workspace_id}"',
)
)
).scalar_one_or_none()
if not data_source_binding:
raise NotFound("Data source binding not found.")
extractor = NotionExtractor(
notion_workspace_id=workspace_id,
notion_obj_id=page_id,
notion_page_type=page_type,
notion_access_token=data_source_binding.access_token,
notion_access_token=credential.get("integration_secret"),
tenant_id=current_user.current_tenant_id,
)
@ -213,10 +246,12 @@ class DataSourceNotionApi(Resource):
extract_settings = []
for notion_info in notion_info_list:
workspace_id = notion_info["workspace_id"]
credential_id = notion_info.get("credential_id")
for page in notion_info["pages"]:
extract_setting = ExtractSetting(
datasource_type=DatasourceType.NOTION.value,
notion_info={
"credential_id": credential_id,
"notion_workspace_id": workspace_id,
"notion_obj_id": page["page_id"],
"notion_page_type": page["type"],

View File

@ -19,7 +19,6 @@ from controllers.console.wraps import (
from core.errors.error import LLMBadRequestError, ProviderTokenNotInitError
from core.indexing_runner import IndexingRunner
from core.model_runtime.entities.model_entities import ModelType
from core.plugin.entities.plugin import ModelProviderID
from core.provider_manager import ProviderManager
from core.rag.datasource.vdb.vector_type import VectorType
from core.rag.extractor.entity.datasource_type import DatasourceType
@ -32,6 +31,7 @@ from fields.document_fields import document_status_fields
from libs.login import login_required
from models import ApiToken, Dataset, Document, DocumentSegment, UploadFile
from models.dataset import DatasetPermissionEnum
from models.provider_ids import ModelProviderID
from services.dataset_service import DatasetPermissionService, DatasetService, DocumentService
@ -280,6 +280,15 @@ class DatasetApi(Resource):
location="json",
help="Invalid external knowledge api id.",
)
parser.add_argument(
"icon_info",
type=dict,
required=False,
nullable=True,
location="json",
help="Invalid icon info.",
)
args = parser.parse_args()
data = request.get_json()
@ -432,10 +441,12 @@ class DatasetIndexingEstimateApi(Resource):
notion_info_list = args["info_list"]["notion_info_list"]
for notion_info in notion_info_list:
workspace_id = notion_info["workspace_id"]
credential_id = notion_info.get("credential_id")
for page in notion_info["pages"]:
extract_setting = ExtractSetting(
datasource_type=DatasourceType.NOTION.value,
notion_info={
"credential_id": credential_id,
"notion_workspace_id": workspace_id,
"notion_obj_id": page["page_id"],
"notion_page_type": page["type"],

View File

@ -1,3 +1,4 @@
import json
import logging
from argparse import ArgumentTypeError
from typing import Literal, cast
@ -52,6 +53,7 @@ from fields.document_fields import (
from libs.datetime_utils import naive_utc_now
from libs.login import login_required
from models import Dataset, DatasetProcessRule, Document, DocumentSegment, UploadFile
from models.dataset import DocumentPipelineExecutionLog
from services.dataset_service import DatasetService, DocumentService
from services.entities.knowledge_entities.knowledge_entities import KnowledgeConfig
@ -494,6 +496,7 @@ class DocumentBatchIndexingEstimateApi(DocumentResource):
extract_setting = ExtractSetting(
datasource_type=DatasourceType.NOTION.value,
notion_info={
"credential_id": data_source_info["credential_id"],
"notion_workspace_id": data_source_info["notion_workspace_id"],
"notion_obj_id": data_source_info["notion_page_id"],
"notion_page_type": data_source_info["type"],
@ -647,7 +650,7 @@ class DocumentApi(DocumentResource):
response = {"id": document.id, "doc_type": document.doc_type, "doc_metadata": document.doc_metadata_details}
elif metadata == "without":
dataset_process_rules = DatasetService.get_process_rules(dataset_id)
document_process_rules = document.dataset_process_rule.to_dict()
document_process_rules = document.dataset_process_rule.to_dict() if document.dataset_process_rule else {}
data_source_info = document.data_source_detail_dict
response = {
"id": document.id,
@ -1010,6 +1013,41 @@ class WebsiteDocumentSyncApi(DocumentResource):
return {"result": "success"}, 200
class DocumentPipelineExecutionLogApi(DocumentResource):
@setup_required
@login_required
@account_initialization_required
def get(self, dataset_id, document_id):
dataset_id = str(dataset_id)
document_id = str(document_id)
dataset = DatasetService.get_dataset(dataset_id)
if not dataset:
raise NotFound("Dataset not found.")
document = DocumentService.get_document(dataset.id, document_id)
if not document:
raise NotFound("Document not found.")
log = (
db.session.query(DocumentPipelineExecutionLog)
.filter_by(document_id=document_id)
.order_by(DocumentPipelineExecutionLog.created_at.desc())
.first()
)
if not log:
return {
"datasource_info": None,
"datasource_type": None,
"input_data": None,
"datasource_node_id": None,
}, 200
return {
"datasource_info": json.loads(log.datasource_info),
"datasource_type": log.datasource_type,
"input_data": log.input_data,
"datasource_node_id": log.datasource_node_id,
}, 200
api.add_resource(GetProcessRuleApi, "/datasets/process-rule")
api.add_resource(DatasetDocumentListApi, "/datasets/<uuid:dataset_id>/documents")
api.add_resource(DatasetInitApi, "/datasets/init")
@ -1031,3 +1069,6 @@ api.add_resource(DocumentRetryApi, "/datasets/<uuid:dataset_id>/retry")
api.add_resource(DocumentRenameApi, "/datasets/<uuid:dataset_id>/documents/<uuid:document_id>/rename")
api.add_resource(WebsiteDocumentSyncApi, "/datasets/<uuid:dataset_id>/documents/<uuid:document_id>/website-sync")
api.add_resource(
DocumentPipelineExecutionLogApi, "/datasets/<uuid:dataset_id>/documents/<uuid:document_id>/pipeline-execution-log"
)

View File

@ -71,3 +71,9 @@ class ChildChunkDeleteIndexError(BaseHTTPException):
error_code = "child_chunk_delete_index_error"
description = "Delete child chunk index failed: {message}"
code = 500
class PipelineNotFoundError(BaseHTTPException):
error_code = "pipeline_not_found"
description = "Pipeline not found."
code = 404

View File

@ -0,0 +1,362 @@
from fastapi.encoders import jsonable_encoder
from flask import make_response, redirect, request
from flask_login import current_user
from flask_restx import Resource, reqparse
from werkzeug.exceptions import Forbidden, NotFound
from configs import dify_config
from controllers.console import api
from controllers.console.wraps import (
account_initialization_required,
setup_required,
)
from core.model_runtime.errors.validate import CredentialsValidateFailedError
from core.plugin.impl.oauth import OAuthHandler
from libs.helper import StrLen
from libs.login import login_required
from models.provider_ids import DatasourceProviderID
from services.datasource_provider_service import DatasourceProviderService
from services.plugin.oauth_service import OAuthProxyService
class DatasourcePluginOAuthAuthorizationUrl(Resource):
@setup_required
@login_required
@account_initialization_required
def get(self, provider_id: str):
user = current_user
tenant_id = user.current_tenant_id
if not current_user.is_editor:
raise Forbidden()
credential_id = request.args.get("credential_id")
datasource_provider_id = DatasourceProviderID(provider_id)
provider_name = datasource_provider_id.provider_name
plugin_id = datasource_provider_id.plugin_id
oauth_config = DatasourceProviderService().get_oauth_client(
tenant_id=tenant_id,
datasource_provider_id=datasource_provider_id,
)
if not oauth_config:
raise ValueError(f"No OAuth Client Config for {provider_id}")
context_id = OAuthProxyService.create_proxy_context(
user_id=current_user.id,
tenant_id=tenant_id,
plugin_id=plugin_id,
provider=provider_name,
credential_id=credential_id,
)
oauth_handler = OAuthHandler()
redirect_uri = f"{dify_config.CONSOLE_API_URL}/console/api/oauth/plugin/{provider_id}/datasource/callback"
authorization_url_response = oauth_handler.get_authorization_url(
tenant_id=tenant_id,
user_id=user.id,
plugin_id=plugin_id,
provider=provider_name,
redirect_uri=redirect_uri,
system_credentials=oauth_config,
)
response = make_response(jsonable_encoder(authorization_url_response))
response.set_cookie(
"context_id",
context_id,
httponly=True,
samesite="Lax",
max_age=OAuthProxyService.__MAX_AGE__,
)
return response
class DatasourceOAuthCallback(Resource):
@setup_required
def get(self, provider_id: str):
context_id = request.cookies.get("context_id") or request.args.get("context_id")
if not context_id:
raise Forbidden("context_id not found")
context = OAuthProxyService.use_proxy_context(context_id)
if context is None:
raise Forbidden("Invalid context_id")
user_id, tenant_id = context.get("user_id"), context.get("tenant_id")
datasource_provider_id = DatasourceProviderID(provider_id)
plugin_id = datasource_provider_id.plugin_id
datasource_provider_service = DatasourceProviderService()
oauth_client_params = datasource_provider_service.get_oauth_client(
tenant_id=tenant_id,
datasource_provider_id=datasource_provider_id,
)
if not oauth_client_params:
raise NotFound()
redirect_uri = f"{dify_config.CONSOLE_API_URL}/console/api/oauth/plugin/{provider_id}/datasource/callback"
oauth_handler = OAuthHandler()
oauth_response = oauth_handler.get_credentials(
tenant_id=tenant_id,
user_id=user_id,
plugin_id=plugin_id,
provider=datasource_provider_id.provider_name,
redirect_uri=redirect_uri,
system_credentials=oauth_client_params,
request=request,
)
credential_id = context.get("credential_id")
if credential_id:
datasource_provider_service.reauthorize_datasource_oauth_provider(
tenant_id=tenant_id,
provider_id=datasource_provider_id,
avatar_url=oauth_response.metadata.get("avatar_url") or None,
name=oauth_response.metadata.get("name") or None,
expire_at=oauth_response.expires_at,
credentials=dict(oauth_response.credentials),
credential_id=context.get("credential_id"),
)
else:
datasource_provider_service.add_datasource_oauth_provider(
tenant_id=tenant_id,
provider_id=datasource_provider_id,
avatar_url=oauth_response.metadata.get("avatar_url") or None,
name=oauth_response.metadata.get("name") or None,
expire_at=oauth_response.expires_at,
credentials=dict(oauth_response.credentials),
)
return redirect(f"{dify_config.CONSOLE_WEB_URL}/oauth-callback")
class DatasourceAuth(Resource):
@setup_required
@login_required
@account_initialization_required
def post(self, provider_id: str):
if not current_user.is_editor:
raise Forbidden()
parser = reqparse.RequestParser()
parser.add_argument(
"name", type=StrLen(max_length=100), required=False, nullable=True, location="json", default=None
)
parser.add_argument("credentials", type=dict, required=True, nullable=False, location="json")
args = parser.parse_args()
datasource_provider_id = DatasourceProviderID(provider_id)
datasource_provider_service = DatasourceProviderService()
try:
datasource_provider_service.add_datasource_api_key_provider(
tenant_id=current_user.current_tenant_id,
provider_id=datasource_provider_id,
credentials=args["credentials"],
name=args["name"],
)
except CredentialsValidateFailedError as ex:
raise ValueError(str(ex))
return {"result": "success"}, 200
@setup_required
@login_required
@account_initialization_required
def get(self, provider_id: str):
datasource_provider_id = DatasourceProviderID(provider_id)
datasource_provider_service = DatasourceProviderService()
datasources = datasource_provider_service.list_datasource_credentials(
tenant_id=current_user.current_tenant_id,
provider=datasource_provider_id.provider_name,
plugin_id=datasource_provider_id.plugin_id,
)
return {"result": datasources}, 200
class DatasourceAuthDeleteApi(Resource):
@setup_required
@login_required
@account_initialization_required
def post(self, provider_id: str):
datasource_provider_id = DatasourceProviderID(provider_id)
plugin_id = datasource_provider_id.plugin_id
provider_name = datasource_provider_id.provider_name
if not current_user.is_editor:
raise Forbidden()
parser = reqparse.RequestParser()
parser.add_argument("credential_id", type=str, required=True, nullable=False, location="json")
args = parser.parse_args()
datasource_provider_service = DatasourceProviderService()
datasource_provider_service.remove_datasource_credentials(
tenant_id=current_user.current_tenant_id,
auth_id=args["credential_id"],
provider=provider_name,
plugin_id=plugin_id,
)
return {"result": "success"}, 200
class DatasourceAuthUpdateApi(Resource):
@setup_required
@login_required
@account_initialization_required
def post(self, provider_id: str):
datasource_provider_id = DatasourceProviderID(provider_id)
parser = reqparse.RequestParser()
parser.add_argument("credentials", type=dict, required=False, nullable=True, location="json")
parser.add_argument("name", type=StrLen(max_length=100), required=False, nullable=True, location="json")
parser.add_argument("credential_id", type=str, required=True, nullable=False, location="json")
args = parser.parse_args()
if not current_user.is_editor:
raise Forbidden()
datasource_provider_service = DatasourceProviderService()
datasource_provider_service.update_datasource_credentials(
tenant_id=current_user.current_tenant_id,
auth_id=args["credential_id"],
provider=datasource_provider_id.provider_name,
plugin_id=datasource_provider_id.plugin_id,
credentials=args.get("credentials", {}),
name=args.get("name", None),
)
return {"result": "success"}, 201
class DatasourceAuthListApi(Resource):
@setup_required
@login_required
@account_initialization_required
def get(self):
datasource_provider_service = DatasourceProviderService()
datasources = datasource_provider_service.get_all_datasource_credentials(
tenant_id=current_user.current_tenant_id
)
return {"result": jsonable_encoder(datasources)}, 200
class DatasourceHardCodeAuthListApi(Resource):
@setup_required
@login_required
@account_initialization_required
def get(self):
datasource_provider_service = DatasourceProviderService()
datasources = datasource_provider_service.get_hard_code_datasource_credentials(
tenant_id=current_user.current_tenant_id
)
return {"result": jsonable_encoder(datasources)}, 200
class DatasourceAuthOauthCustomClient(Resource):
@setup_required
@login_required
@account_initialization_required
def post(self, provider_id: str):
if not current_user.is_editor:
raise Forbidden()
parser = reqparse.RequestParser()
parser.add_argument("client_params", type=dict, required=False, nullable=True, location="json")
parser.add_argument("enable_oauth_custom_client", type=bool, required=False, nullable=True, location="json")
args = parser.parse_args()
datasource_provider_id = DatasourceProviderID(provider_id)
datasource_provider_service = DatasourceProviderService()
datasource_provider_service.setup_oauth_custom_client_params(
tenant_id=current_user.current_tenant_id,
datasource_provider_id=datasource_provider_id,
client_params=args.get("client_params", {}),
enabled=args.get("enable_oauth_custom_client", False),
)
return {"result": "success"}, 200
@setup_required
@login_required
@account_initialization_required
def delete(self, provider_id: str):
datasource_provider_id = DatasourceProviderID(provider_id)
datasource_provider_service = DatasourceProviderService()
datasource_provider_service.remove_oauth_custom_client_params(
tenant_id=current_user.current_tenant_id,
datasource_provider_id=datasource_provider_id,
)
return {"result": "success"}, 200
class DatasourceAuthDefaultApi(Resource):
@setup_required
@login_required
@account_initialization_required
def post(self, provider_id: str):
if not current_user.is_editor:
raise Forbidden()
parser = reqparse.RequestParser()
parser.add_argument("id", type=str, required=True, nullable=False, location="json")
args = parser.parse_args()
datasource_provider_id = DatasourceProviderID(provider_id)
datasource_provider_service = DatasourceProviderService()
datasource_provider_service.set_default_datasource_provider(
tenant_id=current_user.current_tenant_id,
datasource_provider_id=datasource_provider_id,
credential_id=args["id"],
)
return {"result": "success"}, 200
class DatasourceUpdateProviderNameApi(Resource):
@setup_required
@login_required
@account_initialization_required
def post(self, provider_id: str):
if not current_user.is_editor:
raise Forbidden()
parser = reqparse.RequestParser()
parser.add_argument("name", type=StrLen(max_length=100), required=True, nullable=False, location="json")
parser.add_argument("credential_id", type=str, required=True, nullable=False, location="json")
args = parser.parse_args()
datasource_provider_id = DatasourceProviderID(provider_id)
datasource_provider_service = DatasourceProviderService()
datasource_provider_service.update_datasource_provider_name(
tenant_id=current_user.current_tenant_id,
datasource_provider_id=datasource_provider_id,
name=args["name"],
credential_id=args["credential_id"],
)
return {"result": "success"}, 200
api.add_resource(
DatasourcePluginOAuthAuthorizationUrl,
"/oauth/plugin/<path:provider_id>/datasource/get-authorization-url",
)
api.add_resource(
DatasourceOAuthCallback,
"/oauth/plugin/<path:provider_id>/datasource/callback",
)
api.add_resource(
DatasourceAuth,
"/auth/plugin/datasource/<path:provider_id>",
)
api.add_resource(
DatasourceAuthUpdateApi,
"/auth/plugin/datasource/<path:provider_id>/update",
)
api.add_resource(
DatasourceAuthDeleteApi,
"/auth/plugin/datasource/<path:provider_id>/delete",
)
api.add_resource(
DatasourceAuthListApi,
"/auth/plugin/datasource/list",
)
api.add_resource(
DatasourceHardCodeAuthListApi,
"/auth/plugin/datasource/default-list",
)
api.add_resource(
DatasourceAuthOauthCustomClient,
"/auth/plugin/datasource/<path:provider_id>/custom-client",
)
api.add_resource(
DatasourceAuthDefaultApi,
"/auth/plugin/datasource/<path:provider_id>/default",
)
api.add_resource(
DatasourceUpdateProviderNameApi,
"/auth/plugin/datasource/<path:provider_id>/update-name",
)

View File

@ -0,0 +1,57 @@
from flask_restx import ( # type: ignore
Resource, # type: ignore
reqparse,
)
from werkzeug.exceptions import Forbidden
from controllers.console import api
from controllers.console.datasets.wraps import get_rag_pipeline
from controllers.console.wraps import account_initialization_required, setup_required
from libs.login import current_user, login_required
from models import Account
from models.dataset import Pipeline
from services.rag_pipeline.rag_pipeline import RagPipelineService
class DataSourceContentPreviewApi(Resource):
@setup_required
@login_required
@account_initialization_required
@get_rag_pipeline
def post(self, pipeline: Pipeline, node_id: str):
"""
Run datasource content preview
"""
if not isinstance(current_user, Account):
raise Forbidden()
parser = reqparse.RequestParser()
parser.add_argument("inputs", type=dict, required=True, nullable=False, location="json")
parser.add_argument("datasource_type", type=str, required=True, location="json")
parser.add_argument("credential_id", type=str, required=False, location="json")
args = parser.parse_args()
inputs = args.get("inputs")
if inputs is None:
raise ValueError("missing inputs")
datasource_type = args.get("datasource_type")
if datasource_type is None:
raise ValueError("missing datasource_type")
rag_pipeline_service = RagPipelineService()
preview_content = rag_pipeline_service.run_datasource_node_preview(
pipeline=pipeline,
node_id=node_id,
user_inputs=inputs,
account=current_user,
datasource_type=datasource_type,
is_published=True,
credential_id=args.get("credential_id"),
)
return preview_content, 200
api.add_resource(
DataSourceContentPreviewApi,
"/rag/pipelines/<uuid:pipeline_id>/workflows/published/datasource/nodes/<string:node_id>/preview",
)

View File

@ -0,0 +1,164 @@
import logging
from flask import request
from flask_restx import Resource, reqparse
from sqlalchemy.orm import Session
from controllers.console import api
from controllers.console.wraps import (
account_initialization_required,
enterprise_license_required,
knowledge_pipeline_publish_enabled,
setup_required,
)
from extensions.ext_database import db
from libs.login import login_required
from models.dataset import PipelineCustomizedTemplate
from services.entities.knowledge_entities.rag_pipeline_entities import PipelineTemplateInfoEntity
from services.rag_pipeline.rag_pipeline import RagPipelineService
logger = logging.getLogger(__name__)
def _validate_name(name):
if not name or len(name) < 1 or len(name) > 40:
raise ValueError("Name must be between 1 to 40 characters.")
return name
def _validate_description_length(description):
if len(description) > 400:
raise ValueError("Description cannot exceed 400 characters.")
return description
class PipelineTemplateListApi(Resource):
@setup_required
@login_required
@account_initialization_required
@enterprise_license_required
def get(self):
type = request.args.get("type", default="built-in", type=str)
language = request.args.get("language", default="en-US", type=str)
# get pipeline templates
pipeline_templates = RagPipelineService.get_pipeline_templates(type, language)
return pipeline_templates, 200
class PipelineTemplateDetailApi(Resource):
@setup_required
@login_required
@account_initialization_required
@enterprise_license_required
def get(self, template_id: str):
type = request.args.get("type", default="built-in", type=str)
rag_pipeline_service = RagPipelineService()
pipeline_template = rag_pipeline_service.get_pipeline_template_detail(template_id, type)
return pipeline_template, 200
class CustomizedPipelineTemplateApi(Resource):
@setup_required
@login_required
@account_initialization_required
@enterprise_license_required
def patch(self, template_id: str):
parser = reqparse.RequestParser()
parser.add_argument(
"name",
nullable=False,
required=True,
help="Name must be between 1 to 40 characters.",
type=_validate_name,
)
parser.add_argument(
"description",
type=str,
nullable=True,
required=False,
default="",
)
parser.add_argument(
"icon_info",
type=dict,
location="json",
nullable=True,
)
args = parser.parse_args()
pipeline_template_info = PipelineTemplateInfoEntity(**args)
RagPipelineService.update_customized_pipeline_template(template_id, pipeline_template_info)
return 200
@setup_required
@login_required
@account_initialization_required
@enterprise_license_required
def delete(self, template_id: str):
RagPipelineService.delete_customized_pipeline_template(template_id)
return 200
@setup_required
@login_required
@account_initialization_required
@enterprise_license_required
def post(self, template_id: str):
with Session(db.engine) as session:
template = (
session.query(PipelineCustomizedTemplate).filter(PipelineCustomizedTemplate.id == template_id).first()
)
if not template:
raise ValueError("Customized pipeline template not found.")
return {"data": template.yaml_content}, 200
class PublishCustomizedPipelineTemplateApi(Resource):
@setup_required
@login_required
@account_initialization_required
@enterprise_license_required
@knowledge_pipeline_publish_enabled
def post(self, pipeline_id: str):
parser = reqparse.RequestParser()
parser.add_argument(
"name",
nullable=False,
required=True,
help="Name must be between 1 to 40 characters.",
type=_validate_name,
)
parser.add_argument(
"description",
type=str,
nullable=True,
required=False,
default="",
)
parser.add_argument(
"icon_info",
type=dict,
location="json",
nullable=True,
)
args = parser.parse_args()
rag_pipeline_service = RagPipelineService()
rag_pipeline_service.publish_customized_pipeline_template(pipeline_id, args)
return {"result": "success"}
api.add_resource(
PipelineTemplateListApi,
"/rag/pipeline/templates",
)
api.add_resource(
PipelineTemplateDetailApi,
"/rag/pipeline/templates/<string:template_id>",
)
api.add_resource(
CustomizedPipelineTemplateApi,
"/rag/pipeline/customized/templates/<string:template_id>",
)
api.add_resource(
PublishCustomizedPipelineTemplateApi,
"/rag/pipelines/<string:pipeline_id>/customized/publish",
)

View File

@ -0,0 +1,114 @@
from flask_login import current_user # type: ignore # type: ignore
from flask_restx import Resource, marshal, reqparse # type: ignore
from sqlalchemy.orm import Session
from werkzeug.exceptions import Forbidden
import services
from controllers.console import api
from controllers.console.datasets.error import DatasetNameDuplicateError
from controllers.console.wraps import (
account_initialization_required,
cloud_edition_billing_rate_limit_check,
setup_required,
)
from extensions.ext_database import db
from fields.dataset_fields import dataset_detail_fields
from libs.login import login_required
from models.dataset import DatasetPermissionEnum
from services.dataset_service import DatasetPermissionService, DatasetService
from services.entities.knowledge_entities.rag_pipeline_entities import IconInfo, RagPipelineDatasetCreateEntity
from services.rag_pipeline.rag_pipeline_dsl_service import RagPipelineDslService
def _validate_name(name):
if not name or len(name) < 1 or len(name) > 40:
raise ValueError("Name must be between 1 to 40 characters.")
return name
def _validate_description_length(description):
if len(description) > 400:
raise ValueError("Description cannot exceed 400 characters.")
return description
class CreateRagPipelineDatasetApi(Resource):
@setup_required
@login_required
@account_initialization_required
@cloud_edition_billing_rate_limit_check("knowledge")
def post(self):
parser = reqparse.RequestParser()
parser.add_argument(
"yaml_content",
type=str,
nullable=False,
required=True,
help="yaml_content is required.",
)
args = parser.parse_args()
# The role of the current user in the ta table must be admin, owner, or editor, or dataset_operator
if not current_user.is_dataset_editor:
raise Forbidden()
rag_pipeline_dataset_create_entity = RagPipelineDatasetCreateEntity(
name="",
description="",
icon_info=IconInfo(
icon="📙",
icon_background="#FFF4ED",
icon_type="emoji",
),
permission=DatasetPermissionEnum.ONLY_ME,
partial_member_list=None,
yaml_content=args["yaml_content"],
)
try:
with Session(db.engine) as session:
rag_pipeline_dsl_service = RagPipelineDslService(session)
import_info = rag_pipeline_dsl_service.create_rag_pipeline_dataset(
tenant_id=current_user.current_tenant_id,
rag_pipeline_dataset_create_entity=rag_pipeline_dataset_create_entity,
)
if rag_pipeline_dataset_create_entity.permission == "partial_members":
DatasetPermissionService.update_partial_member_list(
current_user.current_tenant_id,
import_info["dataset_id"],
rag_pipeline_dataset_create_entity.partial_member_list,
)
except services.errors.dataset.DatasetNameDuplicateError:
raise DatasetNameDuplicateError()
return import_info, 201
class CreateEmptyRagPipelineDatasetApi(Resource):
@setup_required
@login_required
@account_initialization_required
@cloud_edition_billing_rate_limit_check("knowledge")
def post(self):
# The role of the current user in the ta table must be admin, owner, or editor, or dataset_operator
if not current_user.is_dataset_editor:
raise Forbidden()
dataset = DatasetService.create_empty_rag_pipeline_dataset(
tenant_id=current_user.current_tenant_id,
rag_pipeline_dataset_create_entity=RagPipelineDatasetCreateEntity(
name="",
description="",
icon_info=IconInfo(
icon="📙",
icon_background="#FFF4ED",
icon_type="emoji",
),
permission=DatasetPermissionEnum.ONLY_ME,
partial_member_list=None,
),
)
return marshal(dataset, dataset_detail_fields), 201
api.add_resource(CreateRagPipelineDatasetApi, "/rag/pipeline/dataset")
api.add_resource(CreateEmptyRagPipelineDatasetApi, "/rag/pipeline/empty-dataset")

View File

@ -0,0 +1,389 @@
import logging
from typing import Any, NoReturn
from flask import Response
from flask_restx import Resource, fields, inputs, marshal, marshal_with, reqparse
from sqlalchemy.orm import Session
from werkzeug.exceptions import Forbidden
from controllers.console import api
from controllers.console.app.error import (
DraftWorkflowNotExist,
)
from controllers.console.app.workflow_draft_variable import (
_WORKFLOW_DRAFT_VARIABLE_FIELDS,
_WORKFLOW_DRAFT_VARIABLE_WITHOUT_VALUE_FIELDS,
)
from controllers.console.datasets.wraps import get_rag_pipeline
from controllers.console.wraps import account_initialization_required, setup_required
from controllers.web.error import InvalidArgumentError, NotFoundError
from core.variables.segment_group import SegmentGroup
from core.variables.segments import ArrayFileSegment, FileSegment, Segment
from core.variables.types import SegmentType
from core.workflow.constants import CONVERSATION_VARIABLE_NODE_ID, SYSTEM_VARIABLE_NODE_ID
from extensions.ext_database import db
from factories.file_factory import build_from_mapping, build_from_mappings
from factories.variable_factory import build_segment_with_type
from libs.login import current_user, login_required
from models.account import Account
from models.dataset import Pipeline
from models.workflow import WorkflowDraftVariable
from services.rag_pipeline.rag_pipeline import RagPipelineService
from services.workflow_draft_variable_service import WorkflowDraftVariableList, WorkflowDraftVariableService
logger = logging.getLogger(__name__)
def _convert_values_to_json_serializable_object(value: Segment) -> Any:
if isinstance(value, FileSegment):
return value.value.model_dump()
elif isinstance(value, ArrayFileSegment):
return [i.model_dump() for i in value.value]
elif isinstance(value, SegmentGroup):
return [_convert_values_to_json_serializable_object(i) for i in value.value]
else:
return value.value
def _serialize_var_value(variable: WorkflowDraftVariable) -> Any:
value = variable.get_value()
# create a copy of the value to avoid affecting the model cache.
value = value.model_copy(deep=True)
# Refresh the url signature before returning it to client.
if isinstance(value, FileSegment):
file = value.value
file.remote_url = file.generate_url()
elif isinstance(value, ArrayFileSegment):
files = value.value
for file in files:
file.remote_url = file.generate_url()
return _convert_values_to_json_serializable_object(value)
def _create_pagination_parser():
parser = reqparse.RequestParser()
parser.add_argument(
"page",
type=inputs.int_range(1, 100_000),
required=False,
default=1,
location="args",
help="the page of data requested",
)
parser.add_argument("limit", type=inputs.int_range(1, 100), required=False, default=20, location="args")
return parser
def _get_items(var_list: WorkflowDraftVariableList) -> list[WorkflowDraftVariable]:
return var_list.variables
_WORKFLOW_DRAFT_VARIABLE_LIST_WITHOUT_VALUE_FIELDS = {
"items": fields.List(fields.Nested(_WORKFLOW_DRAFT_VARIABLE_WITHOUT_VALUE_FIELDS), attribute=_get_items),
"total": fields.Raw(),
}
_WORKFLOW_DRAFT_VARIABLE_LIST_FIELDS = {
"items": fields.List(fields.Nested(_WORKFLOW_DRAFT_VARIABLE_FIELDS), attribute=_get_items),
}
def _api_prerequisite(f):
"""Common prerequisites for all draft workflow variable APIs.
It ensures the following conditions are satisfied:
- Dify has been property setup.
- The request user has logged in and initialized.
- The requested app is a workflow or a chat flow.
- The request user has the edit permission for the app.
"""
@setup_required
@login_required
@account_initialization_required
@get_rag_pipeline
def wrapper(*args, **kwargs):
if not isinstance(current_user, Account) or not current_user.is_editor:
raise Forbidden()
return f(*args, **kwargs)
return wrapper
class RagPipelineVariableCollectionApi(Resource):
@_api_prerequisite
@marshal_with(_WORKFLOW_DRAFT_VARIABLE_LIST_WITHOUT_VALUE_FIELDS)
def get(self, pipeline: Pipeline):
"""
Get draft workflow
"""
parser = _create_pagination_parser()
args = parser.parse_args()
# fetch draft workflow by app_model
rag_pipeline_service = RagPipelineService()
workflow_exist = rag_pipeline_service.is_workflow_exist(pipeline=pipeline)
if not workflow_exist:
raise DraftWorkflowNotExist()
# fetch draft workflow by app_model
with Session(bind=db.engine, expire_on_commit=False) as session:
draft_var_srv = WorkflowDraftVariableService(
session=session,
)
workflow_vars = draft_var_srv.list_variables_without_values(
app_id=pipeline.id,
page=args.page,
limit=args.limit,
)
return workflow_vars
@_api_prerequisite
def delete(self, pipeline: Pipeline):
draft_var_srv = WorkflowDraftVariableService(
session=db.session(),
)
draft_var_srv.delete_workflow_variables(pipeline.id)
db.session.commit()
return Response("", 204)
def validate_node_id(node_id: str) -> NoReturn | None:
if node_id in [
CONVERSATION_VARIABLE_NODE_ID,
SYSTEM_VARIABLE_NODE_ID,
]:
# NOTE(QuantumGhost): While we store the system and conversation variables as node variables
# with specific `node_id` in database, we still want to make the API separated. By disallowing
# accessing system and conversation variables in `WorkflowDraftNodeVariableListApi`,
# we mitigate the risk that user of the API depending on the implementation detail of the API.
#
# ref: [Hyrum's Law](https://www.hyrumslaw.com/)
raise InvalidArgumentError(
f"invalid node_id, please use correspond api for conversation and system variables, node_id={node_id}",
)
return None
class RagPipelineNodeVariableCollectionApi(Resource):
@_api_prerequisite
@marshal_with(_WORKFLOW_DRAFT_VARIABLE_LIST_FIELDS)
def get(self, pipeline: Pipeline, node_id: str):
validate_node_id(node_id)
with Session(bind=db.engine, expire_on_commit=False) as session:
draft_var_srv = WorkflowDraftVariableService(
session=session,
)
node_vars = draft_var_srv.list_node_variables(pipeline.id, node_id)
return node_vars
@_api_prerequisite
def delete(self, pipeline: Pipeline, node_id: str):
validate_node_id(node_id)
srv = WorkflowDraftVariableService(db.session())
srv.delete_node_variables(pipeline.id, node_id)
db.session.commit()
return Response("", 204)
class RagPipelineVariableApi(Resource):
_PATCH_NAME_FIELD = "name"
_PATCH_VALUE_FIELD = "value"
@_api_prerequisite
@marshal_with(_WORKFLOW_DRAFT_VARIABLE_FIELDS)
def get(self, pipeline: Pipeline, variable_id: str):
draft_var_srv = WorkflowDraftVariableService(
session=db.session(),
)
variable = draft_var_srv.get_variable(variable_id=variable_id)
if variable is None:
raise NotFoundError(description=f"variable not found, id={variable_id}")
if variable.app_id != pipeline.id:
raise NotFoundError(description=f"variable not found, id={variable_id}")
return variable
@_api_prerequisite
@marshal_with(_WORKFLOW_DRAFT_VARIABLE_FIELDS)
def patch(self, pipeline: Pipeline, variable_id: str):
# Request payload for file types:
#
# Local File:
#
# {
# "type": "image",
# "transfer_method": "local_file",
# "url": "",
# "upload_file_id": "daded54f-72c7-4f8e-9d18-9b0abdd9f190"
# }
#
# Remote File:
#
#
# {
# "type": "image",
# "transfer_method": "remote_url",
# "url": "http://127.0.0.1:5001/files/1602650a-4fe4-423c-85a2-af76c083e3c4/file-preview?timestamp=1750041099&nonce=...&sign=...=",
# "upload_file_id": "1602650a-4fe4-423c-85a2-af76c083e3c4"
# }
parser = reqparse.RequestParser()
parser.add_argument(self._PATCH_NAME_FIELD, type=str, required=False, nullable=True, location="json")
# Parse 'value' field as-is to maintain its original data structure
parser.add_argument(self._PATCH_VALUE_FIELD, type=lambda x: x, required=False, nullable=True, location="json")
draft_var_srv = WorkflowDraftVariableService(
session=db.session(),
)
args = parser.parse_args(strict=True)
variable = draft_var_srv.get_variable(variable_id=variable_id)
if variable is None:
raise NotFoundError(description=f"variable not found, id={variable_id}")
if variable.app_id != pipeline.id:
raise NotFoundError(description=f"variable not found, id={variable_id}")
new_name = args.get(self._PATCH_NAME_FIELD, None)
raw_value = args.get(self._PATCH_VALUE_FIELD, None)
if new_name is None and raw_value is None:
return variable
new_value = None
if raw_value is not None:
if variable.value_type == SegmentType.FILE:
if not isinstance(raw_value, dict):
raise InvalidArgumentError(description=f"expected dict for file, got {type(raw_value)}")
raw_value = build_from_mapping(mapping=raw_value, tenant_id=pipeline.tenant_id)
elif variable.value_type == SegmentType.ARRAY_FILE:
if not isinstance(raw_value, list):
raise InvalidArgumentError(description=f"expected list for files, got {type(raw_value)}")
if len(raw_value) > 0 and not isinstance(raw_value[0], dict):
raise InvalidArgumentError(description=f"expected dict for files[0], got {type(raw_value)}")
raw_value = build_from_mappings(mappings=raw_value, tenant_id=pipeline.tenant_id)
new_value = build_segment_with_type(variable.value_type, raw_value)
draft_var_srv.update_variable(variable, name=new_name, value=new_value)
db.session.commit()
return variable
@_api_prerequisite
def delete(self, pipeline: Pipeline, variable_id: str):
draft_var_srv = WorkflowDraftVariableService(
session=db.session(),
)
variable = draft_var_srv.get_variable(variable_id=variable_id)
if variable is None:
raise NotFoundError(description=f"variable not found, id={variable_id}")
if variable.app_id != pipeline.id:
raise NotFoundError(description=f"variable not found, id={variable_id}")
draft_var_srv.delete_variable(variable)
db.session.commit()
return Response("", 204)
class RagPipelineVariableResetApi(Resource):
@_api_prerequisite
def put(self, pipeline: Pipeline, variable_id: str):
draft_var_srv = WorkflowDraftVariableService(
session=db.session(),
)
rag_pipeline_service = RagPipelineService()
draft_workflow = rag_pipeline_service.get_draft_workflow(pipeline=pipeline)
if draft_workflow is None:
raise NotFoundError(
f"Draft workflow not found, pipeline_id={pipeline.id}",
)
variable = draft_var_srv.get_variable(variable_id=variable_id)
if variable is None:
raise NotFoundError(description=f"variable not found, id={variable_id}")
if variable.app_id != pipeline.id:
raise NotFoundError(description=f"variable not found, id={variable_id}")
resetted = draft_var_srv.reset_variable(draft_workflow, variable)
db.session.commit()
if resetted is None:
return Response("", 204)
else:
return marshal(resetted, _WORKFLOW_DRAFT_VARIABLE_FIELDS)
def _get_variable_list(pipeline: Pipeline, node_id) -> WorkflowDraftVariableList:
with Session(bind=db.engine, expire_on_commit=False) as session:
draft_var_srv = WorkflowDraftVariableService(
session=session,
)
if node_id == CONVERSATION_VARIABLE_NODE_ID:
draft_vars = draft_var_srv.list_conversation_variables(pipeline.id)
elif node_id == SYSTEM_VARIABLE_NODE_ID:
draft_vars = draft_var_srv.list_system_variables(pipeline.id)
else:
draft_vars = draft_var_srv.list_node_variables(app_id=pipeline.id, node_id=node_id)
return draft_vars
class RagPipelineSystemVariableCollectionApi(Resource):
@_api_prerequisite
@marshal_with(_WORKFLOW_DRAFT_VARIABLE_LIST_FIELDS)
def get(self, pipeline: Pipeline):
return _get_variable_list(pipeline, SYSTEM_VARIABLE_NODE_ID)
class RagPipelineEnvironmentVariableCollectionApi(Resource):
@_api_prerequisite
def get(self, pipeline: Pipeline):
"""
Get draft workflow
"""
# fetch draft workflow by app_model
rag_pipeline_service = RagPipelineService()
workflow = rag_pipeline_service.get_draft_workflow(pipeline=pipeline)
if workflow is None:
raise DraftWorkflowNotExist()
env_vars = workflow.environment_variables
env_vars_list = []
for v in env_vars:
env_vars_list.append(
{
"id": v.id,
"type": "env",
"name": v.name,
"description": v.description,
"selector": v.selector,
"value_type": v.value_type.value,
"value": v.value,
# Do not track edited for env vars.
"edited": False,
"visible": True,
"editable": True,
}
)
return {"items": env_vars_list}
api.add_resource(
RagPipelineVariableCollectionApi,
"/rag/pipelines/<uuid:pipeline_id>/workflows/draft/variables",
)
api.add_resource(
RagPipelineNodeVariableCollectionApi,
"/rag/pipelines/<uuid:pipeline_id>/workflows/draft/nodes/<string:node_id>/variables",
)
api.add_resource(
RagPipelineVariableApi, "/rag/pipelines/<uuid:pipeline_id>/workflows/draft/variables/<uuid:variable_id>"
)
api.add_resource(
RagPipelineVariableResetApi, "/rag/pipelines/<uuid:pipeline_id>/workflows/draft/variables/<uuid:variable_id>/reset"
)
api.add_resource(
RagPipelineSystemVariableCollectionApi, "/rag/pipelines/<uuid:pipeline_id>/workflows/draft/system-variables"
)
api.add_resource(
RagPipelineEnvironmentVariableCollectionApi,
"/rag/pipelines/<uuid:pipeline_id>/workflows/draft/environment-variables",
)

View File

@ -0,0 +1,147 @@
from typing import cast
from flask_login import current_user # type: ignore
from flask_restx import Resource, marshal_with, reqparse # type: ignore
from sqlalchemy.orm import Session
from werkzeug.exceptions import Forbidden
from controllers.console import api
from controllers.console.datasets.wraps import get_rag_pipeline
from controllers.console.wraps import (
account_initialization_required,
setup_required,
)
from extensions.ext_database import db
from fields.rag_pipeline_fields import pipeline_import_check_dependencies_fields, pipeline_import_fields
from libs.login import login_required
from models import Account
from models.dataset import Pipeline
from services.app_dsl_service import ImportStatus
from services.rag_pipeline.rag_pipeline_dsl_service import RagPipelineDslService
class RagPipelineImportApi(Resource):
@setup_required
@login_required
@account_initialization_required
@marshal_with(pipeline_import_fields)
def post(self):
# Check user role first
if not current_user.is_editor:
raise Forbidden()
parser = reqparse.RequestParser()
parser.add_argument("mode", type=str, required=True, location="json")
parser.add_argument("yaml_content", type=str, location="json")
parser.add_argument("yaml_url", type=str, location="json")
parser.add_argument("name", type=str, location="json")
parser.add_argument("description", type=str, location="json")
parser.add_argument("icon_type", type=str, location="json")
parser.add_argument("icon", type=str, location="json")
parser.add_argument("icon_background", type=str, location="json")
parser.add_argument("pipeline_id", type=str, location="json")
args = parser.parse_args()
# Create service with session
with Session(db.engine) as session:
import_service = RagPipelineDslService(session)
# Import app
account = cast(Account, current_user)
result = import_service.import_rag_pipeline(
account=account,
import_mode=args["mode"],
yaml_content=args.get("yaml_content"),
yaml_url=args.get("yaml_url"),
pipeline_id=args.get("pipeline_id"),
dataset_name=args.get("name"),
)
session.commit()
# Return appropriate status code based on result
status = result.status
if status == ImportStatus.FAILED.value:
return result.model_dump(mode="json"), 400
elif status == ImportStatus.PENDING.value:
return result.model_dump(mode="json"), 202
return result.model_dump(mode="json"), 200
class RagPipelineImportConfirmApi(Resource):
@setup_required
@login_required
@account_initialization_required
@marshal_with(pipeline_import_fields)
def post(self, import_id):
# Check user role first
if not current_user.is_editor:
raise Forbidden()
# Create service with session
with Session(db.engine) as session:
import_service = RagPipelineDslService(session)
# Confirm import
account = cast(Account, current_user)
result = import_service.confirm_import(import_id=import_id, account=account)
session.commit()
# Return appropriate status code based on result
if result.status == ImportStatus.FAILED.value:
return result.model_dump(mode="json"), 400
return result.model_dump(mode="json"), 200
class RagPipelineImportCheckDependenciesApi(Resource):
@setup_required
@login_required
@get_rag_pipeline
@account_initialization_required
@marshal_with(pipeline_import_check_dependencies_fields)
def get(self, pipeline: Pipeline):
if not current_user.is_editor:
raise Forbidden()
with Session(db.engine) as session:
import_service = RagPipelineDslService(session)
result = import_service.check_dependencies(pipeline=pipeline)
return result.model_dump(mode="json"), 200
class RagPipelineExportApi(Resource):
@setup_required
@login_required
@get_rag_pipeline
@account_initialization_required
def get(self, pipeline: Pipeline):
if not current_user.is_editor:
raise Forbidden()
# Add include_secret params
parser = reqparse.RequestParser()
parser.add_argument("include_secret", type=bool, default=False, location="args")
args = parser.parse_args()
with Session(db.engine) as session:
export_service = RagPipelineDslService(session)
result = export_service.export_rag_pipeline_dsl(pipeline=pipeline, include_secret=args["include_secret"])
return {"data": result}, 200
# Import Rag Pipeline
api.add_resource(
RagPipelineImportApi,
"/rag/pipelines/imports",
)
api.add_resource(
RagPipelineImportConfirmApi,
"/rag/pipelines/imports/<string:import_id>/confirm",
)
api.add_resource(
RagPipelineImportCheckDependenciesApi,
"/rag/pipelines/imports/<string:pipeline_id>/check-dependencies",
)
api.add_resource(
RagPipelineExportApi,
"/rag/pipelines/<string:pipeline_id>/exports",
)

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,47 @@
from collections.abc import Callable
from functools import wraps
from typing import Optional
from controllers.console.datasets.error import PipelineNotFoundError
from extensions.ext_database import db
from libs.login import current_user
from models.account import Account
from models.dataset import Pipeline
def get_rag_pipeline(
view: Optional[Callable] = None,
):
def decorator(view_func):
@wraps(view_func)
def decorated_view(*args, **kwargs):
if not kwargs.get("pipeline_id"):
raise ValueError("missing pipeline_id in path parameters")
if not isinstance(current_user, Account):
raise ValueError("current_user is not an account")
pipeline_id = kwargs.get("pipeline_id")
pipeline_id = str(pipeline_id)
del kwargs["pipeline_id"]
pipeline = (
db.session.query(Pipeline)
.filter(Pipeline.id == pipeline_id, Pipeline.tenant_id == current_user.current_tenant_id)
.first()
)
if not pipeline:
raise PipelineNotFoundError()
kwargs["pipeline"] = pipeline
return view_func(*args, **kwargs)
return decorated_view
if view is None:
return decorator
else:
return decorator(view)

View File

@ -20,6 +20,7 @@ from core.errors.error import (
QuotaExceededError,
)
from core.model_runtime.errors.invoke import InvokeError
from core.workflow.graph_engine.manager import GraphEngineManager
from libs import helper
from libs.login import current_user
from models.model import AppMode, InstalledApp
@ -78,6 +79,11 @@ class InstalledAppWorkflowTaskStopApi(InstalledAppResource):
raise NotWorkflowAppError()
assert current_user is not None
AppQueueManager.set_stop_flag(task_id, InvokeFrom.EXPLORE, current_user.id)
# Stop using both mechanisms for backward compatibility
# Legacy stop flag mechanism (without user check)
AppQueueManager.set_stop_flag_no_user_check(task_id)
# New graph engine command channel mechanism
GraphEngineManager.send_stop_command(task_id)
return {"result": "success"}

View File

@ -20,6 +20,7 @@ from controllers.console.wraps import (
cloud_edition_billing_resource_check,
setup_required,
)
from extensions.ext_database import db
from fields.file_fields import file_fields, upload_config_fields
from libs.login import login_required
from services.file_service import FileService
@ -68,7 +69,7 @@ class FileApi(Resource):
source = None
try:
upload_file = FileService.upload_file(
upload_file = FileService(db.engine).upload_file(
filename=file.filename,
content=file.read(),
mimetype=file.mimetype,
@ -89,7 +90,7 @@ class FilePreviewApi(Resource):
@account_initialization_required
def get(self, file_id):
file_id = str(file_id)
text = FileService.get_file_preview(file_id)
text = FileService(db.engine).get_file_preview(file_id)
return {"content": text}

View File

@ -14,6 +14,7 @@ from controllers.common.errors import (
)
from core.file import helpers as file_helpers
from core.helper import ssrf_proxy
from extensions.ext_database import db
from fields.file_fields import file_fields_with_signed_url, remote_file_info_fields
from models.account import Account
from services.file_service import FileService
@ -61,7 +62,7 @@ class RemoteFileUploadApi(Resource):
try:
user = cast(Account, current_user)
upload_file = FileService.upload_file(
upload_file = FileService(db.engine).upload_file(
filename=file_info.filename,
content=content,
mimetype=file_info.mimetype,

View File

@ -0,0 +1,35 @@
import logging
from flask_restx import Resource
from controllers.console import api
from controllers.console.wraps import (
account_initialization_required,
setup_required,
)
from core.schemas.schema_manager import SchemaManager
from libs.login import login_required
logger = logging.getLogger(__name__)
class SpecSchemaDefinitionsApi(Resource):
@setup_required
@login_required
@account_initialization_required
def get(self):
"""
Get system JSON Schema definitions specification
Used for frontend component type mapping
"""
try:
schema_manager = SchemaManager()
schema_definitions = schema_manager.get_all_schema_definitions()
return schema_definitions, 200
except Exception:
logger.exception("Failed to get schema definitions from local registry")
# Return empty array as fallback
return [], 200
api.add_resource(SpecSchemaDefinitionsApi, "/spec/schema-definitions")

View File

@ -21,11 +21,11 @@ from core.mcp.auth.auth_provider import OAuthClientProvider
from core.mcp.error import MCPAuthError, MCPError
from core.mcp.mcp_client import MCPClient
from core.model_runtime.utils.encoders import jsonable_encoder
from core.plugin.entities.plugin import ToolProviderID
from core.plugin.impl.oauth import OAuthHandler
from core.tools.entities.tool_entities import CredentialType
from libs.helper import StrLen, alphanumeric, uuid_value
from libs.login import login_required
from models.provider_ids import ToolProviderID
from services.plugin.oauth_service import OAuthProxyService
from services.tools.api_tools_manage_service import ApiToolManageService
from services.tools.builtin_tools_manage_service import BuiltinToolManageService

View File

@ -211,7 +211,7 @@ class WebappLogoWorkspaceApi(Resource):
raise UnsupportedFileTypeError()
try:
upload_file = FileService.upload_file(
upload_file = FileService(db.engine).upload_file(
filename=file.filename,
content=file.read(),
mimetype=file.mimetype,

View File

@ -261,3 +261,14 @@ def is_allow_transfer_owner(view):
abort(403)
return decorated
def knowledge_pipeline_publish_enabled(view):
@wraps(view)
def decorated(*args, **kwargs):
features = FeatureService.get_features(current_user.current_tenant_id)
if features.knowledge_pipeline.publish_enabled:
return view(*args, **kwargs)
abort(403)
return decorated

View File

@ -7,6 +7,7 @@ from werkzeug.exceptions import NotFound
import services
from controllers.common.errors import UnsupportedFileTypeError
from controllers.files import files_ns
from extensions.ext_database import db
from services.account_service import TenantService
from services.file_service import FileService
@ -28,7 +29,7 @@ class ImagePreviewApi(Resource):
return {"content": "Invalid request."}, 400
try:
generator, mimetype = FileService.get_image_preview(
generator, mimetype = FileService(db.engine).get_image_preview(
file_id=file_id,
timestamp=timestamp,
nonce=nonce,
@ -57,7 +58,7 @@ class FilePreviewApi(Resource):
return {"content": "Invalid request."}, 400
try:
generator, upload_file = FileService.get_file_generator_by_file_id(
generator, upload_file = FileService(db.engine).get_file_generator_by_file_id(
file_id=file_id,
timestamp=args["timestamp"],
nonce=args["nonce"],
@ -108,7 +109,7 @@ class WorkspaceWebappLogoApi(Resource):
raise NotFound("webapp logo is not found")
try:
generator, mimetype = FileService.get_public_image_preview(
generator, mimetype = FileService(db.engine).get_public_image_preview(
webapp_logo_file_id,
)
except services.errors.file.UnsupportedFileTypeError:

View File

@ -8,7 +8,7 @@ from controllers.common.errors import UnsupportedFileTypeError
from controllers.files import files_ns
from core.tools.signature import verify_tool_file_signature
from core.tools.tool_file_manager import ToolFileManager
from models import db as global_db
from extensions.ext_database import db as global_db
@files_ns.route("/tools/<uuid:file_id>.<string:extension>")

View File

@ -12,8 +12,9 @@ from controllers.common.errors import (
)
from controllers.service_api import service_api_ns
from controllers.service_api.wraps import FetchUserArg, WhereisUserArg, validate_app_token
from extensions.ext_database import db
from fields.file_fields import build_file_model
from models.model import App, EndUser
from models import App, EndUser
from services.file_service import FileService
@ -52,7 +53,7 @@ class FileApi(Resource):
raise FilenameNotExistsError
try:
upload_file = FileService.upload_file(
upload_file = FileService(db.engine).upload_file(
filename=file.filename,
content=file.read(),
mimetype=file.mimetype,

View File

@ -26,7 +26,8 @@ from core.errors.error import (
)
from core.helper.trace_id_helper import get_external_trace_id
from core.model_runtime.errors.invoke import InvokeError
from core.workflow.entities.workflow_execution import WorkflowExecutionStatus
from core.workflow.enums import WorkflowExecutionStatus
from core.workflow.graph_engine.manager import GraphEngineManager
from extensions.ext_database import db
from fields.workflow_app_log_fields import build_workflow_app_log_pagination_model
from libs import helper
@ -262,7 +263,12 @@ class WorkflowTaskStopApi(Resource):
if app_mode != AppMode.WORKFLOW:
raise NotWorkflowAppError()
AppQueueManager.set_stop_flag(task_id, InvokeFrom.SERVICE_API, end_user.id)
# Stop using both mechanisms for backward compatibility
# Legacy stop flag mechanism (without user check)
AppQueueManager.set_stop_flag_no_user_check(task_id)
# New graph engine command channel mechanism
GraphEngineManager.send_stop_command(task_id)
return {"result": "success"}

View File

@ -13,13 +13,13 @@ from controllers.service_api.wraps import (
validate_dataset_token,
)
from core.model_runtime.entities.model_entities import ModelType
from core.plugin.entities.plugin import ModelProviderID
from core.provider_manager import ProviderManager
from fields.dataset_fields import dataset_detail_fields
from fields.tag_fields import build_dataset_tag_fields
from libs.login import current_user
from models.account import Account
from models.dataset import Dataset, DatasetPermissionEnum
from models.provider_ids import ModelProviderID
from services.dataset_service import DatasetPermissionService, DatasetService, DocumentService
from services.entities.knowledge_entities.knowledge_entities import RetrievalModel
from services.tag_service import TagService

View File

@ -123,7 +123,7 @@ class DocumentAddByTextApi(DatasetApiResource):
args.get("retrieval_model").get("reranking_model").get("reranking_model_name"),
)
upload_file = FileService.upload_text(text=str(text), text_name=str(name))
upload_file = FileService(db.engine).upload_text(text=str(text), text_name=str(name))
data_source = {
"type": "upload_file",
"info_list": {"data_source_type": "upload_file", "file_info_list": {"file_ids": [upload_file.id]}},
@ -133,6 +133,9 @@ class DocumentAddByTextApi(DatasetApiResource):
# validate args
DocumentService.document_create_args_validate(knowledge_config)
if not current_user:
raise ValueError("current_user is required")
try:
documents, batch = DocumentService.save_document_with_dataset_id(
dataset=dataset,
@ -198,7 +201,7 @@ class DocumentUpdateByTextApi(DatasetApiResource):
name = args.get("name")
if text is None or name is None:
raise ValueError("Both text and name must be strings.")
upload_file = FileService.upload_text(text=str(text), text_name=str(name))
upload_file = FileService(db.engine).upload_text(text=str(text), text_name=str(name))
data_source = {
"type": "upload_file",
"info_list": {"data_source_type": "upload_file", "file_info_list": {"file_ids": [upload_file.id]}},
@ -298,7 +301,7 @@ class DocumentAddByFileApi(DatasetApiResource):
if not file.filename:
raise FilenameNotExistsError
upload_file = FileService.upload_file(
upload_file = FileService(db.engine).upload_file(
filename=file.filename,
content=file.read(),
mimetype=file.mimetype,
@ -387,7 +390,7 @@ class DocumentUpdateByFileApi(DatasetApiResource):
raise FilenameNotExistsError
try:
upload_file = FileService.upload_file(
upload_file = FileService(db.engine).upload_file(
filename=file.filename,
content=file.read(),
mimetype=file.mimetype,

View File

@ -11,6 +11,7 @@ from controllers.common.errors import (
)
from controllers.web import web_ns
from controllers.web.wraps import WebApiResource
from extensions.ext_database import db
from fields.file_fields import build_file_model
from services.file_service import FileService
@ -68,7 +69,7 @@ class FileApi(WebApiResource):
source = None
try:
upload_file = FileService.upload_file(
upload_file = FileService(db.engine).upload_file(
filename=file.filename,
content=file.read(),
mimetype=file.mimetype,

View File

@ -14,6 +14,7 @@ from controllers.web import web_ns
from controllers.web.wraps import WebApiResource
from core.file import helpers as file_helpers
from core.helper import ssrf_proxy
from extensions.ext_database import db
from fields.file_fields import build_file_with_signed_url_model, build_remote_file_info_model
from services.file_service import FileService
@ -119,7 +120,7 @@ class RemoteFileUploadApi(WebApiResource):
content = resp.content if resp.request.method == "GET" else ssrf_proxy.get(url).content
try:
upload_file = FileService.upload_file(
upload_file = FileService(db.engine).upload_file(
filename=file_info.filename,
content=content,
mimetype=file_info.mimetype,

View File

@ -21,6 +21,7 @@ from core.errors.error import (
QuotaExceededError,
)
from core.model_runtime.errors.invoke import InvokeError
from core.workflow.graph_engine.manager import GraphEngineManager
from libs import helper
from models.model import App, AppMode, EndUser
from services.app_generate_service import AppGenerateService
@ -110,7 +111,12 @@ class WorkflowTaskStopApi(WebApiResource):
if app_mode != AppMode.WORKFLOW:
raise NotWorkflowAppError()
AppQueueManager.set_stop_flag(task_id, InvokeFrom.WEB_APP, end_user.id)
# Stop using both mechanisms for backward compatibility
# Legacy stop flag mechanism (without user check)
AppQueueManager.set_stop_flag_no_user_check(task_id)
# New graph engine command channel mechanism
GraphEngineManager.send_stop_command(task_id)
return {"result": "success"}

View File

@ -90,7 +90,9 @@ class BaseAgentRunner(AppRunner):
tenant_id=tenant_id,
dataset_ids=app_config.dataset.dataset_ids if app_config.dataset else [],
retrieve_config=app_config.dataset.retrieve_config if app_config.dataset else None,
return_resource=app_config.additional_features.show_retrieve_source,
return_resource=(
app_config.additional_features.show_retrieve_source if app_config.additional_features else False
),
invoke_from=application_generate_entity.invoke_from,
hit_callback=hit_callback,
user_id=user_id,

View File

@ -4,8 +4,8 @@ from typing import Any
from core.app.app_config.entities import ModelConfigEntity
from core.model_runtime.entities.model_entities import ModelPropertyKey, ModelType
from core.model_runtime.model_providers.model_provider_factory import ModelProviderFactory
from core.plugin.entities.plugin import ModelProviderID
from core.provider_manager import ProviderManager
from models.provider_ids import ModelProviderID
class ModelConfigManager:

View File

@ -114,9 +114,9 @@ class VariableEntity(BaseModel):
hide: bool = False
max_length: Optional[int] = None
options: Sequence[str] = Field(default_factory=list)
allowed_file_types: Sequence[FileType] = Field(default_factory=list)
allowed_file_extensions: Sequence[str] = Field(default_factory=list)
allowed_file_upload_methods: Sequence[FileTransferMethod] = Field(default_factory=list)
allowed_file_types: Optional[Sequence[FileType]] = Field(default_factory=list)
allowed_file_extensions: Optional[Sequence[str]] = Field(default_factory=list)
allowed_file_upload_methods: Optional[Sequence[FileTransferMethod]] = Field(default_factory=list)
@field_validator("description", mode="before")
@classmethod
@ -129,6 +129,16 @@ class VariableEntity(BaseModel):
return v or []
class RagPipelineVariableEntity(VariableEntity):
"""
Rag Pipeline Variable Entity.
"""
tooltips: Optional[str] = None
placeholder: Optional[str] = None
belong_to_node_id: str
class ExternalDataVariableEntity(BaseModel):
"""
External Data Variable Entity.
@ -288,7 +298,7 @@ class AppConfig(BaseModel):
tenant_id: str
app_id: str
app_mode: AppMode
additional_features: AppAdditionalFeatures
additional_features: Optional[AppAdditionalFeatures] = None
variables: list[VariableEntity] = []
sensitive_word_avoidance: Optional[SensitiveWordAvoidanceEntity] = None

View File

@ -1,4 +1,6 @@
from core.app.app_config.entities import VariableEntity
import re
from core.app.app_config.entities import RagPipelineVariableEntity, VariableEntity
from models.workflow import Workflow
@ -20,3 +22,48 @@ class WorkflowVariablesConfigManager:
variables.append(VariableEntity.model_validate(variable))
return variables
@classmethod
def convert_rag_pipeline_variable(cls, workflow: Workflow, start_node_id: str) -> list[RagPipelineVariableEntity]:
"""
Convert workflow start variables to variables
:param workflow: workflow instance
"""
variables = []
# get second step node
rag_pipeline_variables = workflow.rag_pipeline_variables
if not rag_pipeline_variables:
return []
variables_map = {item["variable"]: item for item in rag_pipeline_variables}
# get datasource node data
datasource_node_data = None
datasource_nodes = workflow.graph_dict.get("nodes", [])
for datasource_node in datasource_nodes:
if datasource_node.get("id") == start_node_id:
datasource_node_data = datasource_node.get("data", {})
break
if datasource_node_data:
datasource_parameters = datasource_node_data.get("datasource_parameters", {})
for _, value in datasource_parameters.items():
if value.get("value") and isinstance(value.get("value"), str):
pattern = r"\{\{#([a-zA-Z0-9_]{1,50}(?:\.[a-zA-Z0-9_][a-zA-Z0-9_]{0,29}){1,10})#\}\}"
match = re.match(pattern, value["value"])
if match:
full_path = match.group(1)
last_part = full_path.split(".")[-1]
variables_map.pop(last_part, None)
if value.get("value") and isinstance(value.get("value"), list):
last_part = value.get("value")[-1]
variables_map.pop(last_part, None)
all_second_step_variables = list(variables_map.values())
for item in all_second_step_variables:
if item.get("belong_to_node_id") == start_node_id or item.get("belong_to_node_id") == "shared":
variables.append(RagPipelineVariableEntity.model_validate(item))
return variables

View File

@ -154,7 +154,7 @@ class AdvancedChatAppGenerator(MessageBasedAppGenerator):
if invoke_from == InvokeFrom.DEBUGGER:
# always enable retriever resource in debugger mode
app_config.additional_features.show_retrieve_source = True
app_config.additional_features.show_retrieve_source = True # type: ignore
workflow_run_id = str(uuid.uuid4())
# init application generate entity
@ -467,7 +467,7 @@ class AdvancedChatAppGenerator(MessageBasedAppGenerator):
workflow_execution_repository=workflow_execution_repository,
workflow_node_execution_repository=workflow_node_execution_repository,
stream=stream,
draft_var_saver_factory=self._get_draft_var_saver_factory(invoke_from),
draft_var_saver_factory=self._get_draft_var_saver_factory(invoke_from, account=user),
)
return AdvancedChatAppGenerateResponseConverter.convert(response=response, invoke_from=invoke_from)

View File

@ -1,11 +1,11 @@
import logging
import time
from collections.abc import Mapping
from typing import Any, Optional, cast
from sqlalchemy import select
from sqlalchemy.orm import Session
from configs import dify_config
from core.app.apps.advanced_chat.app_config_manager import AdvancedChatAppConfig
from core.app.apps.base_app_queue_manager import AppQueueManager
from core.app.apps.workflow_app_runner import WorkflowBasedAppRunner
@ -23,16 +23,17 @@ from core.app.features.annotation_reply.annotation_reply import AnnotationReplyF
from core.moderation.base import ModerationError
from core.moderation.input_moderation import InputModeration
from core.variables.variables import VariableUnion
from core.workflow.callbacks import WorkflowCallback, WorkflowLoggingCallback
from core.workflow.entities.variable_pool import VariablePool
from core.workflow.entities import GraphRuntimeState, VariablePool
from core.workflow.graph_engine.command_channels.redis_channel import RedisChannel
from core.workflow.system_variable import SystemVariable
from core.workflow.variable_loader import VariableLoader
from core.workflow.workflow_entry import WorkflowEntry
from extensions.ext_database import db
from extensions.ext_redis import redis_client
from models import Workflow
from models.enums import UserFrom
from models.model import App, Conversation, Message, MessageAnnotation
from models.workflow import ConversationVariable, WorkflowType
from models.workflow import ConversationVariable
logger = logging.getLogger(__name__)
@ -78,23 +79,29 @@ class AdvancedChatAppRunner(WorkflowBasedAppRunner):
if not app_record:
raise ValueError("App not found")
workflow_callbacks: list[WorkflowCallback] = []
if dify_config.DEBUG:
workflow_callbacks.append(WorkflowLoggingCallback())
if self.application_generate_entity.single_iteration_run:
# if only single iteration run is requested
graph_runtime_state = GraphRuntimeState(
variable_pool=VariablePool.empty(),
start_at=time.time(),
)
graph, variable_pool = self._get_graph_and_variable_pool_of_single_iteration(
workflow=self._workflow,
node_id=self.application_generate_entity.single_iteration_run.node_id,
user_inputs=dict(self.application_generate_entity.single_iteration_run.inputs),
graph_runtime_state=graph_runtime_state,
)
elif self.application_generate_entity.single_loop_run:
# if only single loop run is requested
graph_runtime_state = GraphRuntimeState(
variable_pool=VariablePool.empty(),
start_at=time.time(),
)
graph, variable_pool = self._get_graph_and_variable_pool_of_single_loop(
workflow=self._workflow,
node_id=self.application_generate_entity.single_loop_run.node_id,
user_inputs=dict(self.application_generate_entity.single_loop_run.inputs),
graph_runtime_state=graph_runtime_state,
)
else:
inputs = self.application_generate_entity.inputs
@ -146,16 +153,27 @@ class AdvancedChatAppRunner(WorkflowBasedAppRunner):
)
# init graph
graph = self._init_graph(graph_config=self._workflow.graph_dict)
graph_runtime_state = GraphRuntimeState(variable_pool=variable_pool, start_at=time.time())
graph = self._init_graph(
graph_config=self._workflow.graph_dict,
graph_runtime_state=graph_runtime_state,
workflow_id=self._workflow.id,
tenant_id=self._workflow.tenant_id,
user_id=self.application_generate_entity.user_id,
)
db.session.close()
# RUN WORKFLOW
# Create Redis command channel for this workflow execution
task_id = self.application_generate_entity.task_id
channel_key = f"workflow:{task_id}:commands"
command_channel = RedisChannel(redis_client, channel_key)
workflow_entry = WorkflowEntry(
tenant_id=self._workflow.tenant_id,
app_id=self._workflow.app_id,
workflow_id=self._workflow.id,
workflow_type=WorkflowType.value_of(self._workflow.type),
graph=graph,
graph_config=self._workflow.graph_dict,
user_id=self.application_generate_entity.user_id,
@ -166,12 +184,11 @@ class AdvancedChatAppRunner(WorkflowBasedAppRunner):
),
invoke_from=self.application_generate_entity.invoke_from,
call_depth=self.application_generate_entity.call_depth,
variable_pool=variable_pool,
graph_runtime_state=graph_runtime_state,
command_channel=command_channel,
)
generator = workflow_entry.run(
callbacks=workflow_callbacks,
)
generator = workflow_entry.run()
for event in generator:
self._handle_event(workflow_entry, event)

View File

@ -31,14 +31,9 @@ from core.app.entities.queue_entities import (
QueueMessageReplaceEvent,
QueueNodeExceptionEvent,
QueueNodeFailedEvent,
QueueNodeInIterationFailedEvent,
QueueNodeInLoopFailedEvent,
QueueNodeRetryEvent,
QueueNodeStartedEvent,
QueueNodeSucceededEvent,
QueueParallelBranchRunFailedEvent,
QueueParallelBranchRunStartedEvent,
QueueParallelBranchRunSucceededEvent,
QueuePingEvent,
QueueRetrieverResourcesEvent,
QueueStopEvent,
@ -65,8 +60,8 @@ from core.app.task_pipeline.message_cycle_manager import MessageCycleManager
from core.base.tts import AppGeneratorTTSPublisher, AudioTrunk
from core.model_runtime.entities.llm_entities import LLMUsage
from core.ops.ops_trace_manager import TraceQueueManager
from core.workflow.entities.workflow_execution import WorkflowExecutionStatus, WorkflowType
from core.workflow.graph_engine.entities.graph_runtime_state import GraphRuntimeState
from core.workflow.entities import GraphRuntimeState
from core.workflow.enums import WorkflowExecutionStatus, WorkflowType
from core.workflow.nodes import NodeType
from core.workflow.repositories.draft_variable_repository import DraftVariableSaverFactory
from core.workflow.repositories.workflow_execution_repository import WorkflowExecutionRepository
@ -387,9 +382,7 @@ class AdvancedChatAppGenerateTaskPipeline:
def _handle_node_failed_events(
self,
event: Union[
QueueNodeFailedEvent, QueueNodeInIterationFailedEvent, QueueNodeInLoopFailedEvent, QueueNodeExceptionEvent
],
event: Union[QueueNodeFailedEvent, QueueNodeExceptionEvent],
**kwargs,
) -> Generator[StreamResponse, None, None]:
"""Handle various node failure events."""
@ -434,32 +427,6 @@ class AdvancedChatAppGenerateTaskPipeline:
answer=delta_text, message_id=self._message_id, from_variable_selector=event.from_variable_selector
)
def _handle_parallel_branch_started_event(
self, event: QueueParallelBranchRunStartedEvent, **kwargs
) -> Generator[StreamResponse, None, None]:
"""Handle parallel branch started events."""
self._ensure_workflow_initialized()
parallel_start_resp = self._workflow_response_converter.workflow_parallel_branch_start_to_stream_response(
task_id=self._application_generate_entity.task_id,
workflow_execution_id=self._workflow_run_id,
event=event,
)
yield parallel_start_resp
def _handle_parallel_branch_finished_events(
self, event: Union[QueueParallelBranchRunSucceededEvent, QueueParallelBranchRunFailedEvent], **kwargs
) -> Generator[StreamResponse, None, None]:
"""Handle parallel branch finished events."""
self._ensure_workflow_initialized()
parallel_finish_resp = self._workflow_response_converter.workflow_parallel_branch_finished_to_stream_response(
task_id=self._application_generate_entity.task_id,
workflow_execution_id=self._workflow_run_id,
event=event,
)
yield parallel_finish_resp
def _handle_iteration_start_event(
self, event: QueueIterationStartEvent, **kwargs
) -> Generator[StreamResponse, None, None]:
@ -751,8 +718,6 @@ class AdvancedChatAppGenerateTaskPipeline:
QueueNodeRetryEvent: self._handle_node_retry_event,
QueueNodeStartedEvent: self._handle_node_started_event,
QueueNodeSucceededEvent: self._handle_node_succeeded_event,
# Parallel branch events
QueueParallelBranchRunStartedEvent: self._handle_parallel_branch_started_event,
# Iteration events
QueueIterationStartEvent: self._handle_iteration_start_event,
QueueIterationNextEvent: self._handle_iteration_next_event,
@ -800,8 +765,6 @@ class AdvancedChatAppGenerateTaskPipeline:
event,
(
QueueNodeFailedEvent,
QueueNodeInIterationFailedEvent,
QueueNodeInLoopFailedEvent,
QueueNodeExceptionEvent,
),
):
@ -814,17 +777,6 @@ class AdvancedChatAppGenerateTaskPipeline:
)
return
# Handle parallel branch finished events with isinstance check
if isinstance(event, (QueueParallelBranchRunSucceededEvent, QueueParallelBranchRunFailedEvent)):
yield from self._handle_parallel_branch_finished_events(
event,
graph_runtime_state=graph_runtime_state,
tts_publisher=tts_publisher,
trace_manager=trace_manager,
queue_message=queue_message,
)
return
# For unhandled events, we continue (original behavior)
return
@ -848,11 +800,6 @@ class AdvancedChatAppGenerateTaskPipeline:
graph_runtime_state = event.graph_runtime_state
yield from self._handle_workflow_started_event(event)
case QueueTextChunkEvent():
yield from self._handle_text_chunk_event(
event, tts_publisher=tts_publisher, queue_message=queue_message
)
case QueueErrorEvent():
yield from self._handle_error_event(event)
break

View File

@ -6,7 +6,7 @@ from sqlalchemy.orm import Session
from core.app.app_config.entities import VariableEntityType
from core.app.entities.app_invoke_entities import InvokeFrom
from core.file import File, FileUploadConfig
from core.workflow.nodes.enums import NodeType
from core.workflow.enums import NodeType
from core.workflow.repositories.draft_variable_repository import (
DraftVariableSaver,
DraftVariableSaverFactory,
@ -14,6 +14,7 @@ from core.workflow.repositories.draft_variable_repository import (
)
from factories import file_factory
from libs.orjson import orjson_dumps
from models import Account, EndUser
from services.workflow_draft_variable_service import DraftVariableSaver as DraftVariableSaverImpl
if TYPE_CHECKING:
@ -182,8 +183,9 @@ class BaseAppGenerator:
@final
@staticmethod
def _get_draft_var_saver_factory(invoke_from: InvokeFrom) -> DraftVariableSaverFactory:
def _get_draft_var_saver_factory(invoke_from: InvokeFrom, account: Account | EndUser) -> DraftVariableSaverFactory:
if invoke_from == InvokeFrom.DEBUGGER:
assert isinstance(account, Account)
def draft_var_saver_factory(
session: Session,
@ -200,6 +202,7 @@ class BaseAppGenerator:
node_type=node_type,
node_execution_id=node_execution_id,
enclosing_node_id=enclosing_node_id,
user=account,
)
else:

View File

@ -126,6 +126,21 @@ class AppQueueManager:
stopped_cache_key = cls._generate_stopped_cache_key(task_id)
redis_client.setex(stopped_cache_key, 600, 1)
@classmethod
def set_stop_flag_no_user_check(cls, task_id: str) -> None:
"""
Set task stop flag without user permission check.
This method allows stopping workflows without user context.
:param task_id: The task ID to stop
:return:
"""
if not task_id:
return
stopped_cache_key = cls._generate_stopped_cache_key(task_id)
redis_client.setex(stopped_cache_key, 600, 1)
def _is_stopped(self) -> bool:
"""
Check if task is stopped

View File

@ -164,7 +164,9 @@ class ChatAppRunner(AppRunner):
config=app_config.dataset,
query=query,
invoke_from=application_generate_entity.invoke_from,
show_retrieve_source=app_config.additional_features.show_retrieve_source,
show_retrieve_source=(
app_config.additional_features.show_retrieve_source if app_config.additional_features else False
),
hit_callback=hit_callback,
memory=memory,
message_id=message.id,

View File

@ -1,7 +1,7 @@
import time
from collections.abc import Mapping, Sequence
from datetime import UTC, datetime
from typing import Any, Optional, Union, cast
from typing import Any, Optional, Union
from sqlalchemy.orm import Session
@ -16,14 +16,9 @@ from core.app.entities.queue_entities import (
QueueLoopStartEvent,
QueueNodeExceptionEvent,
QueueNodeFailedEvent,
QueueNodeInIterationFailedEvent,
QueueNodeInLoopFailedEvent,
QueueNodeRetryEvent,
QueueNodeStartedEvent,
QueueNodeSucceededEvent,
QueueParallelBranchRunFailedEvent,
QueueParallelBranchRunStartedEvent,
QueueParallelBranchRunSucceededEvent,
)
from core.app.entities.task_entities import (
AgentLogStreamResponse,
@ -36,24 +31,23 @@ from core.app.entities.task_entities import (
NodeFinishStreamResponse,
NodeRetryStreamResponse,
NodeStartStreamResponse,
ParallelBranchFinishedStreamResponse,
ParallelBranchStartStreamResponse,
WorkflowFinishStreamResponse,
WorkflowStartStreamResponse,
)
from core.file import FILE_MODEL_IDENTITY, File
from core.plugin.impl.datasource import PluginDatasourceManager
from core.tools.entities.tool_entities import ToolProviderType
from core.tools.tool_manager import ToolManager
from core.variables.segments import ArrayFileSegment, FileSegment, Segment
from core.workflow.entities.workflow_execution import WorkflowExecution
from core.workflow.entities.workflow_node_execution import WorkflowNodeExecution, WorkflowNodeExecutionStatus
from core.workflow.nodes import NodeType
from core.workflow.nodes.tool.entities import ToolNodeData
from core.workflow.entities import WorkflowExecution, WorkflowNodeExecution
from core.workflow.enums import NodeType, WorkflowNodeExecutionStatus
from core.workflow.workflow_type_encoder import WorkflowRuntimeTypeConverter
from libs.datetime_utils import naive_utc_now
from models import (
Account,
EndUser,
)
from services.variable_truncator import VariableTruncator
class WorkflowResponseConverter:
@ -65,6 +59,7 @@ class WorkflowResponseConverter:
) -> None:
self._application_generate_entity = application_generate_entity
self._user = user
self._truncator = VariableTruncator.default()
def workflow_start_to_stream_response(
self,
@ -156,7 +151,8 @@ class WorkflowResponseConverter:
title=workflow_node_execution.title,
index=workflow_node_execution.index,
predecessor_node_id=workflow_node_execution.predecessor_node_id,
inputs=workflow_node_execution.inputs,
inputs=workflow_node_execution.get_response_inputs(),
inputs_truncated=workflow_node_execution.inputs_truncated,
created_at=int(workflow_node_execution.created_at.timestamp()),
parallel_id=event.parallel_id,
parallel_start_node_id=event.parallel_start_node_id,
@ -171,11 +167,19 @@ class WorkflowResponseConverter:
# extras logic
if event.node_type == NodeType.TOOL:
node_data = cast(ToolNodeData, event.node_data)
response.data.extras["icon"] = ToolManager.get_tool_icon(
tenant_id=self._application_generate_entity.app_config.tenant_id,
provider_type=node_data.provider_type,
provider_id=node_data.provider_id,
provider_type=ToolProviderType(event.provider_type),
provider_id=event.provider_id,
)
elif event.node_type == NodeType.DATASOURCE:
manager = PluginDatasourceManager()
provider_entity = manager.fetch_datasource_provider(
self._application_generate_entity.app_config.tenant_id,
event.provider_id,
)
response.data.extras["icon"] = provider_entity.declaration.identity.generate_datasource_icon_url(
self._application_generate_entity.app_config.tenant_id
)
return response
@ -183,11 +187,7 @@ class WorkflowResponseConverter:
def workflow_node_finish_to_stream_response(
self,
*,
event: QueueNodeSucceededEvent
| QueueNodeFailedEvent
| QueueNodeInIterationFailedEvent
| QueueNodeInLoopFailedEvent
| QueueNodeExceptionEvent,
event: QueueNodeSucceededEvent | QueueNodeFailedEvent | QueueNodeExceptionEvent,
task_id: str,
workflow_node_execution: WorkflowNodeExecution,
) -> Optional[NodeFinishStreamResponse]:
@ -210,9 +210,12 @@ class WorkflowResponseConverter:
index=workflow_node_execution.index,
title=workflow_node_execution.title,
predecessor_node_id=workflow_node_execution.predecessor_node_id,
inputs=workflow_node_execution.inputs,
process_data=workflow_node_execution.process_data,
outputs=json_converter.to_json_encodable(workflow_node_execution.outputs),
inputs=workflow_node_execution.get_response_inputs(),
inputs_truncated=workflow_node_execution.inputs_truncated,
process_data=workflow_node_execution.get_response_process_data(),
process_data_truncated=workflow_node_execution.process_data_truncated,
outputs=json_converter.to_json_encodable(workflow_node_execution.get_response_outputs()),
outputs_truncated=workflow_node_execution.outputs_truncated,
status=workflow_node_execution.status,
error=workflow_node_execution.error,
elapsed_time=workflow_node_execution.elapsed_time,
@ -221,9 +224,6 @@ class WorkflowResponseConverter:
finished_at=int(workflow_node_execution.finished_at.timestamp()),
files=self.fetch_files_from_node_outputs(workflow_node_execution.outputs or {}),
parallel_id=event.parallel_id,
parallel_start_node_id=event.parallel_start_node_id,
parent_parallel_id=event.parent_parallel_id,
parent_parallel_start_node_id=event.parent_parallel_start_node_id,
iteration_id=event.in_iteration_id,
loop_id=event.in_loop_id,
),
@ -255,9 +255,12 @@ class WorkflowResponseConverter:
index=workflow_node_execution.index,
title=workflow_node_execution.title,
predecessor_node_id=workflow_node_execution.predecessor_node_id,
inputs=workflow_node_execution.inputs,
process_data=workflow_node_execution.process_data,
outputs=json_converter.to_json_encodable(workflow_node_execution.outputs),
inputs=workflow_node_execution.get_response_inputs(),
inputs_truncated=workflow_node_execution.inputs_truncated,
process_data=workflow_node_execution.get_response_process_data(),
process_data_truncated=workflow_node_execution.process_data_truncated,
outputs=json_converter.to_json_encodable(workflow_node_execution.get_response_outputs()),
outputs_truncated=workflow_node_execution.outputs_truncated,
status=workflow_node_execution.status,
error=workflow_node_execution.error,
elapsed_time=workflow_node_execution.elapsed_time,
@ -275,50 +278,6 @@ class WorkflowResponseConverter:
),
)
def workflow_parallel_branch_start_to_stream_response(
self,
*,
task_id: str,
workflow_execution_id: str,
event: QueueParallelBranchRunStartedEvent,
) -> ParallelBranchStartStreamResponse:
return ParallelBranchStartStreamResponse(
task_id=task_id,
workflow_run_id=workflow_execution_id,
data=ParallelBranchStartStreamResponse.Data(
parallel_id=event.parallel_id,
parallel_branch_id=event.parallel_start_node_id,
parent_parallel_id=event.parent_parallel_id,
parent_parallel_start_node_id=event.parent_parallel_start_node_id,
iteration_id=event.in_iteration_id,
loop_id=event.in_loop_id,
created_at=int(time.time()),
),
)
def workflow_parallel_branch_finished_to_stream_response(
self,
*,
task_id: str,
workflow_execution_id: str,
event: QueueParallelBranchRunSucceededEvent | QueueParallelBranchRunFailedEvent,
) -> ParallelBranchFinishedStreamResponse:
return ParallelBranchFinishedStreamResponse(
task_id=task_id,
workflow_run_id=workflow_execution_id,
data=ParallelBranchFinishedStreamResponse.Data(
parallel_id=event.parallel_id,
parallel_branch_id=event.parallel_start_node_id,
parent_parallel_id=event.parent_parallel_id,
parent_parallel_start_node_id=event.parent_parallel_start_node_id,
iteration_id=event.in_iteration_id,
loop_id=event.in_loop_id,
status="succeeded" if isinstance(event, QueueParallelBranchRunSucceededEvent) else "failed",
error=event.error if isinstance(event, QueueParallelBranchRunFailedEvent) else None,
created_at=int(time.time()),
),
)
def workflow_iteration_start_to_stream_response(
self,
*,
@ -326,6 +285,7 @@ class WorkflowResponseConverter:
workflow_execution_id: str,
event: QueueIterationStartEvent,
) -> IterationNodeStartStreamResponse:
new_inputs, truncated = self._truncator.truncate_variable_mapping(event.inputs or {})
return IterationNodeStartStreamResponse(
task_id=task_id,
workflow_run_id=workflow_execution_id,
@ -333,13 +293,12 @@ class WorkflowResponseConverter:
id=event.node_id,
node_id=event.node_id,
node_type=event.node_type.value,
title=event.node_data.title,
title=event.node_title,
created_at=int(time.time()),
extras={},
inputs=event.inputs or {},
inputs=new_inputs,
inputs_truncated=truncated,
metadata=event.metadata or {},
parallel_id=event.parallel_id,
parallel_start_node_id=event.parallel_start_node_id,
),
)
@ -357,15 +316,10 @@ class WorkflowResponseConverter:
id=event.node_id,
node_id=event.node_id,
node_type=event.node_type.value,
title=event.node_data.title,
title=event.node_title,
index=event.index,
pre_iteration_output=event.output,
created_at=int(time.time()),
extras={},
parallel_id=event.parallel_id,
parallel_start_node_id=event.parallel_start_node_id,
parallel_mode_run_id=event.parallel_mode_run_id,
duration=event.duration,
),
)
@ -377,6 +331,11 @@ class WorkflowResponseConverter:
event: QueueIterationCompletedEvent,
) -> IterationNodeCompletedStreamResponse:
json_converter = WorkflowRuntimeTypeConverter()
new_inputs, inputs_truncated = self._truncator.truncate_variable_mapping(event.inputs or {})
new_outputs, outputs_truncated = self._truncator.truncate_variable_mapping(
json_converter.to_json_encodable(event.outputs) or {}
)
return IterationNodeCompletedStreamResponse(
task_id=task_id,
workflow_run_id=workflow_execution_id,
@ -384,11 +343,13 @@ class WorkflowResponseConverter:
id=event.node_id,
node_id=event.node_id,
node_type=event.node_type.value,
title=event.node_data.title,
outputs=json_converter.to_json_encodable(event.outputs),
title=event.node_title,
outputs=new_outputs,
outputs_truncated=outputs_truncated,
created_at=int(time.time()),
extras={},
inputs=event.inputs or {},
inputs=new_inputs,
inputs_truncated=inputs_truncated,
status=WorkflowNodeExecutionStatus.SUCCEEDED
if event.error is None
else WorkflowNodeExecutionStatus.FAILED,
@ -398,14 +359,13 @@ class WorkflowResponseConverter:
execution_metadata=event.metadata,
finished_at=int(time.time()),
steps=event.steps,
parallel_id=event.parallel_id,
parallel_start_node_id=event.parallel_start_node_id,
),
)
def workflow_loop_start_to_stream_response(
self, *, task_id: str, workflow_execution_id: str, event: QueueLoopStartEvent
) -> LoopNodeStartStreamResponse:
new_inputs, truncated = self._truncator.truncate_variable_mapping(event.inputs or {})
return LoopNodeStartStreamResponse(
task_id=task_id,
workflow_run_id=workflow_execution_id,
@ -413,10 +373,11 @@ class WorkflowResponseConverter:
id=event.node_id,
node_id=event.node_id,
node_type=event.node_type.value,
title=event.node_data.title,
title=event.node_title,
created_at=int(time.time()),
extras={},
inputs=event.inputs or {},
inputs=new_inputs,
inputs_truncated=truncated,
metadata=event.metadata or {},
parallel_id=event.parallel_id,
parallel_start_node_id=event.parallel_start_node_id,
@ -437,15 +398,16 @@ class WorkflowResponseConverter:
id=event.node_id,
node_id=event.node_id,
node_type=event.node_type.value,
title=event.node_data.title,
title=event.node_title,
index=event.index,
pre_loop_output=event.output,
# The `pre_loop_output` field is not utilized by the frontend.
# Previously, it was assigned the value of `event.output`.
pre_loop_output={},
created_at=int(time.time()),
extras={},
parallel_id=event.parallel_id,
parallel_start_node_id=event.parallel_start_node_id,
parallel_mode_run_id=event.parallel_mode_run_id,
duration=event.duration,
),
)
@ -456,6 +418,11 @@ class WorkflowResponseConverter:
workflow_execution_id: str,
event: QueueLoopCompletedEvent,
) -> LoopNodeCompletedStreamResponse:
json_converter = WorkflowRuntimeTypeConverter()
new_inputs, inputs_truncated = self._truncator.truncate_variable_mapping(event.inputs or {})
new_outputs, outputs_truncated = self._truncator.truncate_variable_mapping(
json_converter.to_json_encodable(event.outputs) or {}
)
return LoopNodeCompletedStreamResponse(
task_id=task_id,
workflow_run_id=workflow_execution_id,
@ -463,11 +430,13 @@ class WorkflowResponseConverter:
id=event.node_id,
node_id=event.node_id,
node_type=event.node_type.value,
title=event.node_data.title,
outputs=WorkflowRuntimeTypeConverter().to_json_encodable(event.outputs),
title=event.node_title,
outputs=new_outputs,
outputs_truncated=outputs_truncated,
created_at=int(time.time()),
extras={},
inputs=event.inputs or {},
inputs=new_inputs,
inputs_truncated=inputs_truncated,
status=WorkflowNodeExecutionStatus.SUCCEEDED
if event.error is None
else WorkflowNodeExecutionStatus.FAILED,

View File

@ -0,0 +1,95 @@
from collections.abc import Generator
from typing import cast
from core.app.apps.base_app_generate_response_converter import AppGenerateResponseConverter
from core.app.entities.task_entities import (
AppStreamResponse,
ErrorStreamResponse,
NodeFinishStreamResponse,
NodeStartStreamResponse,
PingStreamResponse,
WorkflowAppBlockingResponse,
WorkflowAppStreamResponse,
)
class WorkflowAppGenerateResponseConverter(AppGenerateResponseConverter):
_blocking_response_type = WorkflowAppBlockingResponse
@classmethod
def convert_blocking_full_response(cls, blocking_response: WorkflowAppBlockingResponse) -> dict: # type: ignore[override]
"""
Convert blocking full response.
:param blocking_response: blocking response
:return:
"""
return dict(blocking_response.to_dict())
@classmethod
def convert_blocking_simple_response(cls, blocking_response: WorkflowAppBlockingResponse) -> dict: # type: ignore[override]
"""
Convert blocking simple response.
:param blocking_response: blocking response
:return:
"""
return cls.convert_blocking_full_response(blocking_response)
@classmethod
def convert_stream_full_response(
cls, stream_response: Generator[AppStreamResponse, None, None]
) -> Generator[dict | str, None, None]:
"""
Convert stream full response.
:param stream_response: stream response
:return:
"""
for chunk in stream_response:
chunk = cast(WorkflowAppStreamResponse, chunk)
sub_stream_response = chunk.stream_response
if isinstance(sub_stream_response, PingStreamResponse):
yield "ping"
continue
response_chunk = {
"event": sub_stream_response.event.value,
"workflow_run_id": chunk.workflow_run_id,
}
if isinstance(sub_stream_response, ErrorStreamResponse):
data = cls._error_to_stream_response(sub_stream_response.err)
response_chunk.update(data)
else:
response_chunk.update(sub_stream_response.to_dict())
yield response_chunk
@classmethod
def convert_stream_simple_response(
cls, stream_response: Generator[AppStreamResponse, None, None]
) -> Generator[dict | str, None, None]:
"""
Convert stream simple response.
:param stream_response: stream response
:return:
"""
for chunk in stream_response:
chunk = cast(WorkflowAppStreamResponse, chunk)
sub_stream_response = chunk.stream_response
if isinstance(sub_stream_response, PingStreamResponse):
yield "ping"
continue
response_chunk = {
"event": sub_stream_response.event.value,
"workflow_run_id": chunk.workflow_run_id,
}
if isinstance(sub_stream_response, ErrorStreamResponse):
data = cls._error_to_stream_response(sub_stream_response.err)
response_chunk.update(data)
elif isinstance(sub_stream_response, NodeStartStreamResponse | NodeFinishStreamResponse):
response_chunk.update(sub_stream_response.to_ignore_detail_dict())
else:
response_chunk.update(sub_stream_response.to_dict())
yield response_chunk

View File

@ -0,0 +1,66 @@
from core.app.app_config.base_app_config_manager import BaseAppConfigManager
from core.app.app_config.common.sensitive_word_avoidance.manager import SensitiveWordAvoidanceConfigManager
from core.app.app_config.entities import RagPipelineVariableEntity, WorkflowUIBasedAppConfig
from core.app.app_config.features.file_upload.manager import FileUploadConfigManager
from core.app.app_config.features.text_to_speech.manager import TextToSpeechConfigManager
from core.app.app_config.workflow_ui_based_app.variables.manager import WorkflowVariablesConfigManager
from models.dataset import Pipeline
from models.model import AppMode
from models.workflow import Workflow
class PipelineConfig(WorkflowUIBasedAppConfig):
"""
Pipeline Config Entity.
"""
rag_pipeline_variables: list[RagPipelineVariableEntity] = []
pass
class PipelineConfigManager(BaseAppConfigManager):
@classmethod
def get_pipeline_config(cls, pipeline: Pipeline, workflow: Workflow, start_node_id: str) -> PipelineConfig:
pipeline_config = PipelineConfig(
tenant_id=pipeline.tenant_id,
app_id=pipeline.id,
app_mode=AppMode.RAG_PIPELINE,
workflow_id=workflow.id,
rag_pipeline_variables=WorkflowVariablesConfigManager.convert_rag_pipeline_variable(
workflow=workflow, start_node_id=start_node_id
),
)
return pipeline_config
@classmethod
def config_validate(cls, tenant_id: str, config: dict, only_structure_validate: bool = False) -> dict:
"""
Validate for pipeline config
:param tenant_id: tenant id
:param config: app model config args
:param only_structure_validate: only validate the structure of the config
"""
related_config_keys = []
# file upload validation
config, current_related_config_keys = FileUploadConfigManager.validate_and_set_defaults(config=config)
related_config_keys.extend(current_related_config_keys)
# text_to_speech
config, current_related_config_keys = TextToSpeechConfigManager.validate_and_set_defaults(config)
related_config_keys.extend(current_related_config_keys)
# moderation validation
config, current_related_config_keys = SensitiveWordAvoidanceConfigManager.validate_and_set_defaults(
tenant_id=tenant_id, config=config, only_structure_validate=only_structure_validate
)
related_config_keys.extend(current_related_config_keys)
related_config_keys = list(set(related_config_keys))
# Filter out extra parameters
filtered_config = {key: config.get(key) for key in related_config_keys}
return filtered_config

View File

@ -0,0 +1,808 @@
import contextvars
import datetime
import json
import logging
import secrets
import threading
import time
import uuid
from collections.abc import Generator, Mapping
from typing import Any, Literal, Optional, Union, cast, overload
from flask import Flask, current_app
from pydantic import ValidationError
from sqlalchemy import select
from sqlalchemy.orm import Session, sessionmaker
import contexts
from configs import dify_config
from core.app.apps.base_app_generator import BaseAppGenerator
from core.app.apps.base_app_queue_manager import AppQueueManager, PublishFrom
from core.app.apps.exc import GenerateTaskStoppedError
from core.app.apps.pipeline.pipeline_config_manager import PipelineConfigManager
from core.app.apps.pipeline.pipeline_queue_manager import PipelineQueueManager
from core.app.apps.pipeline.pipeline_runner import PipelineRunner
from core.app.apps.workflow.generate_response_converter import WorkflowAppGenerateResponseConverter
from core.app.apps.workflow.generate_task_pipeline import WorkflowAppGenerateTaskPipeline
from core.app.entities.app_invoke_entities import InvokeFrom, RagPipelineGenerateEntity
from core.app.entities.task_entities import WorkflowAppBlockingResponse, WorkflowAppStreamResponse
from core.datasource.entities.datasource_entities import (
DatasourceProviderType,
OnlineDriveBrowseFilesRequest,
)
from core.datasource.online_drive.online_drive_plugin import OnlineDriveDatasourcePlugin
from core.entities.knowledge_entities import PipelineDataset, PipelineDocument
from core.model_runtime.errors.invoke import InvokeAuthorizationError
from core.rag.index_processor.constant.built_in_field import BuiltInField
from core.repositories import SQLAlchemyWorkflowNodeExecutionRepository
from core.repositories.sqlalchemy_workflow_execution_repository import SQLAlchemyWorkflowExecutionRepository
from core.workflow.repositories.draft_variable_repository import DraftVariableSaverFactory
from core.workflow.repositories.workflow_execution_repository import WorkflowExecutionRepository
from core.workflow.repositories.workflow_node_execution_repository import WorkflowNodeExecutionRepository
from core.workflow.variable_loader import DUMMY_VARIABLE_LOADER, VariableLoader
from extensions.ext_database import db
from libs.flask_utils import preserve_flask_contexts
from models import Account, EndUser, Workflow, WorkflowNodeExecutionTriggeredFrom
from models.dataset import Document, DocumentPipelineExecutionLog, Pipeline
from models.enums import WorkflowRunTriggeredFrom
from models.model import AppMode
from services.dataset_service import DocumentService
from services.datasource_provider_service import DatasourceProviderService
from services.workflow_draft_variable_service import DraftVarLoader, WorkflowDraftVariableService
from tasks.rag_pipeline.rag_pipeline_run_task import rag_pipeline_run_task
logger = logging.getLogger(__name__)
class PipelineGenerator(BaseAppGenerator):
@overload
def generate(
self,
*,
pipeline: Pipeline,
workflow: Workflow,
user: Union[Account, EndUser],
args: Mapping[str, Any],
invoke_from: InvokeFrom,
streaming: Literal[True],
call_depth: int,
workflow_thread_pool_id: Optional[str],
) -> Mapping[str, Any] | Generator[Mapping | str, None, None] | None: ...
@overload
def generate(
self,
*,
pipeline: Pipeline,
workflow: Workflow,
user: Union[Account, EndUser],
args: Mapping[str, Any],
invoke_from: InvokeFrom,
streaming: Literal[False],
call_depth: int,
workflow_thread_pool_id: Optional[str],
) -> Mapping[str, Any]: ...
@overload
def generate(
self,
*,
pipeline: Pipeline,
workflow: Workflow,
user: Union[Account, EndUser],
args: Mapping[str, Any],
invoke_from: InvokeFrom,
streaming: bool,
call_depth: int,
workflow_thread_pool_id: Optional[str],
) -> Union[Mapping[str, Any], Generator[Mapping | str, None, None]]: ...
def generate(
self,
*,
pipeline: Pipeline,
workflow: Workflow,
user: Union[Account, EndUser],
args: Mapping[str, Any],
invoke_from: InvokeFrom,
streaming: bool = True,
call_depth: int = 0,
workflow_thread_pool_id: Optional[str] = None,
) -> Union[Mapping[str, Any], Generator[Mapping | str, None, None], None]:
# Add null check for dataset
with Session(db.engine) as session:
dataset = pipeline.retrieve_dataset(session)
if not dataset:
raise ValueError("Pipeline dataset is required")
inputs: Mapping[str, Any] = args["inputs"]
start_node_id: str = args["start_node_id"]
datasource_type: str = args["datasource_type"]
datasource_info_list: list[Mapping[str, Any]] = self._format_datasource_info_list(
datasource_type, args["datasource_info_list"], pipeline, workflow, start_node_id, user
)
batch = time.strftime("%Y%m%d%H%M%S") + str(secrets.randbelow(900000) + 100000)
# convert to app config
pipeline_config = PipelineConfigManager.get_pipeline_config(
pipeline=pipeline, workflow=workflow, start_node_id=start_node_id
)
documents = []
if invoke_from == InvokeFrom.PUBLISHED:
for datasource_info in datasource_info_list:
position = DocumentService.get_documents_position(dataset.id)
document = self._build_document(
tenant_id=pipeline.tenant_id,
dataset_id=dataset.id,
built_in_field_enabled=dataset.built_in_field_enabled,
datasource_type=datasource_type,
datasource_info=datasource_info,
created_from="rag-pipeline",
position=position,
account=user,
batch=batch,
document_form=dataset.chunk_structure,
)
db.session.add(document)
documents.append(document)
db.session.commit()
# run in child thread
for i, datasource_info in enumerate(datasource_info_list):
workflow_run_id = str(uuid.uuid4())
document_id = None
if invoke_from == InvokeFrom.PUBLISHED:
document_id = documents[i].id
document_pipeline_execution_log = DocumentPipelineExecutionLog(
document_id=document_id,
datasource_type=datasource_type,
datasource_info=json.dumps(datasource_info),
datasource_node_id=start_node_id,
input_data=inputs,
pipeline_id=pipeline.id,
created_by=user.id,
)
db.session.add(document_pipeline_execution_log)
db.session.commit()
application_generate_entity = RagPipelineGenerateEntity(
task_id=str(uuid.uuid4()),
app_config=pipeline_config,
pipeline_config=pipeline_config,
datasource_type=datasource_type,
datasource_info=datasource_info,
dataset_id=dataset.id,
start_node_id=start_node_id,
batch=batch,
document_id=document_id,
inputs=self._prepare_user_inputs(
user_inputs=inputs,
variables=pipeline_config.rag_pipeline_variables,
tenant_id=pipeline.tenant_id,
strict_type_validation=True if invoke_from == InvokeFrom.SERVICE_API else False,
),
files=[],
user_id=user.id,
stream=streaming,
invoke_from=invoke_from,
call_depth=call_depth,
workflow_execution_id=workflow_run_id,
)
contexts.plugin_tool_providers.set({})
contexts.plugin_tool_providers_lock.set(threading.Lock())
if invoke_from == InvokeFrom.DEBUGGER:
workflow_triggered_from = WorkflowRunTriggeredFrom.RAG_PIPELINE_DEBUGGING
else:
workflow_triggered_from = WorkflowRunTriggeredFrom.RAG_PIPELINE_RUN
# Create workflow node execution repository
session_factory = sessionmaker(bind=db.engine, expire_on_commit=False)
workflow_execution_repository = SQLAlchemyWorkflowExecutionRepository(
session_factory=session_factory,
user=user,
app_id=application_generate_entity.app_config.app_id,
triggered_from=workflow_triggered_from,
)
workflow_node_execution_repository = SQLAlchemyWorkflowNodeExecutionRepository(
session_factory=session_factory,
user=user,
app_id=application_generate_entity.app_config.app_id,
triggered_from=WorkflowNodeExecutionTriggeredFrom.RAG_PIPELINE_RUN,
)
if invoke_from == InvokeFrom.DEBUGGER:
return self._generate(
flask_app=current_app._get_current_object(), # type: ignore
context=contextvars.copy_context(),
pipeline=pipeline,
workflow_id=workflow.id,
user=user,
application_generate_entity=application_generate_entity,
invoke_from=invoke_from,
workflow_execution_repository=workflow_execution_repository,
workflow_node_execution_repository=workflow_node_execution_repository,
streaming=streaming,
workflow_thread_pool_id=workflow_thread_pool_id,
)
else:
rag_pipeline_run_task.delay( # type: ignore
pipeline_id=pipeline.id,
user_id=user.id,
tenant_id=pipeline.tenant_id,
workflow_id=workflow.id,
streaming=streaming,
workflow_execution_id=workflow_run_id,
workflow_thread_pool_id=workflow_thread_pool_id,
application_generate_entity=application_generate_entity.model_dump(),
)
# return batch, dataset, documents
return {
"batch": batch,
"dataset": PipelineDataset(
id=dataset.id,
name=dataset.name,
description=dataset.description,
chunk_structure=dataset.chunk_structure,
).model_dump(),
"documents": [
PipelineDocument(
id=document.id,
position=document.position,
data_source_type=document.data_source_type,
data_source_info=json.loads(document.data_source_info) if document.data_source_info else None,
name=document.name,
indexing_status=document.indexing_status,
error=document.error,
enabled=document.enabled,
).model_dump()
for document in documents
],
}
def _generate(
self,
*,
flask_app: Flask,
context: contextvars.Context,
pipeline: Pipeline,
workflow_id: str,
user: Union[Account, EndUser],
application_generate_entity: RagPipelineGenerateEntity,
invoke_from: InvokeFrom,
workflow_execution_repository: WorkflowExecutionRepository,
workflow_node_execution_repository: WorkflowNodeExecutionRepository,
streaming: bool = True,
variable_loader: VariableLoader = DUMMY_VARIABLE_LOADER,
workflow_thread_pool_id: Optional[str] = None,
) -> Union[Mapping[str, Any], Generator[str | Mapping[str, Any], None, None]]:
"""
Generate App response.
:param pipeline: Pipeline
:param workflow: Workflow
:param user: account or end user
:param application_generate_entity: application generate entity
:param invoke_from: invoke from source
:param workflow_execution_repository: repository for workflow execution
:param workflow_node_execution_repository: repository for workflow node execution
:param streaming: is stream
:param workflow_thread_pool_id: workflow thread pool id
"""
with preserve_flask_contexts(flask_app, context_vars=context):
# init queue manager
workflow = db.session.query(Workflow).filter(Workflow.id == workflow_id).first()
if not workflow:
raise ValueError(f"Workflow not found: {workflow_id}")
queue_manager = PipelineQueueManager(
task_id=application_generate_entity.task_id,
user_id=application_generate_entity.user_id,
invoke_from=application_generate_entity.invoke_from,
app_mode=AppMode.RAG_PIPELINE,
)
context = contextvars.copy_context()
# new thread
worker_thread = threading.Thread(
target=self._generate_worker,
kwargs={
"flask_app": current_app._get_current_object(), # type: ignore
"context": context,
"queue_manager": queue_manager,
"application_generate_entity": application_generate_entity,
"workflow_thread_pool_id": workflow_thread_pool_id,
"variable_loader": variable_loader,
},
)
worker_thread.start()
draft_var_saver_factory = self._get_draft_var_saver_factory(
invoke_from,
user,
)
# return response or stream generator
response = self._handle_response(
application_generate_entity=application_generate_entity,
workflow=workflow,
queue_manager=queue_manager,
user=user,
workflow_execution_repository=workflow_execution_repository,
workflow_node_execution_repository=workflow_node_execution_repository,
stream=streaming,
draft_var_saver_factory=draft_var_saver_factory,
)
return WorkflowAppGenerateResponseConverter.convert(response=response, invoke_from=invoke_from)
def single_iteration_generate(
self,
pipeline: Pipeline,
workflow: Workflow,
node_id: str,
user: Account | EndUser,
args: Mapping[str, Any],
streaming: bool = True,
) -> Mapping[str, Any] | Generator[str | Mapping[str, Any], None, None]:
"""
Generate App response.
:param app_model: App
:param workflow: Workflow
:param node_id: the node id
:param user: account or end user
:param args: request args
:param streaming: is streamed
"""
if not node_id:
raise ValueError("node_id is required")
if args.get("inputs") is None:
raise ValueError("inputs is required")
# convert to app config
pipeline_config = PipelineConfigManager.get_pipeline_config(
pipeline=pipeline, workflow=workflow, start_node_id=args.get("start_node_id", "shared")
)
with Session(db.engine) as session:
dataset = pipeline.retrieve_dataset(session)
if not dataset:
raise ValueError("Pipeline dataset is required")
# init application generate entity - use RagPipelineGenerateEntity instead
application_generate_entity = RagPipelineGenerateEntity(
task_id=str(uuid.uuid4()),
app_config=pipeline_config,
pipeline_config=pipeline_config,
datasource_type=args.get("datasource_type", ""),
datasource_info=args.get("datasource_info", {}),
dataset_id=dataset.id,
batch=args.get("batch", ""),
document_id=args.get("document_id"),
inputs={},
files=[],
user_id=user.id,
stream=streaming,
invoke_from=InvokeFrom.DEBUGGER,
call_depth=0,
workflow_execution_id=str(uuid.uuid4()),
)
contexts.plugin_tool_providers.set({})
contexts.plugin_tool_providers_lock.set(threading.Lock())
# Create workflow node execution repository
session_factory = sessionmaker(bind=db.engine, expire_on_commit=False)
workflow_execution_repository = SQLAlchemyWorkflowExecutionRepository(
session_factory=session_factory,
user=user,
app_id=application_generate_entity.app_config.app_id,
triggered_from=WorkflowRunTriggeredFrom.RAG_PIPELINE_DEBUGGING,
)
workflow_node_execution_repository = SQLAlchemyWorkflowNodeExecutionRepository(
session_factory=session_factory,
user=user,
app_id=application_generate_entity.app_config.app_id,
triggered_from=WorkflowNodeExecutionTriggeredFrom.SINGLE_STEP,
)
draft_var_srv = WorkflowDraftVariableService(db.session())
draft_var_srv.prefill_conversation_variable_default_values(workflow)
var_loader = DraftVarLoader(
engine=db.engine,
app_id=application_generate_entity.app_config.app_id,
tenant_id=application_generate_entity.app_config.tenant_id,
)
return self._generate(
flask_app=current_app._get_current_object(), # type: ignore
pipeline=pipeline,
workflow_id=workflow.id,
user=user,
invoke_from=InvokeFrom.DEBUGGER,
application_generate_entity=application_generate_entity,
workflow_execution_repository=workflow_execution_repository,
workflow_node_execution_repository=workflow_node_execution_repository,
streaming=streaming,
variable_loader=var_loader,
)
def single_loop_generate(
self,
pipeline: Pipeline,
workflow: Workflow,
node_id: str,
user: Account | EndUser,
args: Mapping[str, Any],
streaming: bool = True,
) -> Mapping[str, Any] | Generator[str | Mapping[str, Any], None, None]:
"""
Generate App response.
:param app_model: App
:param workflow: Workflow
:param node_id: the node id
:param user: account or end user
:param args: request args
:param streaming: is streamed
"""
if not node_id:
raise ValueError("node_id is required")
if args.get("inputs") is None:
raise ValueError("inputs is required")
with Session(db.engine) as session:
dataset = pipeline.retrieve_dataset(session)
if not dataset:
raise ValueError("Pipeline dataset is required")
# convert to app config
pipeline_config = PipelineConfigManager.get_pipeline_config(
pipeline=pipeline, workflow=workflow, start_node_id=args.get("start_node_id", "shared")
)
# init application generate entity
application_generate_entity = RagPipelineGenerateEntity(
task_id=str(uuid.uuid4()),
app_config=pipeline_config,
pipeline_config=pipeline_config,
datasource_type=args.get("datasource_type", ""),
datasource_info=args.get("datasource_info", {}),
batch=args.get("batch", ""),
document_id=args.get("document_id"),
dataset_id=dataset.id,
inputs={},
files=[],
user_id=user.id,
stream=streaming,
invoke_from=InvokeFrom.DEBUGGER,
extras={"auto_generate_conversation_name": False},
single_loop_run=RagPipelineGenerateEntity.SingleLoopRunEntity(node_id=node_id, inputs=args["inputs"]),
workflow_execution_id=str(uuid.uuid4()),
)
contexts.plugin_tool_providers.set({})
contexts.plugin_tool_providers_lock.set(threading.Lock())
# Create workflow node execution repository
session_factory = sessionmaker(bind=db.engine, expire_on_commit=False)
workflow_execution_repository = SQLAlchemyWorkflowExecutionRepository(
session_factory=session_factory,
user=user,
app_id=application_generate_entity.app_config.app_id,
triggered_from=WorkflowRunTriggeredFrom.RAG_PIPELINE_DEBUGGING,
)
workflow_node_execution_repository = SQLAlchemyWorkflowNodeExecutionRepository(
session_factory=session_factory,
user=user,
app_id=application_generate_entity.app_config.app_id,
triggered_from=WorkflowNodeExecutionTriggeredFrom.SINGLE_STEP,
)
draft_var_srv = WorkflowDraftVariableService(db.session())
draft_var_srv.prefill_conversation_variable_default_values(workflow)
var_loader = DraftVarLoader(
engine=db.engine,
app_id=application_generate_entity.app_config.app_id,
tenant_id=application_generate_entity.app_config.tenant_id,
)
return self._generate(
flask_app=current_app._get_current_object(), # type: ignore
pipeline=pipeline,
workflow_id=workflow.id,
user=user,
invoke_from=InvokeFrom.DEBUGGER,
application_generate_entity=application_generate_entity,
workflow_execution_repository=workflow_execution_repository,
workflow_node_execution_repository=workflow_node_execution_repository,
streaming=streaming,
variable_loader=var_loader,
)
def _generate_worker(
self,
flask_app: Flask,
application_generate_entity: RagPipelineGenerateEntity,
queue_manager: AppQueueManager,
context: contextvars.Context,
variable_loader: VariableLoader,
workflow_thread_pool_id: Optional[str] = None,
) -> None:
"""
Generate worker in a new thread.
:param flask_app: Flask app
:param application_generate_entity: application generate entity
:param queue_manager: queue manager
:param workflow_thread_pool_id: workflow thread pool id
:return:
"""
with preserve_flask_contexts(flask_app, context_vars=context):
try:
with Session(db.engine, expire_on_commit=False) as session:
workflow = session.scalar(
select(Workflow).where(
Workflow.tenant_id == application_generate_entity.app_config.tenant_id,
Workflow.app_id == application_generate_entity.app_config.app_id,
Workflow.id == application_generate_entity.app_config.workflow_id,
)
)
if workflow is None:
raise ValueError("Workflow not found")
# Determine system_user_id based on invocation source
is_external_api_call = application_generate_entity.invoke_from in {
InvokeFrom.WEB_APP,
InvokeFrom.SERVICE_API,
}
if is_external_api_call:
# For external API calls, use end user's session ID
end_user = session.scalar(
select(EndUser).where(EndUser.id == application_generate_entity.user_id)
)
system_user_id = end_user.session_id if end_user else ""
else:
# For internal calls, use the original user ID
system_user_id = application_generate_entity.user_id
# workflow app
runner = PipelineRunner(
application_generate_entity=application_generate_entity,
queue_manager=queue_manager,
workflow_thread_pool_id=workflow_thread_pool_id,
variable_loader=variable_loader,
workflow=workflow,
system_user_id=system_user_id,
)
runner.run()
except GenerateTaskStoppedError:
pass
except InvokeAuthorizationError:
queue_manager.publish_error(
InvokeAuthorizationError("Incorrect API key provided"), PublishFrom.APPLICATION_MANAGER
)
except ValidationError as e:
logger.exception("Validation Error when generating")
queue_manager.publish_error(e, PublishFrom.APPLICATION_MANAGER)
except ValueError as e:
if dify_config.DEBUG:
logger.exception("Error when generating")
queue_manager.publish_error(e, PublishFrom.APPLICATION_MANAGER)
except Exception as e:
logger.exception("Unknown Error when generating")
queue_manager.publish_error(e, PublishFrom.APPLICATION_MANAGER)
finally:
db.session.close()
def _handle_response(
self,
application_generate_entity: RagPipelineGenerateEntity,
workflow: Workflow,
queue_manager: AppQueueManager,
user: Union[Account, EndUser],
workflow_execution_repository: WorkflowExecutionRepository,
workflow_node_execution_repository: WorkflowNodeExecutionRepository,
draft_var_saver_factory: DraftVariableSaverFactory,
stream: bool = False,
) -> Union[WorkflowAppBlockingResponse, Generator[WorkflowAppStreamResponse, None, None]]:
"""
Handle response.
:param application_generate_entity: application generate entity
:param workflow: workflow
:param queue_manager: queue manager
:param user: account or end user
:param stream: is stream
:param workflow_node_execution_repository: optional repository for workflow node execution
:return:
"""
# init generate task pipeline
generate_task_pipeline = WorkflowAppGenerateTaskPipeline(
application_generate_entity=application_generate_entity,
workflow=workflow,
queue_manager=queue_manager,
user=user,
stream=stream,
workflow_node_execution_repository=workflow_node_execution_repository,
workflow_execution_repository=workflow_execution_repository,
draft_var_saver_factory=draft_var_saver_factory,
)
try:
return generate_task_pipeline.process()
except ValueError as e:
if len(e.args) > 0 and e.args[0] == "I/O operation on closed file.": # ignore this error
raise GenerateTaskStoppedError()
else:
logger.exception(
"Fails to process generate task pipeline, task_id: %r",
application_generate_entity.task_id,
)
raise e
def _build_document(
self,
tenant_id: str,
dataset_id: str,
built_in_field_enabled: bool,
datasource_type: str,
datasource_info: Mapping[str, Any],
created_from: str,
position: int,
account: Union[Account, EndUser],
batch: str,
document_form: str,
):
if datasource_type == "local_file":
name = datasource_info.get("name", "untitled")
elif datasource_type == "online_document":
name = datasource_info.get("page", {}).get("page_name", "untitled")
elif datasource_type == "website_crawl":
name = datasource_info.get("title", "untitled")
elif datasource_type == "online_drive":
name = datasource_info.get("name", "untitled")
else:
raise ValueError(f"Unsupported datasource type: {datasource_type}")
document = Document(
tenant_id=tenant_id,
dataset_id=dataset_id,
position=position,
data_source_type=datasource_type,
data_source_info=json.dumps(datasource_info),
batch=batch,
name=name,
created_from=created_from,
created_by=account.id,
doc_form=document_form,
)
doc_metadata = {}
if built_in_field_enabled:
doc_metadata = {
BuiltInField.document_name: name,
BuiltInField.uploader: account.name,
BuiltInField.upload_date: datetime.datetime.now(datetime.UTC).strftime("%Y-%m-%d %H:%M:%S"),
BuiltInField.last_update_date: datetime.datetime.now(datetime.UTC).strftime("%Y-%m-%d %H:%M:%S"),
BuiltInField.source: datasource_type,
}
if doc_metadata:
document.doc_metadata = doc_metadata
return document
def _format_datasource_info_list(
self,
datasource_type: str,
datasource_info_list: list[Mapping[str, Any]],
pipeline: Pipeline,
workflow: Workflow,
start_node_id: str,
user: Union[Account, EndUser],
) -> list[Mapping[str, Any]]:
"""
Format datasource info list.
"""
if datasource_type == "online_drive":
all_files = []
datasource_node_data = None
datasource_nodes = workflow.graph_dict.get("nodes", [])
for datasource_node in datasource_nodes:
if datasource_node.get("id") == start_node_id:
datasource_node_data = datasource_node.get("data", {})
break
if not datasource_node_data:
raise ValueError("Datasource node data not found")
from core.datasource.datasource_manager import DatasourceManager
datasource_runtime = DatasourceManager.get_datasource_runtime(
provider_id=f"{datasource_node_data.get('plugin_id')}/{datasource_node_data.get('provider_name')}",
datasource_name=datasource_node_data.get("datasource_name"),
tenant_id=pipeline.tenant_id,
datasource_type=DatasourceProviderType(datasource_type),
)
datasource_provider_service = DatasourceProviderService()
credentials = datasource_provider_service.get_datasource_credentials(
tenant_id=pipeline.tenant_id,
provider=datasource_node_data.get("provider_name"),
plugin_id=datasource_node_data.get("plugin_id"),
credential_id=datasource_node_data.get("credential_id"),
)
if credentials:
datasource_runtime.runtime.credentials = credentials
datasource_runtime = cast(OnlineDriveDatasourcePlugin, datasource_runtime)
for datasource_info in datasource_info_list:
if datasource_info.get("id") and datasource_info.get("type") == "folder":
# get all files in the folder
self._get_files_in_folder(
datasource_runtime,
datasource_info.get("id", ""),
datasource_info.get("bucket", None),
user.id,
all_files,
datasource_info,
None,
)
else:
all_files.append(
{
"id": datasource_info.get("id", ""),
"name": datasource_info.get("name", "untitled"),
"bucket": datasource_info.get("bucket", None),
}
)
return all_files
else:
return datasource_info_list
def _get_files_in_folder(
self,
datasource_runtime: OnlineDriveDatasourcePlugin,
prefix: str,
bucket: Optional[str],
user_id: str,
all_files: list,
datasource_info: Mapping[str, Any],
next_page_parameters: Optional[dict] = None,
):
"""
Get files in a folder.
"""
result_generator = datasource_runtime.online_drive_browse_files(
user_id=user_id,
request=OnlineDriveBrowseFilesRequest(
bucket=bucket,
prefix=prefix,
max_keys=20,
next_page_parameters=next_page_parameters,
),
provider_type=datasource_runtime.datasource_provider_type(),
)
is_truncated = False
for result in result_generator:
for files in result.result:
for file in files.files:
if file.type == "folder":
self._get_files_in_folder(
datasource_runtime,
file.id,
bucket,
user_id,
all_files,
datasource_info,
None,
)
else:
all_files.append(
{
"id": file.id,
"name": file.name,
"bucket": bucket,
}
)
is_truncated = files.is_truncated
next_page_parameters = files.next_page_parameters
if is_truncated:
self._get_files_in_folder(
datasource_runtime, prefix, bucket, user_id, all_files, datasource_info, next_page_parameters
)

View File

@ -0,0 +1,45 @@
from core.app.apps.base_app_queue_manager import AppQueueManager, PublishFrom
from core.app.apps.exc import GenerateTaskStoppedError
from core.app.entities.app_invoke_entities import InvokeFrom
from core.app.entities.queue_entities import (
AppQueueEvent,
QueueErrorEvent,
QueueMessageEndEvent,
QueueStopEvent,
QueueWorkflowFailedEvent,
QueueWorkflowPartialSuccessEvent,
QueueWorkflowSucceededEvent,
WorkflowQueueMessage,
)
class PipelineQueueManager(AppQueueManager):
def __init__(self, task_id: str, user_id: str, invoke_from: InvokeFrom, app_mode: str) -> None:
super().__init__(task_id, user_id, invoke_from)
self._app_mode = app_mode
def _publish(self, event: AppQueueEvent, pub_from: PublishFrom) -> None:
"""
Publish event to queue
:param event:
:param pub_from:
:return:
"""
message = WorkflowQueueMessage(task_id=self._task_id, app_mode=self._app_mode, event=event)
self._q.put(message)
if isinstance(
event,
QueueStopEvent
| QueueErrorEvent
| QueueMessageEndEvent
| QueueWorkflowSucceededEvent
| QueueWorkflowFailedEvent
| QueueWorkflowPartialSuccessEvent,
):
self.stop_listen()
if pub_from == PublishFrom.APPLICATION_MANAGER and self._is_stopped():
raise GenerateTaskStoppedError()

View File

@ -0,0 +1,280 @@
import logging
import time
from typing import Optional, cast
from core.app.apps.base_app_queue_manager import AppQueueManager
from core.app.apps.pipeline.pipeline_config_manager import PipelineConfig
from core.app.apps.workflow_app_runner import WorkflowBasedAppRunner
from core.app.entities.app_invoke_entities import (
InvokeFrom,
RagPipelineGenerateEntity,
)
from core.variables.variables import RAGPipelineVariable, RAGPipelineVariableInput
from core.workflow.entities.graph_init_params import GraphInitParams
from core.workflow.entities.graph_runtime_state import GraphRuntimeState
from core.workflow.entities.variable_pool import VariablePool
from core.workflow.graph import Graph
from core.workflow.graph_events import GraphEngineEvent, GraphRunFailedEvent
from core.workflow.nodes.node_factory import DifyNodeFactory
from core.workflow.system_variable import SystemVariable
from core.workflow.variable_loader import VariableLoader
from core.workflow.workflow_entry import WorkflowEntry
from extensions.ext_database import db
from models.dataset import Document, Pipeline
from models.enums import UserFrom
from models.model import EndUser
from models.workflow import Workflow
logger = logging.getLogger(__name__)
class PipelineRunner(WorkflowBasedAppRunner):
"""
Pipeline Application Runner
"""
def __init__(
self,
application_generate_entity: RagPipelineGenerateEntity,
queue_manager: AppQueueManager,
variable_loader: VariableLoader,
workflow: Workflow,
system_user_id: str,
workflow_thread_pool_id: Optional[str] = None,
) -> None:
"""
:param application_generate_entity: application generate entity
:param queue_manager: application queue manager
:param workflow_thread_pool_id: workflow thread pool id
"""
super().__init__(
queue_manager=queue_manager,
variable_loader=variable_loader,
app_id=application_generate_entity.app_config.app_id,
)
self.application_generate_entity = application_generate_entity
self.workflow_thread_pool_id = workflow_thread_pool_id
self._workflow = workflow
self._sys_user_id = system_user_id
def _get_app_id(self) -> str:
return self.application_generate_entity.app_config.app_id
def run(self) -> None:
"""
Run application
"""
app_config = self.application_generate_entity.app_config
app_config = cast(PipelineConfig, app_config)
user_id = None
if self.application_generate_entity.invoke_from in {InvokeFrom.WEB_APP, InvokeFrom.SERVICE_API}:
end_user = db.session.query(EndUser).filter(EndUser.id == self.application_generate_entity.user_id).first()
if end_user:
user_id = end_user.session_id
else:
user_id = self.application_generate_entity.user_id
pipeline = db.session.query(Pipeline).filter(Pipeline.id == app_config.app_id).first()
if not pipeline:
raise ValueError("Pipeline not found")
workflow = self.get_workflow(pipeline=pipeline, workflow_id=app_config.workflow_id)
if not workflow:
raise ValueError("Workflow not initialized")
db.session.close()
# if only single iteration run is requested
if self.application_generate_entity.single_iteration_run:
graph_runtime_state = GraphRuntimeState(
variable_pool=VariablePool.empty(),
start_at=time.time(),
)
# if only single iteration run is requested
graph, variable_pool = self._get_graph_and_variable_pool_of_single_iteration(
workflow=workflow,
node_id=self.application_generate_entity.single_iteration_run.node_id,
user_inputs=self.application_generate_entity.single_iteration_run.inputs,
graph_runtime_state=graph_runtime_state,
)
elif self.application_generate_entity.single_loop_run:
graph_runtime_state = GraphRuntimeState(
variable_pool=VariablePool.empty(),
start_at=time.time(),
)
# if only single loop run is requested
graph, variable_pool = self._get_graph_and_variable_pool_of_single_loop(
workflow=workflow,
node_id=self.application_generate_entity.single_loop_run.node_id,
user_inputs=self.application_generate_entity.single_loop_run.inputs,
graph_runtime_state=graph_runtime_state,
)
else:
inputs = self.application_generate_entity.inputs
files = self.application_generate_entity.files
# Create a variable pool.
system_inputs = SystemVariable(
files=files,
user_id=user_id,
app_id=app_config.app_id,
workflow_id=app_config.workflow_id,
workflow_execution_id=self.application_generate_entity.workflow_execution_id,
document_id=self.application_generate_entity.document_id,
batch=self.application_generate_entity.batch,
dataset_id=self.application_generate_entity.dataset_id,
datasource_type=self.application_generate_entity.datasource_type,
datasource_info=self.application_generate_entity.datasource_info,
invoke_from=self.application_generate_entity.invoke_from.value,
)
rag_pipeline_variables = []
if workflow.rag_pipeline_variables:
for v in workflow.rag_pipeline_variables:
rag_pipeline_variable = RAGPipelineVariable(**v)
if (
rag_pipeline_variable.belong_to_node_id
in (self.application_generate_entity.start_node_id, "shared")
) and rag_pipeline_variable.variable in inputs:
rag_pipeline_variables.append(
RAGPipelineVariableInput(
variable=rag_pipeline_variable,
value=inputs[rag_pipeline_variable.variable],
)
)
variable_pool = VariablePool(
system_variables=system_inputs,
user_inputs=inputs,
environment_variables=workflow.environment_variables,
conversation_variables=[],
rag_pipeline_variables=rag_pipeline_variables,
)
graph_runtime_state = GraphRuntimeState(variable_pool=variable_pool, start_at=time.perf_counter())
# init graph
graph = self._init_rag_pipeline_graph(
graph_runtime_state=graph_runtime_state,
start_node_id=self.application_generate_entity.start_node_id,
workflow=workflow,
)
# RUN WORKFLOW
workflow_entry = WorkflowEntry(
tenant_id=workflow.tenant_id,
app_id=workflow.app_id,
workflow_id=workflow.id,
graph=graph,
graph_config=workflow.graph_dict,
user_id=self.application_generate_entity.user_id,
user_from=(
UserFrom.ACCOUNT
if self.application_generate_entity.invoke_from in {InvokeFrom.EXPLORE, InvokeFrom.DEBUGGER}
else UserFrom.END_USER
),
invoke_from=self.application_generate_entity.invoke_from,
call_depth=self.application_generate_entity.call_depth,
graph_runtime_state=graph_runtime_state,
)
generator = workflow_entry.run()
for event in generator:
self._update_document_status(
event, self.application_generate_entity.document_id, self.application_generate_entity.dataset_id
)
self._handle_event(workflow_entry, event)
def get_workflow(self, pipeline: Pipeline, workflow_id: str) -> Optional[Workflow]:
"""
Get workflow
"""
# fetch workflow by workflow_id
workflow = (
db.session.query(Workflow)
.filter(
Workflow.tenant_id == pipeline.tenant_id, Workflow.app_id == pipeline.id, Workflow.id == workflow_id
)
.first()
)
# return workflow
return workflow
def _init_rag_pipeline_graph(
self, workflow: Workflow, graph_runtime_state: GraphRuntimeState, start_node_id: Optional[str] = None
) -> Graph:
"""
Init pipeline graph
"""
graph_config = workflow.graph_dict
if "nodes" not in graph_config or "edges" not in graph_config:
raise ValueError("nodes or edges not found in workflow graph")
if not isinstance(graph_config.get("nodes"), list):
raise ValueError("nodes in workflow graph must be a list")
if not isinstance(graph_config.get("edges"), list):
raise ValueError("edges in workflow graph must be a list")
# nodes = graph_config.get("nodes", [])
# edges = graph_config.get("edges", [])
# real_run_nodes = []
# real_edges = []
# exclude_node_ids = []
# for node in nodes:
# node_id = node.get("id")
# node_type = node.get("data", {}).get("type", "")
# if node_type == "datasource":
# if start_node_id != node_id:
# exclude_node_ids.append(node_id)
# continue
# real_run_nodes.append(node)
# for edge in edges:
# if edge.get("source") in exclude_node_ids:
# continue
# real_edges.append(edge)
# graph_config = dict(graph_config)
# graph_config["nodes"] = real_run_nodes
# graph_config["edges"] = real_edges
# init graph
# Create required parameters for Graph.init
graph_init_params = GraphInitParams(
tenant_id=workflow.tenant_id,
app_id=self._app_id,
workflow_id=workflow.id,
graph_config=graph_config,
user_id=self.application_generate_entity.user_id,
user_from=UserFrom.ACCOUNT.value,
invoke_from=InvokeFrom.SERVICE_API.value,
call_depth=0,
)
node_factory = DifyNodeFactory(
graph_init_params=graph_init_params,
graph_runtime_state=graph_runtime_state,
)
graph = Graph.init(graph_config=graph_config, node_factory=node_factory, root_node_id=start_node_id)
if not graph:
raise ValueError("graph not found in workflow")
return graph
def _update_document_status(self, event: GraphEngineEvent, document_id: str | None, dataset_id: str | None) -> None:
"""
Update document status
"""
if isinstance(event, GraphRunFailedEvent):
if document_id and dataset_id:
document = (
db.session.query(Document)
.filter(Document.id == document_id, Document.dataset_id == dataset_id)
.first()
)
if document:
document.indexing_status = "error"
document.error = event.error or "Unknown error"
db.session.add(document)
db.session.commit()

View File

@ -3,7 +3,7 @@ import logging
import threading
import uuid
from collections.abc import Generator, Mapping, Sequence
from typing import Any, Literal, Optional, Union, overload
from typing import Any, Literal, Union, overload
from flask import Flask, current_app
from pydantic import ValidationError
@ -53,7 +53,6 @@ class WorkflowAppGenerator(BaseAppGenerator):
invoke_from: InvokeFrom,
streaming: Literal[True],
call_depth: int,
workflow_thread_pool_id: Optional[str],
) -> Generator[Mapping | str, None, None]: ...
@overload
@ -67,7 +66,6 @@ class WorkflowAppGenerator(BaseAppGenerator):
invoke_from: InvokeFrom,
streaming: Literal[False],
call_depth: int,
workflow_thread_pool_id: Optional[str],
) -> Mapping[str, Any]: ...
@overload
@ -81,7 +79,6 @@ class WorkflowAppGenerator(BaseAppGenerator):
invoke_from: InvokeFrom,
streaming: bool,
call_depth: int,
workflow_thread_pool_id: Optional[str],
) -> Union[Mapping[str, Any], Generator[Mapping | str, None, None]]: ...
def generate(
@ -94,7 +91,6 @@ class WorkflowAppGenerator(BaseAppGenerator):
invoke_from: InvokeFrom,
streaming: bool = True,
call_depth: int = 0,
workflow_thread_pool_id: Optional[str] = None,
) -> Union[Mapping[str, Any], Generator[Mapping | str, None, None]]:
files: Sequence[Mapping[str, Any]] = args.get("files") or []
@ -186,7 +182,6 @@ class WorkflowAppGenerator(BaseAppGenerator):
workflow_execution_repository=workflow_execution_repository,
workflow_node_execution_repository=workflow_node_execution_repository,
streaming=streaming,
workflow_thread_pool_id=workflow_thread_pool_id,
)
def _generate(
@ -200,7 +195,6 @@ class WorkflowAppGenerator(BaseAppGenerator):
workflow_execution_repository: WorkflowExecutionRepository,
workflow_node_execution_repository: WorkflowNodeExecutionRepository,
streaming: bool = True,
workflow_thread_pool_id: Optional[str] = None,
variable_loader: VariableLoader = DUMMY_VARIABLE_LOADER,
) -> Union[Mapping[str, Any], Generator[str | Mapping[str, Any], None, None]]:
"""
@ -214,7 +208,6 @@ class WorkflowAppGenerator(BaseAppGenerator):
:param workflow_execution_repository: repository for workflow execution
:param workflow_node_execution_repository: repository for workflow node execution
:param streaming: is stream
:param workflow_thread_pool_id: workflow thread pool id
"""
# init queue manager
queue_manager = WorkflowAppQueueManager(
@ -237,16 +230,13 @@ class WorkflowAppGenerator(BaseAppGenerator):
"application_generate_entity": application_generate_entity,
"queue_manager": queue_manager,
"context": context,
"workflow_thread_pool_id": workflow_thread_pool_id,
"variable_loader": variable_loader,
},
)
worker_thread.start()
draft_var_saver_factory = self._get_draft_var_saver_factory(
invoke_from,
)
draft_var_saver_factory = self._get_draft_var_saver_factory(invoke_from, user)
# return response or stream generator
response = self._handle_response(
@ -434,17 +424,7 @@ class WorkflowAppGenerator(BaseAppGenerator):
queue_manager: AppQueueManager,
context: contextvars.Context,
variable_loader: VariableLoader,
workflow_thread_pool_id: Optional[str] = None,
) -> None:
"""
Generate worker in a new thread.
:param flask_app: Flask app
:param application_generate_entity: application generate entity
:param queue_manager: queue manager
:param workflow_thread_pool_id: workflow thread pool id
:return:
"""
with preserve_flask_contexts(flask_app, context_vars=context):
with Session(db.engine, expire_on_commit=False) as session:
workflow = session.scalar(
@ -474,7 +454,6 @@ class WorkflowAppGenerator(BaseAppGenerator):
runner = WorkflowAppRunner(
application_generate_entity=application_generate_entity,
queue_manager=queue_manager,
workflow_thread_pool_id=workflow_thread_pool_id,
variable_loader=variable_loader,
workflow=workflow,
system_user_id=system_user_id,

View File

@ -1,7 +1,7 @@
import logging
from typing import Optional, cast
import time
from typing import cast
from configs import dify_config
from core.app.apps.base_app_queue_manager import AppQueueManager
from core.app.apps.workflow.app_config_manager import WorkflowAppConfig
from core.app.apps.workflow_app_runner import WorkflowBasedAppRunner
@ -9,13 +9,14 @@ from core.app.entities.app_invoke_entities import (
InvokeFrom,
WorkflowAppGenerateEntity,
)
from core.workflow.callbacks import WorkflowCallback, WorkflowLoggingCallback
from core.workflow.entities.variable_pool import VariablePool
from core.workflow.entities import GraphRuntimeState, VariablePool
from core.workflow.graph_engine.command_channels.redis_channel import RedisChannel
from core.workflow.system_variable import SystemVariable
from core.workflow.variable_loader import VariableLoader
from core.workflow.workflow_entry import WorkflowEntry
from extensions.ext_redis import redis_client
from models.enums import UserFrom
from models.workflow import Workflow, WorkflowType
from models.workflow import Workflow
logger = logging.getLogger(__name__)
@ -31,7 +32,6 @@ class WorkflowAppRunner(WorkflowBasedAppRunner):
application_generate_entity: WorkflowAppGenerateEntity,
queue_manager: AppQueueManager,
variable_loader: VariableLoader,
workflow_thread_pool_id: Optional[str] = None,
workflow: Workflow,
system_user_id: str,
) -> None:
@ -41,7 +41,6 @@ class WorkflowAppRunner(WorkflowBasedAppRunner):
app_id=application_generate_entity.app_config.app_id,
)
self.application_generate_entity = application_generate_entity
self.workflow_thread_pool_id = workflow_thread_pool_id
self._workflow = workflow
self._sys_user_id = system_user_id
@ -52,24 +51,30 @@ class WorkflowAppRunner(WorkflowBasedAppRunner):
app_config = self.application_generate_entity.app_config
app_config = cast(WorkflowAppConfig, app_config)
workflow_callbacks: list[WorkflowCallback] = []
if dify_config.DEBUG:
workflow_callbacks.append(WorkflowLoggingCallback())
# if only single iteration run is requested
if self.application_generate_entity.single_iteration_run:
# if only single iteration run is requested
graph_runtime_state = GraphRuntimeState(
variable_pool=VariablePool.empty(),
start_at=time.time(),
)
graph, variable_pool = self._get_graph_and_variable_pool_of_single_iteration(
workflow=self._workflow,
node_id=self.application_generate_entity.single_iteration_run.node_id,
user_inputs=self.application_generate_entity.single_iteration_run.inputs,
graph_runtime_state=graph_runtime_state,
)
elif self.application_generate_entity.single_loop_run:
# if only single loop run is requested
graph_runtime_state = GraphRuntimeState(
variable_pool=VariablePool.empty(),
start_at=time.time(),
)
graph, variable_pool = self._get_graph_and_variable_pool_of_single_loop(
workflow=self._workflow,
node_id=self.application_generate_entity.single_loop_run.node_id,
user_inputs=self.application_generate_entity.single_loop_run.inputs,
graph_runtime_state=graph_runtime_state,
)
else:
inputs = self.application_generate_entity.inputs
@ -92,15 +97,26 @@ class WorkflowAppRunner(WorkflowBasedAppRunner):
conversation_variables=[],
)
graph_runtime_state = GraphRuntimeState(variable_pool=variable_pool, start_at=time.perf_counter())
# init graph
graph = self._init_graph(graph_config=self._workflow.graph_dict)
graph = self._init_graph(
graph_config=self._workflow.graph_dict,
graph_runtime_state=graph_runtime_state,
workflow_id=self._workflow.id,
tenant_id=self._workflow.tenant_id,
)
# RUN WORKFLOW
# Create Redis command channel for this workflow execution
task_id = self.application_generate_entity.task_id
channel_key = f"workflow:{task_id}:commands"
command_channel = RedisChannel(redis_client, channel_key)
workflow_entry = WorkflowEntry(
tenant_id=self._workflow.tenant_id,
app_id=self._workflow.app_id,
workflow_id=self._workflow.id,
workflow_type=WorkflowType.value_of(self._workflow.type),
graph=graph,
graph_config=self._workflow.graph_dict,
user_id=self.application_generate_entity.user_id,
@ -111,11 +127,11 @@ class WorkflowAppRunner(WorkflowBasedAppRunner):
),
invoke_from=self.application_generate_entity.invoke_from,
call_depth=self.application_generate_entity.call_depth,
variable_pool=variable_pool,
thread_pool_id=self.workflow_thread_pool_id,
graph_runtime_state=graph_runtime_state,
command_channel=command_channel,
)
generator = workflow_entry.run(callbacks=workflow_callbacks)
generator = workflow_entry.run()
for event in generator:
self._handle_event(workflow_entry, event)

View File

@ -2,7 +2,7 @@ import logging
import time
from collections.abc import Callable, Generator
from contextlib import contextmanager
from typing import Any, Optional, Union
from typing import Optional, Union
from sqlalchemy.orm import Session
@ -14,6 +14,7 @@ from core.app.entities.app_invoke_entities import (
WorkflowAppGenerateEntity,
)
from core.app.entities.queue_entities import (
AppQueueEvent,
MessageQueueMessage,
QueueAgentLogEvent,
QueueErrorEvent,
@ -25,14 +26,9 @@ from core.app.entities.queue_entities import (
QueueLoopStartEvent,
QueueNodeExceptionEvent,
QueueNodeFailedEvent,
QueueNodeInIterationFailedEvent,
QueueNodeInLoopFailedEvent,
QueueNodeRetryEvent,
QueueNodeStartedEvent,
QueueNodeSucceededEvent,
QueueParallelBranchRunFailedEvent,
QueueParallelBranchRunStartedEvent,
QueueParallelBranchRunSucceededEvent,
QueuePingEvent,
QueueStopEvent,
QueueTextChunkEvent,
@ -57,8 +53,8 @@ from core.app.entities.task_entities import (
from core.app.task_pipeline.based_generate_task_pipeline import BasedGenerateTaskPipeline
from core.base.tts import AppGeneratorTTSPublisher, AudioTrunk
from core.ops.ops_trace_manager import TraceQueueManager
from core.workflow.entities.workflow_execution import WorkflowExecution, WorkflowExecutionStatus, WorkflowType
from core.workflow.graph_engine.entities.graph_runtime_state import GraphRuntimeState
from core.workflow.entities import GraphRuntimeState, WorkflowExecution
from core.workflow.enums import WorkflowExecutionStatus, WorkflowType
from core.workflow.repositories.draft_variable_repository import DraftVariableSaverFactory
from core.workflow.repositories.workflow_execution_repository import WorkflowExecutionRepository
from core.workflow.repositories.workflow_node_execution_repository import WorkflowNodeExecutionRepository
@ -349,9 +345,7 @@ class WorkflowAppGenerateTaskPipeline:
def _handle_node_failed_events(
self,
event: Union[
QueueNodeFailedEvent, QueueNodeInIterationFailedEvent, QueueNodeInLoopFailedEvent, QueueNodeExceptionEvent
],
event: Union[QueueNodeFailedEvent, QueueNodeExceptionEvent],
**kwargs,
) -> Generator[StreamResponse, None, None]:
"""Handle various node failure events."""
@ -370,32 +364,6 @@ class WorkflowAppGenerateTaskPipeline:
if node_failed_response:
yield node_failed_response
def _handle_parallel_branch_started_event(
self, event: QueueParallelBranchRunStartedEvent, **kwargs
) -> Generator[StreamResponse, None, None]:
"""Handle parallel branch started events."""
self._ensure_workflow_initialized()
parallel_start_resp = self._workflow_response_converter.workflow_parallel_branch_start_to_stream_response(
task_id=self._application_generate_entity.task_id,
workflow_execution_id=self._workflow_run_id,
event=event,
)
yield parallel_start_resp
def _handle_parallel_branch_finished_events(
self, event: Union[QueueParallelBranchRunSucceededEvent, QueueParallelBranchRunFailedEvent], **kwargs
) -> Generator[StreamResponse, None, None]:
"""Handle parallel branch finished events."""
self._ensure_workflow_initialized()
parallel_finish_resp = self._workflow_response_converter.workflow_parallel_branch_finished_to_stream_response(
task_id=self._application_generate_entity.task_id,
workflow_execution_id=self._workflow_run_id,
event=event,
)
yield parallel_finish_resp
def _handle_iteration_start_event(
self, event: QueueIterationStartEvent, **kwargs
) -> Generator[StreamResponse, None, None]:
@ -617,8 +585,6 @@ class WorkflowAppGenerateTaskPipeline:
QueueNodeRetryEvent: self._handle_node_retry_event,
QueueNodeStartedEvent: self._handle_node_started_event,
QueueNodeSucceededEvent: self._handle_node_succeeded_event,
# Parallel branch events
QueueParallelBranchRunStartedEvent: self._handle_parallel_branch_started_event,
# Iteration events
QueueIterationStartEvent: self._handle_iteration_start_event,
QueueIterationNextEvent: self._handle_iteration_next_event,
@ -633,7 +599,7 @@ class WorkflowAppGenerateTaskPipeline:
def _dispatch_event(
self,
event: Any,
event: AppQueueEvent,
*,
graph_runtime_state: Optional[GraphRuntimeState] = None,
tts_publisher: Optional[AppGeneratorTTSPublisher] = None,
@ -660,8 +626,6 @@ class WorkflowAppGenerateTaskPipeline:
event,
(
QueueNodeFailedEvent,
QueueNodeInIterationFailedEvent,
QueueNodeInLoopFailedEvent,
QueueNodeExceptionEvent,
),
):
@ -674,17 +638,6 @@ class WorkflowAppGenerateTaskPipeline:
)
return
# Handle parallel branch finished events with isinstance check
if isinstance(event, (QueueParallelBranchRunSucceededEvent, QueueParallelBranchRunFailedEvent)):
yield from self._handle_parallel_branch_finished_events(
event,
graph_runtime_state=graph_runtime_state,
tts_publisher=tts_publisher,
trace_manager=trace_manager,
queue_message=queue_message,
)
return
# Handle workflow failed and stop events with isinstance check
if isinstance(event, (QueueWorkflowFailedEvent, QueueStopEvent)):
yield from self._handle_workflow_failed_and_stop_events(

View File

@ -2,6 +2,7 @@ from collections.abc import Mapping
from typing import Any, cast
from core.app.apps.base_app_queue_manager import AppQueueManager, PublishFrom
from core.app.entities.app_invoke_entities import InvokeFrom
from core.app.entities.queue_entities import (
AppQueueEvent,
QueueAgentLogEvent,
@ -13,14 +14,9 @@ from core.app.entities.queue_entities import (
QueueLoopStartEvent,
QueueNodeExceptionEvent,
QueueNodeFailedEvent,
QueueNodeInIterationFailedEvent,
QueueNodeInLoopFailedEvent,
QueueNodeRetryEvent,
QueueNodeStartedEvent,
QueueNodeSucceededEvent,
QueueParallelBranchRunFailedEvent,
QueueParallelBranchRunStartedEvent,
QueueParallelBranchRunSucceededEvent,
QueueRetrieverResourcesEvent,
QueueTextChunkEvent,
QueueWorkflowFailedEvent,
@ -28,42 +24,39 @@ from core.app.entities.queue_entities import (
QueueWorkflowStartedEvent,
QueueWorkflowSucceededEvent,
)
from core.workflow.entities.variable_pool import VariablePool
from core.workflow.entities.workflow_node_execution import WorkflowNodeExecutionMetadataKey
from core.workflow.graph_engine.entities.event import (
AgentLogEvent,
from core.workflow.entities import GraphInitParams, GraphRuntimeState, VariablePool
from core.workflow.graph import Graph
from core.workflow.graph_events import (
GraphEngineEvent,
GraphRunFailedEvent,
GraphRunPartialSucceededEvent,
GraphRunStartedEvent,
GraphRunSucceededEvent,
IterationRunFailedEvent,
IterationRunNextEvent,
IterationRunStartedEvent,
IterationRunSucceededEvent,
LoopRunFailedEvent,
LoopRunNextEvent,
LoopRunStartedEvent,
LoopRunSucceededEvent,
NodeInIterationFailedEvent,
NodeInLoopFailedEvent,
NodeRunAgentLogEvent,
NodeRunExceptionEvent,
NodeRunFailedEvent,
NodeRunIterationFailedEvent,
NodeRunIterationNextEvent,
NodeRunIterationStartedEvent,
NodeRunIterationSucceededEvent,
NodeRunLoopFailedEvent,
NodeRunLoopNextEvent,
NodeRunLoopStartedEvent,
NodeRunLoopSucceededEvent,
NodeRunRetrieverResourceEvent,
NodeRunRetryEvent,
NodeRunStartedEvent,
NodeRunStreamChunkEvent,
NodeRunSucceededEvent,
ParallelBranchRunFailedEvent,
ParallelBranchRunStartedEvent,
ParallelBranchRunSucceededEvent,
)
from core.workflow.graph_engine.entities.graph import Graph
from core.workflow.graph_events.graph import GraphRunAbortedEvent
from core.workflow.nodes import NodeType
from core.workflow.nodes.node_factory import DifyNodeFactory
from core.workflow.nodes.node_mapping import NODE_TYPE_CLASSES_MAPPING
from core.workflow.system_variable import SystemVariable
from core.workflow.variable_loader import DUMMY_VARIABLE_LOADER, VariableLoader, load_into_variable_pool
from core.workflow.workflow_entry import WorkflowEntry
from models.enums import UserFrom
from models.workflow import Workflow
@ -79,7 +72,14 @@ class WorkflowBasedAppRunner:
self._variable_loader = variable_loader
self._app_id = app_id
def _init_graph(self, graph_config: Mapping[str, Any]) -> Graph:
def _init_graph(
self,
graph_config: Mapping[str, Any],
graph_runtime_state: GraphRuntimeState,
workflow_id: str = "",
tenant_id: str = "",
user_id: str = "",
) -> Graph:
"""
Init graph
"""
@ -91,8 +91,28 @@ class WorkflowBasedAppRunner:
if not isinstance(graph_config.get("edges"), list):
raise ValueError("edges in workflow graph must be a list")
# Create required parameters for Graph.init
graph_init_params = GraphInitParams(
tenant_id=tenant_id or "",
app_id=self._app_id,
workflow_id=workflow_id,
graph_config=graph_config,
user_id=user_id,
user_from=UserFrom.ACCOUNT.value,
invoke_from=InvokeFrom.SERVICE_API.value,
call_depth=0,
)
# Use the provided graph_runtime_state for consistent state management
node_factory = DifyNodeFactory(
graph_init_params=graph_init_params,
graph_runtime_state=graph_runtime_state,
)
# init graph
graph = Graph.init(graph_config=graph_config)
graph = Graph.init(graph_config=graph_config, node_factory=node_factory)
if not graph:
raise ValueError("graph not found in workflow")
@ -104,6 +124,7 @@ class WorkflowBasedAppRunner:
workflow: Workflow,
node_id: str,
user_inputs: dict,
graph_runtime_state: GraphRuntimeState,
) -> tuple[Graph, VariablePool]:
"""
Get variable pool of single iteration
@ -145,8 +166,25 @@ class WorkflowBasedAppRunner:
graph_config["edges"] = edge_configs
# Create required parameters for Graph.init
graph_init_params = GraphInitParams(
tenant_id=workflow.tenant_id,
app_id=self._app_id,
workflow_id=workflow.id,
graph_config=graph_config,
user_id="",
user_from=UserFrom.ACCOUNT.value,
invoke_from=InvokeFrom.SERVICE_API.value,
call_depth=0,
)
node_factory = DifyNodeFactory(
graph_init_params=graph_init_params,
graph_runtime_state=graph_runtime_state,
)
# init graph
graph = Graph.init(graph_config=graph_config, root_node_id=node_id)
graph = Graph.init(graph_config=graph_config, node_factory=node_factory, root_node_id=node_id)
if not graph:
raise ValueError("graph not found in workflow")
@ -201,6 +239,7 @@ class WorkflowBasedAppRunner:
workflow: Workflow,
node_id: str,
user_inputs: dict,
graph_runtime_state: GraphRuntimeState,
) -> tuple[Graph, VariablePool]:
"""
Get variable pool of single loop
@ -242,8 +281,25 @@ class WorkflowBasedAppRunner:
graph_config["edges"] = edge_configs
# Create required parameters for Graph.init
graph_init_params = GraphInitParams(
tenant_id=workflow.tenant_id,
app_id=self._app_id,
workflow_id=workflow.id,
graph_config=graph_config,
user_id="",
user_from=UserFrom.ACCOUNT.value,
invoke_from=InvokeFrom.SERVICE_API.value,
call_depth=0,
)
node_factory = DifyNodeFactory(
graph_init_params=graph_init_params,
graph_runtime_state=graph_runtime_state,
)
# init graph
graph = Graph.init(graph_config=graph_config, root_node_id=node_id)
graph = Graph.init(graph_config=graph_config, node_factory=node_factory, root_node_id=node_id)
if not graph:
raise ValueError("graph not found in workflow")
@ -310,29 +366,21 @@ class WorkflowBasedAppRunner:
)
elif isinstance(event, GraphRunFailedEvent):
self._publish_event(QueueWorkflowFailedEvent(error=event.error, exceptions_count=event.exceptions_count))
elif isinstance(event, GraphRunAbortedEvent):
self._publish_event(QueueWorkflowFailedEvent(error=event.reason or "Unknown error", exceptions_count=0))
elif isinstance(event, NodeRunRetryEvent):
node_run_result = event.route_node_state.node_run_result
inputs: Mapping[str, Any] | None = {}
process_data: Mapping[str, Any] | None = {}
outputs: Mapping[str, Any] | None = {}
execution_metadata: Mapping[WorkflowNodeExecutionMetadataKey, Any] | None = {}
if node_run_result:
inputs = node_run_result.inputs
process_data = node_run_result.process_data
outputs = node_run_result.outputs
execution_metadata = node_run_result.metadata
node_run_result = event.node_run_result
inputs = node_run_result.inputs
process_data = node_run_result.process_data
outputs = node_run_result.outputs
execution_metadata = node_run_result.metadata
self._publish_event(
QueueNodeRetryEvent(
node_execution_id=event.id,
node_id=event.node_id,
node_title=event.node_title,
node_type=event.node_type,
node_data=event.node_data,
parallel_id=event.parallel_id,
parallel_start_node_id=event.parallel_start_node_id,
parent_parallel_id=event.parent_parallel_id,
parent_parallel_start_node_id=event.parent_parallel_start_node_id,
start_at=event.start_at,
node_run_index=event.route_node_state.index,
predecessor_node_id=event.predecessor_node_id,
in_iteration_id=event.in_iteration_id,
in_loop_id=event.in_loop_id,
@ -343,6 +391,8 @@ class WorkflowBasedAppRunner:
error=event.error,
execution_metadata=execution_metadata,
retry_index=event.retry_index,
provider_type=event.provider_type,
provider_id=event.provider_id,
)
)
elif isinstance(event, NodeRunStartedEvent):
@ -350,44 +400,30 @@ class WorkflowBasedAppRunner:
QueueNodeStartedEvent(
node_execution_id=event.id,
node_id=event.node_id,
node_title=event.node_title,
node_type=event.node_type,
node_data=event.node_data,
parallel_id=event.parallel_id,
parallel_start_node_id=event.parallel_start_node_id,
parent_parallel_id=event.parent_parallel_id,
parent_parallel_start_node_id=event.parent_parallel_start_node_id,
start_at=event.route_node_state.start_at,
node_run_index=event.route_node_state.index,
start_at=event.start_at,
predecessor_node_id=event.predecessor_node_id,
in_iteration_id=event.in_iteration_id,
in_loop_id=event.in_loop_id,
parallel_mode_run_id=event.parallel_mode_run_id,
agent_strategy=event.agent_strategy,
provider_type=event.provider_type,
provider_id=event.provider_id,
)
)
elif isinstance(event, NodeRunSucceededEvent):
node_run_result = event.route_node_state.node_run_result
if node_run_result:
inputs = node_run_result.inputs
process_data = node_run_result.process_data
outputs = node_run_result.outputs
execution_metadata = node_run_result.metadata
else:
inputs = {}
process_data = {}
outputs = {}
execution_metadata = {}
node_run_result = event.node_run_result
inputs = node_run_result.inputs
process_data = node_run_result.process_data
outputs = node_run_result.outputs
execution_metadata = node_run_result.metadata
self._publish_event(
QueueNodeSucceededEvent(
node_execution_id=event.id,
node_id=event.node_id,
node_type=event.node_type,
node_data=event.node_data,
parallel_id=event.parallel_id,
parallel_start_node_id=event.parallel_start_node_id,
parent_parallel_id=event.parent_parallel_id,
parent_parallel_start_node_id=event.parent_parallel_start_node_id,
start_at=event.route_node_state.start_at,
start_at=event.start_at,
inputs=inputs,
process_data=process_data,
outputs=outputs,
@ -396,34 +432,18 @@ class WorkflowBasedAppRunner:
in_loop_id=event.in_loop_id,
)
)
elif isinstance(event, NodeRunFailedEvent):
self._publish_event(
QueueNodeFailedEvent(
node_execution_id=event.id,
node_id=event.node_id,
node_type=event.node_type,
node_data=event.node_data,
parallel_id=event.parallel_id,
parallel_start_node_id=event.parallel_start_node_id,
parent_parallel_id=event.parent_parallel_id,
parent_parallel_start_node_id=event.parent_parallel_start_node_id,
start_at=event.route_node_state.start_at,
inputs=event.route_node_state.node_run_result.inputs
if event.route_node_state.node_run_result
else {},
process_data=event.route_node_state.node_run_result.process_data
if event.route_node_state.node_run_result
else {},
outputs=event.route_node_state.node_run_result.outputs or {}
if event.route_node_state.node_run_result
else {},
error=event.route_node_state.node_run_result.error
if event.route_node_state.node_run_result and event.route_node_state.node_run_result.error
else "Unknown error",
execution_metadata=event.route_node_state.node_run_result.metadata
if event.route_node_state.node_run_result
else {},
start_at=event.start_at,
inputs=event.node_run_result.inputs,
process_data=event.node_run_result.process_data,
outputs=event.node_run_result.outputs,
error=event.node_run_result.error or "Unknown error",
execution_metadata=event.node_run_result.metadata,
in_iteration_id=event.in_iteration_id,
in_loop_id=event.in_loop_id,
)
@ -434,93 +454,21 @@ class WorkflowBasedAppRunner:
node_execution_id=event.id,
node_id=event.node_id,
node_type=event.node_type,
node_data=event.node_data,
parallel_id=event.parallel_id,
parallel_start_node_id=event.parallel_start_node_id,
parent_parallel_id=event.parent_parallel_id,
parent_parallel_start_node_id=event.parent_parallel_start_node_id,
start_at=event.route_node_state.start_at,
inputs=event.route_node_state.node_run_result.inputs
if event.route_node_state.node_run_result
else {},
process_data=event.route_node_state.node_run_result.process_data
if event.route_node_state.node_run_result
else {},
outputs=event.route_node_state.node_run_result.outputs
if event.route_node_state.node_run_result
else {},
error=event.route_node_state.node_run_result.error
if event.route_node_state.node_run_result and event.route_node_state.node_run_result.error
else "Unknown error",
execution_metadata=event.route_node_state.node_run_result.metadata
if event.route_node_state.node_run_result
else {},
start_at=event.start_at,
inputs=event.node_run_result.inputs,
process_data=event.node_run_result.process_data,
outputs=event.node_run_result.outputs,
error=event.node_run_result.error or "Unknown error",
execution_metadata=event.node_run_result.metadata,
in_iteration_id=event.in_iteration_id,
in_loop_id=event.in_loop_id,
)
)
elif isinstance(event, NodeInIterationFailedEvent):
self._publish_event(
QueueNodeInIterationFailedEvent(
node_execution_id=event.id,
node_id=event.node_id,
node_type=event.node_type,
node_data=event.node_data,
parallel_id=event.parallel_id,
parallel_start_node_id=event.parallel_start_node_id,
parent_parallel_id=event.parent_parallel_id,
parent_parallel_start_node_id=event.parent_parallel_start_node_id,
start_at=event.route_node_state.start_at,
inputs=event.route_node_state.node_run_result.inputs
if event.route_node_state.node_run_result
else {},
process_data=event.route_node_state.node_run_result.process_data
if event.route_node_state.node_run_result
else {},
outputs=event.route_node_state.node_run_result.outputs or {}
if event.route_node_state.node_run_result
else {},
execution_metadata=event.route_node_state.node_run_result.metadata
if event.route_node_state.node_run_result
else {},
in_iteration_id=event.in_iteration_id,
error=event.error,
)
)
elif isinstance(event, NodeInLoopFailedEvent):
self._publish_event(
QueueNodeInLoopFailedEvent(
node_execution_id=event.id,
node_id=event.node_id,
node_type=event.node_type,
node_data=event.node_data,
parallel_id=event.parallel_id,
parallel_start_node_id=event.parallel_start_node_id,
parent_parallel_id=event.parent_parallel_id,
parent_parallel_start_node_id=event.parent_parallel_start_node_id,
start_at=event.route_node_state.start_at,
inputs=event.route_node_state.node_run_result.inputs
if event.route_node_state.node_run_result
else {},
process_data=event.route_node_state.node_run_result.process_data
if event.route_node_state.node_run_result
else {},
outputs=event.route_node_state.node_run_result.outputs or {}
if event.route_node_state.node_run_result
else {},
execution_metadata=event.route_node_state.node_run_result.metadata
if event.route_node_state.node_run_result
else {},
in_loop_id=event.in_loop_id,
error=event.error,
)
)
elif isinstance(event, NodeRunStreamChunkEvent):
self._publish_event(
QueueTextChunkEvent(
text=event.chunk_content,
from_variable_selector=event.from_variable_selector,
text=event.chunk,
from_variable_selector=list(event.selector),
in_iteration_id=event.in_iteration_id,
in_loop_id=event.in_loop_id,
)
@ -533,10 +481,10 @@ class WorkflowBasedAppRunner:
in_loop_id=event.in_loop_id,
)
)
elif isinstance(event, AgentLogEvent):
elif isinstance(event, NodeRunAgentLogEvent):
self._publish_event(
QueueAgentLogEvent(
id=event.id,
id=event.message_id,
label=event.label,
node_execution_id=event.node_execution_id,
parent_id=event.parent_id,
@ -547,51 +495,13 @@ class WorkflowBasedAppRunner:
node_id=event.node_id,
)
)
elif isinstance(event, ParallelBranchRunStartedEvent):
self._publish_event(
QueueParallelBranchRunStartedEvent(
parallel_id=event.parallel_id,
parallel_start_node_id=event.parallel_start_node_id,
parent_parallel_id=event.parent_parallel_id,
parent_parallel_start_node_id=event.parent_parallel_start_node_id,
in_iteration_id=event.in_iteration_id,
in_loop_id=event.in_loop_id,
)
)
elif isinstance(event, ParallelBranchRunSucceededEvent):
self._publish_event(
QueueParallelBranchRunSucceededEvent(
parallel_id=event.parallel_id,
parallel_start_node_id=event.parallel_start_node_id,
parent_parallel_id=event.parent_parallel_id,
parent_parallel_start_node_id=event.parent_parallel_start_node_id,
in_iteration_id=event.in_iteration_id,
in_loop_id=event.in_loop_id,
)
)
elif isinstance(event, ParallelBranchRunFailedEvent):
self._publish_event(
QueueParallelBranchRunFailedEvent(
parallel_id=event.parallel_id,
parallel_start_node_id=event.parallel_start_node_id,
parent_parallel_id=event.parent_parallel_id,
parent_parallel_start_node_id=event.parent_parallel_start_node_id,
in_iteration_id=event.in_iteration_id,
in_loop_id=event.in_loop_id,
error=event.error,
)
)
elif isinstance(event, IterationRunStartedEvent):
elif isinstance(event, NodeRunIterationStartedEvent):
self._publish_event(
QueueIterationStartEvent(
node_execution_id=event.iteration_id,
node_id=event.iteration_node_id,
node_type=event.iteration_node_type,
node_data=event.iteration_node_data,
parallel_id=event.parallel_id,
parallel_start_node_id=event.parallel_start_node_id,
parent_parallel_id=event.parent_parallel_id,
parent_parallel_start_node_id=event.parent_parallel_start_node_id,
node_execution_id=event.id,
node_id=event.node_id,
node_type=event.node_type,
node_title=event.node_title,
start_at=event.start_at,
node_run_index=workflow_entry.graph_engine.graph_runtime_state.node_run_steps,
inputs=event.inputs,
@ -599,55 +509,41 @@ class WorkflowBasedAppRunner:
metadata=event.metadata,
)
)
elif isinstance(event, IterationRunNextEvent):
elif isinstance(event, NodeRunIterationNextEvent):
self._publish_event(
QueueIterationNextEvent(
node_execution_id=event.iteration_id,
node_id=event.iteration_node_id,
node_type=event.iteration_node_type,
node_data=event.iteration_node_data,
parallel_id=event.parallel_id,
parallel_start_node_id=event.parallel_start_node_id,
parent_parallel_id=event.parent_parallel_id,
parent_parallel_start_node_id=event.parent_parallel_start_node_id,
node_execution_id=event.id,
node_id=event.node_id,
node_type=event.node_type,
node_title=event.node_title,
index=event.index,
node_run_index=workflow_entry.graph_engine.graph_runtime_state.node_run_steps,
output=event.pre_iteration_output,
parallel_mode_run_id=event.parallel_mode_run_id,
duration=event.duration,
)
)
elif isinstance(event, (IterationRunSucceededEvent | IterationRunFailedEvent)):
elif isinstance(event, (NodeRunIterationSucceededEvent | NodeRunIterationFailedEvent)):
self._publish_event(
QueueIterationCompletedEvent(
node_execution_id=event.iteration_id,
node_id=event.iteration_node_id,
node_type=event.iteration_node_type,
node_data=event.iteration_node_data,
parallel_id=event.parallel_id,
parallel_start_node_id=event.parallel_start_node_id,
parent_parallel_id=event.parent_parallel_id,
parent_parallel_start_node_id=event.parent_parallel_start_node_id,
node_execution_id=event.id,
node_id=event.node_id,
node_type=event.node_type,
node_title=event.node_title,
start_at=event.start_at,
node_run_index=workflow_entry.graph_engine.graph_runtime_state.node_run_steps,
inputs=event.inputs,
outputs=event.outputs,
metadata=event.metadata,
steps=event.steps,
error=event.error if isinstance(event, IterationRunFailedEvent) else None,
error=event.error if isinstance(event, NodeRunIterationFailedEvent) else None,
)
)
elif isinstance(event, LoopRunStartedEvent):
elif isinstance(event, NodeRunLoopStartedEvent):
self._publish_event(
QueueLoopStartEvent(
node_execution_id=event.loop_id,
node_id=event.loop_node_id,
node_type=event.loop_node_type,
node_data=event.loop_node_data,
parallel_id=event.parallel_id,
parallel_start_node_id=event.parallel_start_node_id,
parent_parallel_id=event.parent_parallel_id,
parent_parallel_start_node_id=event.parent_parallel_start_node_id,
node_execution_id=event.id,
node_id=event.node_id,
node_type=event.node_type,
node_title=event.node_title,
start_at=event.start_at,
node_run_index=workflow_entry.graph_engine.graph_runtime_state.node_run_steps,
inputs=event.inputs,
@ -655,42 +551,32 @@ class WorkflowBasedAppRunner:
metadata=event.metadata,
)
)
elif isinstance(event, LoopRunNextEvent):
elif isinstance(event, NodeRunLoopNextEvent):
self._publish_event(
QueueLoopNextEvent(
node_execution_id=event.loop_id,
node_id=event.loop_node_id,
node_type=event.loop_node_type,
node_data=event.loop_node_data,
parallel_id=event.parallel_id,
parallel_start_node_id=event.parallel_start_node_id,
parent_parallel_id=event.parent_parallel_id,
parent_parallel_start_node_id=event.parent_parallel_start_node_id,
node_execution_id=event.id,
node_id=event.node_id,
node_type=event.node_type,
node_title=event.node_title,
index=event.index,
node_run_index=workflow_entry.graph_engine.graph_runtime_state.node_run_steps,
output=event.pre_loop_output,
parallel_mode_run_id=event.parallel_mode_run_id,
duration=event.duration,
)
)
elif isinstance(event, (LoopRunSucceededEvent | LoopRunFailedEvent)):
elif isinstance(event, (NodeRunLoopSucceededEvent | NodeRunLoopFailedEvent)):
self._publish_event(
QueueLoopCompletedEvent(
node_execution_id=event.loop_id,
node_id=event.loop_node_id,
node_type=event.loop_node_type,
node_data=event.loop_node_data,
parallel_id=event.parallel_id,
parallel_start_node_id=event.parallel_start_node_id,
parent_parallel_id=event.parent_parallel_id,
parent_parallel_start_node_id=event.parent_parallel_start_node_id,
node_execution_id=event.id,
node_id=event.node_id,
node_type=event.node_type,
node_title=event.node_title,
start_at=event.start_at,
node_run_index=workflow_entry.graph_engine.graph_runtime_state.node_run_steps,
inputs=event.inputs,
outputs=event.outputs,
metadata=event.metadata,
steps=event.steps,
error=event.error if isinstance(event, LoopRunFailedEvent) else None,
error=event.error if isinstance(event, NodeRunLoopFailedEvent) else None,
)
)

View File

@ -1,9 +1,12 @@
from collections.abc import Mapping, Sequence
from enum import StrEnum
from typing import Any, Optional
from typing import TYPE_CHECKING, Any, Optional
from pydantic import BaseModel, ConfigDict, Field, ValidationInfo, field_validator
if TYPE_CHECKING:
from core.ops.ops_trace_manager import TraceQueueManager
from constants import UUID_NIL
from core.app.app_config.entities import EasyUIBasedAppConfig, WorkflowUIBasedAppConfig
from core.entities.provider_configuration import ProviderModelBundle
@ -35,6 +38,7 @@ class InvokeFrom(StrEnum):
# DEBUGGER indicates that this invocation is from
# the workflow (or chatflow) edit page.
DEBUGGER = "debugger"
PUBLISHED = "published"
@classmethod
def value_of(cls, value: str):
@ -113,8 +117,7 @@ class AppGenerateEntity(BaseModel):
extras: dict[str, Any] = Field(default_factory=dict)
# tracing instance
# Using Any to avoid circular import with TraceQueueManager
trace_manager: Optional[Any] = None
trace_manager: Optional["TraceQueueManager"] = None
class EasyUIBasedAppGenerateEntity(AppGenerateEntity):
@ -240,3 +243,53 @@ class WorkflowAppGenerateEntity(AppGenerateEntity):
inputs: dict
single_loop_run: Optional[SingleLoopRunEntity] = None
class RagPipelineGenerateEntity(WorkflowAppGenerateEntity):
"""
RAG Pipeline Application Generate Entity.
"""
# pipeline config
pipeline_config: WorkflowUIBasedAppConfig
datasource_type: str
datasource_info: Mapping[str, Any]
dataset_id: str
batch: str
document_id: Optional[str] = None
start_node_id: Optional[str] = None
class SingleIterationRunEntity(BaseModel):
"""
Single Iteration Run Entity.
"""
node_id: str
inputs: dict
single_iteration_run: Optional[SingleIterationRunEntity] = None
class SingleLoopRunEntity(BaseModel):
"""
Single Loop Run Entity.
"""
node_id: str
inputs: dict
single_loop_run: Optional[SingleLoopRunEntity] = None
# Import TraceQueueManager at runtime to resolve forward references
from core.ops.ops_trace_manager import TraceQueueManager
# Rebuild models that use forward references
AppGenerateEntity.model_rebuild()
EasyUIBasedAppGenerateEntity.model_rebuild()
ConversationAppGenerateEntity.model_rebuild()
ChatAppGenerateEntity.model_rebuild()
CompletionAppGenerateEntity.model_rebuild()
AgentChatAppGenerateEntity.model_rebuild()
AdvancedChatAppGenerateEntity.model_rebuild()
WorkflowAppGenerateEntity.model_rebuild()
RagPipelineGenerateEntity.model_rebuild()

View File

@ -7,11 +7,9 @@ from pydantic import BaseModel
from core.model_runtime.entities.llm_entities import LLMResult, LLMResultChunk
from core.rag.entities.citation_metadata import RetrievalSourceMetadata
from core.workflow.entities.node_entities import AgentNodeStrategyInit
from core.workflow.entities.workflow_node_execution import WorkflowNodeExecutionMetadataKey
from core.workflow.graph_engine.entities.graph_runtime_state import GraphRuntimeState
from core.workflow.entities import AgentNodeStrategyInit, GraphRuntimeState
from core.workflow.enums import WorkflowNodeExecutionMetadataKey
from core.workflow.nodes import NodeType
from core.workflow.nodes.base import BaseNodeData
class QueueEvent(StrEnum):
@ -43,9 +41,6 @@ class QueueEvent(StrEnum):
ANNOTATION_REPLY = "annotation_reply"
AGENT_THOUGHT = "agent_thought"
MESSAGE_FILE = "message_file"
PARALLEL_BRANCH_RUN_STARTED = "parallel_branch_run_started"
PARALLEL_BRANCH_RUN_SUCCEEDED = "parallel_branch_run_succeeded"
PARALLEL_BRANCH_RUN_FAILED = "parallel_branch_run_failed"
AGENT_LOG = "agent_log"
ERROR = "error"
PING = "ping"
@ -80,15 +75,7 @@ class QueueIterationStartEvent(AppQueueEvent):
node_execution_id: str
node_id: str
node_type: NodeType
node_data: BaseNodeData
parallel_id: Optional[str] = None
"""parallel id if node is in parallel"""
parallel_start_node_id: Optional[str] = None
"""parallel start node id if node is in parallel"""
parent_parallel_id: Optional[str] = None
"""parent parallel id if node is in parallel"""
parent_parallel_start_node_id: Optional[str] = None
"""parent parallel start node id if node is in parallel"""
node_title: str
start_at: datetime
node_run_index: int
@ -108,20 +95,9 @@ class QueueIterationNextEvent(AppQueueEvent):
node_execution_id: str
node_id: str
node_type: NodeType
node_data: BaseNodeData
parallel_id: Optional[str] = None
"""parallel id if node is in parallel"""
parallel_start_node_id: Optional[str] = None
"""parallel start node id if node is in parallel"""
parent_parallel_id: Optional[str] = None
"""parent parallel id if node is in parallel"""
parent_parallel_start_node_id: Optional[str] = None
"""parent parallel start node id if node is in parallel"""
parallel_mode_run_id: Optional[str] = None
"""iteration run in parallel mode run id"""
node_title: str
node_run_index: int
output: Optional[Any] = None # output for the current iteration
duration: Optional[float] = None
class QueueIterationCompletedEvent(AppQueueEvent):
@ -134,15 +110,7 @@ class QueueIterationCompletedEvent(AppQueueEvent):
node_execution_id: str
node_id: str
node_type: NodeType
node_data: BaseNodeData
parallel_id: Optional[str] = None
"""parallel id if node is in parallel"""
parallel_start_node_id: Optional[str] = None
"""parallel start node id if node is in parallel"""
parent_parallel_id: Optional[str] = None
"""parent parallel id if node is in parallel"""
parent_parallel_start_node_id: Optional[str] = None
"""parent parallel start node id if node is in parallel"""
node_title: str
start_at: datetime
node_run_index: int
@ -163,7 +131,7 @@ class QueueLoopStartEvent(AppQueueEvent):
node_execution_id: str
node_id: str
node_type: NodeType
node_data: BaseNodeData
node_title: str
parallel_id: Optional[str] = None
"""parallel id if node is in parallel"""
parallel_start_node_id: Optional[str] = None
@ -191,7 +159,7 @@ class QueueLoopNextEvent(AppQueueEvent):
node_execution_id: str
node_id: str
node_type: NodeType
node_data: BaseNodeData
node_title: str
parallel_id: Optional[str] = None
"""parallel id if node is in parallel"""
parallel_start_node_id: Optional[str] = None
@ -204,7 +172,6 @@ class QueueLoopNextEvent(AppQueueEvent):
"""iteration run in parallel mode run id"""
node_run_index: int
output: Optional[Any] = None # output for the current loop
duration: Optional[float] = None
class QueueLoopCompletedEvent(AppQueueEvent):
@ -217,7 +184,7 @@ class QueueLoopCompletedEvent(AppQueueEvent):
node_execution_id: str
node_id: str
node_type: NodeType
node_data: BaseNodeData
node_title: str
parallel_id: Optional[str] = None
"""parallel id if node is in parallel"""
parallel_start_node_id: Optional[str] = None
@ -364,27 +331,24 @@ class QueueNodeStartedEvent(AppQueueEvent):
node_execution_id: str
node_id: str
node_title: str
node_type: NodeType
node_data: BaseNodeData
node_run_index: int = 1
node_run_index: int = 1 # FIXME(-LAN-): may not used
predecessor_node_id: Optional[str] = None
parallel_id: Optional[str] = None
"""parallel id if node is in parallel"""
parallel_start_node_id: Optional[str] = None
"""parallel start node id if node is in parallel"""
parent_parallel_id: Optional[str] = None
"""parent parallel id if node is in parallel"""
parent_parallel_start_node_id: Optional[str] = None
"""parent parallel start node id if node is in parallel"""
in_iteration_id: Optional[str] = None
"""iteration id if node is in iteration"""
in_loop_id: Optional[str] = None
"""loop id if node is in loop"""
start_at: datetime
parallel_mode_run_id: Optional[str] = None
"""iteration run in parallel mode run id"""
agent_strategy: Optional[AgentNodeStrategyInit] = None
# FIXME(-LAN-): only for ToolNode, need to refactor
provider_type: str # should be a core.tools.entities.tool_entities.ToolProviderType
provider_id: str
class QueueNodeSucceededEvent(AppQueueEvent):
"""
@ -396,7 +360,6 @@ class QueueNodeSucceededEvent(AppQueueEvent):
node_execution_id: str
node_id: str
node_type: NodeType
node_data: BaseNodeData
parallel_id: Optional[str] = None
"""parallel id if node is in parallel"""
parallel_start_node_id: Optional[str] = None
@ -417,10 +380,6 @@ class QueueNodeSucceededEvent(AppQueueEvent):
execution_metadata: Optional[Mapping[WorkflowNodeExecutionMetadataKey, Any]] = None
error: Optional[str] = None
"""single iteration duration map"""
iteration_duration_map: Optional[dict[str, float]] = None
"""single loop duration map"""
loop_duration_map: Optional[dict[str, float]] = None
class QueueAgentLogEvent(AppQueueEvent):
@ -454,72 +413,6 @@ class QueueNodeRetryEvent(QueueNodeStartedEvent):
retry_index: int # retry index
class QueueNodeInIterationFailedEvent(AppQueueEvent):
"""
QueueNodeInIterationFailedEvent entity
"""
event: QueueEvent = QueueEvent.NODE_FAILED
node_execution_id: str
node_id: str
node_type: NodeType
node_data: BaseNodeData
parallel_id: Optional[str] = None
"""parallel id if node is in parallel"""
parallel_start_node_id: Optional[str] = None
"""parallel start node id if node is in parallel"""
parent_parallel_id: Optional[str] = None
"""parent parallel id if node is in parallel"""
parent_parallel_start_node_id: Optional[str] = None
"""parent parallel start node id if node is in parallel"""
in_iteration_id: Optional[str] = None
"""iteration id if node is in iteration"""
in_loop_id: Optional[str] = None
"""loop id if node is in loop"""
start_at: datetime
inputs: Optional[Mapping[str, Any]] = None
process_data: Optional[Mapping[str, Any]] = None
outputs: Optional[Mapping[str, Any]] = None
execution_metadata: Optional[Mapping[WorkflowNodeExecutionMetadataKey, Any]] = None
error: str
class QueueNodeInLoopFailedEvent(AppQueueEvent):
"""
QueueNodeInLoopFailedEvent entity
"""
event: QueueEvent = QueueEvent.NODE_FAILED
node_execution_id: str
node_id: str
node_type: NodeType
node_data: BaseNodeData
parallel_id: Optional[str] = None
"""parallel id if node is in parallel"""
parallel_start_node_id: Optional[str] = None
"""parallel start node id if node is in parallel"""
parent_parallel_id: Optional[str] = None
"""parent parallel id if node is in parallel"""
parent_parallel_start_node_id: Optional[str] = None
"""parent parallel start node id if node is in parallel"""
in_iteration_id: Optional[str] = None
"""iteration id if node is in iteration"""
in_loop_id: Optional[str] = None
"""loop id if node is in loop"""
start_at: datetime
inputs: Optional[Mapping[str, Any]] = None
process_data: Optional[Mapping[str, Any]] = None
outputs: Optional[Mapping[str, Any]] = None
execution_metadata: Optional[Mapping[WorkflowNodeExecutionMetadataKey, Any]] = None
error: str
class QueueNodeExceptionEvent(AppQueueEvent):
"""
QueueNodeExceptionEvent entity
@ -530,7 +423,6 @@ class QueueNodeExceptionEvent(AppQueueEvent):
node_execution_id: str
node_id: str
node_type: NodeType
node_data: BaseNodeData
parallel_id: Optional[str] = None
"""parallel id if node is in parallel"""
parallel_start_node_id: Optional[str] = None
@ -563,15 +455,7 @@ class QueueNodeFailedEvent(AppQueueEvent):
node_execution_id: str
node_id: str
node_type: NodeType
node_data: BaseNodeData
parallel_id: Optional[str] = None
"""parallel id if node is in parallel"""
parallel_start_node_id: Optional[str] = None
"""parallel start node id if node is in parallel"""
parent_parallel_id: Optional[str] = None
"""parent parallel id if node is in parallel"""
parent_parallel_start_node_id: Optional[str] = None
"""parent parallel start node id if node is in parallel"""
in_iteration_id: Optional[str] = None
"""iteration id if node is in iteration"""
in_loop_id: Optional[str] = None
@ -678,61 +562,3 @@ class WorkflowQueueMessage(QueueMessage):
"""
pass
class QueueParallelBranchRunStartedEvent(AppQueueEvent):
"""
QueueParallelBranchRunStartedEvent entity
"""
event: QueueEvent = QueueEvent.PARALLEL_BRANCH_RUN_STARTED
parallel_id: str
parallel_start_node_id: str
parent_parallel_id: Optional[str] = None
"""parent parallel id if node is in parallel"""
parent_parallel_start_node_id: Optional[str] = None
"""parent parallel start node id if node is in parallel"""
in_iteration_id: Optional[str] = None
"""iteration id if node is in iteration"""
in_loop_id: Optional[str] = None
"""loop id if node is in loop"""
class QueueParallelBranchRunSucceededEvent(AppQueueEvent):
"""
QueueParallelBranchRunSucceededEvent entity
"""
event: QueueEvent = QueueEvent.PARALLEL_BRANCH_RUN_SUCCEEDED
parallel_id: str
parallel_start_node_id: str
parent_parallel_id: Optional[str] = None
"""parent parallel id if node is in parallel"""
parent_parallel_start_node_id: Optional[str] = None
"""parent parallel start node id if node is in parallel"""
in_iteration_id: Optional[str] = None
"""iteration id if node is in iteration"""
in_loop_id: Optional[str] = None
"""loop id if node is in loop"""
class QueueParallelBranchRunFailedEvent(AppQueueEvent):
"""
QueueParallelBranchRunFailedEvent entity
"""
event: QueueEvent = QueueEvent.PARALLEL_BRANCH_RUN_FAILED
parallel_id: str
parallel_start_node_id: str
parent_parallel_id: Optional[str] = None
"""parent parallel id if node is in parallel"""
parent_parallel_start_node_id: Optional[str] = None
"""parent parallel start node id if node is in parallel"""
in_iteration_id: Optional[str] = None
"""iteration id if node is in iteration"""
in_loop_id: Optional[str] = None
"""loop id if node is in loop"""
error: str

View File

@ -7,8 +7,8 @@ from pydantic import BaseModel, ConfigDict, Field
from core.model_runtime.entities.llm_entities import LLMResult, LLMUsage
from core.model_runtime.utils.encoders import jsonable_encoder
from core.rag.entities.citation_metadata import RetrievalSourceMetadata
from core.workflow.entities.node_entities import AgentNodeStrategyInit
from core.workflow.entities.workflow_node_execution import WorkflowNodeExecutionMetadataKey, WorkflowNodeExecutionStatus
from core.workflow.entities import AgentNodeStrategyInit
from core.workflow.enums import WorkflowNodeExecutionMetadataKey, WorkflowNodeExecutionStatus
class AnnotationReplyAccount(BaseModel):
@ -71,8 +71,6 @@ class StreamEvent(Enum):
NODE_STARTED = "node_started"
NODE_FINISHED = "node_finished"
NODE_RETRY = "node_retry"
PARALLEL_BRANCH_STARTED = "parallel_branch_started"
PARALLEL_BRANCH_FINISHED = "parallel_branch_finished"
ITERATION_STARTED = "iteration_started"
ITERATION_NEXT = "iteration_next"
ITERATION_COMPLETED = "iteration_completed"
@ -260,6 +258,7 @@ class NodeStartStreamResponse(StreamResponse):
index: int
predecessor_node_id: Optional[str] = None
inputs: Optional[Mapping[str, Any]] = None
inputs_truncated: bool = False
created_at: int
extras: dict = Field(default_factory=dict)
parallel_id: Optional[str] = None
@ -317,8 +316,11 @@ class NodeFinishStreamResponse(StreamResponse):
index: int
predecessor_node_id: Optional[str] = None
inputs: Optional[Mapping[str, Any]] = None
inputs_truncated: bool = False
process_data: Optional[Mapping[str, Any]] = None
process_data_truncated: bool = False
outputs: Optional[Mapping[str, Any]] = None
outputs_truncated: bool = True
status: str
error: Optional[str] = None
elapsed_time: float
@ -386,8 +388,11 @@ class NodeRetryStreamResponse(StreamResponse):
index: int
predecessor_node_id: Optional[str] = None
inputs: Optional[Mapping[str, Any]] = None
inputs_truncated: bool = False
process_data: Optional[Mapping[str, Any]] = None
process_data_truncated: bool = False
outputs: Optional[Mapping[str, Any]] = None
outputs_truncated: bool = False
status: str
error: Optional[str] = None
elapsed_time: float
@ -440,54 +445,6 @@ class NodeRetryStreamResponse(StreamResponse):
}
class ParallelBranchStartStreamResponse(StreamResponse):
"""
ParallelBranchStartStreamResponse entity
"""
class Data(BaseModel):
"""
Data entity
"""
parallel_id: str
parallel_branch_id: str
parent_parallel_id: Optional[str] = None
parent_parallel_start_node_id: Optional[str] = None
iteration_id: Optional[str] = None
loop_id: Optional[str] = None
created_at: int
event: StreamEvent = StreamEvent.PARALLEL_BRANCH_STARTED
workflow_run_id: str
data: Data
class ParallelBranchFinishedStreamResponse(StreamResponse):
"""
ParallelBranchFinishedStreamResponse entity
"""
class Data(BaseModel):
"""
Data entity
"""
parallel_id: str
parallel_branch_id: str
parent_parallel_id: Optional[str] = None
parent_parallel_start_node_id: Optional[str] = None
iteration_id: Optional[str] = None
loop_id: Optional[str] = None
status: str
error: Optional[str] = None
created_at: int
event: StreamEvent = StreamEvent.PARALLEL_BRANCH_FINISHED
workflow_run_id: str
data: Data
class IterationNodeStartStreamResponse(StreamResponse):
"""
NodeStartStreamResponse entity
@ -506,8 +463,7 @@ class IterationNodeStartStreamResponse(StreamResponse):
extras: dict = Field(default_factory=dict)
metadata: Mapping = {}
inputs: Mapping = {}
parallel_id: Optional[str] = None
parallel_start_node_id: Optional[str] = None
inputs_truncated: bool = False
event: StreamEvent = StreamEvent.ITERATION_STARTED
workflow_run_id: str
@ -530,12 +486,7 @@ class IterationNodeNextStreamResponse(StreamResponse):
title: str
index: int
created_at: int
pre_iteration_output: Optional[Any] = None
extras: dict = Field(default_factory=dict)
parallel_id: Optional[str] = None
parallel_start_node_id: Optional[str] = None
parallel_mode_run_id: Optional[str] = None
duration: Optional[float] = None
event: StreamEvent = StreamEvent.ITERATION_NEXT
workflow_run_id: str
@ -557,9 +508,11 @@ class IterationNodeCompletedStreamResponse(StreamResponse):
node_type: str
title: str
outputs: Optional[Mapping] = None
outputs_truncated: bool = False
created_at: int
extras: Optional[dict] = None
inputs: Optional[Mapping] = None
inputs_truncated: bool = False
status: WorkflowNodeExecutionStatus
error: Optional[str] = None
elapsed_time: float
@ -567,8 +520,6 @@ class IterationNodeCompletedStreamResponse(StreamResponse):
execution_metadata: Optional[Mapping] = None
finished_at: int
steps: int
parallel_id: Optional[str] = None
parallel_start_node_id: Optional[str] = None
event: StreamEvent = StreamEvent.ITERATION_COMPLETED
workflow_run_id: str
@ -593,6 +544,7 @@ class LoopNodeStartStreamResponse(StreamResponse):
extras: dict = Field(default_factory=dict)
metadata: Mapping = {}
inputs: Mapping = {}
inputs_truncated: bool = False
parallel_id: Optional[str] = None
parallel_start_node_id: Optional[str] = None
@ -622,7 +574,6 @@ class LoopNodeNextStreamResponse(StreamResponse):
parallel_id: Optional[str] = None
parallel_start_node_id: Optional[str] = None
parallel_mode_run_id: Optional[str] = None
duration: Optional[float] = None
event: StreamEvent = StreamEvent.LOOP_NEXT
workflow_run_id: str
@ -644,9 +595,11 @@ class LoopNodeCompletedStreamResponse(StreamResponse):
node_type: str
title: str
outputs: Optional[Mapping] = None
outputs_truncated: bool = False
created_at: int
extras: Optional[dict] = None
inputs: Optional[Mapping] = None
inputs_truncated: bool = False
status: WorkflowNodeExecutionStatus
error: Optional[str] = None
elapsed_time: float

View File

@ -105,6 +105,14 @@ class DifyAgentCallbackHandler(BaseModel):
self.current_loop += 1
def on_datasource_start(self, datasource_name: str, datasource_inputs: Mapping[str, Any]) -> None:
"""Run on datasource start."""
if dify_config.DEBUG:
print_text(
"\n[on_datasource_start] DatasourceCall:" + datasource_name + "\n" + str(datasource_inputs) + "\n",
color=self.color,
)
@property
def ignore_agent(self) -> bool:
"""Whether to ignore agent callbacks."""

View File

@ -0,0 +1,41 @@
from abc import ABC, abstractmethod
from configs import dify_config
from core.datasource.__base.datasource_runtime import DatasourceRuntime
from core.datasource.entities.datasource_entities import (
DatasourceEntity,
DatasourceProviderType,
)
class DatasourcePlugin(ABC):
entity: DatasourceEntity
runtime: DatasourceRuntime
icon: str
def __init__(
self,
entity: DatasourceEntity,
runtime: DatasourceRuntime,
icon: str,
) -> None:
self.entity = entity
self.runtime = runtime
self.icon = icon
@abstractmethod
def datasource_provider_type(self) -> str:
"""
returns the type of the datasource provider
"""
return DatasourceProviderType.LOCAL_FILE
def fork_datasource_runtime(self, runtime: DatasourceRuntime) -> "DatasourcePlugin":
return self.__class__(
entity=self.entity.model_copy(),
runtime=runtime,
icon=self.icon,
)
def get_icon_url(self, tenant_id: str) -> str:
return f"{dify_config.CONSOLE_API_URL}/console/api/workspaces/current/plugin/icon?tenant_id={tenant_id}&filename={self.icon}" # noqa: E501

View File

@ -0,0 +1,118 @@
from abc import ABC, abstractmethod
from typing import Any
from core.datasource.__base.datasource_plugin import DatasourcePlugin
from core.datasource.entities.datasource_entities import DatasourceProviderEntityWithPlugin, DatasourceProviderType
from core.entities.provider_entities import ProviderConfig
from core.plugin.impl.tool import PluginToolManager
from core.tools.errors import ToolProviderCredentialValidationError
class DatasourcePluginProviderController(ABC):
entity: DatasourceProviderEntityWithPlugin
tenant_id: str
def __init__(self, entity: DatasourceProviderEntityWithPlugin, tenant_id: str) -> None:
self.entity = entity
self.tenant_id = tenant_id
@property
def need_credentials(self) -> bool:
"""
returns whether the provider needs credentials
:return: whether the provider needs credentials
"""
return self.entity.credentials_schema is not None and len(self.entity.credentials_schema) != 0
def _validate_credentials(self, user_id: str, credentials: dict[str, Any]) -> None:
"""
validate the credentials of the provider
"""
manager = PluginToolManager()
if not manager.validate_datasource_credentials(
tenant_id=self.tenant_id,
user_id=user_id,
provider=self.entity.identity.name,
credentials=credentials,
):
raise ToolProviderCredentialValidationError("Invalid credentials")
@property
def provider_type(self) -> DatasourceProviderType:
"""
returns the type of the provider
"""
return DatasourceProviderType.LOCAL_FILE
@abstractmethod
def get_datasource(self, datasource_name: str) -> DatasourcePlugin:
"""
return datasource with given name
"""
pass
def validate_credentials_format(self, credentials: dict[str, Any]) -> None:
"""
validate the format of the credentials of the provider and set the default value if needed
:param credentials: the credentials of the tool
"""
credentials_schema = dict[str, ProviderConfig]()
if credentials_schema is None:
return
for credential in self.entity.credentials_schema:
credentials_schema[credential.name] = credential
credentials_need_to_validate: dict[str, ProviderConfig] = {}
for credential_name in credentials_schema:
credentials_need_to_validate[credential_name] = credentials_schema[credential_name]
for credential_name in credentials:
if credential_name not in credentials_need_to_validate:
raise ToolProviderCredentialValidationError(
f"credential {credential_name} not found in provider {self.entity.identity.name}"
)
# check type
credential_schema = credentials_need_to_validate[credential_name]
if not credential_schema.required and credentials[credential_name] is None:
continue
if credential_schema.type in {ProviderConfig.Type.SECRET_INPUT, ProviderConfig.Type.TEXT_INPUT}:
if not isinstance(credentials[credential_name], str):
raise ToolProviderCredentialValidationError(f"credential {credential_name} should be string")
elif credential_schema.type == ProviderConfig.Type.SELECT:
if not isinstance(credentials[credential_name], str):
raise ToolProviderCredentialValidationError(f"credential {credential_name} should be string")
options = credential_schema.options
if not isinstance(options, list):
raise ToolProviderCredentialValidationError(f"credential {credential_name} options should be list")
if credentials[credential_name] not in [x.value for x in options]:
raise ToolProviderCredentialValidationError(
f"credential {credential_name} should be one of {options}"
)
credentials_need_to_validate.pop(credential_name)
for credential_name in credentials_need_to_validate:
credential_schema = credentials_need_to_validate[credential_name]
if credential_schema.required:
raise ToolProviderCredentialValidationError(f"credential {credential_name} is required")
# the credential is not set currently, set the default value if needed
if credential_schema.default is not None:
default_value = credential_schema.default
# parse default value into the correct type
if credential_schema.type in {
ProviderConfig.Type.SECRET_INPUT,
ProviderConfig.Type.TEXT_INPUT,
ProviderConfig.Type.SELECT,
}:
default_value = str(default_value)
credentials[credential_name] = default_value

View File

@ -0,0 +1,42 @@
from typing import TYPE_CHECKING, Any, Optional
from openai import BaseModel
from pydantic import Field
# Import InvokeFrom locally to avoid circular import
from core.app.entities.app_invoke_entities import InvokeFrom
from core.datasource.entities.datasource_entities import DatasourceInvokeFrom
if TYPE_CHECKING:
from core.app.entities.app_invoke_entities import InvokeFrom
class DatasourceRuntime(BaseModel):
"""
Meta data of a datasource call processing
"""
tenant_id: str
datasource_id: Optional[str] = None
invoke_from: Optional["InvokeFrom"] = None
datasource_invoke_from: Optional[DatasourceInvokeFrom] = None
credentials: dict[str, Any] = Field(default_factory=dict)
runtime_parameters: dict[str, Any] = Field(default_factory=dict)
class FakeDatasourceRuntime(DatasourceRuntime):
"""
Fake datasource runtime for testing
"""
def __init__(self):
super().__init__(
tenant_id="fake_tenant_id",
datasource_id="fake_datasource_id",
invoke_from=InvokeFrom.DEBUGGER,
datasource_invoke_from=DatasourceInvokeFrom.RAG_PIPELINE,
credentials={},
runtime_parameters={},
)

View File

@ -0,0 +1,243 @@
import base64
import hashlib
import hmac
import logging
import os
import time
from datetime import datetime
from mimetypes import guess_extension, guess_type
from typing import Optional, Union
from uuid import uuid4
import httpx
from configs import dify_config
from core.helper import ssrf_proxy
from extensions.ext_database import db
from extensions.ext_storage import storage
from models.enums import CreatorUserRole
from models.model import MessageFile, UploadFile
from models.tools import ToolFile
logger = logging.getLogger(__name__)
class DatasourceFileManager:
@staticmethod
def sign_file(datasource_file_id: str, extension: str) -> str:
"""
sign file to get a temporary url
"""
base_url = dify_config.FILES_URL
file_preview_url = f"{base_url}/files/datasources/{datasource_file_id}{extension}"
timestamp = str(int(time.time()))
nonce = os.urandom(16).hex()
data_to_sign = f"file-preview|{datasource_file_id}|{timestamp}|{nonce}"
secret_key = dify_config.SECRET_KEY.encode() if dify_config.SECRET_KEY else b""
sign = hmac.new(secret_key, data_to_sign.encode(), hashlib.sha256).digest()
encoded_sign = base64.urlsafe_b64encode(sign).decode()
return f"{file_preview_url}?timestamp={timestamp}&nonce={nonce}&sign={encoded_sign}"
@staticmethod
def verify_file(datasource_file_id: str, timestamp: str, nonce: str, sign: str) -> bool:
"""
verify signature
"""
data_to_sign = f"file-preview|{datasource_file_id}|{timestamp}|{nonce}"
secret_key = dify_config.SECRET_KEY.encode() if dify_config.SECRET_KEY else b""
recalculated_sign = hmac.new(secret_key, data_to_sign.encode(), hashlib.sha256).digest()
recalculated_encoded_sign = base64.urlsafe_b64encode(recalculated_sign).decode()
# verify signature
if sign != recalculated_encoded_sign:
return False
current_time = int(time.time())
return current_time - int(timestamp) <= dify_config.FILES_ACCESS_TIMEOUT
@staticmethod
def create_file_by_raw(
*,
user_id: str,
tenant_id: str,
conversation_id: Optional[str],
file_binary: bytes,
mimetype: str,
filename: Optional[str] = None,
) -> UploadFile:
extension = guess_extension(mimetype) or ".bin"
unique_name = uuid4().hex
unique_filename = f"{unique_name}{extension}"
# default just as before
present_filename = unique_filename
if filename is not None:
has_extension = len(filename.split(".")) > 1
# Add extension flexibly
present_filename = filename if has_extension else f"{filename}{extension}"
filepath = f"datasources/{tenant_id}/{unique_filename}"
storage.save(filepath, file_binary)
upload_file = UploadFile(
tenant_id=tenant_id,
storage_type=dify_config.STORAGE_TYPE,
key=filepath,
name=present_filename,
size=len(file_binary),
extension=extension,
mime_type=mimetype,
created_by_role=CreatorUserRole.ACCOUNT,
created_by=user_id,
used=False,
hash=hashlib.sha3_256(file_binary).hexdigest(),
source_url="",
created_at=datetime.now(),
)
db.session.add(upload_file)
db.session.commit()
db.session.refresh(upload_file)
return upload_file
@staticmethod
def create_file_by_url(
user_id: str,
tenant_id: str,
file_url: str,
conversation_id: Optional[str] = None,
) -> ToolFile:
# try to download image
try:
response = ssrf_proxy.get(file_url)
response.raise_for_status()
blob = response.content
except httpx.TimeoutException:
raise ValueError(f"timeout when downloading file from {file_url}")
mimetype = (
guess_type(file_url)[0]
or response.headers.get("Content-Type", "").split(";")[0].strip()
or "application/octet-stream"
)
extension = guess_extension(mimetype) or ".bin"
unique_name = uuid4().hex
filename = f"{unique_name}{extension}"
filepath = f"tools/{tenant_id}/{filename}"
storage.save(filepath, blob)
tool_file = ToolFile(
tenant_id=tenant_id,
user_id=user_id,
conversation_id=conversation_id,
file_key=filepath,
mimetype=mimetype,
original_url=file_url,
name=filename,
size=len(blob),
key=filepath,
)
db.session.add(tool_file)
db.session.commit()
return tool_file
@staticmethod
def get_file_binary(id: str) -> Union[tuple[bytes, str], None]:
"""
get file binary
:param id: the id of the file
:return: the binary of the file, mime type
"""
upload_file: UploadFile | None = (
db.session.query(UploadFile)
.filter(
UploadFile.id == id,
)
.first()
)
if not upload_file:
return None
blob = storage.load_once(upload_file.key)
return blob, upload_file.mime_type
@staticmethod
def get_file_binary_by_message_file_id(id: str) -> Union[tuple[bytes, str], None]:
"""
get file binary
:param id: the id of the file
:return: the binary of the file, mime type
"""
message_file: MessageFile | None = (
db.session.query(MessageFile)
.filter(
MessageFile.id == id,
)
.first()
)
# Check if message_file is not None
if message_file is not None:
# get tool file id
if message_file.url is not None:
tool_file_id = message_file.url.split("/")[-1]
# trim extension
tool_file_id = tool_file_id.split(".")[0]
else:
tool_file_id = None
else:
tool_file_id = None
tool_file: ToolFile | None = (
db.session.query(ToolFile)
.filter(
ToolFile.id == tool_file_id,
)
.first()
)
if not tool_file:
return None
blob = storage.load_once(tool_file.file_key)
return blob, tool_file.mimetype
@staticmethod
def get_file_generator_by_upload_file_id(upload_file_id: str):
"""
get file binary
:param tool_file_id: the id of the tool file
:return: the binary of the file, mime type
"""
upload_file: UploadFile | None = (
db.session.query(UploadFile)
.filter(
UploadFile.id == upload_file_id,
)
.first()
)
if not upload_file:
return None, None
stream = storage.load_stream(upload_file.key)
return stream, upload_file.mime_type
# init tool_file_parser
# from core.file.datasource_file_parser import datasource_file_manager
#
# datasource_file_manager["manager"] = DatasourceFileManager

View File

@ -0,0 +1,108 @@
import logging
from threading import Lock
from typing import Union
import contexts
from core.datasource.__base.datasource_plugin import DatasourcePlugin
from core.datasource.__base.datasource_provider import DatasourcePluginProviderController
from core.datasource.entities.common_entities import I18nObject
from core.datasource.entities.datasource_entities import DatasourceProviderType
from core.datasource.errors import DatasourceProviderNotFoundError
from core.datasource.local_file.local_file_provider import LocalFileDatasourcePluginProviderController
from core.datasource.online_document.online_document_provider import OnlineDocumentDatasourcePluginProviderController
from core.datasource.online_drive.online_drive_provider import OnlineDriveDatasourcePluginProviderController
from core.datasource.website_crawl.website_crawl_provider import WebsiteCrawlDatasourcePluginProviderController
from core.plugin.impl.datasource import PluginDatasourceManager
logger = logging.getLogger(__name__)
class DatasourceManager:
_builtin_provider_lock = Lock()
_hardcoded_providers: dict[str, DatasourcePluginProviderController] = {}
_builtin_providers_loaded = False
_builtin_tools_labels: dict[str, Union[I18nObject, None]] = {}
@classmethod
def get_datasource_plugin_provider(
cls, provider_id: str, tenant_id: str, datasource_type: DatasourceProviderType
) -> DatasourcePluginProviderController:
"""
get the datasource plugin provider
"""
# check if context is set
try:
contexts.datasource_plugin_providers.get()
except LookupError:
contexts.datasource_plugin_providers.set({})
contexts.datasource_plugin_providers_lock.set(Lock())
with contexts.datasource_plugin_providers_lock.get():
datasource_plugin_providers = contexts.datasource_plugin_providers.get()
if provider_id in datasource_plugin_providers:
return datasource_plugin_providers[provider_id]
manager = PluginDatasourceManager()
provider_entity = manager.fetch_datasource_provider(tenant_id, provider_id)
if not provider_entity:
raise DatasourceProviderNotFoundError(f"plugin provider {provider_id} not found")
match datasource_type:
case DatasourceProviderType.ONLINE_DOCUMENT:
controller = OnlineDocumentDatasourcePluginProviderController(
entity=provider_entity.declaration,
plugin_id=provider_entity.plugin_id,
plugin_unique_identifier=provider_entity.plugin_unique_identifier,
tenant_id=tenant_id,
)
case DatasourceProviderType.ONLINE_DRIVE:
controller = OnlineDriveDatasourcePluginProviderController(
entity=provider_entity.declaration,
plugin_id=provider_entity.plugin_id,
plugin_unique_identifier=provider_entity.plugin_unique_identifier,
tenant_id=tenant_id,
)
case DatasourceProviderType.WEBSITE_CRAWL:
controller = WebsiteCrawlDatasourcePluginProviderController(
entity=provider_entity.declaration,
plugin_id=provider_entity.plugin_id,
plugin_unique_identifier=provider_entity.plugin_unique_identifier,
tenant_id=tenant_id,
)
case DatasourceProviderType.LOCAL_FILE:
controller = LocalFileDatasourcePluginProviderController(
entity=provider_entity.declaration,
plugin_id=provider_entity.plugin_id,
plugin_unique_identifier=provider_entity.plugin_unique_identifier,
tenant_id=tenant_id,
)
case _:
raise ValueError(f"Unsupported datasource type: {datasource_type}")
datasource_plugin_providers[provider_id] = controller
return controller
@classmethod
def get_datasource_runtime(
cls,
provider_id: str,
datasource_name: str,
tenant_id: str,
datasource_type: DatasourceProviderType,
) -> DatasourcePlugin:
"""
get the datasource runtime
:param provider_type: the type of the provider
:param provider_id: the id of the provider
:param datasource_name: the name of the datasource
:param tenant_id: the tenant id
:return: the datasource plugin
"""
return cls.get_datasource_plugin_provider(
provider_id,
tenant_id,
datasource_type,
).get_datasource(datasource_name)

View File

@ -0,0 +1,71 @@
from typing import Literal, Optional
from pydantic import BaseModel, Field, field_validator
from core.datasource.entities.datasource_entities import DatasourceParameter
from core.model_runtime.utils.encoders import jsonable_encoder
from core.tools.entities.common_entities import I18nObject
class DatasourceApiEntity(BaseModel):
author: str
name: str # identifier
label: I18nObject # label
description: I18nObject
parameters: Optional[list[DatasourceParameter]] = None
labels: list[str] = Field(default_factory=list)
output_schema: Optional[dict] = None
ToolProviderTypeApiLiteral = Optional[Literal["builtin", "api", "workflow"]]
class DatasourceProviderApiEntity(BaseModel):
id: str
author: str
name: str # identifier
description: I18nObject
icon: str | dict
label: I18nObject # label
type: str
masked_credentials: Optional[dict] = None
original_credentials: Optional[dict] = None
is_team_authorization: bool = False
allow_delete: bool = True
plugin_id: Optional[str] = Field(default="", description="The plugin id of the datasource")
plugin_unique_identifier: Optional[str] = Field(default="", description="The unique identifier of the datasource")
datasources: list[DatasourceApiEntity] = Field(default_factory=list)
labels: list[str] = Field(default_factory=list)
@field_validator("datasources", mode="before")
@classmethod
def convert_none_to_empty_list(cls, v):
return v if v is not None else []
def to_dict(self) -> dict:
# -------------
# overwrite datasource parameter types for temp fix
datasources = jsonable_encoder(self.datasources)
for datasource in datasources:
if datasource.get("parameters"):
for parameter in datasource.get("parameters"):
if parameter.get("type") == DatasourceParameter.DatasourceParameterType.SYSTEM_FILES.value:
parameter["type"] = "files"
# -------------
return {
"id": self.id,
"author": self.author,
"name": self.name,
"plugin_id": self.plugin_id,
"plugin_unique_identifier": self.plugin_unique_identifier,
"description": self.description.to_dict(),
"icon": self.icon,
"label": self.label.to_dict(),
"type": self.type.value,
"team_credentials": self.masked_credentials,
"is_team_authorization": self.is_team_authorization,
"allow_delete": self.allow_delete,
"datasources": datasources,
"labels": self.labels,
}

View File

@ -0,0 +1,23 @@
from typing import Optional
from pydantic import BaseModel, Field
class I18nObject(BaseModel):
"""
Model class for i18n object.
"""
en_US: str
zh_Hans: Optional[str] = Field(default=None)
pt_BR: Optional[str] = Field(default=None)
ja_JP: Optional[str] = Field(default=None)
def __init__(self, **data):
super().__init__(**data)
self.zh_Hans = self.zh_Hans or self.en_US
self.pt_BR = self.pt_BR or self.en_US
self.ja_JP = self.ja_JP or self.en_US
def to_dict(self) -> dict:
return {"zh_Hans": self.zh_Hans, "en_US": self.en_US, "pt_BR": self.pt_BR, "ja_JP": self.ja_JP}

View File

@ -0,0 +1,380 @@
import enum
from enum import Enum
from typing import Any, Optional
from pydantic import BaseModel, Field, ValidationInfo, field_validator
from yarl import URL
from configs import dify_config
from core.entities.provider_entities import ProviderConfig
from core.plugin.entities.oauth import OAuthSchema
from core.plugin.entities.parameters import (
PluginParameter,
PluginParameterOption,
PluginParameterType,
as_normal_type,
cast_parameter_value,
init_frontend_parameter,
)
from core.tools.entities.common_entities import I18nObject
from core.tools.entities.tool_entities import ToolInvokeMessage, ToolLabelEnum
class DatasourceProviderType(enum.StrEnum):
"""
Enum class for datasource provider
"""
ONLINE_DOCUMENT = "online_document"
LOCAL_FILE = "local_file"
WEBSITE_CRAWL = "website_crawl"
ONLINE_DRIVE = "online_drive"
@classmethod
def value_of(cls, value: str) -> "DatasourceProviderType":
"""
Get value of given mode.
:param value: mode value
:return: mode
"""
for mode in cls:
if mode.value == value:
return mode
raise ValueError(f"invalid mode value {value}")
class DatasourceParameter(PluginParameter):
"""
Overrides type
"""
class DatasourceParameterType(enum.StrEnum):
"""
removes TOOLS_SELECTOR from PluginParameterType
"""
STRING = PluginParameterType.STRING.value
NUMBER = PluginParameterType.NUMBER.value
BOOLEAN = PluginParameterType.BOOLEAN.value
SELECT = PluginParameterType.SELECT.value
SECRET_INPUT = PluginParameterType.SECRET_INPUT.value
FILE = PluginParameterType.FILE.value
FILES = PluginParameterType.FILES.value
# deprecated, should not use.
SYSTEM_FILES = PluginParameterType.SYSTEM_FILES.value
def as_normal_type(self):
return as_normal_type(self)
def cast_value(self, value: Any):
return cast_parameter_value(self, value)
type: DatasourceParameterType = Field(..., description="The type of the parameter")
description: I18nObject = Field(..., description="The description of the parameter")
@classmethod
def get_simple_instance(
cls,
name: str,
typ: DatasourceParameterType,
required: bool,
options: Optional[list[str]] = None,
) -> "DatasourceParameter":
"""
get a simple datasource parameter
:param name: the name of the parameter
:param llm_description: the description presented to the LLM
:param typ: the type of the parameter
:param required: if the parameter is required
:param options: the options of the parameter
"""
# convert options to ToolParameterOption
# FIXME fix the type error
if options:
option_objs = [
PluginParameterOption(value=option, label=I18nObject(en_US=option, zh_Hans=option))
for option in options
]
else:
option_objs = []
return cls(
name=name,
label=I18nObject(en_US="", zh_Hans=""),
placeholder=None,
type=typ,
required=required,
options=option_objs,
description=I18nObject(en_US="", zh_Hans=""),
)
def init_frontend_parameter(self, value: Any):
return init_frontend_parameter(self, self.type, value)
class DatasourceIdentity(BaseModel):
author: str = Field(..., description="The author of the datasource")
name: str = Field(..., description="The name of the datasource")
label: I18nObject = Field(..., description="The label of the datasource")
provider: str = Field(..., description="The provider of the datasource")
icon: Optional[str] = None
class DatasourceEntity(BaseModel):
identity: DatasourceIdentity
parameters: list[DatasourceParameter] = Field(default_factory=list)
description: I18nObject = Field(..., description="The label of the datasource")
output_schema: Optional[dict] = None
@field_validator("parameters", mode="before")
@classmethod
def set_parameters(cls, v, validation_info: ValidationInfo) -> list[DatasourceParameter]:
return v or []
class DatasourceProviderIdentity(BaseModel):
author: str = Field(..., description="The author of the tool")
name: str = Field(..., description="The name of the tool")
description: I18nObject = Field(..., description="The description of the tool")
icon: str = Field(..., description="The icon of the tool")
label: I18nObject = Field(..., description="The label of the tool")
tags: Optional[list[ToolLabelEnum]] = Field(
default=[],
description="The tags of the tool",
)
def generate_datasource_icon_url(self, tenant_id: str) -> str:
HARD_CODED_DATASOURCE_ICONS = ["https://assets.dify.ai/images/File%20Upload.svg"]
if self.icon in HARD_CODED_DATASOURCE_ICONS:
return self.icon
return str(
URL(dify_config.CONSOLE_API_URL or "/")
/ "console"
/ "api"
/ "workspaces"
/ "current"
/ "plugin"
/ "icon"
% {"tenant_id": tenant_id, "filename": self.icon}
)
class DatasourceProviderEntity(BaseModel):
"""
Datasource provider entity
"""
identity: DatasourceProviderIdentity
credentials_schema: list[ProviderConfig] = Field(default_factory=list)
oauth_schema: Optional[OAuthSchema] = None
provider_type: DatasourceProviderType
class DatasourceProviderEntityWithPlugin(DatasourceProviderEntity):
datasources: list[DatasourceEntity] = Field(default_factory=list)
class DatasourceInvokeMeta(BaseModel):
"""
Datasource invoke meta
"""
time_cost: float = Field(..., description="The time cost of the tool invoke")
error: Optional[str] = None
tool_config: Optional[dict] = None
@classmethod
def empty(cls) -> "DatasourceInvokeMeta":
"""
Get an empty instance of DatasourceInvokeMeta
"""
return cls(time_cost=0.0, error=None, tool_config={})
@classmethod
def error_instance(cls, error: str) -> "DatasourceInvokeMeta":
"""
Get an instance of DatasourceInvokeMeta with error
"""
return cls(time_cost=0.0, error=error, tool_config={})
def to_dict(self) -> dict:
return {
"time_cost": self.time_cost,
"error": self.error,
"tool_config": self.tool_config,
}
class DatasourceLabel(BaseModel):
"""
Datasource label
"""
name: str = Field(..., description="The name of the tool")
label: I18nObject = Field(..., description="The label of the tool")
icon: str = Field(..., description="The icon of the tool")
class DatasourceInvokeFrom(Enum):
"""
Enum class for datasource invoke
"""
RAG_PIPELINE = "rag_pipeline"
class OnlineDocumentPage(BaseModel):
"""
Online document page
"""
page_id: str = Field(..., description="The page id")
page_name: str = Field(..., description="The page title")
page_icon: Optional[dict] = Field(None, description="The page icon")
type: str = Field(..., description="The type of the page")
last_edited_time: str = Field(..., description="The last edited time")
parent_id: Optional[str] = Field(None, description="The parent page id")
class OnlineDocumentInfo(BaseModel):
"""
Online document info
"""
workspace_id: Optional[str] = Field(None, description="The workspace id")
workspace_name: Optional[str] = Field(None, description="The workspace name")
workspace_icon: Optional[str] = Field(None, description="The workspace icon")
total: int = Field(..., description="The total number of documents")
pages: list[OnlineDocumentPage] = Field(..., description="The pages of the online document")
class OnlineDocumentPagesMessage(BaseModel):
"""
Get online document pages response
"""
result: list[OnlineDocumentInfo]
class GetOnlineDocumentPageContentRequest(BaseModel):
"""
Get online document page content request
"""
workspace_id: str = Field(..., description="The workspace id")
page_id: str = Field(..., description="The page id")
type: str = Field(..., description="The type of the page")
class OnlineDocumentPageContent(BaseModel):
"""
Online document page content
"""
workspace_id: str = Field(..., description="The workspace id")
page_id: str = Field(..., description="The page id")
content: str = Field(..., description="The content of the page")
class GetOnlineDocumentPageContentResponse(BaseModel):
"""
Get online document page content response
"""
result: OnlineDocumentPageContent
class GetWebsiteCrawlRequest(BaseModel):
"""
Get website crawl request
"""
crawl_parameters: dict = Field(..., description="The crawl parameters")
class WebSiteInfoDetail(BaseModel):
source_url: str = Field(..., description="The url of the website")
content: str = Field(..., description="The content of the website")
title: str = Field(..., description="The title of the website")
description: str = Field(..., description="The description of the website")
class WebSiteInfo(BaseModel):
"""
Website info
"""
status: Optional[str] = Field(..., description="crawl job status")
web_info_list: Optional[list[WebSiteInfoDetail]] = []
total: Optional[int] = Field(default=0, description="The total number of websites")
completed: Optional[int] = Field(default=0, description="The number of completed websites")
class WebsiteCrawlMessage(BaseModel):
"""
Get website crawl response
"""
result: WebSiteInfo = WebSiteInfo(status="", web_info_list=[], total=0, completed=0)
class DatasourceMessage(ToolInvokeMessage):
pass
#########################
# Online drive file
#########################
class OnlineDriveFile(BaseModel):
"""
Online drive file
"""
id: str = Field(..., description="The file ID")
name: str = Field(..., description="The file name")
size: int = Field(..., description="The file size")
type: str = Field(..., description="The file type: folder or file")
class OnlineDriveFileBucket(BaseModel):
"""
Online drive file bucket
"""
bucket: Optional[str] = Field(None, description="The file bucket")
files: list[OnlineDriveFile] = Field(..., description="The file list")
is_truncated: bool = Field(False, description="Whether the result is truncated")
next_page_parameters: Optional[dict] = Field(None, description="Parameters for fetching the next page")
class OnlineDriveBrowseFilesRequest(BaseModel):
"""
Get online drive file list request
"""
bucket: Optional[str] = Field(None, description="The file bucket")
prefix: str = Field(..., description="The parent folder ID")
max_keys: int = Field(20, description="Page size for pagination")
next_page_parameters: Optional[dict] = Field(None, description="Parameters for fetching the next page")
class OnlineDriveBrowseFilesResponse(BaseModel):
"""
Get online drive file list response
"""
result: list[OnlineDriveFileBucket] = Field(..., description="The list of file buckets")
class OnlineDriveDownloadFileRequest(BaseModel):
"""
Get online drive file
"""
id: str = Field(..., description="The id of the file")
bucket: Optional[str] = Field(None, description="The name of the bucket")

View File

@ -0,0 +1,37 @@
from core.datasource.entities.datasource_entities import DatasourceInvokeMeta
class DatasourceProviderNotFoundError(ValueError):
pass
class DatasourceNotFoundError(ValueError):
pass
class DatasourceParameterValidationError(ValueError):
pass
class DatasourceProviderCredentialValidationError(ValueError):
pass
class DatasourceNotSupportedError(ValueError):
pass
class DatasourceInvokeError(ValueError):
pass
class DatasourceApiSchemaError(ValueError):
pass
class DatasourceEngineInvokeError(Exception):
meta: DatasourceInvokeMeta
def __init__(self, meta, **kwargs):
self.meta = meta
super().__init__(**kwargs)

View File

@ -0,0 +1,29 @@
from core.datasource.__base.datasource_plugin import DatasourcePlugin
from core.datasource.__base.datasource_runtime import DatasourceRuntime
from core.datasource.entities.datasource_entities import (
DatasourceEntity,
DatasourceProviderType,
)
class LocalFileDatasourcePlugin(DatasourcePlugin):
tenant_id: str
plugin_unique_identifier: str
def __init__(
self,
entity: DatasourceEntity,
runtime: DatasourceRuntime,
tenant_id: str,
icon: str,
plugin_unique_identifier: str,
) -> None:
super().__init__(entity, runtime, icon)
self.tenant_id = tenant_id
self.plugin_unique_identifier = plugin_unique_identifier
def datasource_provider_type(self) -> str:
return DatasourceProviderType.LOCAL_FILE
def get_icon_url(self, tenant_id: str) -> str:
return self.icon

View File

@ -0,0 +1,56 @@
from typing import Any
from core.datasource.__base.datasource_provider import DatasourcePluginProviderController
from core.datasource.__base.datasource_runtime import DatasourceRuntime
from core.datasource.entities.datasource_entities import DatasourceProviderEntityWithPlugin, DatasourceProviderType
from core.datasource.local_file.local_file_plugin import LocalFileDatasourcePlugin
class LocalFileDatasourcePluginProviderController(DatasourcePluginProviderController):
entity: DatasourceProviderEntityWithPlugin
plugin_id: str
plugin_unique_identifier: str
def __init__(
self, entity: DatasourceProviderEntityWithPlugin, plugin_id: str, plugin_unique_identifier: str, tenant_id: str
) -> None:
super().__init__(entity, tenant_id)
self.plugin_id = plugin_id
self.plugin_unique_identifier = plugin_unique_identifier
@property
def provider_type(self) -> DatasourceProviderType:
"""
returns the type of the provider
"""
return DatasourceProviderType.LOCAL_FILE
def _validate_credentials(self, user_id: str, credentials: dict[str, Any]) -> None:
"""
validate the credentials of the provider
"""
pass
def get_datasource(self, datasource_name: str) -> LocalFileDatasourcePlugin: # type: ignore
"""
return datasource with given name
"""
datasource_entity = next(
(
datasource_entity
for datasource_entity in self.entity.datasources
if datasource_entity.identity.name == datasource_name
),
None,
)
if not datasource_entity:
raise ValueError(f"Datasource with name {datasource_name} not found")
return LocalFileDatasourcePlugin(
entity=datasource_entity,
runtime=DatasourceRuntime(tenant_id=self.tenant_id),
tenant_id=self.tenant_id,
icon=self.entity.identity.icon,
plugin_unique_identifier=self.plugin_unique_identifier,
)

View File

@ -0,0 +1,71 @@
from collections.abc import Generator, Mapping
from typing import Any
from core.datasource.__base.datasource_plugin import DatasourcePlugin
from core.datasource.__base.datasource_runtime import DatasourceRuntime
from core.datasource.entities.datasource_entities import (
DatasourceEntity,
DatasourceMessage,
DatasourceProviderType,
GetOnlineDocumentPageContentRequest,
OnlineDocumentPagesMessage,
)
from core.plugin.impl.datasource import PluginDatasourceManager
class OnlineDocumentDatasourcePlugin(DatasourcePlugin):
tenant_id: str
plugin_unique_identifier: str
entity: DatasourceEntity
runtime: DatasourceRuntime
def __init__(
self,
entity: DatasourceEntity,
runtime: DatasourceRuntime,
tenant_id: str,
icon: str,
plugin_unique_identifier: str,
) -> None:
super().__init__(entity, runtime, icon)
self.tenant_id = tenant_id
self.plugin_unique_identifier = plugin_unique_identifier
def get_online_document_pages(
self,
user_id: str,
datasource_parameters: Mapping[str, Any],
provider_type: str,
) -> Generator[OnlineDocumentPagesMessage, None, None]:
manager = PluginDatasourceManager()
return manager.get_online_document_pages(
tenant_id=self.tenant_id,
user_id=user_id,
datasource_provider=self.entity.identity.provider,
datasource_name=self.entity.identity.name,
credentials=self.runtime.credentials,
datasource_parameters=datasource_parameters,
provider_type=provider_type,
)
def get_online_document_page_content(
self,
user_id: str,
datasource_parameters: GetOnlineDocumentPageContentRequest,
provider_type: str,
) -> Generator[DatasourceMessage, None, None]:
manager = PluginDatasourceManager()
return manager.get_online_document_page_content(
tenant_id=self.tenant_id,
user_id=user_id,
datasource_provider=self.entity.identity.provider,
datasource_name=self.entity.identity.name,
credentials=self.runtime.credentials,
datasource_parameters=datasource_parameters,
provider_type=provider_type,
)
def datasource_provider_type(self) -> str:
return DatasourceProviderType.ONLINE_DOCUMENT

View File

@ -0,0 +1,48 @@
from core.datasource.__base.datasource_provider import DatasourcePluginProviderController
from core.datasource.__base.datasource_runtime import DatasourceRuntime
from core.datasource.entities.datasource_entities import DatasourceProviderEntityWithPlugin, DatasourceProviderType
from core.datasource.online_document.online_document_plugin import OnlineDocumentDatasourcePlugin
class OnlineDocumentDatasourcePluginProviderController(DatasourcePluginProviderController):
entity: DatasourceProviderEntityWithPlugin
plugin_id: str
plugin_unique_identifier: str
def __init__(
self, entity: DatasourceProviderEntityWithPlugin, plugin_id: str, plugin_unique_identifier: str, tenant_id: str
) -> None:
super().__init__(entity, tenant_id)
self.plugin_id = plugin_id
self.plugin_unique_identifier = plugin_unique_identifier
@property
def provider_type(self) -> DatasourceProviderType:
"""
returns the type of the provider
"""
return DatasourceProviderType.ONLINE_DOCUMENT
def get_datasource(self, datasource_name: str) -> OnlineDocumentDatasourcePlugin: # type: ignore
"""
return datasource with given name
"""
datasource_entity = next(
(
datasource_entity
for datasource_entity in self.entity.datasources
if datasource_entity.identity.name == datasource_name
),
None,
)
if not datasource_entity:
raise ValueError(f"Datasource with name {datasource_name} not found")
return OnlineDocumentDatasourcePlugin(
entity=datasource_entity,
runtime=DatasourceRuntime(tenant_id=self.tenant_id),
tenant_id=self.tenant_id,
icon=self.entity.identity.icon,
plugin_unique_identifier=self.plugin_unique_identifier,
)

View File

@ -0,0 +1,71 @@
from collections.abc import Generator
from core.datasource.__base.datasource_plugin import DatasourcePlugin
from core.datasource.__base.datasource_runtime import DatasourceRuntime
from core.datasource.entities.datasource_entities import (
DatasourceEntity,
DatasourceMessage,
DatasourceProviderType,
OnlineDriveBrowseFilesRequest,
OnlineDriveBrowseFilesResponse,
OnlineDriveDownloadFileRequest,
)
from core.plugin.impl.datasource import PluginDatasourceManager
class OnlineDriveDatasourcePlugin(DatasourcePlugin):
tenant_id: str
plugin_unique_identifier: str
entity: DatasourceEntity
runtime: DatasourceRuntime
def __init__(
self,
entity: DatasourceEntity,
runtime: DatasourceRuntime,
tenant_id: str,
icon: str,
plugin_unique_identifier: str,
) -> None:
super().__init__(entity, runtime, icon)
self.tenant_id = tenant_id
self.plugin_unique_identifier = plugin_unique_identifier
def online_drive_browse_files(
self,
user_id: str,
request: OnlineDriveBrowseFilesRequest,
provider_type: str,
) -> Generator[OnlineDriveBrowseFilesResponse, None, None]:
manager = PluginDatasourceManager()
return manager.online_drive_browse_files(
tenant_id=self.tenant_id,
user_id=user_id,
datasource_provider=self.entity.identity.provider,
datasource_name=self.entity.identity.name,
credentials=self.runtime.credentials,
request=request,
provider_type=provider_type,
)
def online_drive_download_file(
self,
user_id: str,
request: OnlineDriveDownloadFileRequest,
provider_type: str,
) -> Generator[DatasourceMessage, None, None]:
manager = PluginDatasourceManager()
return manager.online_drive_download_file(
tenant_id=self.tenant_id,
user_id=user_id,
datasource_provider=self.entity.identity.provider,
datasource_name=self.entity.identity.name,
credentials=self.runtime.credentials,
request=request,
provider_type=provider_type,
)
def datasource_provider_type(self) -> str:
return DatasourceProviderType.ONLINE_DRIVE

View File

@ -0,0 +1,48 @@
from core.datasource.__base.datasource_provider import DatasourcePluginProviderController
from core.datasource.__base.datasource_runtime import DatasourceRuntime
from core.datasource.entities.datasource_entities import DatasourceProviderEntityWithPlugin, DatasourceProviderType
from core.datasource.online_drive.online_drive_plugin import OnlineDriveDatasourcePlugin
class OnlineDriveDatasourcePluginProviderController(DatasourcePluginProviderController):
entity: DatasourceProviderEntityWithPlugin
plugin_id: str
plugin_unique_identifier: str
def __init__(
self, entity: DatasourceProviderEntityWithPlugin, plugin_id: str, plugin_unique_identifier: str, tenant_id: str
) -> None:
super().__init__(entity, tenant_id)
self.plugin_id = plugin_id
self.plugin_unique_identifier = plugin_unique_identifier
@property
def provider_type(self) -> DatasourceProviderType:
"""
returns the type of the provider
"""
return DatasourceProviderType.ONLINE_DRIVE
def get_datasource(self, datasource_name: str) -> OnlineDriveDatasourcePlugin: # type: ignore
"""
return datasource with given name
"""
datasource_entity = next(
(
datasource_entity
for datasource_entity in self.entity.datasources
if datasource_entity.identity.name == datasource_name
),
None,
)
if not datasource_entity:
raise ValueError(f"Datasource with name {datasource_name} not found")
return OnlineDriveDatasourcePlugin(
entity=datasource_entity,
runtime=DatasourceRuntime(tenant_id=self.tenant_id),
tenant_id=self.tenant_id,
icon=self.entity.identity.icon,
plugin_unique_identifier=self.plugin_unique_identifier,
)

View File

@ -0,0 +1,265 @@
from copy import deepcopy
from typing import Any
from pydantic import BaseModel
from core.entities.provider_entities import BasicProviderConfig
from core.helper import encrypter
from core.helper.tool_parameter_cache import ToolParameterCache, ToolParameterCacheType
from core.helper.tool_provider_cache import ToolProviderCredentialsCache, ToolProviderCredentialsCacheType
from core.tools.__base.tool import Tool
from core.tools.entities.tool_entities import (
ToolParameter,
ToolProviderType,
)
class ProviderConfigEncrypter(BaseModel):
tenant_id: str
config: list[BasicProviderConfig]
provider_type: str
provider_identity: str
def _deep_copy(self, data: dict[str, str]) -> dict[str, str]:
"""
deep copy data
"""
return deepcopy(data)
def encrypt(self, data: dict[str, str]) -> dict[str, str]:
"""
encrypt tool credentials with tenant id
return a deep copy of credentials with encrypted values
"""
data = self._deep_copy(data)
# get fields need to be decrypted
fields = dict[str, BasicProviderConfig]()
for credential in self.config:
fields[credential.name] = credential
for field_name, field in fields.items():
if field.type == BasicProviderConfig.Type.SECRET_INPUT:
if field_name in data:
encrypted = encrypter.encrypt_token(self.tenant_id, data[field_name] or "")
data[field_name] = encrypted
return data
def mask_tool_credentials(self, data: dict[str, Any]) -> dict[str, Any]:
"""
mask tool credentials
return a deep copy of credentials with masked values
"""
data = self._deep_copy(data)
# get fields need to be decrypted
fields = dict[str, BasicProviderConfig]()
for credential in self.config:
fields[credential.name] = credential
for field_name, field in fields.items():
if field.type == BasicProviderConfig.Type.SECRET_INPUT:
if field_name in data:
if len(data[field_name]) > 6:
data[field_name] = (
data[field_name][:2] + "*" * (len(data[field_name]) - 4) + data[field_name][-2:]
)
else:
data[field_name] = "*" * len(data[field_name])
return data
def decrypt(self, data: dict[str, str]) -> dict[str, str]:
"""
decrypt tool credentials with tenant id
return a deep copy of credentials with decrypted values
"""
cache = ToolProviderCredentialsCache(
tenant_id=self.tenant_id,
identity_id=f"{self.provider_type}.{self.provider_identity}",
cache_type=ToolProviderCredentialsCacheType.PROVIDER,
)
cached_credentials = cache.get()
if cached_credentials:
return cached_credentials
data = self._deep_copy(data)
# get fields need to be decrypted
fields = dict[str, BasicProviderConfig]()
for credential in self.config:
fields[credential.name] = credential
for field_name, field in fields.items():
if field.type == BasicProviderConfig.Type.SECRET_INPUT:
if field_name in data:
try:
# if the value is None or empty string, skip decrypt
if not data[field_name]:
continue
data[field_name] = encrypter.decrypt_token(self.tenant_id, data[field_name])
except Exception:
pass
cache.set(data)
return data
def delete_tool_credentials_cache(self):
cache = ToolProviderCredentialsCache(
tenant_id=self.tenant_id,
identity_id=f"{self.provider_type}.{self.provider_identity}",
cache_type=ToolProviderCredentialsCacheType.PROVIDER,
)
cache.delete()
class ToolParameterConfigurationManager:
"""
Tool parameter configuration manager
"""
tenant_id: str
tool_runtime: Tool
provider_name: str
provider_type: ToolProviderType
identity_id: str
def __init__(
self, tenant_id: str, tool_runtime: Tool, provider_name: str, provider_type: ToolProviderType, identity_id: str
) -> None:
self.tenant_id = tenant_id
self.tool_runtime = tool_runtime
self.provider_name = provider_name
self.provider_type = provider_type
self.identity_id = identity_id
def _deep_copy(self, parameters: dict[str, Any]) -> dict[str, Any]:
"""
deep copy parameters
"""
return deepcopy(parameters)
def _merge_parameters(self) -> list[ToolParameter]:
"""
merge parameters
"""
# get tool parameters
tool_parameters = self.tool_runtime.entity.parameters or []
# get tool runtime parameters
runtime_parameters = self.tool_runtime.get_runtime_parameters()
# override parameters
current_parameters = tool_parameters.copy()
for runtime_parameter in runtime_parameters:
found = False
for index, parameter in enumerate(current_parameters):
if parameter.name == runtime_parameter.name and parameter.form == runtime_parameter.form:
current_parameters[index] = runtime_parameter
found = True
break
if not found and runtime_parameter.form == ToolParameter.ToolParameterForm.FORM:
current_parameters.append(runtime_parameter)
return current_parameters
def mask_tool_parameters(self, parameters: dict[str, Any]) -> dict[str, Any]:
"""
mask tool parameters
return a deep copy of parameters with masked values
"""
parameters = self._deep_copy(parameters)
# override parameters
current_parameters = self._merge_parameters()
for parameter in current_parameters:
if (
parameter.form == ToolParameter.ToolParameterForm.FORM
and parameter.type == ToolParameter.ToolParameterType.SECRET_INPUT
):
if parameter.name in parameters:
if len(parameters[parameter.name]) > 6:
parameters[parameter.name] = (
parameters[parameter.name][:2]
+ "*" * (len(parameters[parameter.name]) - 4)
+ parameters[parameter.name][-2:]
)
else:
parameters[parameter.name] = "*" * len(parameters[parameter.name])
return parameters
def encrypt_tool_parameters(self, parameters: dict[str, Any]) -> dict[str, Any]:
"""
encrypt tool parameters with tenant id
return a deep copy of parameters with encrypted values
"""
# override parameters
current_parameters = self._merge_parameters()
parameters = self._deep_copy(parameters)
for parameter in current_parameters:
if (
parameter.form == ToolParameter.ToolParameterForm.FORM
and parameter.type == ToolParameter.ToolParameterType.SECRET_INPUT
):
if parameter.name in parameters:
encrypted = encrypter.encrypt_token(self.tenant_id, parameters[parameter.name])
parameters[parameter.name] = encrypted
return parameters
def decrypt_tool_parameters(self, parameters: dict[str, Any]) -> dict[str, Any]:
"""
decrypt tool parameters with tenant id
return a deep copy of parameters with decrypted values
"""
cache = ToolParameterCache(
tenant_id=self.tenant_id,
provider=f"{self.provider_type.value}.{self.provider_name}",
tool_name=self.tool_runtime.entity.identity.name,
cache_type=ToolParameterCacheType.PARAMETER,
identity_id=self.identity_id,
)
cached_parameters = cache.get()
if cached_parameters:
return cached_parameters
# override parameters
current_parameters = self._merge_parameters()
has_secret_input = False
for parameter in current_parameters:
if (
parameter.form == ToolParameter.ToolParameterForm.FORM
and parameter.type == ToolParameter.ToolParameterType.SECRET_INPUT
):
if parameter.name in parameters:
try:
has_secret_input = True
parameters[parameter.name] = encrypter.decrypt_token(self.tenant_id, parameters[parameter.name])
except Exception:
pass
if has_secret_input:
cache.set(parameters)
return parameters
def delete_tool_parameters_cache(self):
cache = ToolParameterCache(
tenant_id=self.tenant_id,
provider=f"{self.provider_type.value}.{self.provider_name}",
tool_name=self.tool_runtime.entity.identity.name,
cache_type=ToolParameterCacheType.PARAMETER,
identity_id=self.identity_id,
)
cache.delete()

View File

@ -0,0 +1,125 @@
import logging
from collections.abc import Generator
from mimetypes import guess_extension, guess_type
from typing import Optional
from core.datasource.entities.datasource_entities import DatasourceMessage
from core.file import File, FileTransferMethod, FileType
from core.tools.tool_file_manager import ToolFileManager
logger = logging.getLogger(__name__)
class DatasourceFileMessageTransformer:
@classmethod
def transform_datasource_invoke_messages(
cls,
messages: Generator[DatasourceMessage, None, None],
user_id: str,
tenant_id: str,
conversation_id: Optional[str] = None,
) -> Generator[DatasourceMessage, None, None]:
"""
Transform datasource message and handle file download
"""
for message in messages:
if message.type in {DatasourceMessage.MessageType.TEXT, DatasourceMessage.MessageType.LINK}:
yield message
elif message.type == DatasourceMessage.MessageType.IMAGE and isinstance(
message.message, DatasourceMessage.TextMessage
):
# try to download image
try:
assert isinstance(message.message, DatasourceMessage.TextMessage)
tool_file_manager = ToolFileManager()
file = tool_file_manager.create_file_by_url(
user_id=user_id,
tenant_id=tenant_id,
file_url=message.message.text,
conversation_id=conversation_id,
)
url = f"/files/datasources/{file.id}{guess_extension(file.mimetype) or '.png'}"
yield DatasourceMessage(
type=DatasourceMessage.MessageType.IMAGE_LINK,
message=DatasourceMessage.TextMessage(text=url),
meta=message.meta.copy() if message.meta is not None else {},
)
except Exception as e:
yield DatasourceMessage(
type=DatasourceMessage.MessageType.TEXT,
message=DatasourceMessage.TextMessage(
text=f"Failed to download image: {message.message.text}: {e}"
),
meta=message.meta.copy() if message.meta is not None else {},
)
elif message.type == DatasourceMessage.MessageType.BLOB:
# get mime type and save blob to storage
meta = message.meta or {}
# get filename from meta
filename = meta.get("file_name", None)
mimetype = meta.get("mime_type")
if not mimetype:
mimetype = guess_type(filename)[0] or "application/octet-stream"
# if message is str, encode it to bytes
if not isinstance(message.message, DatasourceMessage.BlobMessage):
raise ValueError("unexpected message type")
# FIXME: should do a type check here.
assert isinstance(message.message.blob, bytes)
tool_file_manager = ToolFileManager()
file = tool_file_manager.create_file_by_raw(
user_id=user_id,
tenant_id=tenant_id,
conversation_id=conversation_id,
file_binary=message.message.blob,
mimetype=mimetype,
filename=filename,
)
url = cls.get_datasource_file_url(datasource_file_id=file.id, extension=guess_extension(file.mimetype))
# check if file is image
if "image" in mimetype:
yield DatasourceMessage(
type=DatasourceMessage.MessageType.IMAGE_LINK,
message=DatasourceMessage.TextMessage(text=url),
meta=meta.copy() if meta is not None else {},
)
else:
yield DatasourceMessage(
type=DatasourceMessage.MessageType.BINARY_LINK,
message=DatasourceMessage.TextMessage(text=url),
meta=meta.copy() if meta is not None else {},
)
elif message.type == DatasourceMessage.MessageType.FILE:
meta = message.meta or {}
file = meta.get("file", None)
if isinstance(file, File):
if file.transfer_method == FileTransferMethod.TOOL_FILE:
assert file.related_id is not None
url = cls.get_datasource_file_url(datasource_file_id=file.related_id, extension=file.extension)
if file.type == FileType.IMAGE:
yield DatasourceMessage(
type=DatasourceMessage.MessageType.IMAGE_LINK,
message=DatasourceMessage.TextMessage(text=url),
meta=meta.copy() if meta is not None else {},
)
else:
yield DatasourceMessage(
type=DatasourceMessage.MessageType.LINK,
message=DatasourceMessage.TextMessage(text=url),
meta=meta.copy() if meta is not None else {},
)
else:
yield message
else:
yield message
@classmethod
def get_datasource_file_url(cls, datasource_file_id: str, extension: Optional[str]) -> str:
return f"/files/datasources/{datasource_file_id}{extension or '.bin'}"

View File

@ -0,0 +1,389 @@
import re
import uuid
from json import dumps as json_dumps
from json import loads as json_loads
from json.decoder import JSONDecodeError
from typing import Optional
from flask import request
from requests import get
from yaml import YAMLError, safe_load # type: ignore
from core.tools.entities.common_entities import I18nObject
from core.tools.entities.tool_bundle import ApiToolBundle
from core.tools.entities.tool_entities import ApiProviderSchemaType, ToolParameter
from core.tools.errors import ToolApiSchemaError, ToolNotSupportedError, ToolProviderNotFoundError
class ApiBasedToolSchemaParser:
@staticmethod
def parse_openapi_to_tool_bundle(
openapi: dict, extra_info: dict | None = None, warning: dict | None = None
) -> list[ApiToolBundle]:
warning = warning if warning is not None else {}
extra_info = extra_info if extra_info is not None else {}
# set description to extra_info
extra_info["description"] = openapi["info"].get("description", "")
if len(openapi["servers"]) == 0:
raise ToolProviderNotFoundError("No server found in the openapi yaml.")
server_url = openapi["servers"][0]["url"]
request_env = request.headers.get("X-Request-Env")
if request_env:
matched_servers = [server["url"] for server in openapi["servers"] if server["env"] == request_env]
server_url = matched_servers[0] if matched_servers else server_url
# list all interfaces
interfaces = []
for path, path_item in openapi["paths"].items():
methods = ["get", "post", "put", "delete", "patch", "head", "options", "trace"]
for method in methods:
if method in path_item:
interfaces.append(
{
"path": path,
"method": method,
"operation": path_item[method],
}
)
# get all parameters
bundles = []
for interface in interfaces:
# convert parameters
parameters = []
if "parameters" in interface["operation"]:
for parameter in interface["operation"]["parameters"]:
tool_parameter = ToolParameter(
name=parameter["name"],
label=I18nObject(en_US=parameter["name"], zh_Hans=parameter["name"]),
human_description=I18nObject(
en_US=parameter.get("description", ""), zh_Hans=parameter.get("description", "")
),
type=ToolParameter.ToolParameterType.STRING,
required=parameter.get("required", False),
form=ToolParameter.ToolParameterForm.LLM,
llm_description=parameter.get("description"),
default=parameter["schema"]["default"]
if "schema" in parameter and "default" in parameter["schema"]
else None,
placeholder=I18nObject(
en_US=parameter.get("description", ""), zh_Hans=parameter.get("description", "")
),
)
# check if there is a type
typ = ApiBasedToolSchemaParser._get_tool_parameter_type(parameter)
if typ:
tool_parameter.type = typ
parameters.append(tool_parameter)
# create tool bundle
# check if there is a request body
if "requestBody" in interface["operation"]:
request_body = interface["operation"]["requestBody"]
if "content" in request_body:
for content_type, content in request_body["content"].items():
# if there is a reference, get the reference and overwrite the content
if "schema" not in content:
continue
if "$ref" in content["schema"]:
# get the reference
root = openapi
reference = content["schema"]["$ref"].split("/")[1:]
for ref in reference:
root = root[ref]
# overwrite the content
interface["operation"]["requestBody"]["content"][content_type]["schema"] = root
# parse body parameters
if "schema" in interface["operation"]["requestBody"]["content"][content_type]:
body_schema = interface["operation"]["requestBody"]["content"][content_type]["schema"]
required = body_schema.get("required", [])
properties = body_schema.get("properties", {})
for name, property in properties.items():
tool = ToolParameter(
name=name,
label=I18nObject(en_US=name, zh_Hans=name),
human_description=I18nObject(
en_US=property.get("description", ""), zh_Hans=property.get("description", "")
),
type=ToolParameter.ToolParameterType.STRING,
required=name in required,
form=ToolParameter.ToolParameterForm.LLM,
llm_description=property.get("description", ""),
default=property.get("default", None),
placeholder=I18nObject(
en_US=property.get("description", ""), zh_Hans=property.get("description", "")
),
)
# check if there is a type
typ = ApiBasedToolSchemaParser._get_tool_parameter_type(property)
if typ:
tool.type = typ
parameters.append(tool)
# check if parameters is duplicated
parameters_count = {}
for parameter in parameters:
if parameter.name not in parameters_count:
parameters_count[parameter.name] = 0
parameters_count[parameter.name] += 1
for name, count in parameters_count.items():
if count > 1:
warning["duplicated_parameter"] = f"Parameter {name} is duplicated."
# check if there is a operation id, use $path_$method as operation id if not
if "operationId" not in interface["operation"]:
# remove special characters like / to ensure the operation id is valid ^[a-zA-Z0-9_-]{1,64}$
path = interface["path"]
if interface["path"].startswith("/"):
path = interface["path"][1:]
# remove special characters like / to ensure the operation id is valid ^[a-zA-Z0-9_-]{1,64}$
path = re.sub(r"[^a-zA-Z0-9_-]", "", path)
if not path:
path = str(uuid.uuid4())
interface["operation"]["operationId"] = f"{path}_{interface['method']}"
bundles.append(
ApiToolBundle(
server_url=server_url + interface["path"],
method=interface["method"],
summary=interface["operation"]["description"]
if "description" in interface["operation"]
else interface["operation"].get("summary", None),
operation_id=interface["operation"]["operationId"],
parameters=parameters,
author="",
icon=None,
openapi=interface["operation"],
)
)
return bundles
@staticmethod
def _get_tool_parameter_type(parameter: dict) -> Optional[ToolParameter.ToolParameterType]:
parameter = parameter or {}
typ: Optional[str] = None
if parameter.get("format") == "binary":
return ToolParameter.ToolParameterType.FILE
if "type" in parameter:
typ = parameter["type"]
elif "schema" in parameter and "type" in parameter["schema"]:
typ = parameter["schema"]["type"]
if typ in {"integer", "number"}:
return ToolParameter.ToolParameterType.NUMBER
elif typ == "boolean":
return ToolParameter.ToolParameterType.BOOLEAN
elif typ == "string":
return ToolParameter.ToolParameterType.STRING
elif typ == "array":
items = parameter.get("items") or parameter.get("schema", {}).get("items")
return ToolParameter.ToolParameterType.FILES if items and items.get("format") == "binary" else None
else:
return None
@staticmethod
def parse_openapi_yaml_to_tool_bundle(
yaml: str, extra_info: dict | None = None, warning: dict | None = None
) -> list[ApiToolBundle]:
"""
parse openapi yaml to tool bundle
:param yaml: the yaml string
:param extra_info: the extra info
:param warning: the warning message
:return: the tool bundle
"""
warning = warning if warning is not None else {}
extra_info = extra_info if extra_info is not None else {}
openapi: dict = safe_load(yaml)
if openapi is None:
raise ToolApiSchemaError("Invalid openapi yaml.")
return ApiBasedToolSchemaParser.parse_openapi_to_tool_bundle(openapi, extra_info=extra_info, warning=warning)
@staticmethod
def parse_swagger_to_openapi(swagger: dict, extra_info: dict | None = None, warning: dict | None = None) -> dict:
warning = warning or {}
"""
parse swagger to openapi
:param swagger: the swagger dict
:return: the openapi dict
"""
# convert swagger to openapi
info = swagger.get("info", {"title": "Swagger", "description": "Swagger", "version": "1.0.0"})
servers = swagger.get("servers", [])
if len(servers) == 0:
raise ToolApiSchemaError("No server found in the swagger yaml.")
openapi = {
"openapi": "3.0.0",
"info": {
"title": info.get("title", "Swagger"),
"description": info.get("description", "Swagger"),
"version": info.get("version", "1.0.0"),
},
"servers": swagger["servers"],
"paths": {},
"components": {"schemas": {}},
}
# check paths
if "paths" not in swagger or len(swagger["paths"]) == 0:
raise ToolApiSchemaError("No paths found in the swagger yaml.")
# convert paths
for path, path_item in swagger["paths"].items():
openapi["paths"][path] = {}
for method, operation in path_item.items():
if "operationId" not in operation:
raise ToolApiSchemaError(f"No operationId found in operation {method} {path}.")
if ("summary" not in operation or len(operation["summary"]) == 0) and (
"description" not in operation or len(operation["description"]) == 0
):
if warning is not None:
warning["missing_summary"] = f"No summary or description found in operation {method} {path}."
openapi["paths"][path][method] = {
"operationId": operation["operationId"],
"summary": operation.get("summary", ""),
"description": operation.get("description", ""),
"parameters": operation.get("parameters", []),
"responses": operation.get("responses", {}),
}
if "requestBody" in operation:
openapi["paths"][path][method]["requestBody"] = operation["requestBody"]
# convert definitions
for name, definition in swagger["definitions"].items():
openapi["components"]["schemas"][name] = definition
return openapi
@staticmethod
def parse_openai_plugin_json_to_tool_bundle(
json: str, extra_info: dict | None = None, warning: dict | None = None
) -> list[ApiToolBundle]:
"""
parse openapi plugin yaml to tool bundle
:param json: the json string
:param extra_info: the extra info
:param warning: the warning message
:return: the tool bundle
"""
warning = warning if warning is not None else {}
extra_info = extra_info if extra_info is not None else {}
try:
openai_plugin = json_loads(json)
api = openai_plugin["api"]
api_url = api["url"]
api_type = api["type"]
except JSONDecodeError:
raise ToolProviderNotFoundError("Invalid openai plugin json.")
if api_type != "openapi":
raise ToolNotSupportedError("Only openapi is supported now.")
# get openapi yaml
response = get(api_url, headers={"User-Agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) "}, timeout=5)
if response.status_code != 200:
raise ToolProviderNotFoundError("cannot get openapi yaml from url.")
return ApiBasedToolSchemaParser.parse_openapi_yaml_to_tool_bundle(
response.text, extra_info=extra_info, warning=warning
)
@staticmethod
def auto_parse_to_tool_bundle(
content: str, extra_info: dict | None = None, warning: dict | None = None
) -> tuple[list[ApiToolBundle], str]:
"""
auto parse to tool bundle
:param content: the content
:param extra_info: the extra info
:param warning: the warning message
:return: tools bundle, schema_type
"""
warning = warning if warning is not None else {}
extra_info = extra_info if extra_info is not None else {}
content = content.strip()
loaded_content = None
json_error = None
yaml_error = None
try:
loaded_content = json_loads(content)
except JSONDecodeError as e:
json_error = e
if loaded_content is None:
try:
loaded_content = safe_load(content)
except YAMLError as e:
yaml_error = e
if loaded_content is None:
raise ToolApiSchemaError(
f"Invalid api schema, schema is neither json nor yaml. json error: {str(json_error)},"
f" yaml error: {str(yaml_error)}"
)
swagger_error = None
openapi_error = None
openapi_plugin_error = None
schema_type = None
try:
openapi = ApiBasedToolSchemaParser.parse_openapi_to_tool_bundle(
loaded_content, extra_info=extra_info, warning=warning
)
schema_type = ApiProviderSchemaType.OPENAPI.value
return openapi, schema_type
except ToolApiSchemaError as e:
openapi_error = e
# openai parse error, fallback to swagger
try:
converted_swagger = ApiBasedToolSchemaParser.parse_swagger_to_openapi(
loaded_content, extra_info=extra_info, warning=warning
)
schema_type = ApiProviderSchemaType.SWAGGER.value
return ApiBasedToolSchemaParser.parse_openapi_to_tool_bundle(
converted_swagger, extra_info=extra_info, warning=warning
), schema_type
except ToolApiSchemaError as e:
swagger_error = e
# swagger parse error, fallback to openai plugin
try:
openapi_plugin = ApiBasedToolSchemaParser.parse_openai_plugin_json_to_tool_bundle(
json_dumps(loaded_content), extra_info=extra_info, warning=warning
)
return openapi_plugin, ApiProviderSchemaType.OPENAI_PLUGIN.value
except ToolNotSupportedError as e:
# maybe it's not plugin at all
openapi_plugin_error = e
raise ToolApiSchemaError(
f"Invalid api schema, openapi error: {str(openapi_error)}, swagger error: {str(swagger_error)},"
f" openapi plugin error: {str(openapi_plugin_error)}"
)

View File

@ -0,0 +1,17 @@
import re
def remove_leading_symbols(text: str) -> str:
"""
Remove leading punctuation or symbols from the given text.
Args:
text (str): The input text to process.
Returns:
str: The text with leading punctuation or symbols removed.
"""
# Match Unicode ranges for punctuation and symbols
# FIXME this pattern is confused quick fix for #11868 maybe refactor it later
pattern = r"^[\u2000-\u206F\u2E00-\u2E7F\u3000-\u303F!\"#$%&'()*+,./:;<=>?@^_`~]+"
return re.sub(pattern, "", text)

View File

@ -0,0 +1,9 @@
import uuid
def is_valid_uuid(uuid_str: str) -> bool:
try:
uuid.UUID(uuid_str)
return True
except Exception:
return False

View File

@ -0,0 +1,43 @@
from collections.abc import Mapping, Sequence
from typing import Any
from core.app.app_config.entities import VariableEntity
from core.tools.entities.tool_entities import WorkflowToolParameterConfiguration
class WorkflowToolConfigurationUtils:
@classmethod
def check_parameter_configurations(cls, configurations: list[Mapping[str, Any]]):
for configuration in configurations:
WorkflowToolParameterConfiguration.model_validate(configuration)
@classmethod
def get_workflow_graph_variables(cls, graph: Mapping[str, Any]) -> Sequence[VariableEntity]:
"""
get workflow graph variables
"""
nodes = graph.get("nodes", [])
start_node = next(filter(lambda x: x.get("data", {}).get("type") == "start", nodes), None)
if not start_node:
return []
return [VariableEntity.model_validate(variable) for variable in start_node.get("data", {}).get("variables", [])]
@classmethod
def check_is_synced(
cls, variables: list[VariableEntity], tool_configurations: list[WorkflowToolParameterConfiguration]
):
"""
check is synced
raise ValueError if not synced
"""
variable_names = [variable.variable for variable in variables]
if len(tool_configurations) != len(variables):
raise ValueError("parameter configuration mismatch, please republish the tool to update")
for parameter in tool_configurations:
if parameter.name not in variable_names:
raise ValueError("parameter configuration mismatch, please republish the tool to update")

View File

@ -0,0 +1,35 @@
import logging
from pathlib import Path
from typing import Any
import yaml # type: ignore
from yaml import YAMLError
logger = logging.getLogger(__name__)
def load_yaml_file(file_path: str, ignore_error: bool = True, default_value: Any = {}) -> Any:
"""
Safe loading a YAML file
:param file_path: the path of the YAML file
:param ignore_error:
if True, return default_value if error occurs and the error will be logged in debug level
if False, raise error if error occurs
:param default_value: the value returned when errors ignored
:return: an object of the YAML content
"""
if not file_path or not Path(file_path).exists():
if ignore_error:
return default_value
else:
raise FileNotFoundError(f"File not found: {file_path}")
with open(file_path, encoding="utf-8") as yaml_file:
try:
yaml_content = yaml.safe_load(yaml_file)
return yaml_content or default_value
except Exception as e:
if ignore_error:
return default_value
else:
raise YAMLError(f"Failed to load YAML file {file_path}: {e}") from e

View File

@ -0,0 +1,51 @@
from collections.abc import Generator, Mapping
from typing import Any
from core.datasource.__base.datasource_plugin import DatasourcePlugin
from core.datasource.__base.datasource_runtime import DatasourceRuntime
from core.datasource.entities.datasource_entities import (
DatasourceEntity,
DatasourceProviderType,
WebsiteCrawlMessage,
)
from core.plugin.impl.datasource import PluginDatasourceManager
class WebsiteCrawlDatasourcePlugin(DatasourcePlugin):
tenant_id: str
plugin_unique_identifier: str
entity: DatasourceEntity
runtime: DatasourceRuntime
def __init__(
self,
entity: DatasourceEntity,
runtime: DatasourceRuntime,
tenant_id: str,
icon: str,
plugin_unique_identifier: str,
) -> None:
super().__init__(entity, runtime, icon)
self.tenant_id = tenant_id
self.plugin_unique_identifier = plugin_unique_identifier
def get_website_crawl(
self,
user_id: str,
datasource_parameters: Mapping[str, Any],
provider_type: str,
) -> Generator[WebsiteCrawlMessage, None, None]:
manager = PluginDatasourceManager()
return manager.get_website_crawl(
tenant_id=self.tenant_id,
user_id=user_id,
datasource_provider=self.entity.identity.provider,
datasource_name=self.entity.identity.name,
credentials=self.runtime.credentials,
datasource_parameters=datasource_parameters,
provider_type=provider_type,
)
def datasource_provider_type(self) -> str:
return DatasourceProviderType.WEBSITE_CRAWL

View File

@ -0,0 +1,52 @@
from core.datasource.__base.datasource_provider import DatasourcePluginProviderController
from core.datasource.__base.datasource_runtime import DatasourceRuntime
from core.datasource.entities.datasource_entities import DatasourceProviderEntityWithPlugin, DatasourceProviderType
from core.datasource.website_crawl.website_crawl_plugin import WebsiteCrawlDatasourcePlugin
class WebsiteCrawlDatasourcePluginProviderController(DatasourcePluginProviderController):
entity: DatasourceProviderEntityWithPlugin
plugin_id: str
plugin_unique_identifier: str
def __init__(
self,
entity: DatasourceProviderEntityWithPlugin,
plugin_id: str,
plugin_unique_identifier: str,
tenant_id: str,
) -> None:
super().__init__(entity, tenant_id)
self.plugin_id = plugin_id
self.plugin_unique_identifier = plugin_unique_identifier
@property
def provider_type(self) -> DatasourceProviderType:
"""
returns the type of the provider
"""
return DatasourceProviderType.WEBSITE_CRAWL
def get_datasource(self, datasource_name: str) -> WebsiteCrawlDatasourcePlugin: # type: ignore
"""
return datasource with given name
"""
datasource_entity = next(
(
datasource_entity
for datasource_entity in self.entity.datasources
if datasource_entity.identity.name == datasource_name
),
None,
)
if not datasource_entity:
raise ValueError(f"Datasource with name {datasource_name} not found")
return WebsiteCrawlDatasourcePlugin(
entity=datasource_entity,
runtime=DatasourceRuntime(tenant_id=self.tenant_id),
tenant_id=self.tenant_id,
icon=self.entity.identity.icon,
plugin_unique_identifier=self.plugin_unique_identifier,
)

View File

@ -17,3 +17,27 @@ class IndexingEstimate(BaseModel):
total_segments: int
preview: list[PreviewDetail]
qa_preview: Optional[list[QAPreviewDetail]] = None
class PipelineDataset(BaseModel):
id: str
name: str
description: str
chunk_structure: str
class PipelineDocument(BaseModel):
id: str
position: int
data_source_type: str
data_source_info: Optional[dict] = None
name: str
indexing_status: str
error: Optional[str] = None
enabled: bool
class PipelineGenerateResponse(BaseModel):
batch: str
dataset: PipelineDataset
documents: list[PipelineDocument]

View File

@ -29,7 +29,6 @@ from core.model_runtime.entities.provider_entities import (
)
from core.model_runtime.model_providers.__base.ai_model import AIModel
from core.model_runtime.model_providers.model_provider_factory import ModelProviderFactory
from core.plugin.entities.plugin import ModelProviderID
from extensions.ext_database import db
from libs.datetime_utils import naive_utc_now
from models.provider import (
@ -42,6 +41,7 @@ from models.provider import (
ProviderType,
TenantPreferredModelProvider,
)
from models.provider_ids import ModelProviderID
logger = logging.getLogger(__name__)
@ -683,6 +683,7 @@ class ProviderConfiguration(BaseModel):
Get custom model credentials.
"""
# get provider model
model_provider_id = ModelProviderID(self.provider.provider)
provider_names = [self.provider.provider]
if model_provider_id.is_langgenius():
@ -1182,6 +1183,7 @@ class ProviderConfiguration(BaseModel):
"""
Get provider model setting.
"""
model_provider_id = ModelProviderID(self.provider.provider)
provider_names = [self.provider.provider]
if model_provider_id.is_langgenius():
@ -1265,6 +1267,7 @@ class ProviderConfiguration(BaseModel):
:param model: model name
:return:
"""
model_provider_id = ModelProviderID(self.provider.provider)
provider_names = [self.provider.provider]
if model_provider_id.is_langgenius():
@ -1398,7 +1401,7 @@ class ProviderConfiguration(BaseModel):
"""
secret_input_form_variables = []
for credential_form_schema in credential_form_schemas:
if credential_form_schema.type == FormType.SECRET_INPUT:
if credential_form_schema.type.value == FormType.SECRET_INPUT.value:
secret_input_form_variables.append(credential_form_schema.variable)
return secret_input_form_variables

View File

@ -0,0 +1,15 @@
from typing import TYPE_CHECKING, Any, cast
from core.datasource import datasource_file_manager
from core.datasource.datasource_file_manager import DatasourceFileManager
if TYPE_CHECKING:
from core.datasource.datasource_file_manager import DatasourceFileManager
tool_file_manager: dict[str, Any] = {"manager": None}
class DatasourceFileParser:
@staticmethod
def get_datasource_file_manager() -> "DatasourceFileManager":
return cast("DatasourceFileManager", datasource_file_manager["manager"])

View File

@ -20,6 +20,7 @@ class FileTransferMethod(StrEnum):
REMOTE_URL = "remote_url"
LOCAL_FILE = "local_file"
TOOL_FILE = "tool_file"
DATASOURCE_FILE = "datasource_file"
@staticmethod
def value_of(value):

View File

@ -97,7 +97,11 @@ def to_prompt_message_content(
def download(f: File, /):
if f.transfer_method in (FileTransferMethod.TOOL_FILE, FileTransferMethod.LOCAL_FILE):
if f.transfer_method in (
FileTransferMethod.TOOL_FILE,
FileTransferMethod.LOCAL_FILE,
FileTransferMethod.DATASOURCE_FILE,
):
return _download_file_content(f._storage_key)
elif f.transfer_method == FileTransferMethod.REMOTE_URL:
response = ssrf_proxy.get(f.remote_url, follow_redirects=True)

Some files were not shown because too many files have changed in this diff Show More