mirror of
https://github.com/langgenius/dify.git
synced 2026-05-13 08:57:28 +08:00
merge hitl
This commit is contained in:
commit
0a05435b37
@ -245,6 +245,7 @@ class Jieba(BaseKeyword):
|
||||
segment = pre_segment_data["segment"]
|
||||
if pre_segment_data["keywords"]:
|
||||
segment.keywords = pre_segment_data["keywords"]
|
||||
assert segment.index_node_id
|
||||
keyword_table = self._add_text_to_keyword_table(
|
||||
keyword_table or {}, segment.index_node_id, pre_segment_data["keywords"]
|
||||
)
|
||||
@ -253,6 +254,7 @@ class Jieba(BaseKeyword):
|
||||
|
||||
keywords = keyword_table_handler.extract_keywords(segment.content, keyword_number)
|
||||
segment.keywords = list(keywords)
|
||||
assert segment.index_node_id
|
||||
keyword_table = self._add_text_to_keyword_table(
|
||||
keyword_table or {}, segment.index_node_id, list(keywords)
|
||||
)
|
||||
|
||||
@ -1,5 +1,6 @@
|
||||
import concurrent.futures
|
||||
import logging
|
||||
from collections.abc import Sequence
|
||||
from concurrent.futures import ThreadPoolExecutor
|
||||
from typing import Any, NotRequired, TypedDict
|
||||
|
||||
@ -502,7 +503,7 @@ class RetrievalService:
|
||||
index_node_ids = [i for i in index_node_ids if i]
|
||||
|
||||
segment_ids: list[str] = []
|
||||
index_node_segments: list[DocumentSegment] = []
|
||||
index_node_segments: Sequence[DocumentSegment] = []
|
||||
segments: list[DocumentSegment] = []
|
||||
attachment_map: dict[str, list[AttachmentInfoDict]] = {}
|
||||
child_chunk_map: dict[str, list[ChildChunk]] = {}
|
||||
@ -544,8 +545,9 @@ class RetrievalService:
|
||||
DocumentSegment.status == "completed",
|
||||
DocumentSegment.index_node_id.in_(index_node_ids),
|
||||
)
|
||||
index_node_segments = session.execute(document_segment_stmt).scalars().all() # type: ignore
|
||||
index_node_segments = session.execute(document_segment_stmt).scalars().all()
|
||||
for index_node_segment in index_node_segments:
|
||||
assert index_node_segment.index_node_id
|
||||
doc_segment_map[index_node_segment.id] = [index_node_segment.index_node_id]
|
||||
|
||||
if segment_ids:
|
||||
|
||||
@ -50,6 +50,7 @@ class DatasetDocumentStore:
|
||||
|
||||
output = {}
|
||||
for document_segment in document_segments:
|
||||
assert document_segment.index_node_id
|
||||
doc_id = document_segment.index_node_id
|
||||
output[doc_id] = Document(
|
||||
page_content=document_segment.content,
|
||||
@ -103,7 +104,7 @@ class DatasetDocumentStore:
|
||||
|
||||
if not segment_document:
|
||||
max_position += 1
|
||||
|
||||
assert self._document_id
|
||||
segment_document = DocumentSegment(
|
||||
tenant_id=self._dataset.tenant_id,
|
||||
dataset_id=self._dataset.id,
|
||||
|
||||
@ -84,7 +84,7 @@ class IndexProcessor:
|
||||
select(DocumentSegment).where(DocumentSegment.document_id == original_document_id)
|
||||
).all()
|
||||
if segments:
|
||||
index_node_ids = [segment.index_node_id for segment in segments]
|
||||
index_node_ids = [segment.index_node_id for segment in segments if segment.index_node_id]
|
||||
|
||||
indexing_start_at = time.perf_counter()
|
||||
# delete from vector index
|
||||
|
||||
@ -8,7 +8,6 @@ import os
|
||||
import pickle
|
||||
import re
|
||||
import time
|
||||
from collections.abc import Sequence
|
||||
from datetime import datetime
|
||||
from json import JSONDecodeError
|
||||
from typing import Any, ClassVar, TypedDict, cast
|
||||
@ -831,7 +830,7 @@ class Document(Base):
|
||||
)
|
||||
|
||||
|
||||
class DocumentSegment(Base):
|
||||
class DocumentSegment(TypeBase):
|
||||
__tablename__ = "document_segments"
|
||||
__table_args__ = (
|
||||
sa.PrimaryKeyConstraint("id", name="document_segment_pkey"),
|
||||
@ -844,35 +843,40 @@ class DocumentSegment(Base):
|
||||
)
|
||||
|
||||
# initial fields
|
||||
id = mapped_column(StringUUID, nullable=False, default=lambda: str(uuid4()))
|
||||
tenant_id = mapped_column(StringUUID, nullable=False)
|
||||
dataset_id = mapped_column(StringUUID, nullable=False)
|
||||
document_id = mapped_column(StringUUID, nullable=False)
|
||||
id: Mapped[str] = mapped_column(StringUUID, nullable=False, default_factory=lambda: str(uuid4()), init=False)
|
||||
tenant_id: Mapped[str] = mapped_column(StringUUID, nullable=False)
|
||||
dataset_id: Mapped[str] = mapped_column(StringUUID, nullable=False)
|
||||
document_id: Mapped[str] = mapped_column(StringUUID, nullable=False)
|
||||
position: Mapped[int]
|
||||
content = mapped_column(LongText, nullable=False)
|
||||
answer = mapped_column(LongText, nullable=True)
|
||||
content: Mapped[str] = mapped_column(LongText, nullable=False)
|
||||
word_count: Mapped[int]
|
||||
tokens: Mapped[int]
|
||||
|
||||
# indexing fields
|
||||
keywords = mapped_column(sa.JSON, nullable=True)
|
||||
index_node_id = mapped_column(String(255), nullable=True)
|
||||
index_node_hash = mapped_column(String(255), nullable=True)
|
||||
|
||||
created_by: Mapped[str] = mapped_column(StringUUID, nullable=False)
|
||||
# basic fields
|
||||
# indexing fields
|
||||
index_node_id: Mapped[str | None] = mapped_column(String(255), nullable=True, default=None)
|
||||
index_node_hash: Mapped[str | None] = mapped_column(String(255), nullable=True, default=None)
|
||||
enabled: Mapped[bool] = mapped_column(sa.Boolean, nullable=False, server_default=sa.text("true"), default=True)
|
||||
answer: Mapped[str | None] = mapped_column(LongText, nullable=True, default=None)
|
||||
keywords: Mapped[Any] = mapped_column(sa.JSON, nullable=True, default=None)
|
||||
disabled_at: Mapped[datetime | None] = mapped_column(DateTime, nullable=True, default=None)
|
||||
disabled_by: Mapped[str | None] = mapped_column(StringUUID, nullable=True, default=None)
|
||||
status: Mapped[SegmentStatus] = mapped_column(
|
||||
EnumText(SegmentStatus, length=255), server_default=sa.text("'waiting'"), default=SegmentStatus.WAITING
|
||||
)
|
||||
created_at: Mapped[datetime] = mapped_column(
|
||||
DateTime, nullable=False, server_default=func.current_timestamp(), init=False
|
||||
)
|
||||
updated_by: Mapped[str | None] = mapped_column(StringUUID, nullable=True, default=None)
|
||||
updated_at: Mapped[datetime] = mapped_column(
|
||||
DateTime, nullable=False, server_default=func.current_timestamp(), init=False
|
||||
)
|
||||
indexing_at: Mapped[datetime | None] = mapped_column(DateTime, nullable=True, default=None)
|
||||
completed_at: Mapped[datetime | None] = mapped_column(DateTime, nullable=True, default=None)
|
||||
error: Mapped[str | None] = mapped_column(LongText, nullable=True, default=None)
|
||||
stopped_at: Mapped[datetime | None] = mapped_column(DateTime, nullable=True, default=None)
|
||||
hit_count: Mapped[int] = mapped_column(sa.Integer, nullable=False, default=0)
|
||||
enabled: Mapped[bool] = mapped_column(sa.Boolean, nullable=False, server_default=sa.text("true"))
|
||||
disabled_at: Mapped[datetime | None] = mapped_column(DateTime, nullable=True)
|
||||
disabled_by = mapped_column(StringUUID, nullable=True)
|
||||
status: Mapped[str] = mapped_column(EnumText(SegmentStatus, length=255), server_default=sa.text("'waiting'"))
|
||||
created_by = mapped_column(StringUUID, nullable=False)
|
||||
created_at: Mapped[datetime] = mapped_column(DateTime, nullable=False, server_default=func.current_timestamp())
|
||||
updated_by = mapped_column(StringUUID, nullable=True)
|
||||
updated_at: Mapped[datetime] = mapped_column(DateTime, nullable=False, server_default=func.current_timestamp())
|
||||
indexing_at: Mapped[datetime | None] = mapped_column(DateTime, nullable=True)
|
||||
completed_at: Mapped[datetime | None] = mapped_column(DateTime, nullable=True)
|
||||
error = mapped_column(LongText, nullable=True)
|
||||
stopped_at: Mapped[datetime | None] = mapped_column(DateTime, nullable=True)
|
||||
|
||||
@property
|
||||
def dataset(self):
|
||||
@ -899,7 +903,7 @@ class DocumentSegment(Base):
|
||||
)
|
||||
|
||||
@property
|
||||
def child_chunks(self) -> Sequence[Any]:
|
||||
def child_chunks(self):
|
||||
if not self.document:
|
||||
return []
|
||||
process_rule = self.document.dataset_process_rule
|
||||
@ -914,7 +918,7 @@ class DocumentSegment(Base):
|
||||
return child_chunks or []
|
||||
return []
|
||||
|
||||
def get_child_chunks(self) -> Sequence[Any]:
|
||||
def get_child_chunks(self):
|
||||
if not self.document:
|
||||
return []
|
||||
process_rule = self.document.dataset_process_rule
|
||||
|
||||
@ -1,6 +1,6 @@
|
||||
[project]
|
||||
name = "dify-api"
|
||||
version = "1.14.0"
|
||||
version = "1.14.1"
|
||||
requires-python = "~=3.12.0"
|
||||
|
||||
dependencies = [
|
||||
|
||||
@ -111,6 +111,7 @@ class VectorService:
|
||||
"dataset_id": segment.dataset_id,
|
||||
},
|
||||
)
|
||||
assert segment.index_node_id
|
||||
if dataset.indexing_technique == IndexTechniqueType.HIGH_QUALITY:
|
||||
# update vector index
|
||||
vector = Vector(dataset=dataset)
|
||||
@ -138,6 +139,7 @@ class VectorService:
|
||||
regenerate: bool = False,
|
||||
):
|
||||
index_processor = IndexProcessorFactory(dataset.doc_form).init_index_processor()
|
||||
assert segment.index_node_id
|
||||
if regenerate:
|
||||
# delete child chunks
|
||||
index_processor.clean(dataset, [segment.index_node_id], with_keywords=True, delete_child_chunks=True)
|
||||
|
||||
@ -50,7 +50,7 @@ def batch_clean_document_task(document_ids: list[str], dataset_id: str, doc_form
|
||||
).all()
|
||||
|
||||
if segments:
|
||||
index_node_ids = [segment.index_node_id for segment in segments]
|
||||
index_node_ids = [segment.index_node_id for segment in segments if segment.index_node_id]
|
||||
segment_ids = [segment.id for segment in segments]
|
||||
|
||||
# Collect image file IDs from segment content
|
||||
|
||||
@ -19,6 +19,7 @@ from graphon.model_runtime.entities.model_entities import ModelType
|
||||
from libs import helper
|
||||
from libs.datetime_utils import naive_utc_now
|
||||
from models.dataset import Dataset, Document, DocumentSegment
|
||||
from models.enums import SegmentStatus
|
||||
from models.model import UploadFile
|
||||
from services.vector_service import VectorService
|
||||
|
||||
@ -156,7 +157,7 @@ def batch_create_segment_to_index_task(
|
||||
tokens=tokens,
|
||||
created_by=user_id,
|
||||
indexing_at=naive_utc_now(),
|
||||
status="completed",
|
||||
status=SegmentStatus.COMPLETED,
|
||||
completed_at=naive_utc_now(),
|
||||
)
|
||||
if document_config["doc_form"] == IndexStructureType.QA_INDEX:
|
||||
|
||||
@ -53,7 +53,7 @@ def clean_document_task(document_id: str, dataset_id: str, doc_form: str, file_i
|
||||
binding_ids = [binding.id for binding, _ in attachments_with_bindings]
|
||||
total_attachment_files.extend([attachment_file.key for _, attachment_file in attachments_with_bindings])
|
||||
|
||||
index_node_ids = [segment.index_node_id for segment in segments]
|
||||
index_node_ids = [segment.index_node_id for segment in segments if segment.index_node_id]
|
||||
segment_contents = [segment.content for segment in segments]
|
||||
except Exception:
|
||||
logger.exception("Cleaned document when document deleted failed")
|
||||
|
||||
@ -38,7 +38,7 @@ def clean_notion_document_task(document_ids: list[str], dataset_id: str):
|
||||
|
||||
for document_id in document_ids:
|
||||
segments = session.scalars(select(DocumentSegment).where(DocumentSegment.document_id == document_id)).all()
|
||||
total_index_node_ids.extend([segment.index_node_id for segment in segments])
|
||||
total_index_node_ids.extend([segment.index_node_id for segment in segments if segment.index_node_id])
|
||||
|
||||
# Wrap vector / keyword index cleanup in try/except so that a transient
|
||||
# failure here (e.g. billing API hiccup propagated via FeatureService when
|
||||
|
||||
@ -9,6 +9,7 @@ from core.db.session_factory import session_factory
|
||||
from core.rag.index_processor.index_processor_factory import IndexProcessorFactory
|
||||
from extensions.ext_redis import redis_client
|
||||
from models.dataset import DocumentSegment
|
||||
from models.enums import SegmentStatus
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
@ -30,7 +31,7 @@ def disable_segment_from_index_task(segment_id: str):
|
||||
logger.info(click.style(f"Segment not found: {segment_id}", fg="red"))
|
||||
return
|
||||
|
||||
if segment.status != "completed":
|
||||
if segment.status != SegmentStatus.COMPLETED:
|
||||
logger.info(click.style(f"Segment is not completed, disable is not allowed: {segment_id}", fg="red"))
|
||||
return
|
||||
|
||||
@ -59,6 +60,7 @@ def disable_segment_from_index_task(segment_id: str):
|
||||
|
||||
index_type = dataset_document.doc_form
|
||||
index_processor = IndexProcessorFactory(index_type).init_index_processor()
|
||||
assert segment.index_node_id
|
||||
index_processor.clean(dataset, [segment.index_node_id])
|
||||
|
||||
# Disable summary index for this segment
|
||||
|
||||
@ -55,7 +55,7 @@ def disable_segments_from_index_task(segment_ids: list, dataset_id: str, documen
|
||||
return
|
||||
|
||||
try:
|
||||
index_node_ids = [segment.index_node_id for segment in segments]
|
||||
index_node_ids = [segment.index_node_id for segment in segments if segment.index_node_id]
|
||||
if dataset.is_multimodal:
|
||||
segment_ids = [segment.id for segment in segments]
|
||||
segment_attachment_bindings = session.scalars(
|
||||
|
||||
@ -69,7 +69,7 @@ def document_indexing_sync_task(dataset_id: str, document_id: str):
|
||||
index_type = document.doc_form
|
||||
|
||||
segments = session.scalars(select(DocumentSegment).where(DocumentSegment.document_id == document_id)).all()
|
||||
index_node_ids = [segment.index_node_id for segment in segments]
|
||||
index_node_ids = [segment.index_node_id for segment in segments if segment.index_node_id]
|
||||
|
||||
# Get credentials from datasource provider
|
||||
datasource_provider_service = DatasourceProviderService()
|
||||
|
||||
@ -45,7 +45,7 @@ def document_indexing_update_task(dataset_id: str, document_id: str):
|
||||
|
||||
index_type = document.doc_form
|
||||
segments = session.scalars(select(DocumentSegment).where(DocumentSegment.document_id == document_id)).all()
|
||||
index_node_ids = [segment.index_node_id for segment in segments]
|
||||
index_node_ids = [segment.index_node_id for segment in segments if segment.index_node_id]
|
||||
|
||||
clean_success = False
|
||||
try:
|
||||
|
||||
@ -137,7 +137,7 @@ def _duplicate_document_indexing_task(dataset_id: str, document_ids: Sequence[st
|
||||
select(DocumentSegment).where(DocumentSegment.document_id == document.id)
|
||||
).all()
|
||||
if segments:
|
||||
index_node_ids = [segment.index_node_id for segment in segments]
|
||||
index_node_ids = [segment.index_node_id for segment in segments if segment.index_node_id]
|
||||
|
||||
# delete from vector index
|
||||
index_processor.clean(dataset, index_node_ids, with_keywords=True, delete_child_chunks=True)
|
||||
|
||||
@ -61,7 +61,7 @@ def remove_document_from_index_task(document_id: str):
|
||||
except Exception as e:
|
||||
logger.warning("Failed to disable summaries for document %s: %s", document.id, str(e))
|
||||
|
||||
index_node_ids = [segment.index_node_id for segment in segments]
|
||||
index_node_ids = [segment.index_node_id for segment in segments if segment.index_node_id]
|
||||
if index_node_ids:
|
||||
try:
|
||||
index_processor.clean(dataset, index_node_ids, with_keywords=True, delete_child_chunks=False)
|
||||
|
||||
@ -85,7 +85,7 @@ def retry_document_indexing_task(dataset_id: str, document_ids: list[str], user_
|
||||
select(DocumentSegment).where(DocumentSegment.document_id == document_id)
|
||||
).all()
|
||||
if segments:
|
||||
index_node_ids = [segment.index_node_id for segment in segments]
|
||||
index_node_ids = [segment.index_node_id for segment in segments if segment.index_node_id]
|
||||
# delete from vector index
|
||||
index_processor.clean(dataset, index_node_ids, with_keywords=True, delete_child_chunks=True)
|
||||
|
||||
|
||||
@ -70,7 +70,7 @@ def sync_website_document_indexing_task(dataset_id: str, document_id: str):
|
||||
|
||||
segments = session.scalars(select(DocumentSegment).where(DocumentSegment.document_id == document_id)).all()
|
||||
if segments:
|
||||
index_node_ids = [segment.index_node_id for segment in segments]
|
||||
index_node_ids = [segment.index_node_id for segment in segments if segment.index_node_id]
|
||||
# delete from vector index
|
||||
index_processor.clean(dataset, index_node_ids, with_keywords=True, delete_child_chunks=True)
|
||||
|
||||
|
||||
@ -13,9 +13,9 @@ from uuid import uuid4
|
||||
from sqlalchemy.orm import Session
|
||||
|
||||
from core.rag.index_processor.constant.index_type import IndexTechniqueType
|
||||
from models import Account, Tenant, TenantAccountJoin, TenantAccountRole
|
||||
from models import Account, AccountStatus, Tenant, TenantAccountJoin, TenantAccountRole, TenantStatus
|
||||
from models.dataset import Dataset, DatasetPermissionEnum, Document, DocumentSegment
|
||||
from models.enums import DataSourceType, DocumentCreatedFrom
|
||||
from models.enums import DataSourceType, DocumentCreatedFrom, SegmentStatus
|
||||
from services.dataset_service import SegmentService
|
||||
|
||||
|
||||
@ -35,13 +35,13 @@ class SegmentServiceTestDataFactory:
|
||||
email=f"{uuid4()}@example.com",
|
||||
name=f"user-{uuid4()}",
|
||||
interface_language="en-US",
|
||||
status="active",
|
||||
status=AccountStatus.ACTIVE,
|
||||
)
|
||||
db_session_with_containers.add(account)
|
||||
db_session_with_containers.commit()
|
||||
|
||||
if tenant is None:
|
||||
tenant = Tenant(name=f"tenant-{uuid4()}", status="normal")
|
||||
tenant = Tenant(name=f"tenant-{uuid4()}", status=TenantStatus.NORMAL)
|
||||
db_session_with_containers.add(tenant)
|
||||
db_session_with_containers.commit()
|
||||
|
||||
@ -103,7 +103,7 @@ class SegmentServiceTestDataFactory:
|
||||
created_by: str,
|
||||
position: int = 1,
|
||||
content: str = "Test content",
|
||||
status: str = "completed",
|
||||
status: SegmentStatus = SegmentStatus.COMPLETED,
|
||||
word_count: int = 10,
|
||||
tokens: int = 15,
|
||||
) -> DocumentSegment:
|
||||
@ -203,7 +203,7 @@ class TestSegmentServiceGetSegments:
|
||||
document_id=document.id,
|
||||
created_by=owner.id,
|
||||
position=1,
|
||||
status="completed",
|
||||
status=SegmentStatus.COMPLETED,
|
||||
)
|
||||
SegmentServiceTestDataFactory.create_segment(
|
||||
db_session_with_containers,
|
||||
@ -212,7 +212,7 @@ class TestSegmentServiceGetSegments:
|
||||
document_id=document.id,
|
||||
created_by=owner.id,
|
||||
position=2,
|
||||
status="indexing",
|
||||
status=SegmentStatus.INDEXING,
|
||||
)
|
||||
SegmentServiceTestDataFactory.create_segment(
|
||||
db_session_with_containers,
|
||||
@ -221,7 +221,7 @@ class TestSegmentServiceGetSegments:
|
||||
document_id=document.id,
|
||||
created_by=owner.id,
|
||||
position=3,
|
||||
status="waiting",
|
||||
status=SegmentStatus.WAITING,
|
||||
)
|
||||
|
||||
# Act
|
||||
@ -257,7 +257,7 @@ class TestSegmentServiceGetSegments:
|
||||
document_id=document.id,
|
||||
created_by=owner.id,
|
||||
position=1,
|
||||
status="completed",
|
||||
status=SegmentStatus.COMPLETED,
|
||||
)
|
||||
SegmentServiceTestDataFactory.create_segment(
|
||||
db_session_with_containers,
|
||||
@ -266,7 +266,7 @@ class TestSegmentServiceGetSegments:
|
||||
document_id=document.id,
|
||||
created_by=owner.id,
|
||||
position=2,
|
||||
status="indexing",
|
||||
status=SegmentStatus.INDEXING,
|
||||
)
|
||||
|
||||
# Act
|
||||
@ -415,7 +415,7 @@ class TestSegmentServiceGetSegments:
|
||||
document_id=document.id,
|
||||
created_by=owner.id,
|
||||
position=1,
|
||||
status="completed",
|
||||
status=SegmentStatus.COMPLETED,
|
||||
content="This is important information",
|
||||
)
|
||||
SegmentServiceTestDataFactory.create_segment(
|
||||
@ -425,7 +425,7 @@ class TestSegmentServiceGetSegments:
|
||||
document_id=document.id,
|
||||
created_by=owner.id,
|
||||
position=2,
|
||||
status="indexing",
|
||||
status=SegmentStatus.INDEXING,
|
||||
content="This is also important",
|
||||
)
|
||||
SegmentServiceTestDataFactory.create_segment(
|
||||
@ -435,7 +435,7 @@ class TestSegmentServiceGetSegments:
|
||||
document_id=document.id,
|
||||
created_by=owner.id,
|
||||
position=3,
|
||||
status="completed",
|
||||
status=SegmentStatus.COMPLETED,
|
||||
content="This is irrelevant",
|
||||
)
|
||||
|
||||
@ -477,7 +477,7 @@ class TestSegmentServiceGetSegments:
|
||||
document_id=document.id,
|
||||
created_by=owner.id,
|
||||
position=1,
|
||||
status="completed",
|
||||
status=SegmentStatus.COMPLETED,
|
||||
)
|
||||
SegmentServiceTestDataFactory.create_segment(
|
||||
db_session_with_containers,
|
||||
@ -486,7 +486,7 @@ class TestSegmentServiceGetSegments:
|
||||
document_id=document.id,
|
||||
created_by=owner.id,
|
||||
position=2,
|
||||
status="waiting",
|
||||
status=SegmentStatus.WAITING,
|
||||
)
|
||||
|
||||
# Act
|
||||
|
||||
@ -128,7 +128,6 @@ class TestAddDocumentToIndexTask:
|
||||
|
||||
for i in range(3):
|
||||
segment = DocumentSegment(
|
||||
id=fake.uuid4(),
|
||||
tenant_id=document.tenant_id,
|
||||
dataset_id=dataset.id,
|
||||
document_id=document.id,
|
||||
@ -451,7 +450,6 @@ class TestAddDocumentToIndexTask:
|
||||
segments = []
|
||||
for i in range(3):
|
||||
segment = DocumentSegment(
|
||||
id=fake.uuid4(),
|
||||
tenant_id=document.tenant_id,
|
||||
dataset_id=dataset.id,
|
||||
document_id=document.id,
|
||||
@ -630,7 +628,6 @@ class TestAddDocumentToIndexTask:
|
||||
|
||||
# Segment 1: Should be processed (enabled=False, status=SegmentStatus.COMPLETED)
|
||||
segment1 = DocumentSegment(
|
||||
id=fake.uuid4(),
|
||||
tenant_id=document.tenant_id,
|
||||
dataset_id=dataset.id,
|
||||
document_id=document.id,
|
||||
@ -650,7 +647,6 @@ class TestAddDocumentToIndexTask:
|
||||
# Segment 2: Should be processed (enabled=True, status=SegmentStatus.COMPLETED)
|
||||
# Note: Implementation doesn't filter by enabled status, only by status=SegmentStatus.COMPLETED
|
||||
segment2 = DocumentSegment(
|
||||
id=fake.uuid4(),
|
||||
tenant_id=document.tenant_id,
|
||||
dataset_id=dataset.id,
|
||||
document_id=document.id,
|
||||
@ -669,7 +665,6 @@ class TestAddDocumentToIndexTask:
|
||||
|
||||
# Segment 3: Should NOT be processed (enabled=False, status="processing")
|
||||
segment3 = DocumentSegment(
|
||||
id=fake.uuid4(),
|
||||
tenant_id=document.tenant_id,
|
||||
dataset_id=dataset.id,
|
||||
document_id=document.id,
|
||||
@ -688,7 +683,6 @@ class TestAddDocumentToIndexTask:
|
||||
|
||||
# Segment 4: Should be processed (enabled=False, status=SegmentStatus.COMPLETED)
|
||||
segment4 = DocumentSegment(
|
||||
id=fake.uuid4(),
|
||||
tenant_id=document.tenant_id,
|
||||
dataset_id=dataset.id,
|
||||
document_id=document.id,
|
||||
|
||||
@ -177,7 +177,6 @@ class TestBatchCleanDocumentTask:
|
||||
fake = Faker()
|
||||
|
||||
segment = DocumentSegment(
|
||||
id=str(uuid.uuid4()),
|
||||
tenant_id=account.current_tenant.id,
|
||||
dataset_id=document.dataset_id,
|
||||
document_id=document.id,
|
||||
@ -290,10 +289,9 @@ class TestBatchCleanDocumentTask:
|
||||
account = self._create_test_account(db_session_with_containers)
|
||||
dataset = self._create_test_dataset(db_session_with_containers, account)
|
||||
document = self._create_test_document(db_session_with_containers, dataset, account)
|
||||
|
||||
assert account.current_tenant
|
||||
# Create segment with simple content (no image references)
|
||||
segment = DocumentSegment(
|
||||
id=str(uuid.uuid4()),
|
||||
tenant_id=account.current_tenant.id,
|
||||
dataset_id=document.dataset_id,
|
||||
document_id=document.id,
|
||||
@ -692,9 +690,9 @@ class TestBatchCleanDocumentTask:
|
||||
|
||||
# Create multiple segments for the document
|
||||
segments = []
|
||||
assert account.current_tenant
|
||||
for i in range(3):
|
||||
segment = DocumentSegment(
|
||||
id=str(uuid.uuid4()),
|
||||
tenant_id=account.current_tenant.id,
|
||||
dataset_id=document.dataset_id,
|
||||
document_id=document.id,
|
||||
|
||||
@ -220,7 +220,6 @@ class TestCleanDatasetTask:
|
||||
DocumentSegment: Created document segment instance
|
||||
"""
|
||||
segment = DocumentSegment(
|
||||
id=str(uuid.uuid4()),
|
||||
tenant_id=tenant.id,
|
||||
dataset_id=dataset.id,
|
||||
document_id=document.id,
|
||||
@ -232,8 +231,6 @@ class TestCleanDatasetTask:
|
||||
status=SegmentStatus.COMPLETED,
|
||||
index_node_id=str(uuid.uuid4()),
|
||||
index_node_hash="test_hash",
|
||||
created_at=datetime.now(),
|
||||
updated_at=datetime.now(),
|
||||
)
|
||||
|
||||
db_session_with_containers.add(segment)
|
||||
@ -614,7 +611,6 @@ class TestCleanDatasetTask:
|
||||
"""
|
||||
|
||||
segment = DocumentSegment(
|
||||
id=str(uuid.uuid4()),
|
||||
tenant_id=tenant.id,
|
||||
dataset_id=dataset.id,
|
||||
document_id=document.id,
|
||||
@ -626,8 +622,6 @@ class TestCleanDatasetTask:
|
||||
status=SegmentStatus.COMPLETED,
|
||||
index_node_id=str(uuid.uuid4()),
|
||||
index_node_hash="test_hash",
|
||||
created_at=datetime.now(),
|
||||
updated_at=datetime.now(),
|
||||
)
|
||||
|
||||
db_session_with_containers.add(segment)
|
||||
@ -729,8 +723,6 @@ class TestCleanDatasetTask:
|
||||
type=DatasetMetadataType.STRING,
|
||||
created_by=account.id,
|
||||
)
|
||||
metadata.id = str(uuid.uuid4())
|
||||
metadata.created_at = datetime.now()
|
||||
metadata_items.append(metadata)
|
||||
|
||||
# Create binding for each metadata item
|
||||
@ -741,8 +733,6 @@ class TestCleanDatasetTask:
|
||||
document_id=documents[i % len(documents)].id,
|
||||
created_by=account.id,
|
||||
)
|
||||
binding.id = str(uuid.uuid4())
|
||||
binding.created_at = datetime.now()
|
||||
bindings.append(binding)
|
||||
|
||||
db_session_with_containers.add_all(metadata_items)
|
||||
@ -946,7 +936,6 @@ class TestCleanDatasetTask:
|
||||
long_content = "Very long content " * 100 # Long content within reasonable limits
|
||||
segment_content = f"Segment with special chars: {special_content}\n{long_content}"
|
||||
segment = DocumentSegment(
|
||||
id=str(uuid.uuid4()),
|
||||
tenant_id=tenant.id,
|
||||
dataset_id=dataset.id,
|
||||
document_id=document.id,
|
||||
@ -958,8 +947,6 @@ class TestCleanDatasetTask:
|
||||
status=SegmentStatus.COMPLETED,
|
||||
index_node_id=str(uuid.uuid4()),
|
||||
index_node_hash="test_hash_" + "x" * 50, # Long hash within limits
|
||||
created_at=datetime.now(),
|
||||
updated_at=datetime.now(),
|
||||
)
|
||||
db_session_with_containers.add(segment)
|
||||
db_session_with_containers.commit()
|
||||
|
||||
@ -123,11 +123,10 @@ class TestCleanNotionDocumentTask:
|
||||
db_session_with_containers.add(document)
|
||||
db_session_with_containers.flush()
|
||||
document_ids.append(document.id)
|
||||
|
||||
assert tenant
|
||||
# Create segments for each document
|
||||
for j in range(2):
|
||||
segment = DocumentSegment(
|
||||
id=str(uuid.uuid4()),
|
||||
tenant_id=tenant.id,
|
||||
dataset_id=dataset.id,
|
||||
document_id=document.id,
|
||||
@ -303,10 +302,9 @@ class TestCleanNotionDocumentTask:
|
||||
)
|
||||
db_session_with_containers.add(document)
|
||||
db_session_with_containers.flush()
|
||||
|
||||
assert tenant
|
||||
# Create test segment
|
||||
segment = DocumentSegment(
|
||||
id=str(uuid.uuid4()),
|
||||
tenant_id=tenant.id,
|
||||
dataset_id=dataset.id,
|
||||
document_id=document.id,
|
||||
@ -390,12 +388,11 @@ class TestCleanNotionDocumentTask:
|
||||
)
|
||||
db_session_with_containers.add(document)
|
||||
db_session_with_containers.flush()
|
||||
|
||||
assert tenant
|
||||
# Create segments without index_node_ids
|
||||
segments = []
|
||||
for i in range(3):
|
||||
segment = DocumentSegment(
|
||||
id=str(uuid.uuid4()),
|
||||
tenant_id=tenant.id,
|
||||
dataset_id=dataset.id,
|
||||
document_id=document.id,
|
||||
@ -484,11 +481,10 @@ class TestCleanNotionDocumentTask:
|
||||
db_session_with_containers.add(document)
|
||||
db_session_with_containers.flush()
|
||||
documents.append(document)
|
||||
|
||||
assert tenant
|
||||
# Create segments for each document
|
||||
for j in range(2):
|
||||
segment = DocumentSegment(
|
||||
id=str(uuid.uuid4()),
|
||||
tenant_id=tenant.id,
|
||||
dataset_id=dataset.id,
|
||||
document_id=document.id,
|
||||
@ -612,10 +608,9 @@ class TestCleanNotionDocumentTask:
|
||||
segment_statuses = [SegmentStatus.WAITING, SegmentStatus.INDEXING, SegmentStatus.COMPLETED, SegmentStatus.ERROR]
|
||||
segments = []
|
||||
index_node_ids = []
|
||||
|
||||
assert tenant
|
||||
for i, status in enumerate(segment_statuses):
|
||||
segment = DocumentSegment(
|
||||
id=str(uuid.uuid4()),
|
||||
tenant_id=tenant.id,
|
||||
dataset_id=dataset.id,
|
||||
document_id=document.id,
|
||||
@ -718,10 +713,9 @@ class TestCleanNotionDocumentTask:
|
||||
)
|
||||
db_session_with_containers.add(document)
|
||||
db_session_with_containers.flush()
|
||||
|
||||
assert tenant
|
||||
# Create segment
|
||||
segment = DocumentSegment(
|
||||
id=str(uuid.uuid4()),
|
||||
tenant_id=tenant.id,
|
||||
dataset_id=dataset.id,
|
||||
document_id=document.id,
|
||||
@ -818,12 +812,11 @@ class TestCleanNotionDocumentTask:
|
||||
db_session_with_containers.add(document)
|
||||
db_session_with_containers.flush()
|
||||
documents.append(document)
|
||||
|
||||
assert tenant
|
||||
# Create multiple segments for each document
|
||||
num_segments_per_doc = 5
|
||||
for j in range(num_segments_per_doc):
|
||||
segment = DocumentSegment(
|
||||
id=str(uuid.uuid4()),
|
||||
tenant_id=tenant.id,
|
||||
dataset_id=dataset.id,
|
||||
document_id=document.id,
|
||||
@ -940,7 +933,6 @@ class TestCleanNotionDocumentTask:
|
||||
# Create segments for each document
|
||||
for j in range(3):
|
||||
segment = DocumentSegment(
|
||||
id=str(uuid.uuid4()),
|
||||
tenant_id=account.current_tenant.id,
|
||||
dataset_id=dataset.id,
|
||||
document_id=document.id,
|
||||
@ -1066,11 +1058,10 @@ class TestCleanNotionDocumentTask:
|
||||
db_session_with_containers.add(document)
|
||||
db_session_with_containers.flush()
|
||||
documents.append(document)
|
||||
|
||||
assert tenant
|
||||
# Create segments for each document
|
||||
for j in range(2):
|
||||
segment = DocumentSegment(
|
||||
id=str(uuid.uuid4()),
|
||||
tenant_id=tenant.id,
|
||||
dataset_id=dataset.id,
|
||||
document_id=document.id,
|
||||
@ -1184,10 +1175,9 @@ class TestCleanNotionDocumentTask:
|
||||
# Create segments with metadata
|
||||
segments = []
|
||||
index_node_ids = []
|
||||
|
||||
assert tenant
|
||||
for i in range(3):
|
||||
segment = DocumentSegment(
|
||||
id=str(uuid.uuid4()),
|
||||
tenant_id=tenant.id,
|
||||
dataset_id=dataset.id,
|
||||
document_id=document.id,
|
||||
|
||||
@ -90,7 +90,6 @@ class TestDealDatasetVectorIndexTask:
|
||||
|
||||
# Create dataset
|
||||
dataset = Dataset(
|
||||
id=str(uuid.uuid4()),
|
||||
tenant_id=tenant.id,
|
||||
name=fake.company(),
|
||||
description=fake.text(max_nb_chars=100),
|
||||
@ -150,7 +149,6 @@ class TestDealDatasetVectorIndexTask:
|
||||
|
||||
# Create dataset
|
||||
dataset = Dataset(
|
||||
id=str(uuid.uuid4()),
|
||||
tenant_id=tenant.id,
|
||||
name=fake.company(),
|
||||
description=fake.text(max_nb_chars=100),
|
||||
@ -202,7 +200,6 @@ class TestDealDatasetVectorIndexTask:
|
||||
|
||||
# Create segments
|
||||
segment = DocumentSegment(
|
||||
id=str(uuid.uuid4()),
|
||||
tenant_id=tenant.id,
|
||||
dataset_id=dataset.id,
|
||||
document_id=document.id,
|
||||
@ -253,7 +250,6 @@ class TestDealDatasetVectorIndexTask:
|
||||
|
||||
# Create dataset with parent-child index
|
||||
dataset = Dataset(
|
||||
id=str(uuid.uuid4()),
|
||||
tenant_id=tenant.id,
|
||||
name=fake.company(),
|
||||
description=fake.text(max_nb_chars=100),
|
||||
@ -305,7 +301,6 @@ class TestDealDatasetVectorIndexTask:
|
||||
|
||||
# Create segments
|
||||
segment = DocumentSegment(
|
||||
id=str(uuid.uuid4()),
|
||||
tenant_id=tenant.id,
|
||||
dataset_id=dataset.id,
|
||||
document_id=document.id,
|
||||
@ -371,7 +366,6 @@ class TestDealDatasetVectorIndexTask:
|
||||
|
||||
# Create dataset without documents
|
||||
dataset = Dataset(
|
||||
id=str(uuid.uuid4()),
|
||||
tenant_id=tenant.id,
|
||||
name=fake.company(),
|
||||
description=fake.text(max_nb_chars=100),
|
||||
@ -403,7 +397,6 @@ class TestDealDatasetVectorIndexTask:
|
||||
|
||||
# Create dataset
|
||||
dataset = Dataset(
|
||||
id=str(uuid.uuid4()),
|
||||
tenant_id=tenant.id,
|
||||
name=fake.company(),
|
||||
description=fake.text(max_nb_chars=100),
|
||||
@ -461,7 +454,6 @@ class TestDealDatasetVectorIndexTask:
|
||||
|
||||
# Create dataset without documents
|
||||
dataset = Dataset(
|
||||
id=str(uuid.uuid4()),
|
||||
tenant_id=tenant.id,
|
||||
name=fake.company(),
|
||||
description=fake.text(max_nb_chars=100),
|
||||
@ -494,7 +486,6 @@ class TestDealDatasetVectorIndexTask:
|
||||
|
||||
# Create dataset
|
||||
dataset = Dataset(
|
||||
id=str(uuid.uuid4()),
|
||||
tenant_id=tenant.id,
|
||||
name=fake.company(),
|
||||
description=fake.text(max_nb_chars=100),
|
||||
@ -546,7 +537,6 @@ class TestDealDatasetVectorIndexTask:
|
||||
|
||||
# Create segments
|
||||
segment = DocumentSegment(
|
||||
id=str(uuid.uuid4()),
|
||||
tenant_id=tenant.id,
|
||||
dataset_id=dataset.id,
|
||||
document_id=document.id,
|
||||
@ -592,7 +582,6 @@ class TestDealDatasetVectorIndexTask:
|
||||
|
||||
# Create dataset with custom index type
|
||||
dataset = Dataset(
|
||||
id=str(uuid.uuid4()),
|
||||
tenant_id=tenant.id,
|
||||
name=fake.company(),
|
||||
description=fake.text(max_nb_chars=100),
|
||||
@ -624,7 +613,6 @@ class TestDealDatasetVectorIndexTask:
|
||||
|
||||
# Create segments
|
||||
segment = DocumentSegment(
|
||||
id=str(uuid.uuid4()),
|
||||
tenant_id=tenant.id,
|
||||
dataset_id=dataset.id,
|
||||
document_id=document.id,
|
||||
@ -670,7 +658,6 @@ class TestDealDatasetVectorIndexTask:
|
||||
|
||||
# Create dataset without doc_form (should use default)
|
||||
dataset = Dataset(
|
||||
id=str(uuid.uuid4()),
|
||||
tenant_id=tenant.id,
|
||||
name=fake.company(),
|
||||
description=fake.text(max_nb_chars=100),
|
||||
@ -702,7 +689,6 @@ class TestDealDatasetVectorIndexTask:
|
||||
|
||||
# Create segments
|
||||
segment = DocumentSegment(
|
||||
id=str(uuid.uuid4()),
|
||||
tenant_id=tenant.id,
|
||||
dataset_id=dataset.id,
|
||||
document_id=document.id,
|
||||
@ -748,7 +734,6 @@ class TestDealDatasetVectorIndexTask:
|
||||
|
||||
# Create dataset
|
||||
dataset = Dataset(
|
||||
id=str(uuid.uuid4()),
|
||||
tenant_id=tenant.id,
|
||||
name=fake.company(),
|
||||
description=fake.text(max_nb_chars=100),
|
||||
@ -806,7 +791,6 @@ class TestDealDatasetVectorIndexTask:
|
||||
for i, document in enumerate(documents):
|
||||
for j in range(2):
|
||||
segment = DocumentSegment(
|
||||
id=str(uuid.uuid4()),
|
||||
tenant_id=tenant.id,
|
||||
dataset_id=dataset.id,
|
||||
document_id=document.id,
|
||||
@ -832,6 +816,7 @@ class TestDealDatasetVectorIndexTask:
|
||||
updated_document = db_session_with_containers.scalar(
|
||||
select(Document).where(Document.id == document.id).limit(1)
|
||||
)
|
||||
assert updated_document
|
||||
assert updated_document.indexing_status == IndexingStatus.COMPLETED
|
||||
|
||||
# Verify index processor load was called multiple times
|
||||
@ -853,7 +838,6 @@ class TestDealDatasetVectorIndexTask:
|
||||
|
||||
# Create dataset
|
||||
dataset = Dataset(
|
||||
id=str(uuid.uuid4()),
|
||||
tenant_id=tenant.id,
|
||||
name=fake.company(),
|
||||
description=fake.text(max_nb_chars=100),
|
||||
@ -905,7 +889,6 @@ class TestDealDatasetVectorIndexTask:
|
||||
|
||||
# Create segments
|
||||
segment = DocumentSegment(
|
||||
id=str(uuid.uuid4()),
|
||||
tenant_id=tenant.id,
|
||||
dataset_id=dataset.id,
|
||||
document_id=document.id,
|
||||
@ -952,7 +935,6 @@ class TestDealDatasetVectorIndexTask:
|
||||
|
||||
# Create dataset
|
||||
dataset = Dataset(
|
||||
id=str(uuid.uuid4()),
|
||||
tenant_id=tenant.id,
|
||||
name=fake.company(),
|
||||
description=fake.text(max_nb_chars=100),
|
||||
@ -1024,7 +1006,6 @@ class TestDealDatasetVectorIndexTask:
|
||||
|
||||
# Create segments for enabled document only
|
||||
segment = DocumentSegment(
|
||||
id=str(uuid.uuid4()),
|
||||
tenant_id=tenant.id,
|
||||
dataset_id=dataset.id,
|
||||
document_id=enabled_document.id,
|
||||
@ -1075,7 +1056,6 @@ class TestDealDatasetVectorIndexTask:
|
||||
|
||||
# Create dataset
|
||||
dataset = Dataset(
|
||||
id=str(uuid.uuid4()),
|
||||
tenant_id=tenant.id,
|
||||
name=fake.company(),
|
||||
description=fake.text(max_nb_chars=100),
|
||||
@ -1147,7 +1127,6 @@ class TestDealDatasetVectorIndexTask:
|
||||
|
||||
# Create segments for active document only
|
||||
segment = DocumentSegment(
|
||||
id=str(uuid.uuid4()),
|
||||
tenant_id=tenant.id,
|
||||
dataset_id=dataset.id,
|
||||
document_id=active_document.id,
|
||||
@ -1198,7 +1177,6 @@ class TestDealDatasetVectorIndexTask:
|
||||
|
||||
# Create dataset
|
||||
dataset = Dataset(
|
||||
id=str(uuid.uuid4()),
|
||||
tenant_id=tenant.id,
|
||||
name=fake.company(),
|
||||
description=fake.text(max_nb_chars=100),
|
||||
@ -1270,7 +1248,6 @@ class TestDealDatasetVectorIndexTask:
|
||||
|
||||
# Create segments for completed document only
|
||||
segment = DocumentSegment(
|
||||
id=str(uuid.uuid4()),
|
||||
tenant_id=tenant.id,
|
||||
dataset_id=dataset.id,
|
||||
document_id=completed_document.id,
|
||||
|
||||
@ -209,26 +209,25 @@ class TestDeleteSegmentFromIndexTask:
|
||||
segments = []
|
||||
|
||||
for i in range(count):
|
||||
segment = DocumentSegment()
|
||||
segment.id = fake.uuid4()
|
||||
segment.tenant_id = document.tenant_id
|
||||
segment.dataset_id = document.dataset_id
|
||||
segment.document_id = document.id
|
||||
segment.position = i + 1
|
||||
segment.content = f"Test segment content {i + 1}: {fake.text(max_nb_chars=200)}"
|
||||
segment.answer = f"Test segment answer {i + 1}: {fake.text(max_nb_chars=100)}"
|
||||
segment.word_count = fake.random_int(min=10, max=100)
|
||||
segment.tokens = fake.random_int(min=5, max=50)
|
||||
segment.keywords = [fake.word() for _ in range(3)]
|
||||
segment.index_node_id = f"node_{fake.uuid4()}"
|
||||
segment.index_node_hash = fake.sha256()
|
||||
segment.hit_count = 0
|
||||
segment.enabled = True
|
||||
segment.status = SegmentStatus.COMPLETED
|
||||
segment.created_by = account.id
|
||||
segment.created_at = fake.date_time_this_year()
|
||||
segment.updated_by = account.id
|
||||
segment.updated_at = segment.created_at
|
||||
created_at = fake.date_time_this_year()
|
||||
segment = DocumentSegment(
|
||||
tenant_id=document.tenant_id,
|
||||
dataset_id=document.dataset_id,
|
||||
document_id=document.id,
|
||||
position=i + 1,
|
||||
content=f"Test segment content {i + 1}: {fake.text(max_nb_chars=200)}",
|
||||
answer=f"Test segment answer {i + 1}: {fake.text(max_nb_chars=100)}",
|
||||
word_count=fake.random_int(min=10, max=100),
|
||||
tokens=fake.random_int(min=5, max=50),
|
||||
keywords=[fake.word() for _ in range(3)],
|
||||
index_node_id=f"node_{fake.uuid4()}",
|
||||
index_node_hash=fake.sha256(),
|
||||
hit_count=0,
|
||||
enabled=True,
|
||||
status=SegmentStatus.COMPLETED,
|
||||
created_by=account.id,
|
||||
updated_by=account.id,
|
||||
)
|
||||
|
||||
db_session_with_containers.add(segment)
|
||||
segments.append(segment)
|
||||
|
||||
@ -159,7 +159,7 @@ class TestDisableSegmentFromIndexTask:
|
||||
dataset: Dataset,
|
||||
tenant: Tenant,
|
||||
account: Account,
|
||||
status: str = "completed",
|
||||
status: SegmentStatus = SegmentStatus.COMPLETED,
|
||||
enabled: bool = True,
|
||||
) -> DocumentSegment:
|
||||
"""
|
||||
|
||||
@ -185,30 +185,31 @@ class TestDisableSegmentsFromIndexTask:
|
||||
segments = []
|
||||
|
||||
for i in range(count):
|
||||
segment = DocumentSegment()
|
||||
segment.id = fake.uuid4()
|
||||
segment.tenant_id = dataset.tenant_id
|
||||
segment.dataset_id = dataset.id
|
||||
segment.document_id = document.id
|
||||
segment.position = i + 1
|
||||
segment.content = f"Test segment content {i + 1}: {fake.text(max_nb_chars=200)}"
|
||||
segment.answer = f"Test answer {i + 1}" if i % 2 == 0 else None
|
||||
segment.word_count = fake.random_int(min=10, max=100)
|
||||
segment.tokens = fake.random_int(min=5, max=50)
|
||||
segment.keywords = [fake.word() for _ in range(3)]
|
||||
segment.index_node_id = f"node_{segment.id}"
|
||||
segment.index_node_hash = fake.sha256()
|
||||
segment.hit_count = 0
|
||||
segment.enabled = True
|
||||
segment.disabled_at = None
|
||||
segment.disabled_by = None
|
||||
segment.status = SegmentStatus.COMPLETED
|
||||
segment.created_by = account.id
|
||||
segment.updated_by = account.id
|
||||
segment.indexing_at = fake.date_time_this_year()
|
||||
segment.completed_at = fake.date_time_this_year()
|
||||
segment.error = None
|
||||
segment.stopped_at = None
|
||||
id = fake.uuid4()
|
||||
segment = DocumentSegment(
|
||||
tenant_id=dataset.tenant_id,
|
||||
dataset_id=dataset.id,
|
||||
document_id=document.id,
|
||||
position=i + 1,
|
||||
content=f"Test segment content {i + 1}: {fake.text(max_nb_chars=200)}",
|
||||
answer=f"Test answer {i + 1}" if i % 2 == 0 else None,
|
||||
word_count=fake.random_int(min=10, max=100),
|
||||
tokens=fake.random_int(min=5, max=50),
|
||||
keywords=[fake.word() for _ in range(3)],
|
||||
index_node_id=f"node_{id}",
|
||||
index_node_hash=fake.sha256(),
|
||||
hit_count=0,
|
||||
enabled=True,
|
||||
disabled_at=None,
|
||||
disabled_by=None,
|
||||
status=SegmentStatus.COMPLETED,
|
||||
created_by=account.id,
|
||||
updated_by=account.id,
|
||||
indexing_at=fake.date_time_this_year(),
|
||||
completed_at=fake.date_time_this_year(),
|
||||
error=None,
|
||||
stopped_at=None,
|
||||
)
|
||||
|
||||
segments.append(segment)
|
||||
|
||||
|
||||
@ -175,7 +175,6 @@ class TestDuplicateDocumentIndexingTasks:
|
||||
for document in documents:
|
||||
for i in range(segments_per_doc):
|
||||
segment = DocumentSegment(
|
||||
id=fake.uuid4(),
|
||||
tenant_id=dataset.tenant_id,
|
||||
dataset_id=dataset.id,
|
||||
document_id=document.id,
|
||||
|
||||
@ -139,7 +139,6 @@ class TestEnableSegmentsToIndexTask:
|
||||
for i in range(count):
|
||||
text = fake.text(max_nb_chars=200)
|
||||
segment = DocumentSegment(
|
||||
id=fake.uuid4(),
|
||||
tenant_id=document.tenant_id,
|
||||
dataset_id=dataset.id,
|
||||
document_id=document.id,
|
||||
|
||||
@ -282,7 +282,6 @@ class TestSegmentServiceQueries:
|
||||
|
||||
def test_get_segment_by_id_returns_only_document_segment_instances(self):
|
||||
segment = DocumentSegment(
|
||||
id="segment-1",
|
||||
tenant_id="tenant-1",
|
||||
dataset_id="dataset-1",
|
||||
document_id="doc-1",
|
||||
@ -292,7 +291,7 @@ class TestSegmentServiceQueries:
|
||||
tokens=2,
|
||||
created_by="user-1",
|
||||
)
|
||||
|
||||
segment.id = "segment-1"
|
||||
with patch("services.dataset_service.db") as mock_db:
|
||||
mock_db.session.scalar.return_value = segment
|
||||
result = SegmentService.get_segment_by_id("segment-1", "tenant-1")
|
||||
@ -307,7 +306,6 @@ class TestSegmentServiceQueries:
|
||||
|
||||
def test_get_segments_by_document_and_dataset_returns_scalars_result(self):
|
||||
segment = DocumentSegment(
|
||||
id="segment-1",
|
||||
tenant_id="tenant-1",
|
||||
dataset_id="dataset-1",
|
||||
document_id="doc-1",
|
||||
@ -318,6 +316,7 @@ class TestSegmentServiceQueries:
|
||||
created_by="user-1",
|
||||
)
|
||||
|
||||
segment.id = "segment-1"
|
||||
with patch("services.dataset_service.db") as mock_db:
|
||||
mock_db.session.scalars.return_value.all.return_value = [segment]
|
||||
|
||||
@ -461,6 +460,7 @@ class TestSegmentServiceMutations:
|
||||
vector_service.create_segments_vector.side_effect = RuntimeError("vector failed")
|
||||
|
||||
result = SegmentService.multi_create_segment(segments, document, dataset)
|
||||
assert result
|
||||
|
||||
assert len(result) == 2
|
||||
assert [segment.position for segment in result] == [2, 3]
|
||||
|
||||
2
api/uv.lock
generated
2
api/uv.lock
generated
@ -1379,7 +1379,7 @@ wheels = [
|
||||
|
||||
[[package]]
|
||||
name = "dify-api"
|
||||
version = "1.14.0"
|
||||
version = "1.14.1"
|
||||
source = { virtual = "." }
|
||||
dependencies = [
|
||||
{ name = "aliyun-log-python-sdk" },
|
||||
|
||||
@ -220,7 +220,7 @@ services:
|
||||
# API service
|
||||
api:
|
||||
<<: *shared-api-worker-config
|
||||
image: langgenius/dify-api:1.14.0
|
||||
image: langgenius/dify-api:1.14.1
|
||||
environment:
|
||||
MODE: api
|
||||
SENTRY_DSN: ${API_SENTRY_DSN:-}
|
||||
@ -264,7 +264,7 @@ services:
|
||||
# WebSocket service for workflow collaboration.
|
||||
api_websocket:
|
||||
<<: *shared-api-worker-config
|
||||
image: langgenius/dify-api:1.14.0
|
||||
image: langgenius/dify-api:1.14.1
|
||||
profiles:
|
||||
- collaboration
|
||||
environment:
|
||||
@ -290,7 +290,7 @@ services:
|
||||
# The Celery worker for processing all queues (dataset, workflow, mail, etc.)
|
||||
worker:
|
||||
<<: *shared-worker-config
|
||||
image: langgenius/dify-api:1.14.0
|
||||
image: langgenius/dify-api:1.14.1
|
||||
environment:
|
||||
MODE: worker
|
||||
SENTRY_DSN: ${API_SENTRY_DSN:-}
|
||||
@ -333,7 +333,7 @@ services:
|
||||
# Celery beat for scheduling periodic tasks.
|
||||
worker_beat:
|
||||
<<: *shared-worker-beat-config
|
||||
image: langgenius/dify-api:1.14.0
|
||||
image: langgenius/dify-api:1.14.1
|
||||
environment:
|
||||
MODE: beat
|
||||
depends_on:
|
||||
@ -366,7 +366,7 @@ services:
|
||||
|
||||
# Frontend web application.
|
||||
web:
|
||||
image: langgenius/dify-web:1.14.0
|
||||
image: langgenius/dify-web:1.14.1
|
||||
restart: always
|
||||
env_file:
|
||||
- path: ./envs/core-services/web.env
|
||||
|
||||
@ -226,7 +226,7 @@ services:
|
||||
# API service
|
||||
api:
|
||||
<<: *shared-api-worker-config
|
||||
image: langgenius/dify-api:1.14.0
|
||||
image: langgenius/dify-api:1.14.1
|
||||
environment:
|
||||
MODE: api
|
||||
SENTRY_DSN: ${API_SENTRY_DSN:-}
|
||||
@ -270,7 +270,7 @@ services:
|
||||
# WebSocket service for workflow collaboration.
|
||||
api_websocket:
|
||||
<<: *shared-api-worker-config
|
||||
image: langgenius/dify-api:1.14.0
|
||||
image: langgenius/dify-api:1.14.1
|
||||
profiles:
|
||||
- collaboration
|
||||
environment:
|
||||
@ -296,7 +296,7 @@ services:
|
||||
# The Celery worker for processing all queues (dataset, workflow, mail, etc.)
|
||||
worker:
|
||||
<<: *shared-worker-config
|
||||
image: langgenius/dify-api:1.14.0
|
||||
image: langgenius/dify-api:1.14.1
|
||||
environment:
|
||||
MODE: worker
|
||||
SENTRY_DSN: ${API_SENTRY_DSN:-}
|
||||
@ -339,7 +339,7 @@ services:
|
||||
# Celery beat for scheduling periodic tasks.
|
||||
worker_beat:
|
||||
<<: *shared-worker-beat-config
|
||||
image: langgenius/dify-api:1.14.0
|
||||
image: langgenius/dify-api:1.14.1
|
||||
environment:
|
||||
MODE: beat
|
||||
depends_on:
|
||||
@ -372,7 +372,7 @@ services:
|
||||
|
||||
# Frontend web application.
|
||||
web:
|
||||
image: langgenius/dify-web:1.14.0
|
||||
image: langgenius/dify-web:1.14.1
|
||||
restart: always
|
||||
env_file:
|
||||
- path: ./envs/core-services/web.env
|
||||
|
||||
@ -86,6 +86,9 @@ vi.mock('@tanstack/react-query', async (importOriginal) => {
|
||||
const actual = await importOriginal<typeof import('@tanstack/react-query')>()
|
||||
return {
|
||||
...actual,
|
||||
useQuery: () => ({
|
||||
data: [],
|
||||
}),
|
||||
useInfiniteQuery: () => ({
|
||||
data: { pages: mockPages },
|
||||
isLoading: mockIsLoading,
|
||||
|
||||
@ -1,20 +1,19 @@
|
||||
import type { ReactNode } from 'react'
|
||||
import type { DataSet } from '@/models/datasets'
|
||||
import { render, screen } from '@testing-library/react'
|
||||
import { IndexingType } from '@/app/components/datasets/create/step-two'
|
||||
import { ChunkingMode, DatasetPermission, DataSourceType } from '@/models/datasets'
|
||||
import { RETRIEVE_METHOD } from '@/types/app'
|
||||
import { render, screen, waitFor } from '@testing-library/react'
|
||||
import { usePathname, useRouter } from '@/next/navigation'
|
||||
import { useDatasetDetail, useDatasetRelatedApps } from '@/service/knowledge/use-dataset'
|
||||
import DatasetDetailLayout from '../layout-main'
|
||||
|
||||
let mockPathname = '/datasets/test-dataset-id/documents'
|
||||
let mockDataset: DataSet | undefined
|
||||
let mockCanAccessSnippetsAndEvaluation = true
|
||||
|
||||
const mockReplace = vi.fn()
|
||||
const mockSetAppSidebarExpand = vi.fn()
|
||||
const mockMutateDatasetRes = vi.fn()
|
||||
|
||||
vi.mock('@/next/navigation', () => ({
|
||||
usePathname: () => mockPathname,
|
||||
usePathname: vi.fn(),
|
||||
useRouter: vi.fn(),
|
||||
}))
|
||||
|
||||
vi.mock('@/service/knowledge/use-dataset', () => ({
|
||||
useDatasetDetail: vi.fn(),
|
||||
useDatasetRelatedApps: vi.fn(),
|
||||
}))
|
||||
|
||||
vi.mock('@/app/components/app/store', () => ({
|
||||
@ -23,208 +22,130 @@ vi.mock('@/app/components/app/store', () => ({
|
||||
}),
|
||||
}))
|
||||
|
||||
vi.mock('@/hooks/use-breakpoints', () => ({
|
||||
default: () => 'desktop',
|
||||
MediaType: {
|
||||
mobile: 'mobile',
|
||||
desktop: 'desktop',
|
||||
},
|
||||
}))
|
||||
|
||||
vi.mock('@/context/event-emitter', () => ({
|
||||
useEventEmitterContextContext: () => ({
|
||||
eventEmitter: {
|
||||
useSubscription: vi.fn(),
|
||||
},
|
||||
}),
|
||||
}))
|
||||
|
||||
vi.mock('@/context/app-context', () => ({
|
||||
useAppContext: () => ({
|
||||
isCurrentWorkspaceDatasetOperator: false,
|
||||
}),
|
||||
}))
|
||||
|
||||
vi.mock('@/hooks/use-snippet-and-evaluation-plan-access', () => ({
|
||||
useSnippetAndEvaluationPlanAccess: () => ({
|
||||
canAccess: mockCanAccessSnippetsAndEvaluation,
|
||||
isReady: true,
|
||||
vi.mock('@/context/event-emitter', () => ({
|
||||
useEventEmitterContextContext: () => ({
|
||||
eventEmitter: undefined,
|
||||
}),
|
||||
}))
|
||||
|
||||
vi.mock('@/hooks/use-breakpoints', () => ({
|
||||
default: () => 'desktop',
|
||||
MediaType: {
|
||||
mobile: 'mobile',
|
||||
},
|
||||
}))
|
||||
|
||||
vi.mock('@/hooks/use-document-title', () => ({
|
||||
default: vi.fn(),
|
||||
}))
|
||||
|
||||
vi.mock('@/service/knowledge/use-dataset', () => ({
|
||||
useDatasetDetail: () => ({
|
||||
data: mockDataset,
|
||||
error: null,
|
||||
refetch: mockMutateDatasetRes,
|
||||
}),
|
||||
useDatasetRelatedApps: () => ({
|
||||
data: [],
|
||||
}),
|
||||
}))
|
||||
|
||||
vi.mock('@/app/components/app-sidebar', () => ({
|
||||
default: ({
|
||||
navigation,
|
||||
children,
|
||||
}: {
|
||||
navigation: Array<{ name: string, href: string, disabled?: boolean }>
|
||||
children?: ReactNode
|
||||
}) => (
|
||||
<div data-testid="app-sidebar">
|
||||
{navigation.map(item => (
|
||||
<button
|
||||
key={item.href}
|
||||
type="button"
|
||||
disabled={item.disabled}
|
||||
>
|
||||
{item.name}
|
||||
</button>
|
||||
))}
|
||||
{children}
|
||||
</div>
|
||||
),
|
||||
default: () => <aside aria-label="dataset navigation" />,
|
||||
}))
|
||||
|
||||
vi.mock('@/app/components/datasets/extra-info', () => ({
|
||||
default: () => <div data-testid="dataset-extra-info" />,
|
||||
default: () => <div />,
|
||||
}))
|
||||
|
||||
vi.mock('@/app/components/base/loading', () => ({
|
||||
default: () => <div role="status">loading</div>,
|
||||
}))
|
||||
|
||||
const createDataset = (overrides: Partial<DataSet> = {}): DataSet => ({
|
||||
id: 'test-dataset-id',
|
||||
name: 'Test Dataset',
|
||||
indexing_status: 'completed',
|
||||
icon_info: {
|
||||
icon: 'book',
|
||||
icon_background: '#fff',
|
||||
icon_type: 'emoji',
|
||||
icon_url: '',
|
||||
},
|
||||
description: '',
|
||||
permission: DatasetPermission.onlyMe,
|
||||
data_source_type: DataSourceType.FILE,
|
||||
indexing_technique: IndexingType.QUALIFIED,
|
||||
created_by: 'user-1',
|
||||
updated_by: 'user-1',
|
||||
updated_at: 0,
|
||||
app_count: 0,
|
||||
doc_form: ChunkingMode.text,
|
||||
document_count: 0,
|
||||
total_document_count: 0,
|
||||
word_count: 0,
|
||||
provider: 'vendor',
|
||||
embedding_model: 'text-embedding',
|
||||
embedding_model_provider: 'openai',
|
||||
embedding_available: true,
|
||||
retrieval_model_dict: {
|
||||
search_method: RETRIEVE_METHOD.semantic,
|
||||
reranking_enable: false,
|
||||
reranking_model: {
|
||||
reranking_provider_name: '',
|
||||
reranking_model_name: '',
|
||||
},
|
||||
top_k: 3,
|
||||
score_threshold_enabled: false,
|
||||
score_threshold: 0.5,
|
||||
},
|
||||
retrieval_model: {
|
||||
search_method: RETRIEVE_METHOD.semantic,
|
||||
reranking_enable: false,
|
||||
reranking_model: {
|
||||
reranking_provider_name: '',
|
||||
reranking_model_name: '',
|
||||
},
|
||||
top_k: 3,
|
||||
score_threshold_enabled: false,
|
||||
score_threshold: 0.5,
|
||||
},
|
||||
tags: [],
|
||||
external_knowledge_info: {
|
||||
external_knowledge_id: '',
|
||||
external_knowledge_api_id: '',
|
||||
external_knowledge_api_name: '',
|
||||
external_knowledge_api_endpoint: '',
|
||||
},
|
||||
external_retrieval_model: {
|
||||
top_k: 3,
|
||||
score_threshold: 0.5,
|
||||
score_threshold_enabled: false,
|
||||
},
|
||||
built_in_field_enabled: false,
|
||||
pipeline_id: 'pipeline-1',
|
||||
is_published: true,
|
||||
runtime_mode: 'rag_pipeline',
|
||||
enable_api: false,
|
||||
is_multimodal: false,
|
||||
...overrides,
|
||||
})
|
||||
const mockUsePathname = vi.mocked(usePathname)
|
||||
const mockUseRouter = vi.mocked(useRouter)
|
||||
const mockUseDatasetDetail = vi.mocked(useDatasetDetail)
|
||||
const mockUseDatasetRelatedApps = vi.mocked(useDatasetRelatedApps)
|
||||
|
||||
describe('DatasetDetailLayout', () => {
|
||||
beforeEach(() => {
|
||||
vi.clearAllMocks()
|
||||
mockPathname = '/datasets/test-dataset-id/documents'
|
||||
mockDataset = createDataset()
|
||||
mockCanAccessSnippetsAndEvaluation = true
|
||||
mockUsePathname.mockReturnValue('/datasets/dataset-1/pipeline')
|
||||
mockUseRouter.mockReturnValue({
|
||||
back: vi.fn(),
|
||||
forward: vi.fn(),
|
||||
refresh: vi.fn(),
|
||||
push: vi.fn(),
|
||||
replace: mockReplace,
|
||||
prefetch: vi.fn(),
|
||||
})
|
||||
mockUseDatasetRelatedApps.mockReturnValue({ data: undefined } as ReturnType<typeof useDatasetRelatedApps>)
|
||||
})
|
||||
|
||||
describe('Evaluation navigation', () => {
|
||||
it('should hide the evaluation menu when the dataset is not a rag pipeline', () => {
|
||||
mockDataset = createDataset({
|
||||
runtime_mode: 'general',
|
||||
is_published: false,
|
||||
describe('Access Errors', () => {
|
||||
it.each([403, 404])('should redirect to datasets page when dataset detail returns %s', async (status) => {
|
||||
// Arrange
|
||||
mockUseDatasetDetail.mockReturnValue({
|
||||
data: undefined,
|
||||
error: new Response(null, { status }),
|
||||
refetch: vi.fn(),
|
||||
} as unknown as ReturnType<typeof useDatasetDetail>)
|
||||
|
||||
// Act
|
||||
render(
|
||||
<DatasetDetailLayout datasetId="dataset-1">
|
||||
<div>Pipeline content</div>
|
||||
</DatasetDetailLayout>,
|
||||
)
|
||||
|
||||
// Assert
|
||||
await waitFor(() => {
|
||||
expect(mockReplace).toHaveBeenCalledWith('/datasets')
|
||||
})
|
||||
expect(mockUseDatasetRelatedApps).toHaveBeenCalledWith('dataset-1', { enabled: false })
|
||||
expect(screen.queryByText('Pipeline content')).not.toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should redirect when the dataset detail error exposes status without being a Response', async () => {
|
||||
// Arrange
|
||||
mockUseDatasetDetail.mockReturnValue({
|
||||
data: undefined,
|
||||
error: { status: 403 },
|
||||
refetch: vi.fn(),
|
||||
} as unknown as ReturnType<typeof useDatasetDetail>)
|
||||
|
||||
// Act
|
||||
render(
|
||||
<DatasetDetailLayout datasetId="test-dataset-id">
|
||||
<div data-testid="dataset-detail-content">content</div>
|
||||
<DatasetDetailLayout datasetId="dataset-1">
|
||||
<div>Pipeline content</div>
|
||||
</DatasetDetailLayout>,
|
||||
)
|
||||
|
||||
expect(screen.queryByRole('button', { name: 'common.datasetMenus.evaluation' })).not.toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should disable the evaluation menu when the rag pipeline is unpublished', () => {
|
||||
mockDataset = createDataset({
|
||||
is_published: false,
|
||||
// Assert
|
||||
await waitFor(() => {
|
||||
expect(mockReplace).toHaveBeenCalledWith('/datasets')
|
||||
})
|
||||
|
||||
render(
|
||||
<DatasetDetailLayout datasetId="test-dataset-id">
|
||||
<div data-testid="dataset-detail-content">content</div>
|
||||
</DatasetDetailLayout>,
|
||||
)
|
||||
|
||||
expect(screen.getByRole('button', { name: 'common.datasetMenus.evaluation' })).toBeDisabled()
|
||||
expect(screen.queryByText('Pipeline content')).not.toBeInTheDocument()
|
||||
})
|
||||
})
|
||||
|
||||
it('should enable the evaluation menu when the rag pipeline is published', () => {
|
||||
describe('Rendering', () => {
|
||||
it('should render children when dataset detail is available', () => {
|
||||
// Arrange
|
||||
mockUseDatasetDetail.mockReturnValue({
|
||||
data: {
|
||||
id: 'dataset-1',
|
||||
name: 'Dataset 1',
|
||||
provider: 'vendor',
|
||||
runtime_mode: 'rag_pipeline',
|
||||
is_published: true,
|
||||
},
|
||||
error: null,
|
||||
refetch: vi.fn(),
|
||||
} as unknown as ReturnType<typeof useDatasetDetail>)
|
||||
|
||||
// Act
|
||||
render(
|
||||
<DatasetDetailLayout datasetId="test-dataset-id">
|
||||
<div data-testid="dataset-detail-content">content</div>
|
||||
<DatasetDetailLayout datasetId="dataset-1">
|
||||
<div>Pipeline content</div>
|
||||
</DatasetDetailLayout>,
|
||||
)
|
||||
|
||||
expect(screen.getByRole('button', { name: 'common.datasetMenus.evaluation' })).toBeEnabled()
|
||||
})
|
||||
|
||||
it('should hide the evaluation menu when snippet and evaluation access is unavailable', () => {
|
||||
mockCanAccessSnippetsAndEvaluation = false
|
||||
|
||||
render(
|
||||
<DatasetDetailLayout datasetId="test-dataset-id">
|
||||
<div data-testid="dataset-detail-content">content</div>
|
||||
</DatasetDetailLayout>,
|
||||
)
|
||||
|
||||
expect(screen.queryByRole('button', { name: 'common.datasetMenus.evaluation' })).not.toBeInTheDocument()
|
||||
// Assert
|
||||
expect(screen.getByText('Pipeline content')).toBeInTheDocument()
|
||||
expect(mockUseDatasetRelatedApps).toHaveBeenCalledWith('dataset-1', { enabled: true })
|
||||
expect(mockReplace).not.toHaveBeenCalled()
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
@ -9,8 +9,6 @@ import {
|
||||
RiFileTextLine,
|
||||
RiFocus2Fill,
|
||||
RiFocus2Line,
|
||||
RiUserSettingsFill,
|
||||
RiUserSettingsLine,
|
||||
} from '@remixicon/react'
|
||||
import * as React from 'react'
|
||||
import { useEffect, useMemo, useState } from 'react'
|
||||
@ -25,8 +23,7 @@ import DatasetDetailContext from '@/context/dataset-detail'
|
||||
import { useEventEmitterContextContext } from '@/context/event-emitter'
|
||||
import useBreakpoints, { MediaType } from '@/hooks/use-breakpoints'
|
||||
import useDocumentTitle from '@/hooks/use-document-title'
|
||||
import { useSnippetAndEvaluationPlanAccess } from '@/hooks/use-snippet-and-evaluation-plan-access'
|
||||
import { usePathname } from '@/next/navigation'
|
||||
import { usePathname, useRouter } from '@/next/navigation'
|
||||
import { useDatasetDetail, useDatasetRelatedApps } from '@/service/knowledge/use-dataset'
|
||||
|
||||
type IAppDetailLayoutProps = {
|
||||
@ -34,8 +31,17 @@ type IAppDetailLayoutProps = {
|
||||
datasetId: string
|
||||
}
|
||||
|
||||
const EvaluationIcon = ({ className }: { className?: string }) => {
|
||||
return <span aria-hidden className={cn('i-custom-vender-line-others-evaluation', className)} />
|
||||
const getResponseStatus = (error: unknown) => {
|
||||
if (error instanceof Response)
|
||||
return error.status
|
||||
|
||||
if (typeof error === 'object' && error && 'status' in error && typeof error.status === 'number')
|
||||
return error.status
|
||||
}
|
||||
|
||||
const shouldRedirectToDatasetList = (error: unknown) => {
|
||||
const status = getResponseStatus(error)
|
||||
return status === 403 || status === 404
|
||||
}
|
||||
|
||||
const DatasetDetailLayout: FC<IAppDetailLayoutProps> = (props) => {
|
||||
@ -44,6 +50,7 @@ const DatasetDetailLayout: FC<IAppDetailLayoutProps> = (props) => {
|
||||
datasetId,
|
||||
} = props
|
||||
const { t } = useTranslation()
|
||||
const router = useRouter()
|
||||
const pathname = usePathname()
|
||||
const hideSideBar = pathname.endsWith('documents/create') || pathname.endsWith('documents/create-from-pipeline')
|
||||
const isPipelineCanvas = pathname.endsWith('/pipeline')
|
||||
@ -56,15 +63,14 @@ const DatasetDetailLayout: FC<IAppDetailLayoutProps> = (props) => {
|
||||
setHideHeader(v.payload)
|
||||
})
|
||||
const { isCurrentWorkspaceDatasetOperator } = useAppContext()
|
||||
const { canAccess: canAccessSnippetsAndEvaluation } = useSnippetAndEvaluationPlanAccess()
|
||||
|
||||
const media = useBreakpoints()
|
||||
const isMobile = media === MediaType.mobile
|
||||
|
||||
const { data: datasetRes, error, refetch: mutateDatasetRes } = useDatasetDetail(datasetId)
|
||||
const shouldRedirect = shouldRedirectToDatasetList(error)
|
||||
|
||||
const { data: relatedApps } = useDatasetRelatedApps(datasetId)
|
||||
const isRagPipelineDataset = datasetRes?.runtime_mode === 'rag_pipeline'
|
||||
const { data: relatedApps } = useDatasetRelatedApps(datasetId, { enabled: !!datasetRes && !shouldRedirect })
|
||||
|
||||
const isButtonDisabledWithPipeline = useMemo(() => {
|
||||
if (!datasetRes)
|
||||
@ -92,46 +98,27 @@ const DatasetDetailLayout: FC<IAppDetailLayoutProps> = (props) => {
|
||||
selectedIcon: RiEqualizer2Fill,
|
||||
disabled: false,
|
||||
},
|
||||
{
|
||||
name: 'Access Config',
|
||||
href: `/datasets/${datasetId}/access-config`,
|
||||
icon: RiUserSettingsLine,
|
||||
selectedIcon: RiUserSettingsFill,
|
||||
disabled: false,
|
||||
},
|
||||
]
|
||||
|
||||
if (datasetRes?.provider !== 'external') {
|
||||
return [
|
||||
{
|
||||
name: t('datasetMenus.documents', { ns: 'common' }),
|
||||
href: `/datasets/${datasetId}/documents`,
|
||||
icon: RiFileTextLine,
|
||||
selectedIcon: RiFileTextFill,
|
||||
disabled: isButtonDisabledWithPipeline,
|
||||
},
|
||||
{
|
||||
name: t('datasetMenus.pipeline', { ns: 'common' }),
|
||||
href: `/datasets/${datasetId}/pipeline`,
|
||||
icon: PipelineLine as RemixiconComponentType,
|
||||
selectedIcon: PipelineFill as RemixiconComponentType,
|
||||
disabled: false,
|
||||
},
|
||||
...baseNavigation,
|
||||
...(isRagPipelineDataset && canAccessSnippetsAndEvaluation
|
||||
? [{
|
||||
name: t('datasetMenus.evaluation', { ns: 'common' }),
|
||||
href: `/datasets/${datasetId}/evaluation`,
|
||||
icon: EvaluationIcon,
|
||||
selectedIcon: EvaluationIcon,
|
||||
disabled: isButtonDisabledWithPipeline,
|
||||
}]
|
||||
: []),
|
||||
]
|
||||
baseNavigation.unshift({
|
||||
name: t('datasetMenus.pipeline', { ns: 'common' }),
|
||||
href: `/datasets/${datasetId}/pipeline`,
|
||||
icon: PipelineLine as RemixiconComponentType,
|
||||
selectedIcon: PipelineFill as RemixiconComponentType,
|
||||
disabled: false,
|
||||
})
|
||||
baseNavigation.unshift({
|
||||
name: t('datasetMenus.documents', { ns: 'common' }),
|
||||
href: `/datasets/${datasetId}/documents`,
|
||||
icon: RiFileTextLine,
|
||||
selectedIcon: RiFileTextFill,
|
||||
disabled: isButtonDisabledWithPipeline,
|
||||
})
|
||||
}
|
||||
|
||||
return baseNavigation
|
||||
}, [canAccessSnippetsAndEvaluation, t, datasetId, isButtonDisabledWithPipeline, isRagPipelineDataset, datasetRes?.provider])
|
||||
}, [t, datasetId, isButtonDisabledWithPipeline, datasetRes?.provider])
|
||||
|
||||
useDocumentTitle(datasetRes?.name || t('menus.datasets', { ns: 'common' }))
|
||||
|
||||
@ -143,9 +130,17 @@ const DatasetDetailLayout: FC<IAppDetailLayoutProps> = (props) => {
|
||||
setAppSidebarExpand(isMobile ? mode : localeMode)
|
||||
}, [isMobile, setAppSidebarExpand])
|
||||
|
||||
useEffect(() => {
|
||||
if (shouldRedirect)
|
||||
router.replace('/datasets')
|
||||
}, [router, shouldRedirect])
|
||||
|
||||
if (!datasetRes && !error)
|
||||
return <Loading type="app" />
|
||||
|
||||
if (shouldRedirect)
|
||||
return <Loading type="app" />
|
||||
|
||||
return (
|
||||
<div
|
||||
className={cn(
|
||||
@ -175,4 +170,4 @@ const DatasetDetailLayout: FC<IAppDetailLayoutProps> = (props) => {
|
||||
</div>
|
||||
)
|
||||
}
|
||||
export default React.memo(DatasetDetailLayout)
|
||||
export default React.memo(DatasetDetailLayout)
|
||||
@ -36,6 +36,7 @@ const mocks = vi.hoisted(() => {
|
||||
})),
|
||||
parseEditorState: vi.fn(() => ({ state: 'parsed' })),
|
||||
setEditorState: vi.fn(),
|
||||
setEditable: vi.fn(),
|
||||
focus: vi.fn(),
|
||||
update: vi.fn((fn: () => void) => fn()),
|
||||
},
|
||||
@ -71,6 +72,7 @@ vi.mock('lexical', async (importOriginal) => {
|
||||
})),
|
||||
getAllTextNodes: () => [],
|
||||
}),
|
||||
$nodesOfType: () => [],
|
||||
TextNode: class TextNode {
|
||||
__text: string
|
||||
constructor(text = '') {
|
||||
@ -92,9 +94,8 @@ vi.mock('@lexical/react/LexicalComposer', () => ({
|
||||
try {
|
||||
initialConfig.onError(new Error('test error'))
|
||||
}
|
||||
catch (e) {
|
||||
// ignore error
|
||||
console.error(e)
|
||||
catch {
|
||||
// Ignore the intentional throw from the mocked error boundary path.
|
||||
}
|
||||
}
|
||||
if (initialConfig?.nodes) {
|
||||
@ -328,6 +329,20 @@ describe('PromptEditor', () => {
|
||||
expect(screen.getByTestId('lexical-composer')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should sync editable changes to the lexical editor instance', async () => {
|
||||
const { rerender } = render(<PromptEditor editable={true} />)
|
||||
|
||||
await waitFor(() => {
|
||||
expect(mocks.editor.setEditable).toHaveBeenCalledWith(true)
|
||||
})
|
||||
|
||||
rerender(<PromptEditor editable={false} />)
|
||||
|
||||
await waitFor(() => {
|
||||
expect(mocks.editor.setEditable).toHaveBeenLastCalledWith(false)
|
||||
})
|
||||
})
|
||||
|
||||
it('should render with isSupportFileVar=true', () => {
|
||||
render(<PromptEditor isSupportFileVar={true} />)
|
||||
expect(screen.getByTestId('lexical-composer')).toBeInTheDocument()
|
||||
|
||||
@ -3,10 +3,9 @@
|
||||
import type { InitialConfigType } from '@lexical/react/LexicalComposer'
|
||||
import type {
|
||||
EditorState,
|
||||
LexicalCommand,
|
||||
} from 'lexical'
|
||||
import type { FC } from 'react'
|
||||
import type { Hotkey } from './plugins/shortcuts-popup-plugin'
|
||||
import type { Hotkey, ShortcutPopupInsertHandler } from './plugins/shortcuts-popup-plugin'
|
||||
import type {
|
||||
ContextBlockType,
|
||||
CurrentBlockType,
|
||||
@ -97,6 +96,16 @@ const ValueSyncPlugin: FC<{ value?: string }> = ({ value }) => {
|
||||
return null
|
||||
}
|
||||
|
||||
const EditableSyncPlugin: FC<{ editable: boolean }> = ({ editable }) => {
|
||||
const [editor] = useLexicalComposerContext()
|
||||
|
||||
useEffect(() => {
|
||||
editor.setEditable(editable)
|
||||
}, [editor, editable])
|
||||
|
||||
return null
|
||||
}
|
||||
|
||||
export type PromptEditorProps = {
|
||||
instanceId?: string
|
||||
compact?: boolean
|
||||
@ -122,7 +131,7 @@ export type PromptEditorProps = {
|
||||
errorMessageBlock?: ErrorMessageBlockType
|
||||
lastRunBlock?: LastRunBlockType
|
||||
isSupportFileVar?: boolean
|
||||
shortcutPopups?: Array<{ hotkey: Hotkey, Popup: React.ComponentType<{ onClose: () => void, onInsert: (command: LexicalCommand<unknown>, params: any[]) => void }> }>
|
||||
shortcutPopups?: Array<{ hotkey: Hotkey, Popup: React.ComponentType<{ onClose: () => void, onInsert: ShortcutPopupInsertHandler }> }>
|
||||
}
|
||||
|
||||
const PromptEditor: FC<PromptEditorProps> = ({
|
||||
@ -194,13 +203,13 @@ const PromptEditor: FC<PromptEditorProps> = ({
|
||||
eventEmitter?.emit({
|
||||
type: UPDATE_DATASETS_EVENT_EMITTER,
|
||||
payload: contextBlock?.datasets,
|
||||
} as any)
|
||||
})
|
||||
}, [eventEmitter, contextBlock?.datasets])
|
||||
useEffect(() => {
|
||||
eventEmitter?.emit({
|
||||
type: UPDATE_HISTORY_EVENT_EMITTER,
|
||||
payload: historyBlock?.history,
|
||||
} as any)
|
||||
})
|
||||
}, [eventEmitter, historyBlock?.history])
|
||||
|
||||
const [floatingAnchorElem, setFloatingAnchorElem] = useState<HTMLDivElement | null>(null)
|
||||
@ -243,6 +252,7 @@ const PromptEditor: FC<PromptEditorProps> = ({
|
||||
onEditorChange={handleEditorChange}
|
||||
/>
|
||||
<ValueSyncPlugin value={value} />
|
||||
<EditableSyncPlugin editable={editable} />
|
||||
</div>
|
||||
</LexicalComposer>
|
||||
)
|
||||
|
||||
@ -4,8 +4,10 @@ import type { FormInputItem, ParagraphFormInput } from '@/app/components/workflo
|
||||
import type { ValueSelector } from '@/app/components/workflow/types'
|
||||
|
||||
import { LexicalComposer } from '@lexical/react/LexicalComposer'
|
||||
import { cleanup, fireEvent, render, screen } from '@testing-library/react'
|
||||
import { BlockEnum, InputVarType } from '@/app/components/workflow/types'
|
||||
import { act, cleanup, fireEvent, render, screen, waitFor } from '@testing-library/react'
|
||||
import { useEffect, useState } from 'react'
|
||||
import { BlockEnum, InputVarType, SupportUploadFileTypes } from '@/app/components/workflow/types'
|
||||
import { TransferMethod } from '@/types/app'
|
||||
import HITLInputComponentUI from '../component-ui'
|
||||
import { HITLInputNode } from '../node'
|
||||
|
||||
@ -113,6 +115,57 @@ describe('HITLInputComponentUI', () => {
|
||||
expect(screen.queryByRole('button', { name: 'common.operation.remove' })).not.toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should close the edit modal when readonly becomes true', async () => {
|
||||
let setReadonlyValue: ((readonly: boolean) => void) | undefined
|
||||
const Harness = () => {
|
||||
const [readonly, setReadonly] = useState(false)
|
||||
const [namespace] = useState(() => `hitl-input-test-${crypto.randomUUID()}`)
|
||||
|
||||
useEffect(() => {
|
||||
setReadonlyValue = setReadonly
|
||||
return () => {
|
||||
setReadonlyValue = undefined
|
||||
}
|
||||
}, [])
|
||||
|
||||
return (
|
||||
<LexicalComposer
|
||||
initialConfig={{
|
||||
namespace,
|
||||
onError: (error: Error) => {
|
||||
throw error
|
||||
},
|
||||
nodes: [HITLInputNode],
|
||||
}}
|
||||
>
|
||||
<HITLInputComponentUI
|
||||
nodeId="node-1"
|
||||
varName="customer_name"
|
||||
workflowNodesMap={createWorkflowNodesMap()}
|
||||
onChange={vi.fn()}
|
||||
onRename={vi.fn()}
|
||||
onRemove={vi.fn()}
|
||||
readonly={readonly}
|
||||
/>
|
||||
</LexicalComposer>
|
||||
)
|
||||
}
|
||||
|
||||
render(<Harness />)
|
||||
|
||||
fireEvent.click(await screen.findByRole('button', { name: 'common.operation.edit' }))
|
||||
|
||||
expect(await screen.findByRole('textbox')).toBeInTheDocument()
|
||||
|
||||
act(() => {
|
||||
setReadonlyValue?.(true)
|
||||
})
|
||||
|
||||
await waitFor(() => {
|
||||
expect(screen.queryByRole('textbox')).not.toBeInTheDocument()
|
||||
})
|
||||
})
|
||||
|
||||
it('should render select option summary for constant options', () => {
|
||||
const { getByText } = renderComponent({
|
||||
formInput: {
|
||||
@ -212,10 +265,33 @@ describe('HITLInputComponentUI', () => {
|
||||
|
||||
expect(queryByRole('textbox')).not.toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should prevent renaming to an existing variable name', async () => {
|
||||
const {
|
||||
findByRole,
|
||||
onChange,
|
||||
onRename,
|
||||
} = renderComponent({
|
||||
unavailableVariableNames: ['existing_name'],
|
||||
})
|
||||
|
||||
fireEvent.click(await screen.findByRole('button', { name: 'common.operation.edit' }))
|
||||
|
||||
const textbox = await findByRole('textbox')
|
||||
fireEvent.change(textbox, { target: { value: 'existing_name' } })
|
||||
|
||||
expect(screen.getByText('workflow.nodes.humanInput.insertInputField.variableNameDuplicated')).toBeInTheDocument()
|
||||
expect(screen.getByRole('button', { name: 'common.operation.save' })).toBeDisabled()
|
||||
|
||||
fireEvent.click(screen.getByRole('button', { name: 'common.operation.save' }))
|
||||
|
||||
expect(onChange).not.toHaveBeenCalled()
|
||||
expect(onRename).not.toHaveBeenCalled()
|
||||
})
|
||||
})
|
||||
|
||||
describe('Default formInput', () => {
|
||||
it('should pass default payload to InputField when formInput is undefined', async () => {
|
||||
it('should open an empty default editor when formInput is undefined', async () => {
|
||||
const { findByRole } = renderComponent({
|
||||
formInput: undefined,
|
||||
})
|
||||
@ -223,10 +299,10 @@ describe('HITLInputComponentUI', () => {
|
||||
fireEvent.click(await screen.findByRole('button', { name: 'common.operation.edit' }))
|
||||
|
||||
const textbox = await findByRole('textbox')
|
||||
const saveButton = await screen.findByRole('button', { name: 'common.operation.save' })
|
||||
|
||||
fireEvent.click(await screen.findByRole('button', { name: 'common.operation.save' }))
|
||||
|
||||
expect(textbox).toHaveValue('customer_name')
|
||||
expect(textbox).toHaveValue('')
|
||||
expect(saveButton).toBeDisabled()
|
||||
})
|
||||
|
||||
it('should render variable selector when workflowNodesMap fallback is used', () => {
|
||||
|
||||
@ -129,6 +129,31 @@ describe('HITLInputComponent', () => {
|
||||
expect(onChange.mock.calls[0][0][0].output_variable_name).toBe('renamed_name')
|
||||
})
|
||||
|
||||
it('should ignore rename when the target variable name already exists', async () => {
|
||||
const user = userEvent.setup()
|
||||
const onChange = vi.fn()
|
||||
|
||||
render(
|
||||
<HITLInputComponent
|
||||
nodeKey="node-key-duplicate"
|
||||
nodeId="node-duplicate"
|
||||
varName="user_name"
|
||||
formInputs={[
|
||||
createInput(),
|
||||
createInput({ output_variable_name: 'renamed_name' }),
|
||||
]}
|
||||
onChange={onChange}
|
||||
onRename={vi.fn()}
|
||||
onRemove={vi.fn()}
|
||||
workflowNodesMap={{}}
|
||||
/>,
|
||||
)
|
||||
|
||||
await user.click(screen.getByRole('button', { name: 'emit-rename' }))
|
||||
|
||||
expect(onChange).not.toHaveBeenCalled()
|
||||
})
|
||||
|
||||
it('should update existing payload when variable name stays the same', async () => {
|
||||
const user = userEvent.setup()
|
||||
const onChange = vi.fn()
|
||||
|
||||
@ -1,11 +1,13 @@
|
||||
import type { LexicalEditor } from 'lexical'
|
||||
import type { FormInputItem } from '@/app/components/workflow/nodes/human-input/types'
|
||||
import { LexicalComposer } from '@lexical/react/LexicalComposer'
|
||||
import { useLexicalComposerContext } from '@lexical/react/LexicalComposerContext'
|
||||
import { act, render, waitFor } from '@testing-library/react'
|
||||
import {
|
||||
$nodesOfType,
|
||||
COMMAND_PRIORITY_EDITOR,
|
||||
} from 'lexical'
|
||||
import { useEffect } from 'react'
|
||||
import { useEffect, useState } from 'react'
|
||||
import {
|
||||
BlockEnum,
|
||||
InputVarType,
|
||||
@ -13,6 +15,7 @@ import {
|
||||
import { CustomTextNode } from '../../custom-text/node'
|
||||
import {
|
||||
getNodeCount,
|
||||
readEditorStateValue,
|
||||
readRootTextContent,
|
||||
renderLexicalEditor,
|
||||
selectRootEnd,
|
||||
@ -76,6 +79,12 @@ const createInsertPayload = () => ({
|
||||
onFormInputItemRemove: vi.fn(),
|
||||
})
|
||||
|
||||
const readHITLReadonlyValues = (editor: LexicalEditor): boolean[] => {
|
||||
return readEditorStateValue(editor, () => {
|
||||
return $nodesOfType(HITLInputNode).map(node => node.getReadonly())
|
||||
})
|
||||
}
|
||||
|
||||
const renderHITLInputBlock = (props?: {
|
||||
onInsert?: () => void
|
||||
onDelete?: () => void
|
||||
@ -169,6 +178,65 @@ describe('HITLInputBlock', () => {
|
||||
expect(getNodeCount(editor, HITLInputNode)).toBe(1)
|
||||
})
|
||||
|
||||
it('should update existing and newly inserted nodes when readonly changes', async () => {
|
||||
let setReadonlyValue: ((readonly: boolean) => void) | undefined
|
||||
const ReadonlyHarness = () => {
|
||||
const [readonly, setReadonly] = useState(false)
|
||||
|
||||
useEffect(() => {
|
||||
setReadonlyValue = setReadonly
|
||||
return () => {
|
||||
setReadonlyValue = undefined
|
||||
}
|
||||
}, [])
|
||||
|
||||
return (
|
||||
<HITLInputBlock
|
||||
nodeId="node-1"
|
||||
formInputs={[createFormInput()]}
|
||||
onFormInputItemRename={vi.fn()}
|
||||
onFormInputItemRemove={vi.fn()}
|
||||
workflowNodesMap={createWorkflowNodesMap('First Node')}
|
||||
readonly={readonly}
|
||||
/>
|
||||
)
|
||||
}
|
||||
|
||||
const { getEditor } = renderLexicalEditor({
|
||||
namespace: 'hitl-input-block-readonly-update-test',
|
||||
nodes: [CustomTextNode, HITLInputNode],
|
||||
children: <ReadonlyHarness />,
|
||||
})
|
||||
|
||||
const editor = await waitForEditorReady(getEditor)
|
||||
|
||||
selectRootEnd(editor)
|
||||
act(() => {
|
||||
editor.dispatchCommand(INSERT_HITL_INPUT_BLOCK_COMMAND, createInsertPayload())
|
||||
})
|
||||
|
||||
await waitFor(() => {
|
||||
expect(readHITLReadonlyValues(editor)).toEqual([false])
|
||||
})
|
||||
|
||||
act(() => {
|
||||
setReadonlyValue?.(true)
|
||||
})
|
||||
|
||||
await waitFor(() => {
|
||||
expect(readHITLReadonlyValues(editor)).toEqual([true])
|
||||
})
|
||||
|
||||
selectRootEnd(editor)
|
||||
act(() => {
|
||||
editor.dispatchCommand(INSERT_HITL_INPUT_BLOCK_COMMAND, createInsertPayload())
|
||||
})
|
||||
|
||||
await waitFor(() => {
|
||||
expect(readHITLReadonlyValues(editor)).toEqual([true, true])
|
||||
})
|
||||
})
|
||||
|
||||
it('should call onDelete when delete command is dispatched', async () => {
|
||||
const onDelete = vi.fn()
|
||||
const { getEditor } = renderHITLInputBlock({ onDelete })
|
||||
|
||||
@ -116,6 +116,31 @@ describe('InputField', () => {
|
||||
expect(onChange).not.toHaveBeenCalled()
|
||||
})
|
||||
|
||||
it('should disable save and show validation error when variable name already exists', async () => {
|
||||
const user = userEvent.setup()
|
||||
const onChange = vi.fn()
|
||||
|
||||
render(
|
||||
<InputField
|
||||
nodeId="node-duplicate-name"
|
||||
isEdit={false}
|
||||
payload={createPayload()}
|
||||
unavailableVariableNames={['existing_name']}
|
||||
onChange={onChange}
|
||||
onCancel={vi.fn()}
|
||||
/>,
|
||||
)
|
||||
|
||||
const inputs = screen.getAllByRole('textbox')
|
||||
await user.clear(inputs[0]!)
|
||||
await user.type(inputs[0]!, 'existing_name')
|
||||
|
||||
expect(screen.getByText('workflow.nodes.humanInput.insertInputField.variableNameDuplicated')).toBeInTheDocument()
|
||||
expect(screen.getByRole('button', { name: /workflow\.nodes\.humanInput\.insertInputField\.insert/i })).toBeDisabled()
|
||||
await user.keyboard('{Control>}{Enter}{/Control}')
|
||||
expect(onChange).not.toHaveBeenCalled()
|
||||
})
|
||||
|
||||
it('should call onChange when saving a valid payload in edit mode', async () => {
|
||||
const user = userEvent.setup()
|
||||
const onChange = vi.fn()
|
||||
|
||||
@ -98,6 +98,8 @@ describe('HITLInputNode', () => {
|
||||
expect(node.getConversationVariables()).toEqual(props.conversationVariables)
|
||||
expect(node.getRagVariables()).toEqual(props.ragVariables)
|
||||
expect(node.getReadonly()).toBe(true)
|
||||
node.setReadonly(false)
|
||||
expect(node.getReadonly()).toBe(false)
|
||||
expect(node.getTextContent()).toBe('{{#$output.user_name#}}')
|
||||
})
|
||||
})
|
||||
|
||||
@ -26,6 +26,7 @@ type HITLInputComponentUIProps = {
|
||||
nodeId: string
|
||||
varName: string
|
||||
formInput?: FormInputItem
|
||||
unavailableVariableNames?: string[]
|
||||
onChange: (input: FormInputItem) => void
|
||||
onRename: (payload: FormInputItem, oldName: string) => void
|
||||
onRemove: (varName: string) => void
|
||||
@ -44,6 +45,7 @@ const HITLInputComponentUI: FC<HITLInputComponentUIProps> = ({
|
||||
nodeId,
|
||||
varName,
|
||||
formInput,
|
||||
unavailableVariableNames = [],
|
||||
onChange,
|
||||
onRename,
|
||||
onRemove,
|
||||
@ -64,6 +66,11 @@ const HITLInputComponentUI: FC<HITLInputComponentUIProps> = ({
|
||||
setFalse: hideEditModal,
|
||||
}] = useBoolean(false)
|
||||
|
||||
useEffect(() => {
|
||||
if (readonly)
|
||||
hideEditModal()
|
||||
}, [hideEditModal, readonly])
|
||||
|
||||
// Lexical delegate the click make it unable to add click by the method of react
|
||||
const editBtnRef = useRef<HTMLDivElement>(null)
|
||||
useEffect(() => {
|
||||
@ -91,12 +98,15 @@ const HITLInputComponentUI: FC<HITLInputComponentUIProps> = ({
|
||||
}, [onRemove, varName])
|
||||
|
||||
const handleChange = useCallback((newPayload: FormInputItem) => {
|
||||
if (newPayload.output_variable_name !== varName && unavailableVariableNames.includes(newPayload.output_variable_name))
|
||||
return
|
||||
|
||||
if (varName === newPayload.output_variable_name)
|
||||
onChange(newPayload)
|
||||
else
|
||||
onRename(newPayload, varName)
|
||||
hideEditModal()
|
||||
}, [hideEditModal, onChange, onRename, varName])
|
||||
}, [hideEditModal, onChange, onRename, unavailableVariableNames, varName])
|
||||
|
||||
const isDefaultValueVariable = useMemo(() => {
|
||||
return paragraphDefault?.type === 'variable'
|
||||
@ -203,6 +213,7 @@ const HITLInputComponentUI: FC<HITLInputComponentUIProps> = ({
|
||||
nodeId={nodeId}
|
||||
isEdit
|
||||
payload={formInput}
|
||||
unavailableVariableNames={unavailableVariableNames}
|
||||
onChange={handleChange}
|
||||
onCancel={hideEditModal}
|
||||
/>
|
||||
|
||||
@ -45,8 +45,14 @@ const HITLInputComponent: FC<HITLInputComponentProps> = ({
|
||||
}) => {
|
||||
const [ref] = useSelectOrDelete(nodeKey, DELETE_HITL_INPUT_BLOCK_COMMAND)
|
||||
const payload = formInputs.find(item => item.output_variable_name === varName)
|
||||
const unavailableVariableNames = formInputs
|
||||
.map(item => item.output_variable_name)
|
||||
.filter(name => name !== varName)
|
||||
|
||||
const handleChange = useCallback((newPayload: FormInputItem) => {
|
||||
if (newPayload.output_variable_name !== varName && unavailableVariableNames.includes(newPayload.output_variable_name))
|
||||
return
|
||||
|
||||
if (!payload) {
|
||||
onChange([...formInputs, newPayload])
|
||||
return
|
||||
@ -58,7 +64,7 @@ const HITLInputComponent: FC<HITLInputComponentProps> = ({
|
||||
return
|
||||
}
|
||||
onChange(formInputs.map(item => item.output_variable_name === varName ? newPayload : item))
|
||||
}, [formInputs, onChange, payload, varName])
|
||||
}, [formInputs, onChange, payload, unavailableVariableNames, varName])
|
||||
|
||||
return (
|
||||
<div
|
||||
@ -69,6 +75,7 @@ const HITLInputComponent: FC<HITLInputComponentProps> = ({
|
||||
nodeId={nodeId}
|
||||
varName={varName}
|
||||
formInput={payload}
|
||||
unavailableVariableNames={unavailableVariableNames}
|
||||
onChange={handleChange}
|
||||
onRename={onRename}
|
||||
onRemove={onRemove}
|
||||
|
||||
@ -1,5 +1,6 @@
|
||||
import type { TextNode } from 'lexical'
|
||||
import type { HITLInputBlockType } from '../../types'
|
||||
import type { Var } from '@/app/components/workflow/types'
|
||||
import { useLexicalComposerContext } from '@lexical/react/LexicalComposerContext'
|
||||
import { mergeRegister } from '@lexical/utils'
|
||||
import { $applyNodeReplacement } from 'lexical'
|
||||
@ -31,7 +32,7 @@ const HITLInputReplacementBlock = ({
|
||||
|
||||
const environmentVariables = useMemo(() => variables?.find(o => o.nodeId === 'env')?.vars || [], [variables])
|
||||
const conversationVariables = useMemo(() => variables?.find(o => o.nodeId === 'conversation')?.vars || [], [variables])
|
||||
const ragVariables = useMemo(() => variables?.reduce<any[]>((acc, curr) => {
|
||||
const ragVariables = useMemo(() => variables?.reduce<Var[]>((acc, curr) => {
|
||||
if (curr.nodeId === 'rag')
|
||||
acc.push(...curr.vars)
|
||||
else
|
||||
@ -81,7 +82,7 @@ const HITLInputReplacementBlock = ({
|
||||
return mergeRegister(
|
||||
editor.registerNodeTransform(CustomTextNode, textNode => decoratorTransform(textNode, getMatch, createHITLInputBlockNode)),
|
||||
)
|
||||
}, [])
|
||||
}, [editor, getMatch, createHITLInputBlockNode])
|
||||
|
||||
return null
|
||||
}
|
||||
|
||||
@ -6,6 +6,7 @@ import { useLexicalComposerContext } from '@lexical/react/LexicalComposerContext
|
||||
import { mergeRegister } from '@lexical/utils'
|
||||
import {
|
||||
$insertNodes,
|
||||
$nodesOfType,
|
||||
COMMAND_PRIORITY_EDITOR,
|
||||
createCommand,
|
||||
} from 'lexical'
|
||||
@ -43,6 +44,14 @@ const HITLInputBlock = memo(({
|
||||
})
|
||||
}, [editor, workflowNodesMap, workflowAvailableVariables])
|
||||
|
||||
useEffect(() => {
|
||||
editor.update(() => {
|
||||
$nodesOfType(HITLInputNode).forEach((node) => {
|
||||
node.setReadonly(readonly)
|
||||
})
|
||||
})
|
||||
}, [editor, readonly])
|
||||
|
||||
useEffect(() => {
|
||||
if (!editor.hasNodes([HITLInputNode]))
|
||||
throw new Error('HITLInputBlockPlugin: HITLInputBlock not registered on editor')
|
||||
@ -95,7 +104,7 @@ const HITLInputBlock = memo(({
|
||||
COMMAND_PRIORITY_EDITOR,
|
||||
),
|
||||
)
|
||||
}, [editor, onInsert, onDelete])
|
||||
}, [editor, onInsert, onDelete, workflowNodesMap, getVarType, readonly])
|
||||
|
||||
return null
|
||||
})
|
||||
|
||||
@ -31,6 +31,7 @@ type InputFieldProps = {
|
||||
nodeId: string
|
||||
isEdit: boolean
|
||||
payload?: FormInputItem
|
||||
unavailableVariableNames?: string[]
|
||||
onChange: (newPayload: FormInputItem) => void
|
||||
onCancel: () => void
|
||||
}
|
||||
@ -38,6 +39,7 @@ const InputField: React.FC<InputFieldProps> = ({
|
||||
nodeId,
|
||||
isEdit,
|
||||
payload,
|
||||
unavailableVariableNames = [],
|
||||
onChange,
|
||||
onCancel,
|
||||
}) => {
|
||||
@ -73,14 +75,24 @@ const InputField: React.FC<InputFieldProps> = ({
|
||||
|
||||
return createDefaultParagraphFormInput(tempPayload.output_variable_name)
|
||||
}, [tempPayload])
|
||||
const nameValid = useMemo(() => {
|
||||
const unavailableVariableNameSet = useMemo(() => {
|
||||
return new Set(unavailableVariableNames.map(name => name.trim()).filter(Boolean))
|
||||
}, [unavailableVariableNames])
|
||||
const variableNameError = useMemo(() => {
|
||||
const name = tempPayload.output_variable_name.trim()
|
||||
if (!name)
|
||||
return false
|
||||
return null
|
||||
if (name.includes(' '))
|
||||
return false
|
||||
return /^[a-z_]\w{0,29}$/.test(name)
|
||||
}, [tempPayload.output_variable_name])
|
||||
return 'variableNameInvalid'
|
||||
if (!/^[a-z_]\w{0,29}$/.test(name))
|
||||
return 'variableNameInvalid'
|
||||
if (unavailableVariableNameSet.has(name))
|
||||
return 'variableNameDuplicated'
|
||||
return null
|
||||
}, [tempPayload.output_variable_name, unavailableVariableNameSet])
|
||||
const nameValid = useMemo(() => {
|
||||
return !!tempPayload.output_variable_name.trim() && !variableNameError
|
||||
}, [tempPayload.output_variable_name, variableNameError])
|
||||
const handleSave = useCallback(() => {
|
||||
if (!nameValid)
|
||||
return
|
||||
@ -223,9 +235,9 @@ const InputField: React.FC<InputFieldProps> = ({
|
||||
}}
|
||||
autoFocus
|
||||
/>
|
||||
{tempPayload.output_variable_name && !nameValid && (
|
||||
{tempPayload.output_variable_name && variableNameError && (
|
||||
<div className="mt-1 px-1 system-xs-regular text-text-destructive-secondary">
|
||||
{t(`${i18nPrefix}.variableNameInvalid`, { ns: 'workflow' })}
|
||||
{t(`${i18nPrefix}.${variableNameError}`, { ns: 'workflow' })}
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
|
||||
@ -109,6 +109,11 @@ export class HITLInputNode extends DecoratorNode<React.JSX.Element> {
|
||||
return self.__readonly || false
|
||||
}
|
||||
|
||||
setReadonly(readonly?: boolean): void {
|
||||
const self = this.getWritable()
|
||||
self.__readonly = readonly
|
||||
}
|
||||
|
||||
static override clone(node: HITLInputNode): HITLInputNode {
|
||||
return new HITLInputNode(
|
||||
node.__variableName,
|
||||
|
||||
@ -64,6 +64,7 @@ vi.mock('@/app/components/base/prompt-editor', () => ({
|
||||
vi.mock('../add-input-field', () => ({
|
||||
__esModule: true,
|
||||
default: (props: {
|
||||
unavailableVariableNames?: string[]
|
||||
onSave: (payload: {
|
||||
type: string
|
||||
output_variable_name: string
|
||||
@ -231,6 +232,41 @@ describe('FormContent', () => {
|
||||
expect(container.firstChild).toHaveClass('pointer-events-none')
|
||||
})
|
||||
|
||||
it('should not insert a new input when the variable name already exists', () => {
|
||||
render(
|
||||
<FormContent
|
||||
nodeId="node-2"
|
||||
value="Initial content"
|
||||
onChange={onChange}
|
||||
formInputs={[{
|
||||
type: 'paragraph',
|
||||
output_variable_name: 'approval',
|
||||
default: {
|
||||
type: 'constant',
|
||||
selector: [],
|
||||
value: '',
|
||||
},
|
||||
} as never]}
|
||||
onFormInputsChange={onFormInputsChange}
|
||||
onFormInputItemRename={onFormInputItemRename}
|
||||
onFormInputItemRemove={onFormInputItemRemove}
|
||||
editorKey={1}
|
||||
isExpand={false}
|
||||
availableVars={[]}
|
||||
availableNodes={[]}
|
||||
/>,
|
||||
)
|
||||
|
||||
expect(mockAddInputField).toHaveBeenCalledWith(expect.objectContaining({
|
||||
unavailableVariableNames: ['approval'],
|
||||
}))
|
||||
|
||||
fireEvent.click(screen.getByText('save-input'))
|
||||
|
||||
expect(mockOnInsert).not.toHaveBeenCalled()
|
||||
expect(onFormInputsChange).not.toHaveBeenCalled()
|
||||
})
|
||||
|
||||
it('should render the mac hotkey hint when focused on macOS', () => {
|
||||
mockIsMac.mockReturnValue(true)
|
||||
|
||||
|
||||
@ -1,4 +1,5 @@
|
||||
import type { ReactNode } from 'react'
|
||||
import type { HumanInputFieldValue } from '@/app/components/base/chat/chat/answer/human-input-content/field-renderer'
|
||||
import type { FormInputItem } from '@/app/components/workflow/nodes/human-input/types'
|
||||
import type { HumanInputFormData } from '@/types/workflow'
|
||||
import { render, screen, waitFor } from '@testing-library/react'
|
||||
import userEvent from '@testing-library/user-event'
|
||||
@ -12,25 +13,54 @@ vi.mock('react-i18next', () => ({
|
||||
}),
|
||||
}))
|
||||
|
||||
vi.mock('@langgenius/dify-ui/button', () => ({
|
||||
Button: ({
|
||||
children,
|
||||
disabled,
|
||||
onClick,
|
||||
}: {
|
||||
children?: ReactNode
|
||||
disabled?: boolean
|
||||
onClick?: () => void
|
||||
}) => (
|
||||
<button type="button" disabled={disabled} onClick={onClick}>
|
||||
{children}
|
||||
</button>
|
||||
),
|
||||
}))
|
||||
|
||||
vi.mock('@/app/components/base/chat/chat/answer/human-input-content/content-item', () => ({
|
||||
__esModule: true,
|
||||
default: ({ content }: { content: string }) => <div>{content}</div>,
|
||||
default: ({
|
||||
content,
|
||||
formInputFields,
|
||||
inputs,
|
||||
onInputChange,
|
||||
}: {
|
||||
content: string
|
||||
formInputFields: FormInputItem[]
|
||||
inputs: Record<string, HumanInputFieldValue>
|
||||
onInputChange: (name: string, value: HumanInputFieldValue) => void
|
||||
}) => {
|
||||
const fieldName = /\{\{#\$output\.([^#]+)#\}\}/.exec(content)?.[1]
|
||||
if (!fieldName)
|
||||
return <div>{content}</div>
|
||||
|
||||
const field = formInputFields.find(field => field.output_variable_name === fieldName)
|
||||
if (!field)
|
||||
return null
|
||||
|
||||
if (field.type === 'select') {
|
||||
return (
|
||||
<select
|
||||
aria-label={fieldName}
|
||||
value={typeof inputs[fieldName] === 'string' ? inputs[fieldName] : ''}
|
||||
onChange={event => onInputChange(fieldName, event.target.value)}
|
||||
>
|
||||
<option value="">Select</option>
|
||||
{field.option_source.value.map(option => (
|
||||
<option key={option} value={option}>{option}</option>
|
||||
))}
|
||||
</select>
|
||||
)
|
||||
}
|
||||
|
||||
if (field.type === 'paragraph') {
|
||||
return (
|
||||
<textarea
|
||||
aria-label={fieldName}
|
||||
value={typeof inputs[fieldName] === 'string' ? inputs[fieldName] : ''}
|
||||
onChange={event => onInputChange(fieldName, event.target.value)}
|
||||
/>
|
||||
)
|
||||
}
|
||||
|
||||
return <div>{fieldName}</div>
|
||||
},
|
||||
}))
|
||||
|
||||
const createFormData = (overrides: Partial<HumanInputFormData> = {}): HumanInputFormData => ({
|
||||
@ -60,6 +90,10 @@ const createFormData = (overrides: Partial<HumanInputFormData> = {}): HumanInput
|
||||
})
|
||||
|
||||
describe('SingleRunForm', () => {
|
||||
beforeEach(() => {
|
||||
vi.clearAllMocks()
|
||||
})
|
||||
|
||||
it('renders the back action as a named button and forwards clicks', async () => {
|
||||
const user = userEvent.setup()
|
||||
const handleBack = vi.fn()
|
||||
@ -99,4 +133,99 @@ describe('SingleRunForm', () => {
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
it('submits updated paragraph input values', async () => {
|
||||
const user = userEvent.setup()
|
||||
const onSubmit = vi.fn().mockResolvedValue(undefined)
|
||||
|
||||
render(
|
||||
<SingleRunForm
|
||||
nodeName="Review"
|
||||
data={createFormData()}
|
||||
onSubmit={onSubmit}
|
||||
/>,
|
||||
)
|
||||
|
||||
await user.clear(screen.getByRole('textbox', { name: 'review' }))
|
||||
await user.type(screen.getByRole('textbox', { name: 'review' }), 'updated review')
|
||||
await user.click(screen.getByRole('button', { name: 'Approve' }))
|
||||
|
||||
await waitFor(() => {
|
||||
expect(onSubmit).toHaveBeenCalledWith({
|
||||
inputs: { review: 'updated review' },
|
||||
action: 'approve',
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
it('uses resolved default values for variable paragraph inputs', async () => {
|
||||
const user = userEvent.setup()
|
||||
const onSubmit = vi.fn().mockResolvedValue(undefined)
|
||||
|
||||
render(
|
||||
<SingleRunForm
|
||||
nodeName="Review"
|
||||
data={createFormData({
|
||||
inputs: [{
|
||||
type: InputVarType.paragraph,
|
||||
output_variable_name: 'review',
|
||||
default: {
|
||||
selector: ['source', 'answer'],
|
||||
type: 'variable',
|
||||
value: 'fallback review',
|
||||
},
|
||||
}],
|
||||
resolved_default_values: {
|
||||
review: 'resolved review',
|
||||
},
|
||||
})}
|
||||
onSubmit={onSubmit}
|
||||
/>,
|
||||
)
|
||||
|
||||
await user.click(screen.getByRole('button', { name: 'Approve' }))
|
||||
|
||||
await waitFor(() => {
|
||||
expect(onSubmit).toHaveBeenCalledWith({
|
||||
inputs: { review: 'resolved review' },
|
||||
action: 'approve',
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
it('disables submit actions until a select input has a value', async () => {
|
||||
const user = userEvent.setup()
|
||||
const onSubmit = vi.fn().mockResolvedValue(undefined)
|
||||
|
||||
render(
|
||||
<SingleRunForm
|
||||
nodeName="Review"
|
||||
data={createFormData({
|
||||
form_content: 'Choose {{#$output.choice#}}',
|
||||
inputs: [{
|
||||
type: InputVarType.select,
|
||||
output_variable_name: 'choice',
|
||||
option_source: {
|
||||
selector: [],
|
||||
type: 'constant',
|
||||
value: ['approve', 'reject'],
|
||||
},
|
||||
}],
|
||||
})}
|
||||
onSubmit={onSubmit}
|
||||
/>,
|
||||
)
|
||||
|
||||
expect(screen.getByRole('button', { name: 'Approve' })).toBeDisabled()
|
||||
|
||||
await user.selectOptions(screen.getByRole('combobox', { name: 'choice' }), 'approve')
|
||||
await user.click(screen.getByRole('button', { name: 'Approve' }))
|
||||
|
||||
await waitFor(() => {
|
||||
expect(onSubmit).toHaveBeenCalledWith({
|
||||
inputs: { choice: 'approve' },
|
||||
action: 'approve',
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
@ -6,12 +6,14 @@ import InputField from '@/app/components/base/prompt-editor/plugins/hitl-input-b
|
||||
|
||||
type Props = {
|
||||
nodeId: string
|
||||
unavailableVariableNames?: string[]
|
||||
onSave: (newPayload: FormInputItem) => void
|
||||
onCancel: () => void
|
||||
}
|
||||
|
||||
const AddInputField: FC<Props> = ({
|
||||
nodeId,
|
||||
unavailableVariableNames,
|
||||
onSave,
|
||||
onCancel,
|
||||
}) => {
|
||||
@ -19,6 +21,7 @@ const AddInputField: FC<Props> = ({
|
||||
<InputField
|
||||
nodeId={nodeId}
|
||||
isEdit={false}
|
||||
unavailableVariableNames={unavailableVariableNames}
|
||||
onChange={onSave}
|
||||
onCancel={onCancel}
|
||||
/>
|
||||
|
||||
@ -1,12 +1,13 @@
|
||||
'use client'
|
||||
import type { LexicalCommand } from 'lexical'
|
||||
import type { FC } from 'react'
|
||||
import type { FormInputItem } from '../types'
|
||||
import type { ShortcutPopupInsertHandler } from '@/app/components/base/prompt-editor/plugins/shortcuts-popup-plugin'
|
||||
import type { WorkflowNodesMap } from '@/app/components/base/prompt-editor/types'
|
||||
import type { Node, NodeOutPutVar } from '@/app/components/workflow/types'
|
||||
import { cn } from '@langgenius/dify-ui/cn'
|
||||
import { useBoolean } from 'ahooks'
|
||||
import * as React from 'react'
|
||||
import { useEffect, useState } from 'react'
|
||||
import { useEffect, useRef } from 'react'
|
||||
import { Trans, useTranslation } from 'react-i18next'
|
||||
import PromptEditor from '@/app/components/base/prompt-editor'
|
||||
import { INSERT_HITL_INPUT_BLOCK_COMMAND } from '@/app/components/base/prompt-editor/plugins/hitl-input-block'
|
||||
@ -56,11 +57,20 @@ const FormContent: FC<FormContentProps> = ({
|
||||
|
||||
const getVarType = useWorkflowVariableType()
|
||||
|
||||
const [needToAddFormInput, setNeedToAddFormInput] = useState(false)
|
||||
const [newFormInputs, setNewFormInputs] = useState<FormInputItem[]>([])
|
||||
const handleInsertHITLNode = (onInsert: (command: LexicalCommand<unknown>, params: any) => void) => {
|
||||
const pendingFormInputsRef = useRef<{
|
||||
value: string
|
||||
formInputs: FormInputItem[]
|
||||
} | null>(null)
|
||||
const handleInsertHITLNode = (onInsert: ShortcutPopupInsertHandler) => {
|
||||
return (payload: FormInputItem) => {
|
||||
if (formInputs.some(input => input.output_variable_name === payload.output_variable_name))
|
||||
return
|
||||
|
||||
const newFormInputs = [...(formInputs || []), payload]
|
||||
pendingFormInputsRef.current = {
|
||||
value,
|
||||
formInputs: newFormInputs,
|
||||
}
|
||||
onInsert(INSERT_HITL_INPUT_BLOCK_COMMAND, {
|
||||
variableName: payload.output_variable_name,
|
||||
nodeId,
|
||||
@ -69,25 +79,25 @@ const FormContent: FC<FormContentProps> = ({
|
||||
onFormInputItemRename,
|
||||
onFormInputItemRemove,
|
||||
})
|
||||
setNewFormInputs(newFormInputs)
|
||||
setNeedToAddFormInput(true)
|
||||
}
|
||||
}
|
||||
|
||||
// avoid update formInputs would overwrite the value just inserted
|
||||
useEffect(() => {
|
||||
if (needToAddFormInput) {
|
||||
onFormInputsChange(newFormInputs)
|
||||
setNeedToAddFormInput(false)
|
||||
}
|
||||
}, [value])
|
||||
const pendingFormInputs = pendingFormInputsRef.current
|
||||
if (!pendingFormInputs || pendingFormInputs.value === value)
|
||||
return
|
||||
|
||||
onFormInputsChange(pendingFormInputs.formInputs)
|
||||
pendingFormInputsRef.current = null
|
||||
}, [onFormInputsChange, value])
|
||||
|
||||
const [isFocus, {
|
||||
setTrue: setFocus,
|
||||
setFalse: setBlur,
|
||||
}] = useBoolean(false)
|
||||
|
||||
const workflowNodesMap = availableNodes.reduce((acc: any, node) => {
|
||||
const workflowNodesMap = availableNodes.reduce<WorkflowNodesMap>((acc, node) => {
|
||||
acc[node.id] = {
|
||||
title: node.data.title,
|
||||
type: node.data.type,
|
||||
@ -137,7 +147,7 @@ const FormContent: FC<FormContentProps> = ({
|
||||
workflowVariableBlock={{
|
||||
show: true,
|
||||
variables: availableVars || [],
|
||||
getVarType: getVarType as any,
|
||||
getVarType,
|
||||
workflowNodesMap,
|
||||
}}
|
||||
editable={!readonly}
|
||||
@ -145,10 +155,12 @@ const FormContent: FC<FormContentProps> = ({
|
||||
? []
|
||||
: [{
|
||||
hotkey: ['mod', '/'],
|
||||
// eslint-disable-next-line react/component-hook-factories, react/no-nested-component-definitions
|
||||
Popup: ({ onClose, onInsert }) => (
|
||||
<AddInputField
|
||||
nodeId={nodeId}
|
||||
onSave={handleInsertHITLNode(onInsert!)}
|
||||
unavailableVariableNames={formInputs.map(input => input.output_variable_name)}
|
||||
onSave={handleInsertHITLNode(onInsert)}
|
||||
onCancel={onClose}
|
||||
/>
|
||||
),
|
||||
|
||||
@ -96,6 +96,26 @@ describe('human-input/use-form-content', () => {
|
||||
expect(result.current.editorKey).toBe(1)
|
||||
})
|
||||
|
||||
it('should not rename an input to an existing variable name', () => {
|
||||
currentInputs = createPayload({
|
||||
inputs: [
|
||||
createFormInput(),
|
||||
createFormInput({ output_variable_name: 'existing_name' }),
|
||||
],
|
||||
})
|
||||
const { result } = renderHook(() => useFormContent('human-input-node', currentInputs))
|
||||
|
||||
act(() => {
|
||||
result.current.handleFormInputItemRename(createFormInput({
|
||||
output_variable_name: 'existing_name',
|
||||
}), 'old_name')
|
||||
})
|
||||
|
||||
expect(mockSetInputs).not.toHaveBeenCalled()
|
||||
expect(mockHandleOutVarRenameChange).not.toHaveBeenCalled()
|
||||
expect(result.current.editorKey).toBe(0)
|
||||
})
|
||||
|
||||
it('should remove an input placeholder and its form input metadata', () => {
|
||||
const { result } = renderHook(() => useFormContent('human-input-node', currentInputs))
|
||||
|
||||
|
||||
@ -29,6 +29,13 @@ const useFormContent = (id: string, payload: HumanInputNodeType) => {
|
||||
|
||||
const handleFormInputItemRename = useCallback((payload: FormInputItem, oldName: string) => {
|
||||
const inputs = inputsRef.current
|
||||
if (
|
||||
oldName !== payload.output_variable_name
|
||||
&& inputs.inputs.some(item => item.output_variable_name === payload.output_variable_name)
|
||||
) {
|
||||
return
|
||||
}
|
||||
|
||||
const newInputs = produce(inputs, (draft) => {
|
||||
draft.form_content = draft.form_content.replaceAll(`{{#$output.${oldName}#}}`, `{{#$output.${payload.output_variable_name}#}}`)
|
||||
draft.inputs = draft.inputs.map(item => item.output_variable_name === oldName ? payload : item)
|
||||
|
||||
@ -654,6 +654,7 @@
|
||||
"nodes.humanInput.insertInputField.useConstantInstead": "Use Constant Instead",
|
||||
"nodes.humanInput.insertInputField.useVarInstead": "Use Variable Instead",
|
||||
"nodes.humanInput.insertInputField.variable": "variable",
|
||||
"nodes.humanInput.insertInputField.variableNameDuplicated": "Variable name already exists",
|
||||
"nodes.humanInput.insertInputField.variableNameInvalid": "Variable name can only contain letters, numbers, and underscores, and cannot start with a number",
|
||||
"nodes.humanInput.log.backstageInputURL": "Backstage input URL:",
|
||||
"nodes.humanInput.log.reason": "Reason:",
|
||||
|
||||
@ -654,6 +654,7 @@
|
||||
"nodes.humanInput.insertInputField.useConstantInstead": "使用常量代替",
|
||||
"nodes.humanInput.insertInputField.useVarInstead": "使用变量代替",
|
||||
"nodes.humanInput.insertInputField.variable": "变量",
|
||||
"nodes.humanInput.insertInputField.variableNameDuplicated": "变量名已存在",
|
||||
"nodes.humanInput.insertInputField.variableNameInvalid": "只能包含字母、数字和下划线,且不能以数字开头",
|
||||
"nodes.humanInput.log.backstageInputURL": "表单输入 URL:",
|
||||
"nodes.humanInput.log.reason": "原因:",
|
||||
|
||||
@ -1,7 +1,7 @@
|
||||
{
|
||||
"name": "dify-web",
|
||||
"type": "module",
|
||||
"version": "1.14.0",
|
||||
"version": "1.14.1",
|
||||
"private": true,
|
||||
"imports": {
|
||||
"#i18n": {
|
||||
|
||||
94
web/service/knowledge/use-dataset.spec.ts
Normal file
94
web/service/knowledge/use-dataset.spec.ts
Normal file
@ -0,0 +1,94 @@
|
||||
import { useQuery } from '@tanstack/react-query'
|
||||
import { get } from '../base'
|
||||
import { useDatasetDetail, useDatasetRelatedApps } from './use-dataset'
|
||||
|
||||
vi.mock('@tanstack/react-query', () => ({
|
||||
keepPreviousData: Symbol('keepPreviousData'),
|
||||
useInfiniteQuery: vi.fn(),
|
||||
useMutation: vi.fn(),
|
||||
useQuery: vi.fn(),
|
||||
useQueryClient: vi.fn(),
|
||||
}))
|
||||
|
||||
vi.mock('../base', () => ({
|
||||
get: vi.fn(),
|
||||
post: vi.fn(),
|
||||
}))
|
||||
|
||||
vi.mock('../use-base', () => ({
|
||||
useInvalid: vi.fn(),
|
||||
}))
|
||||
|
||||
const mockUseQuery = vi.mocked(useQuery)
|
||||
const mockGet = vi.mocked(get)
|
||||
|
||||
type QueryOptions = Parameters<typeof useQuery>[0]
|
||||
type RetryFn = (failureCount: number, error: unknown) => boolean
|
||||
|
||||
const getLastQueryOptions = () => {
|
||||
return mockUseQuery.mock.calls.at(-1)?.[0] as QueryOptions
|
||||
}
|
||||
|
||||
const getRetryFn = () => {
|
||||
return getLastQueryOptions().retry as RetryFn
|
||||
}
|
||||
|
||||
describe('knowledge dataset hooks', () => {
|
||||
beforeEach(() => {
|
||||
vi.clearAllMocks()
|
||||
mockUseQuery.mockReturnValue({} as ReturnType<typeof useQuery>)
|
||||
})
|
||||
|
||||
describe('useDatasetDetail', () => {
|
||||
it('should not retry forbidden or missing dataset detail errors', () => {
|
||||
// Arrange & Act
|
||||
useDatasetDetail('dataset-1')
|
||||
const retry = getRetryFn()
|
||||
|
||||
// Assert
|
||||
expect(retry(0, new Response(null, { status: 403 }))).toBe(false)
|
||||
expect(retry(0, new Response(null, { status: 404 }))).toBe(false)
|
||||
})
|
||||
|
||||
it('should retry other dataset detail errors fewer than three times', () => {
|
||||
// Arrange & Act
|
||||
useDatasetDetail('dataset-1')
|
||||
const retry = getRetryFn()
|
||||
|
||||
// Assert
|
||||
expect(retry(2, new Error('temporary failure'))).toBe(true)
|
||||
expect(retry(3, new Error('temporary failure'))).toBe(false)
|
||||
})
|
||||
|
||||
it('should fetch dataset detail without silent mode', () => {
|
||||
// Arrange
|
||||
mockGet.mockResolvedValue({ id: 'dataset-1' })
|
||||
|
||||
// Act
|
||||
useDatasetDetail('dataset-1')
|
||||
const queryFn = getLastQueryOptions().queryFn as () => unknown
|
||||
queryFn()
|
||||
|
||||
// Assert
|
||||
expect(mockGet).toHaveBeenCalledWith('/datasets/dataset-1')
|
||||
})
|
||||
})
|
||||
|
||||
describe('useDatasetRelatedApps', () => {
|
||||
it('should use explicit enabled option when provided', () => {
|
||||
// Arrange & Act
|
||||
useDatasetRelatedApps('dataset-1', { enabled: false })
|
||||
|
||||
// Assert
|
||||
expect(getLastQueryOptions().enabled).toBe(false)
|
||||
})
|
||||
|
||||
it('should enable related apps query when dataset id exists and no option is provided', () => {
|
||||
// Arrange & Act
|
||||
useDatasetRelatedApps('dataset-1')
|
||||
|
||||
// Assert
|
||||
expect(getLastQueryOptions().enabled).toBe(true)
|
||||
})
|
||||
})
|
||||
})
|
||||
@ -110,13 +110,20 @@ export const useDatasetDetail = (datasetId: string) => {
|
||||
queryKey: [...datasetDetailQueryKeyPrefix, datasetId],
|
||||
queryFn: () => get<DataSet>(`/datasets/${datasetId}`),
|
||||
enabled: !!datasetId,
|
||||
retry: (failureCount, error) => {
|
||||
if (error instanceof Response && [403, 404].includes(error.status))
|
||||
return false
|
||||
|
||||
return failureCount < 3
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
export const useDatasetRelatedApps = (datasetId: string) => {
|
||||
export const useDatasetRelatedApps = (datasetId: string, options?: { enabled?: boolean }) => {
|
||||
return useQuery({
|
||||
queryKey: [NAME_SPACE, 'related-apps', datasetId],
|
||||
queryFn: () => get<RelatedAppResponse>(`/datasets/${datasetId}/related-apps`),
|
||||
enabled: options?.enabled ?? !!datasetId,
|
||||
})
|
||||
}
|
||||
|
||||
|
||||
Loading…
Reference in New Issue
Block a user