mirror of https://github.com/langgenius/dify.git
Merge remote-tracking branch 'origin/main' into feat/e2e-testing
This commit is contained in:
commit
cf20e9fd38
|
|
@ -0,0 +1,21 @@
|
|||
name: Semantic Pull Request
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
types:
|
||||
- opened
|
||||
- edited
|
||||
- reopened
|
||||
- synchronize
|
||||
|
||||
jobs:
|
||||
lint:
|
||||
name: Validate PR title
|
||||
permissions:
|
||||
pull-requests: read
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Check title
|
||||
uses: amannn/action-semantic-pull-request@v6.1.1
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
|
@ -422,7 +422,6 @@ class DatasetApi(Resource):
|
|||
raise NotFound("Dataset not found.")
|
||||
|
||||
payload = DatasetUpdatePayload.model_validate(console_ns.payload or {})
|
||||
payload_data = payload.model_dump(exclude_unset=True)
|
||||
current_user, current_tenant_id = current_account_with_tenant()
|
||||
# check embedding model setting
|
||||
if (
|
||||
|
|
@ -434,6 +433,7 @@ class DatasetApi(Resource):
|
|||
dataset.tenant_id, payload.embedding_model_provider, payload.embedding_model
|
||||
)
|
||||
payload.is_multimodal = is_multimodal
|
||||
payload_data = payload.model_dump(exclude_unset=True)
|
||||
# The role of the current user in the ta table must be admin, owner, editor, or dataset_operator
|
||||
DatasetPermissionService.check_permission(
|
||||
current_user, dataset, payload.permission, payload.partial_member_list
|
||||
|
|
|
|||
|
|
@ -230,7 +230,7 @@ class ModelProviderModelApi(Resource):
|
|||
|
||||
return {"result": "success"}, 200
|
||||
|
||||
@console_ns.expect(console_ns.models[ParserDeleteModels.__name__], validate=True)
|
||||
@console_ns.expect(console_ns.models[ParserDeleteModels.__name__])
|
||||
@setup_required
|
||||
@login_required
|
||||
@is_admin_or_owner_required
|
||||
|
|
|
|||
|
|
@ -4,7 +4,7 @@ from uuid import UUID
|
|||
|
||||
from flask import request
|
||||
from flask_restx import Resource
|
||||
from pydantic import BaseModel, Field
|
||||
from pydantic import BaseModel, Field, field_validator
|
||||
from werkzeug.exceptions import BadRequest, InternalServerError, NotFound
|
||||
|
||||
import services
|
||||
|
|
@ -52,11 +52,23 @@ class ChatRequestPayload(BaseModel):
|
|||
query: str
|
||||
files: list[dict[str, Any]] | None = None
|
||||
response_mode: Literal["blocking", "streaming"] | None = None
|
||||
conversation_id: UUID | None = None
|
||||
conversation_id: str | None = Field(default=None, description="Conversation UUID")
|
||||
retriever_from: str = Field(default="dev")
|
||||
auto_generate_name: bool = Field(default=True, description="Auto generate conversation name")
|
||||
workflow_id: str | None = Field(default=None, description="Workflow ID for advanced chat")
|
||||
|
||||
@field_validator("conversation_id", mode="before")
|
||||
@classmethod
|
||||
def normalize_conversation_id(cls, value: str | UUID | None) -> str | None:
|
||||
"""Allow missing or blank conversation IDs; enforce UUID format when provided."""
|
||||
if not value:
|
||||
return None
|
||||
|
||||
try:
|
||||
return helper.uuid_value(value)
|
||||
except ValueError as exc:
|
||||
raise ValueError("conversation_id must be a valid UUID") from exc
|
||||
|
||||
|
||||
register_schema_models(service_api_ns, CompletionRequestPayload, ChatRequestPayload)
|
||||
|
||||
|
|
|
|||
|
|
@ -62,8 +62,7 @@ from core.app.task_pipeline.message_cycle_manager import MessageCycleManager
|
|||
from core.base.tts import AppGeneratorTTSPublisher, AudioTrunk
|
||||
from core.model_runtime.entities.llm_entities import LLMUsage
|
||||
from core.model_runtime.utils.encoders import jsonable_encoder
|
||||
from core.ops.entities.trace_entity import TraceTaskName
|
||||
from core.ops.ops_trace_manager import TraceQueueManager, TraceTask
|
||||
from core.ops.ops_trace_manager import TraceQueueManager
|
||||
from core.workflow.enums import WorkflowExecutionStatus
|
||||
from core.workflow.nodes import NodeType
|
||||
from core.workflow.repositories.draft_variable_repository import DraftVariableSaverFactory
|
||||
|
|
@ -73,7 +72,7 @@ from extensions.ext_database import db
|
|||
from libs.datetime_utils import naive_utc_now
|
||||
from models import Account, Conversation, EndUser, Message, MessageFile
|
||||
from models.enums import CreatorUserRole
|
||||
from models.workflow import Workflow, WorkflowNodeExecutionModel
|
||||
from models.workflow import Workflow
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
|
@ -581,7 +580,7 @@ class AdvancedChatAppGenerateTaskPipeline(GraphRuntimeStateSupport):
|
|||
|
||||
with self._database_session() as session:
|
||||
# Save message
|
||||
self._save_message(session=session, graph_runtime_state=resolved_state, trace_manager=trace_manager)
|
||||
self._save_message(session=session, graph_runtime_state=resolved_state)
|
||||
|
||||
yield workflow_finish_resp
|
||||
elif event.stopped_by in (
|
||||
|
|
@ -591,7 +590,7 @@ class AdvancedChatAppGenerateTaskPipeline(GraphRuntimeStateSupport):
|
|||
# When hitting input-moderation or annotation-reply, the workflow will not start
|
||||
with self._database_session() as session:
|
||||
# Save message
|
||||
self._save_message(session=session, trace_manager=trace_manager)
|
||||
self._save_message(session=session)
|
||||
|
||||
yield self._message_end_to_stream_response()
|
||||
|
||||
|
|
@ -600,7 +599,6 @@ class AdvancedChatAppGenerateTaskPipeline(GraphRuntimeStateSupport):
|
|||
event: QueueAdvancedChatMessageEndEvent,
|
||||
*,
|
||||
graph_runtime_state: GraphRuntimeState | None = None,
|
||||
trace_manager: TraceQueueManager | None = None,
|
||||
**kwargs,
|
||||
) -> Generator[StreamResponse, None, None]:
|
||||
"""Handle advanced chat message end events."""
|
||||
|
|
@ -618,7 +616,7 @@ class AdvancedChatAppGenerateTaskPipeline(GraphRuntimeStateSupport):
|
|||
|
||||
# Save message
|
||||
with self._database_session() as session:
|
||||
self._save_message(session=session, graph_runtime_state=resolved_state, trace_manager=trace_manager)
|
||||
self._save_message(session=session, graph_runtime_state=resolved_state)
|
||||
|
||||
yield self._message_end_to_stream_response()
|
||||
|
||||
|
|
@ -772,13 +770,7 @@ class AdvancedChatAppGenerateTaskPipeline(GraphRuntimeStateSupport):
|
|||
if self._conversation_name_generate_thread:
|
||||
logger.debug("Conversation name generation running as daemon thread")
|
||||
|
||||
def _save_message(
|
||||
self,
|
||||
*,
|
||||
session: Session,
|
||||
graph_runtime_state: GraphRuntimeState | None = None,
|
||||
trace_manager: TraceQueueManager | None = None,
|
||||
):
|
||||
def _save_message(self, *, session: Session, graph_runtime_state: GraphRuntimeState | None = None):
|
||||
message = self._get_message(session=session)
|
||||
|
||||
# If there are assistant files, remove markdown image links from answer
|
||||
|
|
@ -817,14 +809,6 @@ class AdvancedChatAppGenerateTaskPipeline(GraphRuntimeStateSupport):
|
|||
|
||||
metadata = self._task_state.metadata.model_dump()
|
||||
message.message_metadata = json.dumps(jsonable_encoder(metadata))
|
||||
|
||||
# Extract model provider and model_id from workflow node executions for tracing
|
||||
if message.workflow_run_id:
|
||||
model_info = self._extract_model_info_from_workflow(session, message.workflow_run_id)
|
||||
if model_info:
|
||||
message.model_provider = model_info.get("provider")
|
||||
message.model_id = model_info.get("model")
|
||||
|
||||
message_files = [
|
||||
MessageFile(
|
||||
message_id=message.id,
|
||||
|
|
@ -842,68 +826,6 @@ class AdvancedChatAppGenerateTaskPipeline(GraphRuntimeStateSupport):
|
|||
]
|
||||
session.add_all(message_files)
|
||||
|
||||
# Trigger MESSAGE_TRACE for tracing integrations
|
||||
if trace_manager:
|
||||
trace_manager.add_trace_task(
|
||||
TraceTask(
|
||||
TraceTaskName.MESSAGE_TRACE, conversation_id=self._conversation_id, message_id=self._message_id
|
||||
)
|
||||
)
|
||||
|
||||
def _extract_model_info_from_workflow(self, session: Session, workflow_run_id: str) -> dict[str, str] | None:
|
||||
"""
|
||||
Extract model provider and model_id from workflow node executions.
|
||||
Returns dict with 'provider' and 'model' keys, or None if not found.
|
||||
"""
|
||||
try:
|
||||
# Query workflow node executions for LLM or Agent nodes
|
||||
stmt = (
|
||||
select(WorkflowNodeExecutionModel)
|
||||
.where(WorkflowNodeExecutionModel.workflow_run_id == workflow_run_id)
|
||||
.where(WorkflowNodeExecutionModel.node_type.in_(["llm", "agent"]))
|
||||
.order_by(WorkflowNodeExecutionModel.created_at.desc())
|
||||
.limit(1)
|
||||
)
|
||||
node_execution = session.scalar(stmt)
|
||||
|
||||
if not node_execution:
|
||||
return None
|
||||
|
||||
# Try to extract from execution_metadata for agent nodes
|
||||
if node_execution.execution_metadata:
|
||||
try:
|
||||
metadata = json.loads(node_execution.execution_metadata)
|
||||
agent_log = metadata.get("agent_log", [])
|
||||
# Look for the first agent thought with provider info
|
||||
for log_entry in agent_log:
|
||||
entry_metadata = log_entry.get("metadata", {})
|
||||
provider_str = entry_metadata.get("provider")
|
||||
if provider_str:
|
||||
# Parse format like "langgenius/deepseek/deepseek"
|
||||
parts = provider_str.split("/")
|
||||
if len(parts) >= 3:
|
||||
return {"provider": parts[1], "model": parts[2]}
|
||||
elif len(parts) == 2:
|
||||
return {"provider": parts[0], "model": parts[1]}
|
||||
except (json.JSONDecodeError, KeyError, AttributeError) as e:
|
||||
logger.debug("Failed to parse execution_metadata: %s", e)
|
||||
|
||||
# Try to extract from process_data for llm nodes
|
||||
if node_execution.process_data:
|
||||
try:
|
||||
process_data = json.loads(node_execution.process_data)
|
||||
provider = process_data.get("model_provider")
|
||||
model = process_data.get("model_name")
|
||||
if provider and model:
|
||||
return {"provider": provider, "model": model}
|
||||
except (json.JSONDecodeError, KeyError) as e:
|
||||
logger.debug("Failed to parse process_data: %s", e)
|
||||
|
||||
return None
|
||||
except Exception as e:
|
||||
logger.warning("Failed to extract model info from workflow: %s", e)
|
||||
return None
|
||||
|
||||
def _seed_graph_runtime_state_from_queue_manager(self) -> None:
|
||||
"""Bootstrap the cached runtime state from the queue manager when present."""
|
||||
candidate = self._base_task_pipeline.queue_manager.graph_runtime_state
|
||||
|
|
|
|||
|
|
@ -40,9 +40,6 @@ class EasyUITaskState(TaskState):
|
|||
"""
|
||||
|
||||
llm_result: LLMResult
|
||||
first_token_time: float | None = None
|
||||
last_token_time: float | None = None
|
||||
is_streaming_response: bool = False
|
||||
|
||||
|
||||
class WorkflowTaskState(TaskState):
|
||||
|
|
|
|||
|
|
@ -332,12 +332,6 @@ class EasyUIBasedGenerateTaskPipeline(BasedGenerateTaskPipeline):
|
|||
if not self._task_state.llm_result.prompt_messages:
|
||||
self._task_state.llm_result.prompt_messages = chunk.prompt_messages
|
||||
|
||||
# Track streaming response times
|
||||
if self._task_state.first_token_time is None:
|
||||
self._task_state.first_token_time = time.perf_counter()
|
||||
self._task_state.is_streaming_response = True
|
||||
self._task_state.last_token_time = time.perf_counter()
|
||||
|
||||
# handle output moderation chunk
|
||||
should_direct_answer = self._handle_output_moderation_chunk(cast(str, delta_text))
|
||||
if should_direct_answer:
|
||||
|
|
@ -404,18 +398,6 @@ class EasyUIBasedGenerateTaskPipeline(BasedGenerateTaskPipeline):
|
|||
message.total_price = usage.total_price
|
||||
message.currency = usage.currency
|
||||
self._task_state.llm_result.usage.latency = message.provider_response_latency
|
||||
|
||||
# Add streaming metrics to usage if available
|
||||
if self._task_state.is_streaming_response and self._task_state.first_token_time:
|
||||
start_time = self.start_at
|
||||
first_token_time = self._task_state.first_token_time
|
||||
last_token_time = self._task_state.last_token_time or first_token_time
|
||||
usage.time_to_first_token = round(first_token_time - start_time, 3)
|
||||
usage.time_to_generate = round(last_token_time - first_token_time, 3)
|
||||
|
||||
# Update metadata with the complete usage info
|
||||
self._task_state.metadata.usage = usage
|
||||
|
||||
message.message_metadata = self._task_state.metadata.model_dump_json()
|
||||
|
||||
if trace_manager:
|
||||
|
|
|
|||
|
|
@ -1,4 +1,4 @@
|
|||
from pydantic import BaseModel
|
||||
from pydantic import BaseModel, Field
|
||||
|
||||
|
||||
class PreviewDetail(BaseModel):
|
||||
|
|
@ -20,7 +20,7 @@ class IndexingEstimate(BaseModel):
|
|||
class PipelineDataset(BaseModel):
|
||||
id: str
|
||||
name: str
|
||||
description: str
|
||||
description: str | None = Field(default="", description="knowledge dataset description")
|
||||
chunk_structure: str
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -222,59 +222,6 @@ class TencentSpanBuilder:
|
|||
links=links,
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def build_message_llm_span(
|
||||
trace_info: MessageTraceInfo, trace_id: int, parent_span_id: int, user_id: str
|
||||
) -> SpanData:
|
||||
"""Build LLM span for message traces with detailed LLM attributes."""
|
||||
status = Status(StatusCode.OK)
|
||||
if trace_info.error:
|
||||
status = Status(StatusCode.ERROR, trace_info.error)
|
||||
|
||||
# Extract model information from `metadata`` or `message_data`
|
||||
trace_metadata = trace_info.metadata or {}
|
||||
message_data = trace_info.message_data or {}
|
||||
|
||||
model_provider = trace_metadata.get("ls_provider") or (
|
||||
message_data.get("model_provider", "") if isinstance(message_data, dict) else ""
|
||||
)
|
||||
model_name = trace_metadata.get("ls_model_name") or (
|
||||
message_data.get("model_id", "") if isinstance(message_data, dict) else ""
|
||||
)
|
||||
|
||||
inputs_str = str(trace_info.inputs or "")
|
||||
outputs_str = str(trace_info.outputs or "")
|
||||
|
||||
attributes = {
|
||||
GEN_AI_SESSION_ID: trace_metadata.get("conversation_id", ""),
|
||||
GEN_AI_USER_ID: str(user_id),
|
||||
GEN_AI_SPAN_KIND: GenAISpanKind.GENERATION.value,
|
||||
GEN_AI_FRAMEWORK: "dify",
|
||||
GEN_AI_MODEL_NAME: str(model_name),
|
||||
GEN_AI_PROVIDER: str(model_provider),
|
||||
GEN_AI_USAGE_INPUT_TOKENS: str(trace_info.message_tokens or 0),
|
||||
GEN_AI_USAGE_OUTPUT_TOKENS: str(trace_info.answer_tokens or 0),
|
||||
GEN_AI_USAGE_TOTAL_TOKENS: str(trace_info.total_tokens or 0),
|
||||
GEN_AI_PROMPT: inputs_str,
|
||||
GEN_AI_COMPLETION: outputs_str,
|
||||
INPUT_VALUE: inputs_str,
|
||||
OUTPUT_VALUE: outputs_str,
|
||||
}
|
||||
|
||||
if trace_info.is_streaming_request:
|
||||
attributes[GEN_AI_IS_STREAMING_REQUEST] = "true"
|
||||
|
||||
return SpanData(
|
||||
trace_id=trace_id,
|
||||
parent_span_id=parent_span_id,
|
||||
span_id=TencentTraceUtils.convert_to_span_id(trace_info.message_id, "llm"),
|
||||
name="GENERATION",
|
||||
start_time=TencentSpanBuilder._get_time_nanoseconds(trace_info.start_time),
|
||||
end_time=TencentSpanBuilder._get_time_nanoseconds(trace_info.end_time),
|
||||
attributes=attributes,
|
||||
status=status,
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def build_tool_span(trace_info: ToolTraceInfo, trace_id: int, parent_span_id: int) -> SpanData:
|
||||
"""Build tool span."""
|
||||
|
|
|
|||
|
|
@ -107,12 +107,8 @@ class TencentDataTrace(BaseTraceInstance):
|
|||
links.append(TencentTraceUtils.create_link(trace_info.trace_id))
|
||||
|
||||
message_span = TencentSpanBuilder.build_message_span(trace_info, trace_id, str(user_id), links)
|
||||
self.trace_client.add_span(message_span)
|
||||
|
||||
# Add LLM child span with detailed attributes
|
||||
parent_span_id = TencentTraceUtils.convert_to_span_id(trace_info.message_id, "message")
|
||||
llm_span = TencentSpanBuilder.build_message_llm_span(trace_info, trace_id, parent_span_id, str(user_id))
|
||||
self.trace_client.add_span(llm_span)
|
||||
self.trace_client.add_span(message_span)
|
||||
|
||||
self._record_message_llm_metrics(trace_info)
|
||||
|
||||
|
|
|
|||
|
|
@ -451,12 +451,21 @@ class RetrievalService:
|
|||
"position": child_chunk.position,
|
||||
"score": document.metadata.get("score", 0.0),
|
||||
}
|
||||
segment_child_map[segment.id]["child_chunks"].append(child_chunk_detail)
|
||||
segment_child_map[segment.id]["max_score"] = max(
|
||||
segment_child_map[segment.id]["max_score"], document.metadata.get("score", 0.0)
|
||||
)
|
||||
if segment.id in segment_child_map:
|
||||
segment_child_map[segment.id]["child_chunks"].append(child_chunk_detail)
|
||||
segment_child_map[segment.id]["max_score"] = max(
|
||||
segment_child_map[segment.id]["max_score"], document.metadata.get("score", 0.0)
|
||||
)
|
||||
else:
|
||||
segment_child_map[segment.id] = {
|
||||
"max_score": document.metadata.get("score", 0.0),
|
||||
"child_chunks": [child_chunk_detail],
|
||||
}
|
||||
if attachment_info:
|
||||
segment_file_map[segment.id].append(attachment_info)
|
||||
if segment.id in segment_file_map:
|
||||
segment_file_map[segment.id].append(attachment_info)
|
||||
else:
|
||||
segment_file_map[segment.id] = [attachment_info]
|
||||
else:
|
||||
# Handle normal documents
|
||||
segment = None
|
||||
|
|
|
|||
|
|
@ -209,7 +209,7 @@ class ParagraphIndexProcessor(BaseIndexProcessor):
|
|||
if dataset.indexing_technique == "high_quality":
|
||||
vector = Vector(dataset)
|
||||
vector.create(documents)
|
||||
if all_multimodal_documents:
|
||||
if all_multimodal_documents and dataset.is_multimodal:
|
||||
vector.create_multimodal(all_multimodal_documents)
|
||||
elif dataset.indexing_technique == "economy":
|
||||
keyword = Keyword(dataset)
|
||||
|
|
|
|||
|
|
@ -312,7 +312,7 @@ class ParentChildIndexProcessor(BaseIndexProcessor):
|
|||
vector = Vector(dataset)
|
||||
if all_child_documents:
|
||||
vector.create(all_child_documents)
|
||||
if all_multimodal_documents:
|
||||
if all_multimodal_documents and dataset.is_multimodal:
|
||||
vector.create_multimodal(all_multimodal_documents)
|
||||
|
||||
def format_preview(self, chunks: Any) -> Mapping[str, Any]:
|
||||
|
|
|
|||
|
|
@ -592,111 +592,116 @@ class DatasetRetrieval:
|
|||
"""Handle retrieval end."""
|
||||
with flask_app.app_context():
|
||||
dify_documents = [document for document in documents if document.provider == "dify"]
|
||||
segment_ids = []
|
||||
segment_index_node_ids = []
|
||||
if not dify_documents:
|
||||
self._send_trace_task(message_id, documents, timer)
|
||||
return
|
||||
|
||||
with Session(db.engine) as session:
|
||||
for document in dify_documents:
|
||||
if document.metadata is not None:
|
||||
dataset_document_stmt = select(DatasetDocument).where(
|
||||
DatasetDocument.id == document.metadata["document_id"]
|
||||
)
|
||||
dataset_document = session.scalar(dataset_document_stmt)
|
||||
if dataset_document:
|
||||
if dataset_document.doc_form == IndexStructureType.PARENT_CHILD_INDEX:
|
||||
segment_id = None
|
||||
if (
|
||||
"doc_type" not in document.metadata
|
||||
or document.metadata.get("doc_type") == DocType.TEXT
|
||||
):
|
||||
child_chunk_stmt = select(ChildChunk).where(
|
||||
ChildChunk.index_node_id == document.metadata["doc_id"],
|
||||
ChildChunk.dataset_id == dataset_document.dataset_id,
|
||||
ChildChunk.document_id == dataset_document.id,
|
||||
)
|
||||
child_chunk = session.scalar(child_chunk_stmt)
|
||||
if child_chunk:
|
||||
segment_id = child_chunk.segment_id
|
||||
elif (
|
||||
"doc_type" in document.metadata
|
||||
and document.metadata.get("doc_type") == DocType.IMAGE
|
||||
):
|
||||
attachment_info_dict = RetrievalService.get_segment_attachment_info(
|
||||
dataset_document.dataset_id,
|
||||
dataset_document.tenant_id,
|
||||
document.metadata.get("doc_id") or "",
|
||||
session,
|
||||
)
|
||||
if attachment_info_dict:
|
||||
segment_id = attachment_info_dict["segment_id"]
|
||||
# Collect all document_ids and batch fetch DatasetDocuments
|
||||
document_ids = {
|
||||
doc.metadata["document_id"]
|
||||
for doc in dify_documents
|
||||
if doc.metadata and "document_id" in doc.metadata
|
||||
}
|
||||
if not document_ids:
|
||||
self._send_trace_task(message_id, documents, timer)
|
||||
return
|
||||
|
||||
dataset_docs_stmt = select(DatasetDocument).where(DatasetDocument.id.in_(document_ids))
|
||||
dataset_docs = session.scalars(dataset_docs_stmt).all()
|
||||
dataset_doc_map = {str(doc.id): doc for doc in dataset_docs}
|
||||
|
||||
# Categorize documents by type and collect necessary IDs
|
||||
parent_child_text_docs: list[tuple[Document, DatasetDocument]] = []
|
||||
parent_child_image_docs: list[tuple[Document, DatasetDocument]] = []
|
||||
normal_text_docs: list[tuple[Document, DatasetDocument]] = []
|
||||
normal_image_docs: list[tuple[Document, DatasetDocument]] = []
|
||||
|
||||
for doc in dify_documents:
|
||||
if not doc.metadata or "document_id" not in doc.metadata:
|
||||
continue
|
||||
dataset_doc = dataset_doc_map.get(doc.metadata["document_id"])
|
||||
if not dataset_doc:
|
||||
continue
|
||||
|
||||
is_image = doc.metadata.get("doc_type") == DocType.IMAGE
|
||||
is_parent_child = dataset_doc.doc_form == IndexStructureType.PARENT_CHILD_INDEX
|
||||
|
||||
if is_parent_child:
|
||||
if is_image:
|
||||
parent_child_image_docs.append((doc, dataset_doc))
|
||||
else:
|
||||
parent_child_text_docs.append((doc, dataset_doc))
|
||||
else:
|
||||
if is_image:
|
||||
normal_image_docs.append((doc, dataset_doc))
|
||||
else:
|
||||
normal_text_docs.append((doc, dataset_doc))
|
||||
|
||||
segment_ids_to_update: set[str] = set()
|
||||
|
||||
# Process PARENT_CHILD_INDEX text documents - batch fetch ChildChunks
|
||||
if parent_child_text_docs:
|
||||
index_node_ids = [doc.metadata["doc_id"] for doc, _ in parent_child_text_docs if doc.metadata]
|
||||
if index_node_ids:
|
||||
child_chunks_stmt = select(ChildChunk).where(ChildChunk.index_node_id.in_(index_node_ids))
|
||||
child_chunks = session.scalars(child_chunks_stmt).all()
|
||||
child_chunk_map = {chunk.index_node_id: chunk.segment_id for chunk in child_chunks}
|
||||
for doc, _ in parent_child_text_docs:
|
||||
if doc.metadata:
|
||||
segment_id = child_chunk_map.get(doc.metadata["doc_id"])
|
||||
if segment_id:
|
||||
if segment_id not in segment_ids:
|
||||
segment_ids.append(segment_id)
|
||||
_ = (
|
||||
session.query(DocumentSegment)
|
||||
.where(DocumentSegment.id == segment_id)
|
||||
.update(
|
||||
{DocumentSegment.hit_count: DocumentSegment.hit_count + 1},
|
||||
synchronize_session=False,
|
||||
)
|
||||
)
|
||||
else:
|
||||
query = None
|
||||
if (
|
||||
"doc_type" not in document.metadata
|
||||
or document.metadata.get("doc_type") == DocType.TEXT
|
||||
):
|
||||
if document.metadata["doc_id"] not in segment_index_node_ids:
|
||||
segment = (
|
||||
session.query(DocumentSegment)
|
||||
.where(DocumentSegment.index_node_id == document.metadata["doc_id"])
|
||||
.first()
|
||||
)
|
||||
if segment:
|
||||
segment_index_node_ids.append(document.metadata["doc_id"])
|
||||
segment_ids.append(segment.id)
|
||||
query = session.query(DocumentSegment).where(
|
||||
DocumentSegment.id == segment.id
|
||||
)
|
||||
elif (
|
||||
"doc_type" in document.metadata
|
||||
and document.metadata.get("doc_type") == DocType.IMAGE
|
||||
):
|
||||
attachment_info_dict = RetrievalService.get_segment_attachment_info(
|
||||
dataset_document.dataset_id,
|
||||
dataset_document.tenant_id,
|
||||
document.metadata.get("doc_id") or "",
|
||||
session,
|
||||
)
|
||||
if attachment_info_dict:
|
||||
segment_id = attachment_info_dict["segment_id"]
|
||||
if segment_id not in segment_ids:
|
||||
segment_ids.append(segment_id)
|
||||
query = session.query(DocumentSegment).where(DocumentSegment.id == segment_id)
|
||||
if query:
|
||||
# if 'dataset_id' in document.metadata:
|
||||
if "dataset_id" in document.metadata:
|
||||
query = query.where(
|
||||
DocumentSegment.dataset_id == document.metadata["dataset_id"]
|
||||
)
|
||||
segment_ids_to_update.add(str(segment_id))
|
||||
|
||||
# add hit count to document segment
|
||||
query.update(
|
||||
{DocumentSegment.hit_count: DocumentSegment.hit_count + 1},
|
||||
synchronize_session=False,
|
||||
)
|
||||
# Process non-PARENT_CHILD_INDEX text documents - batch fetch DocumentSegments
|
||||
if normal_text_docs:
|
||||
index_node_ids = [doc.metadata["doc_id"] for doc, _ in normal_text_docs if doc.metadata]
|
||||
if index_node_ids:
|
||||
segments_stmt = select(DocumentSegment).where(DocumentSegment.index_node_id.in_(index_node_ids))
|
||||
segments = session.scalars(segments_stmt).all()
|
||||
segment_map = {seg.index_node_id: seg.id for seg in segments}
|
||||
for doc, _ in normal_text_docs:
|
||||
if doc.metadata:
|
||||
segment_id = segment_map.get(doc.metadata["doc_id"])
|
||||
if segment_id:
|
||||
segment_ids_to_update.add(str(segment_id))
|
||||
|
||||
db.session.commit()
|
||||
# Process IMAGE documents - batch fetch SegmentAttachmentBindings
|
||||
all_image_docs = parent_child_image_docs + normal_image_docs
|
||||
if all_image_docs:
|
||||
attachment_ids = [
|
||||
doc.metadata["doc_id"]
|
||||
for doc, _ in all_image_docs
|
||||
if doc.metadata and doc.metadata.get("doc_id")
|
||||
]
|
||||
if attachment_ids:
|
||||
bindings_stmt = select(SegmentAttachmentBinding).where(
|
||||
SegmentAttachmentBinding.attachment_id.in_(attachment_ids)
|
||||
)
|
||||
bindings = session.scalars(bindings_stmt).all()
|
||||
segment_ids_to_update.update(str(binding.segment_id) for binding in bindings)
|
||||
|
||||
# get tracing instance
|
||||
trace_manager: TraceQueueManager | None = (
|
||||
self.application_generate_entity.trace_manager if self.application_generate_entity else None
|
||||
)
|
||||
if trace_manager:
|
||||
trace_manager.add_trace_task(
|
||||
TraceTask(
|
||||
TraceTaskName.DATASET_RETRIEVAL_TRACE, message_id=message_id, documents=documents, timer=timer
|
||||
# Batch update hit_count for all segments
|
||||
if segment_ids_to_update:
|
||||
session.query(DocumentSegment).where(DocumentSegment.id.in_(segment_ids_to_update)).update(
|
||||
{DocumentSegment.hit_count: DocumentSegment.hit_count + 1},
|
||||
synchronize_session=False,
|
||||
)
|
||||
session.commit()
|
||||
|
||||
self._send_trace_task(message_id, documents, timer)
|
||||
|
||||
def _send_trace_task(self, message_id: str | None, documents: list[Document], timer: dict | None):
|
||||
"""Send trace task if trace manager is available."""
|
||||
trace_manager: TraceQueueManager | None = (
|
||||
self.application_generate_entity.trace_manager if self.application_generate_entity else None
|
||||
)
|
||||
if trace_manager:
|
||||
trace_manager.add_trace_task(
|
||||
TraceTask(
|
||||
TraceTaskName.DATASET_RETRIEVAL_TRACE, message_id=message_id, documents=documents, timer=timer
|
||||
)
|
||||
)
|
||||
|
||||
def _on_query(
|
||||
self,
|
||||
|
|
|
|||
|
|
@ -13,5 +13,5 @@ def remove_leading_symbols(text: str) -> str:
|
|||
"""
|
||||
# Match Unicode ranges for punctuation and symbols
|
||||
# FIXME this pattern is confused quick fix for #11868 maybe refactor it later
|
||||
pattern = r"^[\u2000-\u206F\u2E00-\u2E7F\u3000-\u303F\"#$%&'()*+,./:;<=>?@^_`~]+"
|
||||
pattern = r'^[\[\]\u2000-\u2025\u2027-\u206F\u2E00-\u2E7F\u3000-\u300F\u3011-\u303F"#$%&\'()*+,./:;<=>?@^_`~]+'
|
||||
return re.sub(pattern, "", text)
|
||||
|
|
|
|||
|
|
@ -221,7 +221,7 @@ class WorkflowToolProviderController(ToolProviderController):
|
|||
session.query(WorkflowToolProvider)
|
||||
.where(
|
||||
WorkflowToolProvider.tenant_id == tenant_id,
|
||||
WorkflowToolProvider.app_id == self.provider_id,
|
||||
WorkflowToolProvider.id == self.provider_id,
|
||||
)
|
||||
.first()
|
||||
)
|
||||
|
|
|
|||
|
|
@ -59,7 +59,7 @@ class OutputVariableEntity(BaseModel):
|
|||
"""
|
||||
|
||||
variable: str
|
||||
value_type: OutputVariableType
|
||||
value_type: OutputVariableType = OutputVariableType.ANY
|
||||
value_selector: Sequence[str]
|
||||
|
||||
@field_validator("value_type", mode="before")
|
||||
|
|
|
|||
|
|
@ -412,16 +412,20 @@ class Executor:
|
|||
body_string += f"--{boundary}\r\n"
|
||||
body_string += f'Content-Disposition: form-data; name="{key}"\r\n\r\n'
|
||||
# decode content safely
|
||||
try:
|
||||
body_string += content.decode("utf-8")
|
||||
except UnicodeDecodeError:
|
||||
body_string += content.decode("utf-8", errors="replace")
|
||||
body_string += "\r\n"
|
||||
# Do not decode binary content; use a placeholder with file metadata instead.
|
||||
# Includes filename, size, and MIME type for better logging context.
|
||||
body_string += (
|
||||
f"<file_content_binary: '{file_entry[1][0] or 'unknown'}', "
|
||||
f"type='{file_entry[1][2] if len(file_entry[1]) > 2 else 'unknown'}', "
|
||||
f"size={len(content)} bytes>\r\n"
|
||||
)
|
||||
body_string += f"--{boundary}--\r\n"
|
||||
elif self.node_data.body:
|
||||
if self.content:
|
||||
# If content is bytes, do not decode it; show a placeholder with size.
|
||||
# Provides content size information for binary data without exposing the raw bytes.
|
||||
if isinstance(self.content, bytes):
|
||||
body_string = self.content.decode("utf-8", errors="replace")
|
||||
body_string = f"<binary_content: size={len(self.content)} bytes>"
|
||||
else:
|
||||
body_string = self.content
|
||||
elif self.data and self.node_data.body.type == "x-www-form-urlencoded":
|
||||
|
|
|
|||
|
|
@ -107,7 +107,7 @@ def email(email):
|
|||
EmailStr = Annotated[str, AfterValidator(email)]
|
||||
|
||||
|
||||
def uuid_value(value):
|
||||
def uuid_value(value: Any) -> str:
|
||||
if value == "":
|
||||
return str(value)
|
||||
|
||||
|
|
|
|||
|
|
@ -1,4 +1,4 @@
|
|||
"""empty message
|
||||
"""mysql adaptation
|
||||
|
||||
Revision ID: 09cfdda155d1
|
||||
Revises: 669ffd70119c
|
||||
|
|
@ -97,11 +97,31 @@ def downgrade():
|
|||
batch_op.alter_column('include_plugins',
|
||||
existing_type=sa.JSON(),
|
||||
type_=postgresql.ARRAY(sa.VARCHAR(length=255)),
|
||||
existing_nullable=False)
|
||||
existing_nullable=False,
|
||||
postgresql_using="""
|
||||
COALESCE(
|
||||
regexp_replace(
|
||||
replace(replace(include_plugins::text, '[', '{'), ']', '}'),
|
||||
'"',
|
||||
'',
|
||||
'g'
|
||||
)::varchar(255)[],
|
||||
ARRAY[]::varchar(255)[]
|
||||
)""")
|
||||
batch_op.alter_column('exclude_plugins',
|
||||
existing_type=sa.JSON(),
|
||||
type_=postgresql.ARRAY(sa.VARCHAR(length=255)),
|
||||
existing_nullable=False)
|
||||
existing_nullable=False,
|
||||
postgresql_using="""
|
||||
COALESCE(
|
||||
regexp_replace(
|
||||
replace(replace(exclude_plugins::text, '[', '{'), ']', '}'),
|
||||
'"',
|
||||
'',
|
||||
'g'
|
||||
)::varchar(255)[],
|
||||
ARRAY[]::varchar(255)[]
|
||||
)""")
|
||||
|
||||
with op.batch_alter_table('external_knowledge_bindings', schema=None) as batch_op:
|
||||
batch_op.alter_column('external_knowledge_id',
|
||||
|
|
|
|||
|
|
@ -835,7 +835,29 @@ class Conversation(Base):
|
|||
|
||||
@property
|
||||
def status_count(self):
|
||||
messages = db.session.scalars(select(Message).where(Message.conversation_id == self.id)).all()
|
||||
from models.workflow import WorkflowRun
|
||||
|
||||
# Get all messages with workflow_run_id for this conversation
|
||||
messages = db.session.scalars(
|
||||
select(Message).where(Message.conversation_id == self.id, Message.workflow_run_id.isnot(None))
|
||||
).all()
|
||||
|
||||
if not messages:
|
||||
return None
|
||||
|
||||
# Batch load all workflow runs in a single query, filtered by this conversation's app_id
|
||||
workflow_run_ids = [msg.workflow_run_id for msg in messages if msg.workflow_run_id]
|
||||
workflow_runs = {}
|
||||
|
||||
if workflow_run_ids:
|
||||
workflow_runs_query = db.session.scalars(
|
||||
select(WorkflowRun).where(
|
||||
WorkflowRun.id.in_(workflow_run_ids),
|
||||
WorkflowRun.app_id == self.app_id, # Filter by this conversation's app_id
|
||||
)
|
||||
).all()
|
||||
workflow_runs = {run.id: run for run in workflow_runs_query}
|
||||
|
||||
status_counts = {
|
||||
WorkflowExecutionStatus.RUNNING: 0,
|
||||
WorkflowExecutionStatus.SUCCEEDED: 0,
|
||||
|
|
@ -845,18 +867,24 @@ class Conversation(Base):
|
|||
}
|
||||
|
||||
for message in messages:
|
||||
if message.workflow_run:
|
||||
status_counts[WorkflowExecutionStatus(message.workflow_run.status)] += 1
|
||||
# Guard against None to satisfy type checker and avoid invalid dict lookups
|
||||
if message.workflow_run_id is None:
|
||||
continue
|
||||
workflow_run = workflow_runs.get(message.workflow_run_id)
|
||||
if not workflow_run:
|
||||
continue
|
||||
|
||||
return (
|
||||
{
|
||||
"success": status_counts[WorkflowExecutionStatus.SUCCEEDED],
|
||||
"failed": status_counts[WorkflowExecutionStatus.FAILED],
|
||||
"partial_success": status_counts[WorkflowExecutionStatus.PARTIAL_SUCCEEDED],
|
||||
}
|
||||
if messages
|
||||
else None
|
||||
)
|
||||
try:
|
||||
status_counts[WorkflowExecutionStatus(workflow_run.status)] += 1
|
||||
except (ValueError, KeyError):
|
||||
# Handle invalid status values gracefully
|
||||
pass
|
||||
|
||||
return {
|
||||
"success": status_counts[WorkflowExecutionStatus.SUCCEEDED],
|
||||
"failed": status_counts[WorkflowExecutionStatus.FAILED],
|
||||
"partial_success": status_counts[WorkflowExecutionStatus.PARTIAL_SUCCEEDED],
|
||||
}
|
||||
|
||||
@property
|
||||
def first_message(self):
|
||||
|
|
@ -1255,13 +1283,9 @@ class Message(Base):
|
|||
"id": self.id,
|
||||
"app_id": self.app_id,
|
||||
"conversation_id": self.conversation_id,
|
||||
"model_provider": self.model_provider,
|
||||
"model_id": self.model_id,
|
||||
"inputs": self.inputs,
|
||||
"query": self.query,
|
||||
"message_tokens": self.message_tokens,
|
||||
"answer_tokens": self.answer_tokens,
|
||||
"provider_response_latency": self.provider_response_latency,
|
||||
"total_price": self.total_price,
|
||||
"message": self.message,
|
||||
"answer": self.answer,
|
||||
|
|
@ -1283,12 +1307,8 @@ class Message(Base):
|
|||
id=data["id"],
|
||||
app_id=data["app_id"],
|
||||
conversation_id=data["conversation_id"],
|
||||
model_provider=data.get("model_provider"),
|
||||
model_id=data["model_id"],
|
||||
inputs=data["inputs"],
|
||||
message_tokens=data.get("message_tokens", 0),
|
||||
answer_tokens=data.get("answer_tokens", 0),
|
||||
provider_response_latency=data.get("provider_response_latency", 0.0),
|
||||
total_price=data["total_price"],
|
||||
query=data["query"],
|
||||
message=data["message"],
|
||||
|
|
|
|||
|
|
@ -1,6 +1,6 @@
|
|||
[project]
|
||||
name = "dify-api"
|
||||
version = "1.10.1"
|
||||
version = "1.11.0"
|
||||
requires-python = ">=3.11,<3.13"
|
||||
|
||||
dependencies = [
|
||||
|
|
@ -151,7 +151,7 @@ dev = [
|
|||
"types-pywin32~=310.0.0",
|
||||
"types-pyyaml~=6.0.12",
|
||||
"types-regex~=2024.11.6",
|
||||
"types-shapely~=2.0.0",
|
||||
"types-shapely~=2.1.0",
|
||||
"types-simplejson>=3.20.0",
|
||||
"types-six>=1.17.0",
|
||||
"types-tensorflow>=2.18.0",
|
||||
|
|
|
|||
|
|
@ -673,6 +673,8 @@ class DatasetService:
|
|||
Returns:
|
||||
str: Action to perform ('add', 'remove', 'update', or None)
|
||||
"""
|
||||
if "indexing_technique" not in data:
|
||||
return None
|
||||
if dataset.indexing_technique != data["indexing_technique"]:
|
||||
if data["indexing_technique"] == "economy":
|
||||
# Remove embedding model configuration for economy mode
|
||||
|
|
|
|||
|
|
@ -70,9 +70,28 @@ class ModelProviderService:
|
|||
continue
|
||||
|
||||
provider_config = provider_configuration.custom_configuration.provider
|
||||
model_config = provider_configuration.custom_configuration.models
|
||||
models = provider_configuration.custom_configuration.models
|
||||
can_added_models = provider_configuration.custom_configuration.can_added_models
|
||||
|
||||
# IMPORTANT: Never expose decrypted credentials in the provider list API.
|
||||
# Sanitize custom model configurations by dropping the credentials payload.
|
||||
sanitized_model_config = []
|
||||
if models:
|
||||
from core.entities.provider_entities import CustomModelConfiguration # local import to avoid cycles
|
||||
|
||||
for model in models:
|
||||
sanitized_model_config.append(
|
||||
CustomModelConfiguration(
|
||||
model=model.model,
|
||||
model_type=model.model_type,
|
||||
credentials=None, # strip secrets from list view
|
||||
current_credential_id=model.current_credential_id,
|
||||
current_credential_name=model.current_credential_name,
|
||||
available_model_credentials=model.available_model_credentials,
|
||||
unadded_to_model_list=model.unadded_to_model_list,
|
||||
)
|
||||
)
|
||||
|
||||
provider_response = ProviderResponse(
|
||||
tenant_id=tenant_id,
|
||||
provider=provider_configuration.provider.provider,
|
||||
|
|
@ -95,7 +114,7 @@ class ModelProviderService:
|
|||
current_credential_id=getattr(provider_config, "current_credential_id", None),
|
||||
current_credential_name=getattr(provider_config, "current_credential_name", None),
|
||||
available_credentials=getattr(provider_config, "available_credentials", []),
|
||||
custom_models=model_config,
|
||||
custom_models=sanitized_model_config,
|
||||
can_added_models=can_added_models,
|
||||
),
|
||||
system_configuration=SystemConfigurationResponse(
|
||||
|
|
|
|||
|
|
@ -0,0 +1,127 @@
|
|||
app:
|
||||
description: 'End node without value_type field reproduction'
|
||||
icon: 🤖
|
||||
icon_background: '#FFEAD5'
|
||||
mode: workflow
|
||||
name: end_node_without_value_type_field_reproduction
|
||||
use_icon_as_answer_icon: false
|
||||
dependencies: []
|
||||
kind: app
|
||||
version: 0.5.0
|
||||
workflow:
|
||||
conversation_variables: []
|
||||
environment_variables: []
|
||||
features:
|
||||
file_upload:
|
||||
allowed_file_extensions:
|
||||
- .JPG
|
||||
- .JPEG
|
||||
- .PNG
|
||||
- .GIF
|
||||
- .WEBP
|
||||
- .SVG
|
||||
allowed_file_types:
|
||||
- image
|
||||
allowed_file_upload_methods:
|
||||
- local_file
|
||||
- remote_url
|
||||
enabled: false
|
||||
fileUploadConfig:
|
||||
audio_file_size_limit: 50
|
||||
batch_count_limit: 5
|
||||
file_size_limit: 15
|
||||
image_file_batch_limit: 10
|
||||
image_file_size_limit: 10
|
||||
single_chunk_attachment_limit: 10
|
||||
video_file_size_limit: 100
|
||||
workflow_file_upload_limit: 10
|
||||
image:
|
||||
enabled: false
|
||||
number_limits: 3
|
||||
transfer_methods:
|
||||
- local_file
|
||||
- remote_url
|
||||
number_limits: 3
|
||||
opening_statement: ''
|
||||
retriever_resource:
|
||||
enabled: true
|
||||
sensitive_word_avoidance:
|
||||
enabled: false
|
||||
speech_to_text:
|
||||
enabled: false
|
||||
suggested_questions: []
|
||||
suggested_questions_after_answer:
|
||||
enabled: false
|
||||
text_to_speech:
|
||||
enabled: false
|
||||
language: ''
|
||||
voice: ''
|
||||
graph:
|
||||
edges:
|
||||
- data:
|
||||
isInIteration: false
|
||||
isInLoop: false
|
||||
sourceType: start
|
||||
targetType: end
|
||||
id: 1765423445456-source-1765423454810-target
|
||||
source: '1765423445456'
|
||||
sourceHandle: source
|
||||
target: '1765423454810'
|
||||
targetHandle: target
|
||||
type: custom
|
||||
zIndex: 0
|
||||
nodes:
|
||||
- data:
|
||||
selected: false
|
||||
title: 用户输入
|
||||
type: start
|
||||
variables:
|
||||
- default: ''
|
||||
hint: ''
|
||||
label: query
|
||||
max_length: 48
|
||||
options: []
|
||||
placeholder: ''
|
||||
required: true
|
||||
type: text-input
|
||||
variable: query
|
||||
height: 109
|
||||
id: '1765423445456'
|
||||
position:
|
||||
x: -48
|
||||
y: 261
|
||||
positionAbsolute:
|
||||
x: -48
|
||||
y: 261
|
||||
selected: false
|
||||
sourcePosition: right
|
||||
targetPosition: left
|
||||
type: custom
|
||||
width: 242
|
||||
- data:
|
||||
outputs:
|
||||
- value_selector:
|
||||
- '1765423445456'
|
||||
- query
|
||||
variable: query
|
||||
selected: true
|
||||
title: 输出
|
||||
type: end
|
||||
height: 88
|
||||
id: '1765423454810'
|
||||
position:
|
||||
x: 382
|
||||
y: 282
|
||||
positionAbsolute:
|
||||
x: 382
|
||||
y: 282
|
||||
selected: true
|
||||
sourcePosition: right
|
||||
targetPosition: left
|
||||
type: custom
|
||||
width: 242
|
||||
viewport:
|
||||
x: 139
|
||||
y: -135
|
||||
zoom: 1
|
||||
rag_pipeline_variables: []
|
||||
|
|
@ -0,0 +1,25 @@
|
|||
import uuid
|
||||
|
||||
import pytest
|
||||
from pydantic import ValidationError
|
||||
|
||||
from controllers.service_api.app.completion import ChatRequestPayload
|
||||
|
||||
|
||||
def test_chat_request_payload_accepts_blank_conversation_id():
|
||||
payload = ChatRequestPayload.model_validate({"inputs": {}, "query": "hello", "conversation_id": ""})
|
||||
|
||||
assert payload.conversation_id is None
|
||||
|
||||
|
||||
def test_chat_request_payload_validates_uuid():
|
||||
conversation_id = str(uuid.uuid4())
|
||||
|
||||
payload = ChatRequestPayload.model_validate({"inputs": {}, "query": "hello", "conversation_id": conversation_id})
|
||||
|
||||
assert payload.conversation_id == conversation_id
|
||||
|
||||
|
||||
def test_chat_request_payload_rejects_invalid_uuid():
|
||||
with pytest.raises(ValidationError):
|
||||
ChatRequestPayload.model_validate({"inputs": {}, "query": "hello", "conversation_id": "invalid"})
|
||||
|
|
@ -0,0 +1,60 @@
|
|||
"""
|
||||
Test case for end node without value_type field (backward compatibility).
|
||||
|
||||
This test validates that end nodes work correctly even when the value_type
|
||||
field is missing from the output configuration, ensuring backward compatibility
|
||||
with older workflow definitions.
|
||||
"""
|
||||
|
||||
from core.workflow.graph_events import (
|
||||
GraphRunStartedEvent,
|
||||
GraphRunSucceededEvent,
|
||||
NodeRunStartedEvent,
|
||||
NodeRunStreamChunkEvent,
|
||||
NodeRunSucceededEvent,
|
||||
)
|
||||
|
||||
from .test_table_runner import TableTestRunner, WorkflowTestCase
|
||||
|
||||
|
||||
def test_end_node_without_value_type_field():
|
||||
"""
|
||||
Test that end node works without explicit value_type field.
|
||||
|
||||
The fixture implements a simple workflow that:
|
||||
1. Takes a query input from start node
|
||||
2. Passes it directly to end node
|
||||
3. End node outputs the value without specifying value_type
|
||||
4. Should correctly infer the type and output the value
|
||||
|
||||
This ensures backward compatibility with workflow definitions
|
||||
created before value_type became a required field.
|
||||
"""
|
||||
fixture_name = "end_node_without_value_type_field_workflow"
|
||||
|
||||
case = WorkflowTestCase(
|
||||
fixture_path=fixture_name,
|
||||
inputs={"query": "test query"},
|
||||
expected_outputs={"query": "test query"},
|
||||
expected_event_sequence=[
|
||||
# Graph start
|
||||
GraphRunStartedEvent,
|
||||
# Start node
|
||||
NodeRunStartedEvent,
|
||||
NodeRunStreamChunkEvent, # Start node streams the input value
|
||||
NodeRunSucceededEvent,
|
||||
# End node
|
||||
NodeRunStartedEvent,
|
||||
NodeRunSucceededEvent,
|
||||
# Graph end
|
||||
GraphRunSucceededEvent,
|
||||
],
|
||||
description="End node without value_type field should work correctly",
|
||||
)
|
||||
|
||||
runner = TableTestRunner()
|
||||
result = runner.run_test_case(case)
|
||||
assert result.success, f"Test failed: {result.error}"
|
||||
assert result.actual_outputs == {"query": "test query"}, (
|
||||
f"Expected output to be {{'query': 'test query'}}, got {result.actual_outputs}"
|
||||
)
|
||||
|
|
@ -1149,3 +1149,258 @@ class TestModelIntegration:
|
|||
# Assert
|
||||
assert site.app_id == app.id
|
||||
assert app.enable_site is True
|
||||
|
||||
|
||||
class TestConversationStatusCount:
|
||||
"""Test suite for Conversation.status_count property N+1 query fix."""
|
||||
|
||||
def test_status_count_no_messages(self):
|
||||
"""Test status_count returns None when conversation has no messages."""
|
||||
# Arrange
|
||||
conversation = Conversation(
|
||||
app_id=str(uuid4()),
|
||||
mode=AppMode.CHAT,
|
||||
name="Test Conversation",
|
||||
status="normal",
|
||||
from_source="api",
|
||||
)
|
||||
conversation.id = str(uuid4())
|
||||
|
||||
# Mock the database query to return no messages
|
||||
with patch("models.model.db.session.scalars") as mock_scalars:
|
||||
mock_scalars.return_value.all.return_value = []
|
||||
|
||||
# Act
|
||||
result = conversation.status_count
|
||||
|
||||
# Assert
|
||||
assert result is None
|
||||
|
||||
def test_status_count_messages_without_workflow_runs(self):
|
||||
"""Test status_count when messages have no workflow_run_id."""
|
||||
# Arrange
|
||||
app_id = str(uuid4())
|
||||
conversation_id = str(uuid4())
|
||||
|
||||
conversation = Conversation(
|
||||
app_id=app_id,
|
||||
mode=AppMode.CHAT,
|
||||
name="Test Conversation",
|
||||
status="normal",
|
||||
from_source="api",
|
||||
)
|
||||
conversation.id = conversation_id
|
||||
|
||||
# Mock the database query to return no messages with workflow_run_id
|
||||
with patch("models.model.db.session.scalars") as mock_scalars:
|
||||
mock_scalars.return_value.all.return_value = []
|
||||
|
||||
# Act
|
||||
result = conversation.status_count
|
||||
|
||||
# Assert
|
||||
assert result is None
|
||||
|
||||
def test_status_count_batch_loading_implementation(self):
|
||||
"""Test that status_count uses batch loading instead of N+1 queries."""
|
||||
# Arrange
|
||||
from core.workflow.enums import WorkflowExecutionStatus
|
||||
|
||||
app_id = str(uuid4())
|
||||
conversation_id = str(uuid4())
|
||||
|
||||
# Create workflow run IDs
|
||||
workflow_run_id_1 = str(uuid4())
|
||||
workflow_run_id_2 = str(uuid4())
|
||||
workflow_run_id_3 = str(uuid4())
|
||||
|
||||
conversation = Conversation(
|
||||
app_id=app_id,
|
||||
mode=AppMode.CHAT,
|
||||
name="Test Conversation",
|
||||
status="normal",
|
||||
from_source="api",
|
||||
)
|
||||
conversation.id = conversation_id
|
||||
|
||||
# Mock messages with workflow_run_id
|
||||
mock_messages = [
|
||||
MagicMock(
|
||||
conversation_id=conversation_id,
|
||||
workflow_run_id=workflow_run_id_1,
|
||||
),
|
||||
MagicMock(
|
||||
conversation_id=conversation_id,
|
||||
workflow_run_id=workflow_run_id_2,
|
||||
),
|
||||
MagicMock(
|
||||
conversation_id=conversation_id,
|
||||
workflow_run_id=workflow_run_id_3,
|
||||
),
|
||||
]
|
||||
|
||||
# Mock workflow runs with different statuses
|
||||
mock_workflow_runs = [
|
||||
MagicMock(
|
||||
id=workflow_run_id_1,
|
||||
status=WorkflowExecutionStatus.SUCCEEDED.value,
|
||||
app_id=app_id,
|
||||
),
|
||||
MagicMock(
|
||||
id=workflow_run_id_2,
|
||||
status=WorkflowExecutionStatus.FAILED.value,
|
||||
app_id=app_id,
|
||||
),
|
||||
MagicMock(
|
||||
id=workflow_run_id_3,
|
||||
status=WorkflowExecutionStatus.PARTIAL_SUCCEEDED.value,
|
||||
app_id=app_id,
|
||||
),
|
||||
]
|
||||
|
||||
# Track database calls
|
||||
calls_made = []
|
||||
|
||||
def mock_scalars(query):
|
||||
calls_made.append(str(query))
|
||||
mock_result = MagicMock()
|
||||
|
||||
# Return messages for the first query (messages with workflow_run_id)
|
||||
if "messages" in str(query) and "conversation_id" in str(query):
|
||||
mock_result.all.return_value = mock_messages
|
||||
# Return workflow runs for the batch query
|
||||
elif "workflow_runs" in str(query):
|
||||
mock_result.all.return_value = mock_workflow_runs
|
||||
else:
|
||||
mock_result.all.return_value = []
|
||||
|
||||
return mock_result
|
||||
|
||||
# Act & Assert
|
||||
with patch("models.model.db.session.scalars", side_effect=mock_scalars):
|
||||
result = conversation.status_count
|
||||
|
||||
# Verify only 2 database queries were made (not N+1)
|
||||
assert len(calls_made) == 2, f"Expected 2 queries, got {len(calls_made)}: {calls_made}"
|
||||
|
||||
# Verify the first query gets messages
|
||||
assert "messages" in calls_made[0]
|
||||
assert "conversation_id" in calls_made[0]
|
||||
|
||||
# Verify the second query batch loads workflow runs with proper filtering
|
||||
assert "workflow_runs" in calls_made[1]
|
||||
assert "app_id" in calls_made[1] # Security filter applied
|
||||
assert "IN" in calls_made[1] # Batch loading with IN clause
|
||||
|
||||
# Verify correct status counts
|
||||
assert result["success"] == 1 # One SUCCEEDED
|
||||
assert result["failed"] == 1 # One FAILED
|
||||
assert result["partial_success"] == 1 # One PARTIAL_SUCCEEDED
|
||||
|
||||
def test_status_count_app_id_filtering(self):
|
||||
"""Test that status_count filters workflow runs by app_id for security."""
|
||||
# Arrange
|
||||
app_id = str(uuid4())
|
||||
other_app_id = str(uuid4())
|
||||
conversation_id = str(uuid4())
|
||||
workflow_run_id = str(uuid4())
|
||||
|
||||
conversation = Conversation(
|
||||
app_id=app_id,
|
||||
mode=AppMode.CHAT,
|
||||
name="Test Conversation",
|
||||
status="normal",
|
||||
from_source="api",
|
||||
)
|
||||
conversation.id = conversation_id
|
||||
|
||||
# Mock message with workflow_run_id
|
||||
mock_messages = [
|
||||
MagicMock(
|
||||
conversation_id=conversation_id,
|
||||
workflow_run_id=workflow_run_id,
|
||||
),
|
||||
]
|
||||
|
||||
calls_made = []
|
||||
|
||||
def mock_scalars(query):
|
||||
calls_made.append(str(query))
|
||||
mock_result = MagicMock()
|
||||
|
||||
if "messages" in str(query):
|
||||
mock_result.all.return_value = mock_messages
|
||||
elif "workflow_runs" in str(query):
|
||||
# Return empty list because no workflow run matches the correct app_id
|
||||
mock_result.all.return_value = [] # Workflow run filtered out by app_id
|
||||
else:
|
||||
mock_result.all.return_value = []
|
||||
|
||||
return mock_result
|
||||
|
||||
# Act
|
||||
with patch("models.model.db.session.scalars", side_effect=mock_scalars):
|
||||
result = conversation.status_count
|
||||
|
||||
# Assert - query should include app_id filter
|
||||
workflow_query = calls_made[1]
|
||||
assert "app_id" in workflow_query
|
||||
|
||||
# Since workflow run has wrong app_id, it shouldn't be included in counts
|
||||
assert result["success"] == 0
|
||||
assert result["failed"] == 0
|
||||
assert result["partial_success"] == 0
|
||||
|
||||
def test_status_count_handles_invalid_workflow_status(self):
|
||||
"""Test that status_count gracefully handles invalid workflow status values."""
|
||||
# Arrange
|
||||
app_id = str(uuid4())
|
||||
conversation_id = str(uuid4())
|
||||
workflow_run_id = str(uuid4())
|
||||
|
||||
conversation = Conversation(
|
||||
app_id=app_id,
|
||||
mode=AppMode.CHAT,
|
||||
name="Test Conversation",
|
||||
status="normal",
|
||||
from_source="api",
|
||||
)
|
||||
conversation.id = conversation_id
|
||||
|
||||
mock_messages = [
|
||||
MagicMock(
|
||||
conversation_id=conversation_id,
|
||||
workflow_run_id=workflow_run_id,
|
||||
),
|
||||
]
|
||||
|
||||
# Mock workflow run with invalid status
|
||||
mock_workflow_runs = [
|
||||
MagicMock(
|
||||
id=workflow_run_id,
|
||||
status="invalid_status", # Invalid status that should raise ValueError
|
||||
app_id=app_id,
|
||||
),
|
||||
]
|
||||
|
||||
with patch("models.model.db.session.scalars") as mock_scalars:
|
||||
# Mock the messages query
|
||||
def mock_scalars_side_effect(query):
|
||||
mock_result = MagicMock()
|
||||
if "messages" in str(query):
|
||||
mock_result.all.return_value = mock_messages
|
||||
elif "workflow_runs" in str(query):
|
||||
mock_result.all.return_value = mock_workflow_runs
|
||||
else:
|
||||
mock_result.all.return_value = []
|
||||
return mock_result
|
||||
|
||||
mock_scalars.side_effect = mock_scalars_side_effect
|
||||
|
||||
# Act - should not raise exception
|
||||
result = conversation.status_count
|
||||
|
||||
# Assert - should handle invalid status gracefully
|
||||
assert result["success"] == 0
|
||||
assert result["failed"] == 0
|
||||
assert result["partial_success"] == 0
|
||||
|
|
|
|||
|
|
@ -0,0 +1,88 @@
|
|||
import types
|
||||
|
||||
import pytest
|
||||
|
||||
from core.entities.provider_entities import CredentialConfiguration, CustomModelConfiguration
|
||||
from core.model_runtime.entities.common_entities import I18nObject
|
||||
from core.model_runtime.entities.model_entities import ModelType
|
||||
from core.model_runtime.entities.provider_entities import ConfigurateMethod
|
||||
from models.provider import ProviderType
|
||||
from services.model_provider_service import ModelProviderService
|
||||
|
||||
|
||||
class _FakeConfigurations:
|
||||
def __init__(self, provider_configuration: types.SimpleNamespace) -> None:
|
||||
self._provider_configuration = provider_configuration
|
||||
|
||||
def values(self) -> list[types.SimpleNamespace]:
|
||||
return [self._provider_configuration]
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def service_with_fake_configurations():
|
||||
# Build a fake provider schema with minimal fields used by ProviderResponse
|
||||
fake_provider = types.SimpleNamespace(
|
||||
provider="langgenius/openai_api_compatible/openai_api_compatible",
|
||||
label=I18nObject(en_US="OpenAI API Compatible", zh_Hans="OpenAI API Compatible"),
|
||||
description=None,
|
||||
icon_small=None,
|
||||
icon_small_dark=None,
|
||||
icon_large=None,
|
||||
background=None,
|
||||
help=None,
|
||||
supported_model_types=[ModelType.LLM],
|
||||
configurate_methods=[ConfigurateMethod.CUSTOMIZABLE_MODEL],
|
||||
provider_credential_schema=None,
|
||||
model_credential_schema=None,
|
||||
)
|
||||
|
||||
# Include decrypted credentials to simulate the leak source
|
||||
custom_model = CustomModelConfiguration(
|
||||
model="gpt-4o-mini",
|
||||
model_type=ModelType.LLM,
|
||||
credentials={"api_key": "sk-plain-text", "endpoint": "https://example.com"},
|
||||
current_credential_id="cred-1",
|
||||
current_credential_name="API KEY 1",
|
||||
available_model_credentials=[],
|
||||
unadded_to_model_list=False,
|
||||
)
|
||||
|
||||
fake_custom_provider = types.SimpleNamespace(
|
||||
current_credential_id="cred-1",
|
||||
current_credential_name="API KEY 1",
|
||||
available_credentials=[CredentialConfiguration(credential_id="cred-1", credential_name="API KEY 1")],
|
||||
)
|
||||
|
||||
fake_custom_configuration = types.SimpleNamespace(
|
||||
provider=fake_custom_provider, models=[custom_model], can_added_models=[]
|
||||
)
|
||||
|
||||
fake_system_configuration = types.SimpleNamespace(enabled=False, current_quota_type=None, quota_configurations=[])
|
||||
|
||||
fake_provider_configuration = types.SimpleNamespace(
|
||||
provider=fake_provider,
|
||||
preferred_provider_type=ProviderType.CUSTOM,
|
||||
custom_configuration=fake_custom_configuration,
|
||||
system_configuration=fake_system_configuration,
|
||||
is_custom_configuration_available=lambda: True,
|
||||
)
|
||||
|
||||
class _FakeProviderManager:
|
||||
def get_configurations(self, tenant_id: str) -> _FakeConfigurations:
|
||||
return _FakeConfigurations(fake_provider_configuration)
|
||||
|
||||
svc = ModelProviderService()
|
||||
svc.provider_manager = _FakeProviderManager()
|
||||
return svc
|
||||
|
||||
|
||||
def test_get_provider_list_strips_credentials(service_with_fake_configurations: ModelProviderService):
|
||||
providers = service_with_fake_configurations.get_provider_list(tenant_id="tenant-1", model_type=None)
|
||||
|
||||
assert len(providers) == 1
|
||||
custom_models = providers[0].custom_configuration.custom_models
|
||||
|
||||
assert custom_models is not None
|
||||
assert len(custom_models) == 1
|
||||
# The sanitizer should drop credentials in list response
|
||||
assert custom_models[0].credentials is None
|
||||
|
|
@ -14,6 +14,7 @@ from core.tools.utils.text_processing_utils import remove_leading_symbols
|
|||
("Hello, World!", "Hello, World!"),
|
||||
("", ""),
|
||||
(" ", " "),
|
||||
("【测试】", "【测试】"),
|
||||
],
|
||||
)
|
||||
def test_remove_leading_symbols(input_text, expected_output):
|
||||
|
|
|
|||
10
api/uv.lock
10
api/uv.lock
|
|
@ -1337,7 +1337,7 @@ wheels = [
|
|||
|
||||
[[package]]
|
||||
name = "dify-api"
|
||||
version = "1.10.1"
|
||||
version = "1.11.0"
|
||||
source = { virtual = "." }
|
||||
dependencies = [
|
||||
{ name = "apscheduler" },
|
||||
|
|
@ -1681,7 +1681,7 @@ dev = [
|
|||
{ name = "types-redis", specifier = ">=4.6.0.20241004" },
|
||||
{ name = "types-regex", specifier = "~=2024.11.6" },
|
||||
{ name = "types-setuptools", specifier = ">=80.9.0" },
|
||||
{ name = "types-shapely", specifier = "~=2.0.0" },
|
||||
{ name = "types-shapely", specifier = "~=2.1.0" },
|
||||
{ name = "types-simplejson", specifier = ">=3.20.0" },
|
||||
{ name = "types-six", specifier = ">=1.17.0" },
|
||||
{ name = "types-tensorflow", specifier = ">=2.18.0" },
|
||||
|
|
@ -6557,14 +6557,14 @@ wheels = [
|
|||
|
||||
[[package]]
|
||||
name = "types-shapely"
|
||||
version = "2.0.0.20250404"
|
||||
version = "2.1.0.20250917"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "numpy" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/4e/55/c71a25fd3fc9200df4d0b5fd2f6d74712a82f9a8bbdd90cefb9e6aee39dd/types_shapely-2.0.0.20250404.tar.gz", hash = "sha256:863f540b47fa626c33ae64eae06df171f9ab0347025d4458d2df496537296b4f", size = 25066, upload-time = "2025-04-04T02:54:30.592Z" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/fa/19/7f28b10994433d43b9caa66f3b9bd6a0a9192b7ce8b5a7fc41534e54b821/types_shapely-2.1.0.20250917.tar.gz", hash = "sha256:5c56670742105aebe40c16414390d35fcaa55d6f774d328c1a18273ab0e2134a", size = 26363, upload-time = "2025-09-17T02:47:44.604Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/ce/ff/7f4d414eb81534ba2476f3d54f06f1463c2ebf5d663fd10cff16ba607dd6/types_shapely-2.0.0.20250404-py3-none-any.whl", hash = "sha256:170fb92f5c168a120db39b3287697fdec5c93ef3e1ad15e52552c36b25318821", size = 36350, upload-time = "2025-04-04T02:54:29.506Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/e5/a9/554ac40810e530263b6163b30a2b623bc16aae3fb64416f5d2b3657d0729/types_shapely-2.1.0.20250917-py3-none-any.whl", hash = "sha256:9334a79339504d39b040426be4938d422cec419168414dc74972aa746a8bf3a1", size = 37813, upload-time = "2025-09-17T02:47:43.788Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
|
|
|||
|
|
@ -1432,3 +1432,6 @@ WORKFLOW_SCHEDULE_MAX_DISPATCH_PER_TICK=0
|
|||
|
||||
# Tenant isolated task queue configuration
|
||||
TENANT_ISOLATED_TASK_CONCURRENCY=1
|
||||
|
||||
# The API key of amplitude
|
||||
AMPLITUDE_API_KEY=
|
||||
|
|
|
|||
|
|
@ -21,7 +21,7 @@ services:
|
|||
|
||||
# API service
|
||||
api:
|
||||
image: langgenius/dify-api:1.10.1-fix.1
|
||||
image: langgenius/dify-api:1.11.0
|
||||
restart: always
|
||||
environment:
|
||||
# Use the shared environment variables.
|
||||
|
|
@ -62,7 +62,7 @@ services:
|
|||
# worker service
|
||||
# The Celery worker for processing all queues (dataset, workflow, mail, etc.)
|
||||
worker:
|
||||
image: langgenius/dify-api:1.10.1-fix.1
|
||||
image: langgenius/dify-api:1.11.0
|
||||
restart: always
|
||||
environment:
|
||||
# Use the shared environment variables.
|
||||
|
|
@ -101,7 +101,7 @@ services:
|
|||
# worker_beat service
|
||||
# Celery beat for scheduling periodic tasks.
|
||||
worker_beat:
|
||||
image: langgenius/dify-api:1.10.1-fix.1
|
||||
image: langgenius/dify-api:1.11.0
|
||||
restart: always
|
||||
environment:
|
||||
# Use the shared environment variables.
|
||||
|
|
@ -131,11 +131,12 @@ services:
|
|||
|
||||
# Frontend web application.
|
||||
web:
|
||||
image: langgenius/dify-web:1.10.1-fix.1
|
||||
image: langgenius/dify-web:1.11.0
|
||||
restart: always
|
||||
environment:
|
||||
CONSOLE_API_URL: ${CONSOLE_API_URL:-}
|
||||
APP_API_URL: ${APP_API_URL:-}
|
||||
AMPLITUDE_API_KEY: ${AMPLITUDE_API_KEY:-}
|
||||
NEXT_PUBLIC_COOKIE_DOMAIN: ${NEXT_PUBLIC_COOKIE_DOMAIN:-}
|
||||
SENTRY_DSN: ${WEB_SENTRY_DSN:-}
|
||||
NEXT_TELEMETRY_DISABLED: ${NEXT_TELEMETRY_DISABLED:-0}
|
||||
|
|
@ -268,7 +269,7 @@ services:
|
|||
|
||||
# plugin daemon
|
||||
plugin_daemon:
|
||||
image: langgenius/dify-plugin-daemon:0.4.1-local
|
||||
image: langgenius/dify-plugin-daemon:0.5.1-local
|
||||
restart: always
|
||||
environment:
|
||||
# Use the shared environment variables.
|
||||
|
|
|
|||
|
|
@ -123,7 +123,7 @@ services:
|
|||
|
||||
# plugin daemon
|
||||
plugin_daemon:
|
||||
image: langgenius/dify-plugin-daemon:0.4.1-local
|
||||
image: langgenius/dify-plugin-daemon:0.5.1-local
|
||||
restart: always
|
||||
env_file:
|
||||
- ./middleware.env
|
||||
|
|
|
|||
|
|
@ -635,6 +635,7 @@ x-shared-env: &shared-api-worker-env
|
|||
WORKFLOW_SCHEDULE_POLLER_BATCH_SIZE: ${WORKFLOW_SCHEDULE_POLLER_BATCH_SIZE:-100}
|
||||
WORKFLOW_SCHEDULE_MAX_DISPATCH_PER_TICK: ${WORKFLOW_SCHEDULE_MAX_DISPATCH_PER_TICK:-0}
|
||||
TENANT_ISOLATED_TASK_CONCURRENCY: ${TENANT_ISOLATED_TASK_CONCURRENCY:-1}
|
||||
AMPLITUDE_API_KEY: ${AMPLITUDE_API_KEY:-}
|
||||
|
||||
services:
|
||||
# Init container to fix permissions
|
||||
|
|
@ -658,7 +659,7 @@ services:
|
|||
|
||||
# API service
|
||||
api:
|
||||
image: langgenius/dify-api:1.10.1-fix.1
|
||||
image: langgenius/dify-api:1.11.0
|
||||
restart: always
|
||||
environment:
|
||||
# Use the shared environment variables.
|
||||
|
|
@ -699,7 +700,7 @@ services:
|
|||
# worker service
|
||||
# The Celery worker for processing all queues (dataset, workflow, mail, etc.)
|
||||
worker:
|
||||
image: langgenius/dify-api:1.10.1-fix.1
|
||||
image: langgenius/dify-api:1.11.0
|
||||
restart: always
|
||||
environment:
|
||||
# Use the shared environment variables.
|
||||
|
|
@ -738,7 +739,7 @@ services:
|
|||
# worker_beat service
|
||||
# Celery beat for scheduling periodic tasks.
|
||||
worker_beat:
|
||||
image: langgenius/dify-api:1.10.1-fix.1
|
||||
image: langgenius/dify-api:1.11.0
|
||||
restart: always
|
||||
environment:
|
||||
# Use the shared environment variables.
|
||||
|
|
@ -768,11 +769,12 @@ services:
|
|||
|
||||
# Frontend web application.
|
||||
web:
|
||||
image: langgenius/dify-web:1.10.1-fix.1
|
||||
image: langgenius/dify-web:1.11.0
|
||||
restart: always
|
||||
environment:
|
||||
CONSOLE_API_URL: ${CONSOLE_API_URL:-}
|
||||
APP_API_URL: ${APP_API_URL:-}
|
||||
AMPLITUDE_API_KEY: ${AMPLITUDE_API_KEY:-}
|
||||
NEXT_PUBLIC_COOKIE_DOMAIN: ${NEXT_PUBLIC_COOKIE_DOMAIN:-}
|
||||
SENTRY_DSN: ${WEB_SENTRY_DSN:-}
|
||||
NEXT_TELEMETRY_DISABLED: ${NEXT_TELEMETRY_DISABLED:-0}
|
||||
|
|
@ -905,7 +907,7 @@ services:
|
|||
|
||||
# plugin daemon
|
||||
plugin_daemon:
|
||||
image: langgenius/dify-plugin-daemon:0.4.1-local
|
||||
image: langgenius/dify-plugin-daemon:0.5.1-local
|
||||
restart: always
|
||||
environment:
|
||||
# Use the shared environment variables.
|
||||
|
|
|
|||
|
|
@ -70,3 +70,6 @@ NEXT_PUBLIC_ENABLE_SINGLE_DOLLAR_LATEX=false
|
|||
|
||||
# The maximum number of tree node depth for workflow
|
||||
NEXT_PUBLIC_MAX_TREE_DEPTH=50
|
||||
|
||||
# The API key of amplitude
|
||||
NEXT_PUBLIC_AMPLITUDE_API_KEY=
|
||||
|
|
|
|||
|
|
@ -21,6 +21,7 @@ import {
|
|||
import { useKeyPress } from 'ahooks'
|
||||
import Divider from '../../base/divider'
|
||||
import Loading from '../../base/loading'
|
||||
import Toast from '../../base/toast'
|
||||
import Tooltip from '../../base/tooltip'
|
||||
import { getKeyboardKeyCodeBySystem, getKeyboardKeyNameBySystem } from '../../workflow/utils'
|
||||
import AccessControl from '../app-access-control'
|
||||
|
|
@ -41,6 +42,7 @@ import type { InputVar, Variable } from '@/app/components/workflow/types'
|
|||
import { appDefaultIconBackground } from '@/config'
|
||||
import { useGlobalPublicStore } from '@/context/global-public-context'
|
||||
import { useFormatTimeFromNow } from '@/hooks/use-format-time-from-now'
|
||||
import { useAsyncWindowOpen } from '@/hooks/use-async-window-open'
|
||||
import { AccessMode } from '@/models/access-control'
|
||||
import { useAppWhiteListSubjects, useGetUserCanAccessApp } from '@/service/access-control'
|
||||
import { fetchAppDetailDirect } from '@/service/apps'
|
||||
|
|
@ -49,7 +51,6 @@ import { AppModeEnum } from '@/types/app'
|
|||
import type { PublishWorkflowParams } from '@/types/workflow'
|
||||
import { basePath } from '@/utils/var'
|
||||
import UpgradeBtn from '@/app/components/billing/upgrade-btn'
|
||||
import { useAsyncWindowOpen } from '@/hooks/use-async-window-open'
|
||||
|
||||
const ACCESS_MODE_MAP: Record<AccessMode, { label: string, icon: React.ElementType }> = {
|
||||
[AccessMode.ORGANIZATION]: {
|
||||
|
|
@ -153,6 +154,7 @@ const AppPublisher = ({
|
|||
|
||||
const { data: userCanAccessApp, isLoading: isGettingUserCanAccessApp, refetch } = useGetUserCanAccessApp({ appId: appDetail?.id, enabled: false })
|
||||
const { data: appAccessSubjects, isLoading: isGettingAppWhiteListSubjects } = useAppWhiteListSubjects(appDetail?.id, open && systemFeatures.webapp_auth.enabled && appDetail?.access_mode === AccessMode.SPECIFIC_GROUPS_MEMBERS)
|
||||
const openAsyncWindow = useAsyncWindowOpen()
|
||||
|
||||
const noAccessPermission = useMemo(() => systemFeatures.webapp_auth.enabled && appDetail && appDetail.access_mode !== AccessMode.EXTERNAL_MEMBERS && !userCanAccessApp?.result, [systemFeatures, appDetail, userCanAccessApp])
|
||||
const disabledFunctionButton = useMemo(() => (!publishedAt || missingStartNode || noAccessPermission), [publishedAt, missingStartNode, noAccessPermission])
|
||||
|
|
@ -216,23 +218,20 @@ const AppPublisher = ({
|
|||
setPublished(false)
|
||||
}, [disabled, onToggle, open])
|
||||
|
||||
const { openAsync } = useAsyncWindowOpen()
|
||||
|
||||
const handleOpenInExplore = useCallback(() => {
|
||||
if (!appDetail?.id) return
|
||||
|
||||
openAsync(
|
||||
async () => {
|
||||
const { installed_apps }: { installed_apps?: { id: string }[] } = await fetchInstalledAppList(appDetail.id) || {}
|
||||
if (installed_apps && installed_apps.length > 0)
|
||||
return `${basePath}/explore/installed/${installed_apps[0].id}`
|
||||
throw new Error('No app found in Explore')
|
||||
const handleOpenInExplore = useCallback(async () => {
|
||||
await openAsyncWindow(async () => {
|
||||
if (!appDetail?.id)
|
||||
throw new Error('App not found')
|
||||
const { installed_apps }: any = await fetchInstalledAppList(appDetail?.id) || {}
|
||||
if (installed_apps?.length > 0)
|
||||
return `${basePath}/explore/installed/${installed_apps[0].id}`
|
||||
throw new Error('No app found in Explore')
|
||||
}, {
|
||||
onError: (err) => {
|
||||
Toast.notify({ type: 'error', message: `${err.message || err}` })
|
||||
},
|
||||
{
|
||||
errorMessage: 'Failed to open app in Explore',
|
||||
},
|
||||
)
|
||||
}, [appDetail?.id, openAsync])
|
||||
})
|
||||
}, [appDetail?.id, openAsyncWindow])
|
||||
|
||||
const handleAccessControlUpdate = useCallback(async () => {
|
||||
if (!appDetail)
|
||||
|
|
|
|||
|
|
@ -0,0 +1,480 @@
|
|||
import '@testing-library/jest-dom'
|
||||
import type { CSSProperties } from 'react'
|
||||
import { fireEvent, render, screen } from '@testing-library/react'
|
||||
import DebugWithMultipleModel from './index'
|
||||
import type { DebugWithMultipleModelContextType } from './context'
|
||||
import { APP_CHAT_WITH_MULTIPLE_MODEL } from '../types'
|
||||
import type { ModelAndParameter } from '../types'
|
||||
import type { Inputs, ModelConfig } from '@/models/debug'
|
||||
import { DEFAULT_AGENT_SETTING, DEFAULT_CHAT_PROMPT_CONFIG, DEFAULT_COMPLETION_PROMPT_CONFIG } from '@/config'
|
||||
import type { FeatureStoreState } from '@/app/components/base/features/store'
|
||||
import type { FileEntity } from '@/app/components/base/file-uploader/types'
|
||||
import type { InputForm } from '@/app/components/base/chat/chat/type'
|
||||
import { AppModeEnum, ModelModeType, type PromptVariable, Resolution, TransferMethod } from '@/types/app'
|
||||
|
||||
type PromptVariableWithMeta = Omit<PromptVariable, 'type' | 'required'> & {
|
||||
type: PromptVariable['type'] | 'api'
|
||||
required?: boolean
|
||||
hide?: boolean
|
||||
}
|
||||
|
||||
const mockUseDebugConfigurationContext = jest.fn()
|
||||
const mockUseFeaturesSelector = jest.fn()
|
||||
const mockUseEventEmitterContext = jest.fn()
|
||||
const mockUseAppStoreSelector = jest.fn()
|
||||
const mockEventEmitter = { emit: jest.fn() }
|
||||
const mockSetShowAppConfigureFeaturesModal = jest.fn()
|
||||
let capturedChatInputProps: MockChatInputAreaProps | null = null
|
||||
let modelIdCounter = 0
|
||||
let featureState: FeatureStoreState
|
||||
|
||||
type MockChatInputAreaProps = {
|
||||
onSend?: (message: string, files?: FileEntity[]) => void
|
||||
onFeatureBarClick?: (state: boolean) => void
|
||||
showFeatureBar?: boolean
|
||||
showFileUpload?: boolean
|
||||
inputs?: Record<string, any>
|
||||
inputsForm?: InputForm[]
|
||||
speechToTextConfig?: unknown
|
||||
visionConfig?: unknown
|
||||
}
|
||||
|
||||
const mockFiles: FileEntity[] = [
|
||||
{
|
||||
id: 'file-1',
|
||||
name: 'file.txt',
|
||||
size: 10,
|
||||
type: 'text/plain',
|
||||
progress: 100,
|
||||
transferMethod: TransferMethod.remote_url,
|
||||
supportFileType: 'text',
|
||||
},
|
||||
]
|
||||
|
||||
jest.mock('react-i18next', () => ({
|
||||
useTranslation: () => ({
|
||||
t: (key: string) => key,
|
||||
}),
|
||||
}))
|
||||
|
||||
jest.mock('@/context/debug-configuration', () => ({
|
||||
__esModule: true,
|
||||
useDebugConfigurationContext: () => mockUseDebugConfigurationContext(),
|
||||
}))
|
||||
|
||||
jest.mock('@/app/components/base/features/hooks', () => ({
|
||||
__esModule: true,
|
||||
useFeatures: (selector: (state: FeatureStoreState) => unknown) => mockUseFeaturesSelector(selector),
|
||||
}))
|
||||
|
||||
jest.mock('@/context/event-emitter', () => ({
|
||||
__esModule: true,
|
||||
useEventEmitterContextContext: () => mockUseEventEmitterContext(),
|
||||
}))
|
||||
|
||||
jest.mock('@/app/components/app/store', () => ({
|
||||
__esModule: true,
|
||||
useStore: (selector: (state: { setShowAppConfigureFeaturesModal: typeof mockSetShowAppConfigureFeaturesModal }) => unknown) => mockUseAppStoreSelector(selector),
|
||||
}))
|
||||
|
||||
jest.mock('./debug-item', () => ({
|
||||
__esModule: true,
|
||||
default: ({
|
||||
modelAndParameter,
|
||||
className,
|
||||
style,
|
||||
}: {
|
||||
modelAndParameter: ModelAndParameter
|
||||
className?: string
|
||||
style?: CSSProperties
|
||||
}) => (
|
||||
<div
|
||||
data-testid='debug-item'
|
||||
data-model-id={modelAndParameter.id}
|
||||
className={className}
|
||||
style={style}
|
||||
>
|
||||
DebugItem-{modelAndParameter.id}
|
||||
</div>
|
||||
),
|
||||
}))
|
||||
|
||||
jest.mock('@/app/components/base/chat/chat/chat-input-area', () => ({
|
||||
__esModule: true,
|
||||
default: (props: MockChatInputAreaProps) => {
|
||||
capturedChatInputProps = props
|
||||
return (
|
||||
<div data-testid='chat-input-area'>
|
||||
<button type='button' onClick={() => props.onSend?.('test message', mockFiles)}>send</button>
|
||||
<button type='button' onClick={() => props.onFeatureBarClick?.(true)}>feature</button>
|
||||
</div>
|
||||
)
|
||||
},
|
||||
}))
|
||||
|
||||
const createFeatureState = (): FeatureStoreState => ({
|
||||
features: {
|
||||
speech2text: { enabled: true },
|
||||
file: {
|
||||
image: {
|
||||
enabled: true,
|
||||
detail: Resolution.high,
|
||||
number_limits: 2,
|
||||
transfer_methods: [TransferMethod.remote_url],
|
||||
},
|
||||
},
|
||||
},
|
||||
setFeatures: jest.fn(),
|
||||
showFeaturesModal: false,
|
||||
setShowFeaturesModal: jest.fn(),
|
||||
})
|
||||
|
||||
const createModelConfig = (promptVariables: PromptVariableWithMeta[] = []): ModelConfig => ({
|
||||
provider: 'OPENAI',
|
||||
model_id: 'gpt-4',
|
||||
mode: ModelModeType.chat,
|
||||
configs: {
|
||||
prompt_template: '',
|
||||
prompt_variables: promptVariables as unknown as PromptVariable[],
|
||||
},
|
||||
chat_prompt_config: DEFAULT_CHAT_PROMPT_CONFIG,
|
||||
completion_prompt_config: DEFAULT_COMPLETION_PROMPT_CONFIG,
|
||||
opening_statement: '',
|
||||
more_like_this: null,
|
||||
suggested_questions: [],
|
||||
suggested_questions_after_answer: null,
|
||||
speech_to_text: null,
|
||||
text_to_speech: null,
|
||||
file_upload: null,
|
||||
retriever_resource: null,
|
||||
sensitive_word_avoidance: null,
|
||||
annotation_reply: null,
|
||||
external_data_tools: [],
|
||||
system_parameters: {
|
||||
audio_file_size_limit: 0,
|
||||
file_size_limit: 0,
|
||||
image_file_size_limit: 0,
|
||||
video_file_size_limit: 0,
|
||||
workflow_file_upload_limit: 0,
|
||||
},
|
||||
dataSets: [],
|
||||
agentConfig: DEFAULT_AGENT_SETTING,
|
||||
})
|
||||
|
||||
type DebugConfiguration = {
|
||||
mode: AppModeEnum
|
||||
inputs: Inputs
|
||||
modelConfig: ModelConfig
|
||||
}
|
||||
|
||||
const createDebugConfiguration = (overrides: Partial<DebugConfiguration> = {}): DebugConfiguration => ({
|
||||
mode: AppModeEnum.CHAT,
|
||||
inputs: {},
|
||||
modelConfig: createModelConfig(),
|
||||
...overrides,
|
||||
})
|
||||
|
||||
const createModelAndParameter = (overrides: Partial<ModelAndParameter> = {}): ModelAndParameter => ({
|
||||
id: `model-${++modelIdCounter}`,
|
||||
model: 'gpt-3.5-turbo',
|
||||
provider: 'openai',
|
||||
parameters: {},
|
||||
...overrides,
|
||||
})
|
||||
|
||||
const createProps = (overrides: Partial<DebugWithMultipleModelContextType> = {}): DebugWithMultipleModelContextType => ({
|
||||
multipleModelConfigs: [createModelAndParameter()],
|
||||
onMultipleModelConfigsChange: jest.fn(),
|
||||
onDebugWithMultipleModelChange: jest.fn(),
|
||||
...overrides,
|
||||
})
|
||||
|
||||
const renderComponent = (props?: Partial<DebugWithMultipleModelContextType>) => {
|
||||
const mergedProps = createProps(props)
|
||||
return render(<DebugWithMultipleModel {...mergedProps} />)
|
||||
}
|
||||
|
||||
describe('DebugWithMultipleModel', () => {
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks()
|
||||
capturedChatInputProps = null
|
||||
modelIdCounter = 0
|
||||
featureState = createFeatureState()
|
||||
mockUseFeaturesSelector.mockImplementation(selector => selector(featureState))
|
||||
mockUseEventEmitterContext.mockReturnValue({ eventEmitter: mockEventEmitter })
|
||||
mockUseAppStoreSelector.mockImplementation(selector => selector({ setShowAppConfigureFeaturesModal: mockSetShowAppConfigureFeaturesModal }))
|
||||
mockUseDebugConfigurationContext.mockReturnValue(createDebugConfiguration())
|
||||
})
|
||||
|
||||
describe('chat input rendering', () => {
|
||||
it('should render chat input in chat mode with transformed prompt variables and feature handler', () => {
|
||||
// Arrange
|
||||
const promptVariables: PromptVariableWithMeta[] = [
|
||||
{ key: 'city', name: 'City', type: 'string', required: true },
|
||||
{ key: 'audience', name: 'Audience', type: 'number' },
|
||||
{ key: 'hidden', name: 'Hidden', type: 'select', hide: true },
|
||||
{ key: 'api-only', name: 'API Only', type: 'api' },
|
||||
]
|
||||
const debugConfiguration = createDebugConfiguration({
|
||||
inputs: { audience: 'engineers' },
|
||||
modelConfig: createModelConfig(promptVariables),
|
||||
})
|
||||
mockUseDebugConfigurationContext.mockReturnValue(debugConfiguration)
|
||||
|
||||
// Act
|
||||
renderComponent()
|
||||
fireEvent.click(screen.getByRole('button', { name: /feature/i }))
|
||||
|
||||
// Assert
|
||||
expect(screen.getByTestId('chat-input-area')).toBeInTheDocument()
|
||||
expect(capturedChatInputProps?.inputs).toEqual({ audience: 'engineers' })
|
||||
expect(capturedChatInputProps?.inputsForm).toEqual([
|
||||
expect.objectContaining({ label: 'City', variable: 'city', hide: false, required: true }),
|
||||
expect.objectContaining({ label: 'Audience', variable: 'audience', hide: false, required: false }),
|
||||
expect.objectContaining({ label: 'Hidden', variable: 'hidden', hide: true, required: false }),
|
||||
])
|
||||
expect(capturedChatInputProps?.showFeatureBar).toBe(true)
|
||||
expect(capturedChatInputProps?.showFileUpload).toBe(false)
|
||||
expect(capturedChatInputProps?.speechToTextConfig).toEqual(featureState.features.speech2text)
|
||||
expect(capturedChatInputProps?.visionConfig).toEqual(featureState.features.file)
|
||||
expect(mockSetShowAppConfigureFeaturesModal).toHaveBeenCalledWith(true)
|
||||
})
|
||||
|
||||
it('should render chat input in agent chat mode', () => {
|
||||
// Arrange
|
||||
mockUseDebugConfigurationContext.mockReturnValue(createDebugConfiguration({
|
||||
mode: AppModeEnum.AGENT_CHAT,
|
||||
}))
|
||||
|
||||
// Act
|
||||
renderComponent()
|
||||
|
||||
// Assert
|
||||
expect(screen.getByTestId('chat-input-area')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should hide chat input when not in chat mode', () => {
|
||||
// Arrange
|
||||
mockUseDebugConfigurationContext.mockReturnValue(createDebugConfiguration({
|
||||
mode: AppModeEnum.COMPLETION,
|
||||
}))
|
||||
const multipleModelConfigs = [createModelAndParameter()]
|
||||
|
||||
// Act
|
||||
renderComponent({ multipleModelConfigs })
|
||||
|
||||
// Assert
|
||||
expect(screen.queryByTestId('chat-input-area')).not.toBeInTheDocument()
|
||||
expect(screen.getAllByTestId('debug-item')).toHaveLength(1)
|
||||
})
|
||||
})
|
||||
|
||||
describe('sending flow', () => {
|
||||
it('should emit chat event when allowed to send', () => {
|
||||
// Arrange
|
||||
const checkCanSend = jest.fn(() => true)
|
||||
const multipleModelConfigs = [createModelAndParameter(), createModelAndParameter()]
|
||||
renderComponent({ multipleModelConfigs, checkCanSend })
|
||||
|
||||
// Act
|
||||
fireEvent.click(screen.getByRole('button', { name: /send/i }))
|
||||
|
||||
// Assert
|
||||
expect(checkCanSend).toHaveBeenCalled()
|
||||
expect(mockEventEmitter.emit).toHaveBeenCalledWith({
|
||||
type: APP_CHAT_WITH_MULTIPLE_MODEL,
|
||||
payload: {
|
||||
message: 'test message',
|
||||
files: mockFiles,
|
||||
},
|
||||
})
|
||||
})
|
||||
|
||||
it('should emit when no checkCanSend is provided', () => {
|
||||
renderComponent()
|
||||
|
||||
fireEvent.click(screen.getByRole('button', { name: /send/i }))
|
||||
|
||||
expect(mockEventEmitter.emit).toHaveBeenCalledWith({
|
||||
type: APP_CHAT_WITH_MULTIPLE_MODEL,
|
||||
payload: {
|
||||
message: 'test message',
|
||||
files: mockFiles,
|
||||
},
|
||||
})
|
||||
})
|
||||
|
||||
it('should block sending when checkCanSend returns false', () => {
|
||||
// Arrange
|
||||
const checkCanSend = jest.fn(() => false)
|
||||
renderComponent({ checkCanSend })
|
||||
|
||||
// Act
|
||||
fireEvent.click(screen.getByRole('button', { name: /send/i }))
|
||||
|
||||
// Assert
|
||||
expect(checkCanSend).toHaveBeenCalled()
|
||||
expect(mockEventEmitter.emit).not.toHaveBeenCalled()
|
||||
})
|
||||
|
||||
it('should tolerate missing event emitter without throwing', () => {
|
||||
mockUseEventEmitterContext.mockReturnValue({ eventEmitter: null })
|
||||
renderComponent()
|
||||
|
||||
expect(() => fireEvent.click(screen.getByRole('button', { name: /send/i }))).not.toThrow()
|
||||
expect(mockEventEmitter.emit).not.toHaveBeenCalled()
|
||||
})
|
||||
})
|
||||
|
||||
describe('layout sizing and positioning', () => {
|
||||
const expectItemLayout = (
|
||||
element: HTMLElement,
|
||||
expectation: {
|
||||
width?: string
|
||||
height?: string
|
||||
transform: string
|
||||
classes?: string[]
|
||||
},
|
||||
) => {
|
||||
if (expectation.width !== undefined)
|
||||
expect(element.style.width).toBe(expectation.width)
|
||||
else
|
||||
expect(element.style.width).toBe('')
|
||||
|
||||
if (expectation.height !== undefined)
|
||||
expect(element.style.height).toBe(expectation.height)
|
||||
else
|
||||
expect(element.style.height).toBe('')
|
||||
|
||||
expect(element.style.transform).toBe(expectation.transform)
|
||||
expectation.classes?.forEach(cls => expect(element).toHaveClass(cls))
|
||||
}
|
||||
|
||||
it('should arrange items in two-column layout for two models', () => {
|
||||
// Arrange
|
||||
const multipleModelConfigs = [createModelAndParameter(), createModelAndParameter()]
|
||||
|
||||
// Act
|
||||
renderComponent({ multipleModelConfigs })
|
||||
const items = screen.getAllByTestId('debug-item')
|
||||
|
||||
// Assert
|
||||
expect(items).toHaveLength(2)
|
||||
expectItemLayout(items[0], {
|
||||
width: 'calc(50% - 4px - 24px)',
|
||||
height: '100%',
|
||||
transform: 'translateX(0) translateY(0)',
|
||||
classes: ['mr-2'],
|
||||
})
|
||||
expectItemLayout(items[1], {
|
||||
width: 'calc(50% - 4px - 24px)',
|
||||
height: '100%',
|
||||
transform: 'translateX(calc(100% + 8px)) translateY(0)',
|
||||
classes: [],
|
||||
})
|
||||
})
|
||||
|
||||
it('should arrange items in thirds for three models', () => {
|
||||
// Arrange
|
||||
const multipleModelConfigs = [createModelAndParameter(), createModelAndParameter(), createModelAndParameter()]
|
||||
|
||||
// Act
|
||||
renderComponent({ multipleModelConfigs })
|
||||
const items = screen.getAllByTestId('debug-item')
|
||||
|
||||
// Assert
|
||||
expect(items).toHaveLength(3)
|
||||
expectItemLayout(items[0], {
|
||||
width: 'calc(33.3% - 5.33px - 16px)',
|
||||
height: '100%',
|
||||
transform: 'translateX(0) translateY(0)',
|
||||
classes: ['mr-2'],
|
||||
})
|
||||
expectItemLayout(items[1], {
|
||||
width: 'calc(33.3% - 5.33px - 16px)',
|
||||
height: '100%',
|
||||
transform: 'translateX(calc(100% + 8px)) translateY(0)',
|
||||
classes: ['mr-2'],
|
||||
})
|
||||
expectItemLayout(items[2], {
|
||||
width: 'calc(33.3% - 5.33px - 16px)',
|
||||
height: '100%',
|
||||
transform: 'translateX(calc(200% + 16px)) translateY(0)',
|
||||
classes: [],
|
||||
})
|
||||
})
|
||||
|
||||
it('should position items on a grid for four models', () => {
|
||||
// Arrange
|
||||
const multipleModelConfigs = [
|
||||
createModelAndParameter(),
|
||||
createModelAndParameter(),
|
||||
createModelAndParameter(),
|
||||
createModelAndParameter(),
|
||||
]
|
||||
|
||||
// Act
|
||||
renderComponent({ multipleModelConfigs })
|
||||
const items = screen.getAllByTestId('debug-item')
|
||||
|
||||
// Assert
|
||||
expect(items).toHaveLength(4)
|
||||
expectItemLayout(items[0], {
|
||||
width: 'calc(50% - 4px - 24px)',
|
||||
height: 'calc(50% - 4px)',
|
||||
transform: 'translateX(0) translateY(0)',
|
||||
classes: ['mr-2', 'mb-2'],
|
||||
})
|
||||
expectItemLayout(items[1], {
|
||||
width: 'calc(50% - 4px - 24px)',
|
||||
height: 'calc(50% - 4px)',
|
||||
transform: 'translateX(calc(100% + 8px)) translateY(0)',
|
||||
classes: ['mb-2'],
|
||||
})
|
||||
expectItemLayout(items[2], {
|
||||
width: 'calc(50% - 4px - 24px)',
|
||||
height: 'calc(50% - 4px)',
|
||||
transform: 'translateX(0) translateY(calc(100% + 8px))',
|
||||
classes: ['mr-2'],
|
||||
})
|
||||
expectItemLayout(items[3], {
|
||||
width: 'calc(50% - 4px - 24px)',
|
||||
height: 'calc(50% - 4px)',
|
||||
transform: 'translateX(calc(100% + 8px)) translateY(calc(100% + 8px))',
|
||||
classes: [],
|
||||
})
|
||||
})
|
||||
|
||||
it('should fall back to single column layout when only one model is provided', () => {
|
||||
// Arrange
|
||||
const multipleModelConfigs = [createModelAndParameter()]
|
||||
|
||||
// Act
|
||||
renderComponent({ multipleModelConfigs })
|
||||
const item = screen.getByTestId('debug-item')
|
||||
|
||||
// Assert
|
||||
expectItemLayout(item, {
|
||||
transform: 'translateX(0) translateY(0)',
|
||||
classes: [],
|
||||
})
|
||||
})
|
||||
|
||||
it('should set scroll area height for chat modes', () => {
|
||||
const { container } = renderComponent()
|
||||
const scrollArea = container.querySelector('.relative.mb-3.grow.overflow-auto.px-6') as HTMLElement
|
||||
expect(scrollArea).toBeInTheDocument()
|
||||
expect(scrollArea.style.height).toBe('calc(100% - 60px)')
|
||||
})
|
||||
|
||||
it('should set full height when chat input is hidden', () => {
|
||||
mockUseDebugConfigurationContext.mockReturnValue(createDebugConfiguration({
|
||||
mode: AppModeEnum.COMPLETION,
|
||||
}))
|
||||
|
||||
const { container } = renderComponent()
|
||||
const scrollArea = container.querySelector('.relative.mb-3.grow.overflow-auto.px-6') as HTMLElement
|
||||
expect(scrollArea.style.height).toBe('100%')
|
||||
})
|
||||
})
|
||||
})
|
||||
|
|
@ -7,7 +7,7 @@ import { useTranslation } from 'react-i18next'
|
|||
import { RiBuildingLine, RiGlobalLine, RiLockLine, RiMoreFill, RiVerifiedBadgeLine } from '@remixicon/react'
|
||||
import cn from '@/utils/classnames'
|
||||
import { type App, AppModeEnum } from '@/types/app'
|
||||
import { ToastContext } from '@/app/components/base/toast'
|
||||
import Toast, { ToastContext } from '@/app/components/base/toast'
|
||||
import { copyApp, deleteApp, exportAppConfig, updateAppInfo } from '@/service/apps'
|
||||
import type { DuplicateAppModalProps } from '@/app/components/app/duplicate-modal'
|
||||
import AppIcon from '@/app/components/base/app-icon'
|
||||
|
|
@ -27,11 +27,11 @@ import { fetchWorkflowDraft } from '@/service/workflow'
|
|||
import { fetchInstalledAppList } from '@/service/explore'
|
||||
import { AppTypeIcon } from '@/app/components/app/type-selector'
|
||||
import Tooltip from '@/app/components/base/tooltip'
|
||||
import { useAsyncWindowOpen } from '@/hooks/use-async-window-open'
|
||||
import { AccessMode } from '@/models/access-control'
|
||||
import { useGlobalPublicStore } from '@/context/global-public-context'
|
||||
import { formatTime } from '@/utils/time'
|
||||
import { useGetUserCanAccessApp } from '@/service/access-control'
|
||||
import { useAsyncWindowOpen } from '@/hooks/use-async-window-open'
|
||||
import dynamic from 'next/dynamic'
|
||||
|
||||
const EditAppModal = dynamic(() => import('@/app/components/explore/create-app-modal'), {
|
||||
|
|
@ -65,6 +65,7 @@ const AppCard = ({ app, onRefresh }: AppCardProps) => {
|
|||
const { isCurrentWorkspaceEditor } = useAppContext()
|
||||
const { onPlanInfoChanged } = useProviderContext()
|
||||
const { push } = useRouter()
|
||||
const openAsyncWindow = useAsyncWindowOpen()
|
||||
|
||||
const [showEditModal, setShowEditModal] = useState(false)
|
||||
const [showDuplicateModal, setShowDuplicateModal] = useState(false)
|
||||
|
|
@ -243,24 +244,25 @@ const AppCard = ({ app, onRefresh }: AppCardProps) => {
|
|||
e.preventDefault()
|
||||
setShowAccessControl(true)
|
||||
}
|
||||
const { openAsync } = useAsyncWindowOpen()
|
||||
|
||||
const onClickInstalledApp = (e: React.MouseEvent<HTMLButtonElement>) => {
|
||||
const onClickInstalledApp = async (e: React.MouseEvent<HTMLButtonElement>) => {
|
||||
e.stopPropagation()
|
||||
props.onClick?.()
|
||||
e.preventDefault()
|
||||
|
||||
openAsync(
|
||||
async () => {
|
||||
const { installed_apps }: { installed_apps?: { id: string }[] } = await fetchInstalledAppList(app.id) || {}
|
||||
if (installed_apps && installed_apps.length > 0)
|
||||
try {
|
||||
await openAsyncWindow(async () => {
|
||||
const { installed_apps }: any = await fetchInstalledAppList(app.id) || {}
|
||||
if (installed_apps?.length > 0)
|
||||
return `${basePath}/explore/installed/${installed_apps[0].id}`
|
||||
throw new Error('No app found in Explore')
|
||||
},
|
||||
{
|
||||
errorMessage: 'Failed to open app in Explore',
|
||||
},
|
||||
)
|
||||
}, {
|
||||
onError: (err) => {
|
||||
Toast.notify({ type: 'error', message: `${err.message || err}` })
|
||||
},
|
||||
})
|
||||
}
|
||||
catch (e: any) {
|
||||
Toast.notify({ type: 'error', message: `${e.message || e}` })
|
||||
}
|
||||
}
|
||||
return (
|
||||
<div className="relative flex w-full flex-col py-1" onMouseLeave={onMouseLeave}>
|
||||
|
|
|
|||
|
|
@ -11,13 +11,19 @@ export type IAmplitudeProps = {
|
|||
sessionReplaySampleRate?: number
|
||||
}
|
||||
|
||||
// Check if Amplitude should be enabled
|
||||
export const isAmplitudeEnabled = () => {
|
||||
const apiKey = process.env.NEXT_PUBLIC_AMPLITUDE_API_KEY
|
||||
return IS_CLOUD_EDITION && !!apiKey
|
||||
}
|
||||
|
||||
const AmplitudeProvider: FC<IAmplitudeProps> = ({
|
||||
apiKey = process.env.NEXT_PUBLIC_AMPLITUDE_API_KEY ?? '',
|
||||
sessionReplaySampleRate = 1,
|
||||
}) => {
|
||||
useEffect(() => {
|
||||
// Only enable in Saas edition
|
||||
if (!IS_CLOUD_EDITION)
|
||||
// Only enable in Saas edition with valid API key
|
||||
if (!isAmplitudeEnabled())
|
||||
return
|
||||
|
||||
// Initialize Amplitude
|
||||
|
|
|
|||
|
|
@ -1,2 +1,2 @@
|
|||
export { default } from './AmplitudeProvider'
|
||||
export { default, isAmplitudeEnabled } from './AmplitudeProvider'
|
||||
export { resetUser, setUserId, setUserProperties, trackEvent } from './utils'
|
||||
|
|
|
|||
|
|
@ -1,4 +1,5 @@
|
|||
import * as amplitude from '@amplitude/analytics-browser'
|
||||
import { isAmplitudeEnabled } from './AmplitudeProvider'
|
||||
|
||||
/**
|
||||
* Track custom event
|
||||
|
|
@ -6,6 +7,8 @@ import * as amplitude from '@amplitude/analytics-browser'
|
|||
* @param eventProperties Event properties (optional)
|
||||
*/
|
||||
export const trackEvent = (eventName: string, eventProperties?: Record<string, any>) => {
|
||||
if (!isAmplitudeEnabled())
|
||||
return
|
||||
amplitude.track(eventName, eventProperties)
|
||||
}
|
||||
|
||||
|
|
@ -14,6 +17,8 @@ export const trackEvent = (eventName: string, eventProperties?: Record<string, a
|
|||
* @param userId User ID
|
||||
*/
|
||||
export const setUserId = (userId: string) => {
|
||||
if (!isAmplitudeEnabled())
|
||||
return
|
||||
amplitude.setUserId(userId)
|
||||
}
|
||||
|
||||
|
|
@ -22,6 +27,8 @@ export const setUserId = (userId: string) => {
|
|||
* @param properties User properties
|
||||
*/
|
||||
export const setUserProperties = (properties: Record<string, any>) => {
|
||||
if (!isAmplitudeEnabled())
|
||||
return
|
||||
const identifyEvent = new amplitude.Identify()
|
||||
Object.entries(properties).forEach(([key, value]) => {
|
||||
identifyEvent.set(key, value)
|
||||
|
|
@ -33,5 +40,7 @@ export const setUserProperties = (properties: Record<string, any>) => {
|
|||
* Reset user (e.g., when user logs out)
|
||||
*/
|
||||
export const resetUser = () => {
|
||||
if (!isAmplitudeEnabled())
|
||||
return
|
||||
amplitude.reset()
|
||||
}
|
||||
|
|
|
|||
|
|
@ -21,6 +21,7 @@ type NotionPageSelectorProps = {
|
|||
datasetId?: string
|
||||
credentialList: DataSourceCredential[]
|
||||
onSelectCredential?: (credentialId: string) => void
|
||||
supportBatchUpload?: boolean
|
||||
}
|
||||
|
||||
const NotionPageSelector = ({
|
||||
|
|
@ -32,6 +33,7 @@ const NotionPageSelector = ({
|
|||
datasetId = '',
|
||||
credentialList,
|
||||
onSelectCredential,
|
||||
supportBatchUpload = false,
|
||||
}: NotionPageSelectorProps) => {
|
||||
const [searchValue, setSearchValue] = useState('')
|
||||
const setShowAccountSettingModal = useModalContextSelector(s => s.setShowAccountSettingModal)
|
||||
|
|
@ -110,7 +112,7 @@ const NotionPageSelector = ({
|
|||
setCurrentCredential(credential)
|
||||
onSelect([]) // Clear selected pages when changing credential
|
||||
onSelectCredential?.(credential.credentialId)
|
||||
}, [invalidPreImportNotionPages, onSelect, onSelectCredential])
|
||||
}, [datasetId, invalidPreImportNotionPages, notionCredentials, onSelect, onSelectCredential])
|
||||
|
||||
const handleSelectPages = useCallback((newSelectedPagesId: Set<string>) => {
|
||||
const selectedPages = Array.from(newSelectedPagesId).map(pageId => pagesMapAndSelectedPagesId[0][pageId])
|
||||
|
|
@ -175,6 +177,7 @@ const NotionPageSelector = ({
|
|||
canPreview={canPreview}
|
||||
previewPageId={previewPageId}
|
||||
onPreview={handlePreviewPage}
|
||||
isMultipleChoice={supportBatchUpload}
|
||||
/>
|
||||
)}
|
||||
</div>
|
||||
|
|
|
|||
|
|
@ -1,9 +1,8 @@
|
|||
'use client'
|
||||
import { useTranslation } from 'react-i18next'
|
||||
import React, { Fragment, useMemo } from 'react'
|
||||
import { Menu, MenuButton, MenuItem, MenuItems, Transition } from '@headlessui/react'
|
||||
import { RiArrowDownSLine } from '@remixicon/react'
|
||||
import NotionIcon from '../../notion-icon'
|
||||
import { CredentialIcon } from '@/app/components/datasets/common/credential-icon'
|
||||
|
||||
export type NotionCredential = {
|
||||
credentialId: string
|
||||
|
|
@ -23,14 +22,10 @@ const CredentialSelector = ({
|
|||
items,
|
||||
onSelect,
|
||||
}: CredentialSelectorProps) => {
|
||||
const { t } = useTranslation()
|
||||
const currentCredential = items.find(item => item.credentialId === value)!
|
||||
|
||||
const getDisplayName = (item: NotionCredential) => {
|
||||
return item.workspaceName || t('datasetPipeline.credentialSelector.name', {
|
||||
credentialName: item.credentialName,
|
||||
pluginName: 'Notion',
|
||||
})
|
||||
return item.workspaceName || item.credentialName
|
||||
}
|
||||
|
||||
const currentDisplayName = useMemo(() => {
|
||||
|
|
@ -43,10 +38,11 @@ const CredentialSelector = ({
|
|||
({ open }) => (
|
||||
<>
|
||||
<MenuButton className={`flex h-7 items-center justify-center rounded-md p-1 pr-2 hover:bg-state-base-hover ${open && 'bg-state-base-hover'} cursor-pointer`}>
|
||||
<NotionIcon
|
||||
<CredentialIcon
|
||||
className='mr-2'
|
||||
src={currentCredential?.workspaceIcon}
|
||||
avatarUrl={currentCredential?.workspaceIcon}
|
||||
name={currentDisplayName}
|
||||
size={20}
|
||||
/>
|
||||
<div
|
||||
className='mr-1 w-[90px] truncate text-left text-sm font-medium text-text-secondary'
|
||||
|
|
@ -80,10 +76,11 @@ const CredentialSelector = ({
|
|||
className='flex h-9 cursor-pointer items-center rounded-lg px-3 hover:bg-state-base-hover'
|
||||
onClick={() => onSelect(item.credentialId)}
|
||||
>
|
||||
<NotionIcon
|
||||
<CredentialIcon
|
||||
className='mr-2 shrink-0'
|
||||
src={item.workspaceIcon}
|
||||
avatarUrl={item.workspaceIcon}
|
||||
name={displayName}
|
||||
size={20}
|
||||
/>
|
||||
<div
|
||||
className='system-sm-medium mr-2 grow truncate text-text-secondary'
|
||||
|
|
|
|||
|
|
@ -7,6 +7,7 @@ import Checkbox from '../../checkbox'
|
|||
import NotionIcon from '../../notion-icon'
|
||||
import cn from '@/utils/classnames'
|
||||
import type { DataSourceNotionPage, DataSourceNotionPageMap } from '@/models/common'
|
||||
import Radio from '@/app/components/base/radio/ui'
|
||||
|
||||
type PageSelectorProps = {
|
||||
value: Set<string>
|
||||
|
|
@ -18,6 +19,7 @@ type PageSelectorProps = {
|
|||
canPreview?: boolean
|
||||
previewPageId?: string
|
||||
onPreview?: (selectedPageId: string) => void
|
||||
isMultipleChoice?: boolean
|
||||
}
|
||||
type NotionPageTreeItem = {
|
||||
children: Set<string>
|
||||
|
|
@ -80,6 +82,7 @@ const ItemComponent = ({ index, style, data }: ListChildComponentProps<{
|
|||
searchValue: string
|
||||
previewPageId: string
|
||||
pagesMap: DataSourceNotionPageMap
|
||||
isMultipleChoice?: boolean
|
||||
}>) => {
|
||||
const { t } = useTranslation()
|
||||
const {
|
||||
|
|
@ -94,6 +97,7 @@ const ItemComponent = ({ index, style, data }: ListChildComponentProps<{
|
|||
searchValue,
|
||||
previewPageId,
|
||||
pagesMap,
|
||||
isMultipleChoice,
|
||||
} = data
|
||||
const current = dataList[index]
|
||||
const currentWithChildrenAndDescendants = listMapWithChildrenAndDescendants[current.page_id]
|
||||
|
|
@ -134,16 +138,24 @@ const ItemComponent = ({ index, style, data }: ListChildComponentProps<{
|
|||
previewPageId === current.page_id && 'bg-state-base-hover')}
|
||||
style={{ ...style, top: style.top as number + 8, left: 8, right: 8, width: 'calc(100% - 16px)' }}
|
||||
>
|
||||
<Checkbox
|
||||
className='mr-2 shrink-0'
|
||||
checked={checkedIds.has(current.page_id)}
|
||||
disabled={disabled}
|
||||
onCheck={() => {
|
||||
if (disabled)
|
||||
return
|
||||
handleCheck(index)
|
||||
}}
|
||||
/>
|
||||
{isMultipleChoice ? (
|
||||
<Checkbox
|
||||
className='mr-2 shrink-0'
|
||||
checked={checkedIds.has(current.page_id)}
|
||||
disabled={disabled}
|
||||
onCheck={() => {
|
||||
handleCheck(index)
|
||||
}}
|
||||
/>) : (
|
||||
<Radio
|
||||
className='mr-2 shrink-0'
|
||||
isChecked={checkedIds.has(current.page_id)}
|
||||
disabled={disabled}
|
||||
onCheck={() => {
|
||||
handleCheck(index)
|
||||
}}
|
||||
/>
|
||||
)}
|
||||
{!searchValue && renderArrow()}
|
||||
<NotionIcon
|
||||
className='mr-1 shrink-0'
|
||||
|
|
@ -192,6 +204,7 @@ const PageSelector = ({
|
|||
canPreview = true,
|
||||
previewPageId,
|
||||
onPreview,
|
||||
isMultipleChoice = true,
|
||||
}: PageSelectorProps) => {
|
||||
const { t } = useTranslation()
|
||||
const [dataList, setDataList] = useState<NotionPageItem[]>([])
|
||||
|
|
@ -265,7 +278,7 @@ const PageSelector = ({
|
|||
const currentWithChildrenAndDescendants = listMapWithChildrenAndDescendants[pageId]
|
||||
|
||||
if (copyValue.has(pageId)) {
|
||||
if (!searchValue) {
|
||||
if (!searchValue && isMultipleChoice) {
|
||||
for (const item of currentWithChildrenAndDescendants.descendants)
|
||||
copyValue.delete(item)
|
||||
}
|
||||
|
|
@ -273,12 +286,18 @@ const PageSelector = ({
|
|||
copyValue.delete(pageId)
|
||||
}
|
||||
else {
|
||||
if (!searchValue) {
|
||||
if (!searchValue && isMultipleChoice) {
|
||||
for (const item of currentWithChildrenAndDescendants.descendants)
|
||||
copyValue.add(item)
|
||||
}
|
||||
|
||||
copyValue.add(pageId)
|
||||
// Single choice mode, clear previous selection
|
||||
if (!isMultipleChoice && copyValue.size > 0) {
|
||||
copyValue.clear()
|
||||
copyValue.add(pageId)
|
||||
}
|
||||
else {
|
||||
copyValue.add(pageId)
|
||||
}
|
||||
}
|
||||
|
||||
onSelect(new Set(copyValue))
|
||||
|
|
@ -322,6 +341,7 @@ const PageSelector = ({
|
|||
searchValue,
|
||||
previewPageId: currentPreviewPageId,
|
||||
pagesMap,
|
||||
isMultipleChoice,
|
||||
}}
|
||||
>
|
||||
{Item}
|
||||
|
|
|
|||
|
|
@ -9,33 +9,28 @@ import PlanComp from '../plan'
|
|||
import { useAppContext } from '@/context/app-context'
|
||||
import { useProviderContext } from '@/context/provider-context'
|
||||
import { useBillingUrl } from '@/service/use-billing'
|
||||
import { useAsyncWindowOpen } from '@/hooks/use-async-window-open'
|
||||
|
||||
const Billing: FC = () => {
|
||||
const { t } = useTranslation()
|
||||
const { isCurrentWorkspaceManager } = useAppContext()
|
||||
const { enableBilling } = useProviderContext()
|
||||
const { data: billingUrl, isFetching, refetch } = useBillingUrl(enableBilling && isCurrentWorkspaceManager)
|
||||
const openAsyncWindow = useAsyncWindowOpen()
|
||||
|
||||
const handleOpenBilling = async () => {
|
||||
// Open synchronously to preserve user gesture for popup blockers
|
||||
if (billingUrl) {
|
||||
window.open(billingUrl, '_blank', 'noopener,noreferrer')
|
||||
return
|
||||
}
|
||||
|
||||
const newWindow = window.open('', '_blank', 'noopener,noreferrer')
|
||||
try {
|
||||
await openAsyncWindow(async () => {
|
||||
const url = (await refetch()).data
|
||||
if (url && newWindow) {
|
||||
newWindow.location.href = url
|
||||
return
|
||||
}
|
||||
}
|
||||
catch (err) {
|
||||
console.error('Failed to fetch billing url', err)
|
||||
}
|
||||
// Close the placeholder window if we failed to fetch the URL
|
||||
newWindow?.close()
|
||||
if (url)
|
||||
return url
|
||||
return null
|
||||
}, {
|
||||
immediateUrl: billingUrl,
|
||||
features: 'noopener,noreferrer',
|
||||
onError: (err) => {
|
||||
console.error('Failed to fetch billing url', err)
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
return (
|
||||
|
|
|
|||
|
|
@ -43,6 +43,7 @@ const CloudPlanItem: FC<CloudPlanItemProps> = ({
|
|||
const isCurrentPaidPlan = isCurrent && !isFreePlan
|
||||
const isPlanDisabled = isCurrentPaidPlan ? false : planInfo.level <= ALL_PLANS[currentPlan].level
|
||||
const { isCurrentWorkspaceManager } = useAppContext()
|
||||
const openAsyncWindow = useAsyncWindowOpen()
|
||||
|
||||
const btnText = useMemo(() => {
|
||||
if (isCurrent)
|
||||
|
|
@ -55,8 +56,6 @@ const CloudPlanItem: FC<CloudPlanItemProps> = ({
|
|||
})[plan]
|
||||
}, [isCurrent, plan, t])
|
||||
|
||||
const { openAsync } = useAsyncWindowOpen()
|
||||
|
||||
const handleGetPayUrl = async () => {
|
||||
if (loading)
|
||||
return
|
||||
|
|
@ -75,13 +74,16 @@ const CloudPlanItem: FC<CloudPlanItemProps> = ({
|
|||
setLoading(true)
|
||||
try {
|
||||
if (isCurrentPaidPlan) {
|
||||
await openAsync(
|
||||
() => fetchBillingUrl().then(res => res.url),
|
||||
{
|
||||
errorMessage: 'Failed to open billing page',
|
||||
windowFeatures: 'noopener,noreferrer',
|
||||
await openAsyncWindow(async () => {
|
||||
const res = await fetchBillingUrl()
|
||||
if (res.url)
|
||||
return res.url
|
||||
throw new Error('Failed to open billing page')
|
||||
}, {
|
||||
onError: (err) => {
|
||||
Toast.notify({ type: 'error', message: err.message || String(err) })
|
||||
},
|
||||
)
|
||||
})
|
||||
return
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -2,7 +2,7 @@ import cn from '@/utils/classnames'
|
|||
import React, { useCallback, useMemo, useState } from 'react'
|
||||
|
||||
type CredentialIconProps = {
|
||||
avatar_url?: string
|
||||
avatarUrl?: string
|
||||
name: string
|
||||
size?: number
|
||||
className?: string
|
||||
|
|
@ -16,12 +16,12 @@ const ICON_BG_COLORS = [
|
|||
]
|
||||
|
||||
export const CredentialIcon: React.FC<CredentialIconProps> = ({
|
||||
avatar_url,
|
||||
avatarUrl,
|
||||
name,
|
||||
size = 20,
|
||||
className = '',
|
||||
}) => {
|
||||
const [showAvatar, setShowAvatar] = useState(!!avatar_url && avatar_url !== 'default')
|
||||
const [showAvatar, setShowAvatar] = useState(!!avatarUrl && avatarUrl !== 'default')
|
||||
const firstLetter = useMemo(() => name.charAt(0).toUpperCase(), [name])
|
||||
const bgColor = useMemo(() => ICON_BG_COLORS[firstLetter.charCodeAt(0) % ICON_BG_COLORS.length], [firstLetter])
|
||||
|
||||
|
|
@ -29,17 +29,20 @@ export const CredentialIcon: React.FC<CredentialIconProps> = ({
|
|||
setShowAvatar(false)
|
||||
}, [])
|
||||
|
||||
if (avatar_url && avatar_url !== 'default' && showAvatar) {
|
||||
if (avatarUrl && avatarUrl !== 'default' && showAvatar) {
|
||||
return (
|
||||
<div
|
||||
className='flex shrink-0 items-center justify-center overflow-hidden rounded-md border border-divider-regular'
|
||||
className={cn(
|
||||
'flex shrink-0 items-center justify-center overflow-hidden rounded-md border border-divider-regular',
|
||||
className,
|
||||
)}
|
||||
style={{ width: `${size}px`, height: `${size}px` }}
|
||||
>
|
||||
<img
|
||||
src={avatar_url}
|
||||
src={avatarUrl}
|
||||
width={size}
|
||||
height={size}
|
||||
className={cn('shrink-0 object-contain', className)}
|
||||
className='shrink-0 object-contain'
|
||||
onError={onImgLoadError}
|
||||
/>
|
||||
</div>
|
||||
|
|
|
|||
|
|
@ -25,7 +25,7 @@ type IFileUploaderProps = {
|
|||
onFileUpdate: (fileItem: FileItem, progress: number, list: FileItem[]) => void
|
||||
onFileListUpdate?: (files: FileItem[]) => void
|
||||
onPreview: (file: File) => void
|
||||
notSupportBatchUpload?: boolean
|
||||
supportBatchUpload?: boolean
|
||||
}
|
||||
|
||||
const FileUploader = ({
|
||||
|
|
@ -35,7 +35,7 @@ const FileUploader = ({
|
|||
onFileUpdate,
|
||||
onFileListUpdate,
|
||||
onPreview,
|
||||
notSupportBatchUpload,
|
||||
supportBatchUpload = false,
|
||||
}: IFileUploaderProps) => {
|
||||
const { t } = useTranslation()
|
||||
const { notify } = useContext(ToastContext)
|
||||
|
|
@ -44,7 +44,7 @@ const FileUploader = ({
|
|||
const dropRef = useRef<HTMLDivElement>(null)
|
||||
const dragRef = useRef<HTMLDivElement>(null)
|
||||
const fileUploader = useRef<HTMLInputElement>(null)
|
||||
const hideUpload = notSupportBatchUpload && fileList.length > 0
|
||||
const hideUpload = !supportBatchUpload && fileList.length > 0
|
||||
|
||||
const { data: fileUploadConfigResponse } = useFileUploadConfig()
|
||||
const { data: supportFileTypesResponse } = useFileSupportTypes()
|
||||
|
|
@ -68,9 +68,9 @@ const FileUploader = ({
|
|||
const ACCEPTS = supportTypes.map((ext: string) => `.${ext}`)
|
||||
const fileUploadConfig = useMemo(() => ({
|
||||
file_size_limit: fileUploadConfigResponse?.file_size_limit ?? 15,
|
||||
batch_count_limit: fileUploadConfigResponse?.batch_count_limit ?? 5,
|
||||
file_upload_limit: fileUploadConfigResponse?.file_upload_limit ?? 5,
|
||||
}), [fileUploadConfigResponse])
|
||||
batch_count_limit: supportBatchUpload ? (fileUploadConfigResponse?.batch_count_limit ?? 5) : 1,
|
||||
file_upload_limit: supportBatchUpload ? (fileUploadConfigResponse?.file_upload_limit ?? 5) : 1,
|
||||
}), [fileUploadConfigResponse, supportBatchUpload])
|
||||
|
||||
const fileListRef = useRef<FileItem[]>([])
|
||||
|
||||
|
|
@ -254,12 +254,12 @@ const FileUploader = ({
|
|||
}),
|
||||
)
|
||||
let files = nested.flat()
|
||||
if (notSupportBatchUpload) files = files.slice(0, 1)
|
||||
if (!supportBatchUpload) files = files.slice(0, 1)
|
||||
files = files.slice(0, fileUploadConfig.batch_count_limit)
|
||||
const valid = files.filter(isValid)
|
||||
initialUpload(valid)
|
||||
},
|
||||
[initialUpload, isValid, notSupportBatchUpload, traverseFileEntry, fileUploadConfig],
|
||||
[initialUpload, isValid, supportBatchUpload, traverseFileEntry, fileUploadConfig],
|
||||
)
|
||||
const selectHandle = () => {
|
||||
if (fileUploader.current)
|
||||
|
|
@ -303,7 +303,7 @@ const FileUploader = ({
|
|||
id="fileUploader"
|
||||
className="hidden"
|
||||
type="file"
|
||||
multiple={!notSupportBatchUpload}
|
||||
multiple={supportBatchUpload}
|
||||
accept={ACCEPTS.join(',')}
|
||||
onChange={fileChangeHandle}
|
||||
/>
|
||||
|
|
@ -317,7 +317,7 @@ const FileUploader = ({
|
|||
<RiUploadCloud2Line className='mr-2 size-5' />
|
||||
|
||||
<span>
|
||||
{notSupportBatchUpload ? t('datasetCreation.stepOne.uploader.buttonSingleFile') : t('datasetCreation.stepOne.uploader.button')}
|
||||
{supportBatchUpload ? t('datasetCreation.stepOne.uploader.button') : t('datasetCreation.stepOne.uploader.buttonSingleFile')}
|
||||
{supportTypes.length > 0 && (
|
||||
<label className="ml-1 cursor-pointer text-text-accent" onClick={selectHandle}>{t('datasetCreation.stepOne.uploader.browse')}</label>
|
||||
)}
|
||||
|
|
@ -326,7 +326,7 @@ const FileUploader = ({
|
|||
<div>{t('datasetCreation.stepOne.uploader.tip', {
|
||||
size: fileUploadConfig.file_size_limit,
|
||||
supportTypes: supportTypesShowNames,
|
||||
batchCount: notSupportBatchUpload ? 1 : fileUploadConfig.batch_count_limit,
|
||||
batchCount: fileUploadConfig.batch_count_limit,
|
||||
totalCount: fileUploadConfig.file_upload_limit,
|
||||
})}</div>
|
||||
{dragging && <div ref={dragRef} className='absolute left-0 top-0 h-full w-full' />}
|
||||
|
|
|
|||
|
|
@ -110,7 +110,7 @@ const StepOne = ({
|
|||
const hasNotin = notionPages.length > 0
|
||||
const isVectorSpaceFull = plan.usage.vectorSpace >= plan.total.vectorSpace
|
||||
const isShowVectorSpaceFull = (allFileLoaded || hasNotin) && isVectorSpaceFull && enableBilling
|
||||
const notSupportBatchUpload = enableBilling && plan.type === 'sandbox'
|
||||
const supportBatchUpload = !enableBilling || plan.type !== 'sandbox'
|
||||
const nextDisabled = useMemo(() => {
|
||||
if (!files.length)
|
||||
return true
|
||||
|
|
@ -229,7 +229,7 @@ const StepOne = ({
|
|||
onFileListUpdate={updateFileList}
|
||||
onFileUpdate={updateFile}
|
||||
onPreview={updateCurrentFile}
|
||||
notSupportBatchUpload={notSupportBatchUpload}
|
||||
supportBatchUpload={supportBatchUpload}
|
||||
/>
|
||||
{isShowVectorSpaceFull && (
|
||||
<div className='mb-4 max-w-[640px]'>
|
||||
|
|
@ -259,6 +259,7 @@ const StepOne = ({
|
|||
credentialList={notionCredentialList}
|
||||
onSelectCredential={updateNotionCredentialId}
|
||||
datasetId={datasetId}
|
||||
supportBatchUpload={supportBatchUpload}
|
||||
/>
|
||||
</div>
|
||||
{isShowVectorSpaceFull && (
|
||||
|
|
@ -290,6 +291,7 @@ const StepOne = ({
|
|||
crawlOptions={crawlOptions}
|
||||
onCrawlOptionsChange={onCrawlOptionsChange}
|
||||
authedDataSourceList={authedDataSourceList}
|
||||
supportBatchUpload={supportBatchUpload}
|
||||
/>
|
||||
</div>
|
||||
{isShowVectorSpaceFull && (
|
||||
|
|
|
|||
|
|
@ -6,6 +6,7 @@ import cn from '@/utils/classnames'
|
|||
import type { CrawlResultItem as CrawlResultItemType } from '@/models/datasets'
|
||||
import Checkbox from '@/app/components/base/checkbox'
|
||||
import Button from '@/app/components/base/button'
|
||||
import Radio from '@/app/components/base/radio/ui'
|
||||
|
||||
type Props = {
|
||||
payload: CrawlResultItemType
|
||||
|
|
@ -13,6 +14,7 @@ type Props = {
|
|||
isPreview: boolean
|
||||
onCheckChange: (checked: boolean) => void
|
||||
onPreview: () => void
|
||||
isMultipleChoice: boolean
|
||||
}
|
||||
|
||||
const CrawledResultItem: FC<Props> = ({
|
||||
|
|
@ -21,6 +23,7 @@ const CrawledResultItem: FC<Props> = ({
|
|||
isChecked,
|
||||
onCheckChange,
|
||||
onPreview,
|
||||
isMultipleChoice,
|
||||
}) => {
|
||||
const { t } = useTranslation()
|
||||
|
||||
|
|
@ -31,7 +34,21 @@ const CrawledResultItem: FC<Props> = ({
|
|||
<div className={cn(isPreview ? 'bg-state-base-active' : 'group hover:bg-state-base-hover', 'cursor-pointer rounded-lg p-2')}>
|
||||
<div className='relative flex'>
|
||||
<div className='flex h-5 items-center'>
|
||||
<Checkbox className='mr-2 shrink-0' checked={isChecked} onCheck={handleCheckChange} />
|
||||
{
|
||||
isMultipleChoice ? (
|
||||
<Checkbox
|
||||
className='mr-2 shrink-0'
|
||||
checked={isChecked}
|
||||
onCheck={handleCheckChange}
|
||||
/>
|
||||
) : (
|
||||
<Radio
|
||||
className='mr-2 shrink-0'
|
||||
isChecked={isChecked}
|
||||
onCheck={handleCheckChange}
|
||||
/>
|
||||
)
|
||||
}
|
||||
</div>
|
||||
<div className='flex min-w-0 grow flex-col'>
|
||||
<div
|
||||
|
|
|
|||
|
|
@ -16,6 +16,7 @@ type Props = {
|
|||
onSelectedChange: (selected: CrawlResultItem[]) => void
|
||||
onPreview: (payload: CrawlResultItem) => void
|
||||
usedTime: number
|
||||
isMultipleChoice: boolean
|
||||
}
|
||||
|
||||
const CrawledResult: FC<Props> = ({
|
||||
|
|
@ -25,6 +26,7 @@ const CrawledResult: FC<Props> = ({
|
|||
onSelectedChange,
|
||||
onPreview,
|
||||
usedTime,
|
||||
isMultipleChoice,
|
||||
}) => {
|
||||
const { t } = useTranslation()
|
||||
|
||||
|
|
@ -40,13 +42,17 @@ const CrawledResult: FC<Props> = ({
|
|||
|
||||
const handleItemCheckChange = useCallback((item: CrawlResultItem) => {
|
||||
return (checked: boolean) => {
|
||||
if (checked)
|
||||
onSelectedChange([...checkedList, item])
|
||||
|
||||
else
|
||||
if (checked) {
|
||||
if (isMultipleChoice)
|
||||
onSelectedChange([...checkedList, item])
|
||||
else
|
||||
onSelectedChange([item])
|
||||
}
|
||||
else {
|
||||
onSelectedChange(checkedList.filter(checkedItem => checkedItem.source_url !== item.source_url))
|
||||
}
|
||||
}
|
||||
}, [checkedList, onSelectedChange])
|
||||
}, [checkedList, isMultipleChoice, onSelectedChange])
|
||||
|
||||
const [previewIndex, setPreviewIndex] = React.useState<number>(-1)
|
||||
const handlePreview = useCallback((index: number) => {
|
||||
|
|
@ -59,11 +65,13 @@ const CrawledResult: FC<Props> = ({
|
|||
return (
|
||||
<div className={cn(className, 'border-t-[0.5px] border-divider-regular shadow-xs shadow-shadow-shadow-3')}>
|
||||
<div className='flex h-[34px] items-center justify-between px-4'>
|
||||
<CheckboxWithLabel
|
||||
isChecked={isCheckAll}
|
||||
onChange={handleCheckedAll} label={isCheckAll ? t(`${I18N_PREFIX}.resetAll`) : t(`${I18N_PREFIX}.selectAll`)}
|
||||
labelClassName='system-[13px] leading-[16px] font-medium text-text-secondary'
|
||||
/>
|
||||
{isMultipleChoice && (
|
||||
<CheckboxWithLabel
|
||||
isChecked={isCheckAll}
|
||||
onChange={handleCheckedAll} label={isCheckAll ? t(`${I18N_PREFIX}.resetAll`) : t(`${I18N_PREFIX}.selectAll`)}
|
||||
labelClassName='system-[13px] leading-[16px] font-medium text-text-secondary'
|
||||
/>
|
||||
)}
|
||||
<div className='text-xs text-text-tertiary'>
|
||||
{t(`${I18N_PREFIX}.scrapTimeInfo`, {
|
||||
total: list.length,
|
||||
|
|
@ -80,6 +88,7 @@ const CrawledResult: FC<Props> = ({
|
|||
payload={item}
|
||||
isChecked={checkedList.some(checkedItem => checkedItem.source_url === item.source_url)}
|
||||
onCheckChange={handleItemCheckChange(item)}
|
||||
isMultipleChoice={isMultipleChoice}
|
||||
/>
|
||||
))}
|
||||
</div>
|
||||
|
|
|
|||
|
|
@ -26,6 +26,7 @@ type Props = {
|
|||
onJobIdChange: (jobId: string) => void
|
||||
crawlOptions: CrawlOptions
|
||||
onCrawlOptionsChange: (payload: CrawlOptions) => void
|
||||
supportBatchUpload: boolean
|
||||
}
|
||||
|
||||
enum Step {
|
||||
|
|
@ -41,6 +42,7 @@ const FireCrawl: FC<Props> = ({
|
|||
onJobIdChange,
|
||||
crawlOptions,
|
||||
onCrawlOptionsChange,
|
||||
supportBatchUpload,
|
||||
}) => {
|
||||
const { t } = useTranslation()
|
||||
const [step, setStep] = useState<Step>(Step.init)
|
||||
|
|
@ -171,7 +173,7 @@ const FireCrawl: FC<Props> = ({
|
|||
content: item.markdown,
|
||||
}))
|
||||
setCrawlResult(data)
|
||||
onCheckedCrawlResultChange(data.data || []) // default select the crawl result
|
||||
onCheckedCrawlResultChange(supportBatchUpload ? (data.data || []) : (data.data?.slice(0, 1) || [])) // default select the crawl result
|
||||
setCrawlErrorMessage('')
|
||||
}
|
||||
}
|
||||
|
|
@ -182,7 +184,7 @@ const FireCrawl: FC<Props> = ({
|
|||
finally {
|
||||
setStep(Step.finished)
|
||||
}
|
||||
}, [checkValid, crawlOptions, onJobIdChange, t, waitForCrawlFinished, onCheckedCrawlResultChange])
|
||||
}, [checkValid, crawlOptions, onJobIdChange, waitForCrawlFinished, t, onCheckedCrawlResultChange, supportBatchUpload])
|
||||
|
||||
return (
|
||||
<div>
|
||||
|
|
@ -221,6 +223,7 @@ const FireCrawl: FC<Props> = ({
|
|||
onSelectedChange={onCheckedCrawlResultChange}
|
||||
onPreview={onPreview}
|
||||
usedTime={Number.parseFloat(crawlResult?.time_consuming as string) || 0}
|
||||
isMultipleChoice={supportBatchUpload}
|
||||
/>
|
||||
}
|
||||
</div>
|
||||
|
|
|
|||
|
|
@ -24,6 +24,7 @@ type Props = {
|
|||
crawlOptions: CrawlOptions
|
||||
onCrawlOptionsChange: (payload: CrawlOptions) => void
|
||||
authedDataSourceList: DataSourceAuth[]
|
||||
supportBatchUpload?: boolean
|
||||
}
|
||||
|
||||
const Website: FC<Props> = ({
|
||||
|
|
@ -35,6 +36,7 @@ const Website: FC<Props> = ({
|
|||
crawlOptions,
|
||||
onCrawlOptionsChange,
|
||||
authedDataSourceList,
|
||||
supportBatchUpload = false,
|
||||
}) => {
|
||||
const { t } = useTranslation()
|
||||
const { setShowAccountSettingModal } = useModalContext()
|
||||
|
|
@ -116,6 +118,7 @@ const Website: FC<Props> = ({
|
|||
onJobIdChange={onJobIdChange}
|
||||
crawlOptions={crawlOptions}
|
||||
onCrawlOptionsChange={onCrawlOptionsChange}
|
||||
supportBatchUpload={supportBatchUpload}
|
||||
/>
|
||||
)}
|
||||
{source && selectedProvider === DataSourceProvider.waterCrawl && (
|
||||
|
|
@ -126,6 +129,7 @@ const Website: FC<Props> = ({
|
|||
onJobIdChange={onJobIdChange}
|
||||
crawlOptions={crawlOptions}
|
||||
onCrawlOptionsChange={onCrawlOptionsChange}
|
||||
supportBatchUpload={supportBatchUpload}
|
||||
/>
|
||||
)}
|
||||
{source && selectedProvider === DataSourceProvider.jinaReader && (
|
||||
|
|
@ -136,6 +140,7 @@ const Website: FC<Props> = ({
|
|||
onJobIdChange={onJobIdChange}
|
||||
crawlOptions={crawlOptions}
|
||||
onCrawlOptionsChange={onCrawlOptionsChange}
|
||||
supportBatchUpload={supportBatchUpload}
|
||||
/>
|
||||
)}
|
||||
{!source && (
|
||||
|
|
|
|||
|
|
@ -26,6 +26,7 @@ type Props = {
|
|||
onJobIdChange: (jobId: string) => void
|
||||
crawlOptions: CrawlOptions
|
||||
onCrawlOptionsChange: (payload: CrawlOptions) => void
|
||||
supportBatchUpload: boolean
|
||||
}
|
||||
|
||||
enum Step {
|
||||
|
|
@ -41,6 +42,7 @@ const JinaReader: FC<Props> = ({
|
|||
onJobIdChange,
|
||||
crawlOptions,
|
||||
onCrawlOptionsChange,
|
||||
supportBatchUpload,
|
||||
}) => {
|
||||
const { t } = useTranslation()
|
||||
const [step, setStep] = useState<Step>(Step.init)
|
||||
|
|
@ -157,7 +159,7 @@ const JinaReader: FC<Props> = ({
|
|||
total: 1,
|
||||
data: [{
|
||||
title,
|
||||
content,
|
||||
markdown: content,
|
||||
description,
|
||||
source_url: url,
|
||||
}],
|
||||
|
|
@ -176,7 +178,7 @@ const JinaReader: FC<Props> = ({
|
|||
}
|
||||
else {
|
||||
setCrawlResult(data)
|
||||
onCheckedCrawlResultChange(data.data || []) // default select the crawl result
|
||||
onCheckedCrawlResultChange(supportBatchUpload ? (data.data || []) : (data.data?.slice(0, 1) || [])) // default select the crawl result
|
||||
setCrawlErrorMessage('')
|
||||
}
|
||||
}
|
||||
|
|
@ -188,7 +190,7 @@ const JinaReader: FC<Props> = ({
|
|||
finally {
|
||||
setStep(Step.finished)
|
||||
}
|
||||
}, [checkValid, crawlOptions, onCheckedCrawlResultChange, onJobIdChange, t, waitForCrawlFinished])
|
||||
}, [checkValid, crawlOptions, onCheckedCrawlResultChange, onJobIdChange, supportBatchUpload, t, waitForCrawlFinished])
|
||||
|
||||
return (
|
||||
<div>
|
||||
|
|
@ -227,6 +229,7 @@ const JinaReader: FC<Props> = ({
|
|||
onSelectedChange={onCheckedCrawlResultChange}
|
||||
onPreview={onPreview}
|
||||
usedTime={Number.parseFloat(crawlResult?.time_consuming as string) || 0}
|
||||
isMultipleChoice={supportBatchUpload}
|
||||
/>
|
||||
}
|
||||
</div>
|
||||
|
|
|
|||
|
|
@ -32,7 +32,7 @@ const WebsitePreview = ({
|
|||
<div className='system-xs-medium truncate text-text-tertiary' title={payload.source_url}>{payload.source_url}</div>
|
||||
</div>
|
||||
<div className={cn(s.previewContent, 'body-md-regular')}>
|
||||
<div className={cn(s.fileContent)}>{payload.content}</div>
|
||||
<div className={cn(s.fileContent)}>{payload.markdown}</div>
|
||||
</div>
|
||||
</div>
|
||||
)
|
||||
|
|
|
|||
|
|
@ -26,6 +26,7 @@ type Props = {
|
|||
onJobIdChange: (jobId: string) => void
|
||||
crawlOptions: CrawlOptions
|
||||
onCrawlOptionsChange: (payload: CrawlOptions) => void
|
||||
supportBatchUpload: boolean
|
||||
}
|
||||
|
||||
enum Step {
|
||||
|
|
@ -41,6 +42,7 @@ const WaterCrawl: FC<Props> = ({
|
|||
onJobIdChange,
|
||||
crawlOptions,
|
||||
onCrawlOptionsChange,
|
||||
supportBatchUpload,
|
||||
}) => {
|
||||
const { t } = useTranslation()
|
||||
const [step, setStep] = useState<Step>(Step.init)
|
||||
|
|
@ -132,7 +134,7 @@ const WaterCrawl: FC<Props> = ({
|
|||
},
|
||||
}
|
||||
}
|
||||
}, [crawlOptions.limit])
|
||||
}, [crawlOptions.limit, onCheckedCrawlResultChange])
|
||||
|
||||
const handleRun = useCallback(async (url: string) => {
|
||||
const { isValid, errorMsg } = checkValid(url)
|
||||
|
|
@ -163,7 +165,7 @@ const WaterCrawl: FC<Props> = ({
|
|||
}
|
||||
else {
|
||||
setCrawlResult(data)
|
||||
onCheckedCrawlResultChange(data.data || []) // default select the crawl result
|
||||
onCheckedCrawlResultChange(supportBatchUpload ? (data.data || []) : (data.data?.slice(0, 1) || [])) // default select the crawl result
|
||||
setCrawlErrorMessage('')
|
||||
}
|
||||
}
|
||||
|
|
@ -174,7 +176,7 @@ const WaterCrawl: FC<Props> = ({
|
|||
finally {
|
||||
setStep(Step.finished)
|
||||
}
|
||||
}, [checkValid, crawlOptions, onJobIdChange, t, waitForCrawlFinished])
|
||||
}, [checkValid, crawlOptions, onCheckedCrawlResultChange, onJobIdChange, supportBatchUpload, t, waitForCrawlFinished])
|
||||
|
||||
return (
|
||||
<div>
|
||||
|
|
@ -213,6 +215,7 @@ const WaterCrawl: FC<Props> = ({
|
|||
onSelectedChange={onCheckedCrawlResultChange}
|
||||
onPreview={onPreview}
|
||||
usedTime={Number.parseFloat(crawlResult?.time_consuming as string) || 0}
|
||||
isMultipleChoice={supportBatchUpload}
|
||||
/>
|
||||
}
|
||||
</div>
|
||||
|
|
|
|||
|
|
@ -10,14 +10,12 @@ import Trigger from './trigger'
|
|||
import List from './list'
|
||||
|
||||
export type CredentialSelectorProps = {
|
||||
pluginName: string
|
||||
currentCredentialId: string
|
||||
onCredentialChange: (credentialId: string) => void
|
||||
credentials: Array<DataSourceCredential>
|
||||
}
|
||||
|
||||
const CredentialSelector = ({
|
||||
pluginName,
|
||||
currentCredentialId,
|
||||
onCredentialChange,
|
||||
credentials,
|
||||
|
|
@ -50,7 +48,6 @@ const CredentialSelector = ({
|
|||
<PortalToFollowElemTrigger onClick={toggle} className='grow overflow-hidden'>
|
||||
<Trigger
|
||||
currentCredential={currentCredential}
|
||||
pluginName={pluginName}
|
||||
isOpen={open}
|
||||
/>
|
||||
</PortalToFollowElemTrigger>
|
||||
|
|
@ -58,7 +55,6 @@ const CredentialSelector = ({
|
|||
<List
|
||||
currentCredentialId={currentCredentialId}
|
||||
credentials={credentials}
|
||||
pluginName={pluginName}
|
||||
onCredentialChange={handleCredentialChange}
|
||||
/>
|
||||
</PortalToFollowElemContent>
|
||||
|
|
|
|||
|
|
@ -2,22 +2,18 @@ import { CredentialIcon } from '@/app/components/datasets/common/credential-icon
|
|||
import type { DataSourceCredential } from '@/types/pipeline'
|
||||
import { RiCheckLine } from '@remixicon/react'
|
||||
import React, { useCallback } from 'react'
|
||||
import { useTranslation } from 'react-i18next'
|
||||
|
||||
type ItemProps = {
|
||||
credential: DataSourceCredential
|
||||
pluginName: string
|
||||
isSelected: boolean
|
||||
onCredentialChange: (credentialId: string) => void
|
||||
}
|
||||
|
||||
const Item = ({
|
||||
credential,
|
||||
pluginName,
|
||||
isSelected,
|
||||
onCredentialChange,
|
||||
}: ItemProps) => {
|
||||
const { t } = useTranslation()
|
||||
const { avatar_url, name } = credential
|
||||
|
||||
const handleCredentialChange = useCallback(() => {
|
||||
|
|
@ -30,15 +26,12 @@ const Item = ({
|
|||
onClick={handleCredentialChange}
|
||||
>
|
||||
<CredentialIcon
|
||||
avatar_url={avatar_url}
|
||||
avatarUrl={avatar_url}
|
||||
name={name}
|
||||
size={20}
|
||||
/>
|
||||
<span className='system-sm-medium grow truncate text-text-secondary'>
|
||||
{t('datasetPipeline.credentialSelector.name', {
|
||||
credentialName: name,
|
||||
pluginName,
|
||||
})}
|
||||
{name}
|
||||
</span>
|
||||
{
|
||||
isSelected && (
|
||||
|
|
|
|||
|
|
@ -5,14 +5,12 @@ import Item from './item'
|
|||
type ListProps = {
|
||||
currentCredentialId: string
|
||||
credentials: Array<DataSourceCredential>
|
||||
pluginName: string
|
||||
onCredentialChange: (credentialId: string) => void
|
||||
}
|
||||
|
||||
const List = ({
|
||||
currentCredentialId,
|
||||
credentials,
|
||||
pluginName,
|
||||
onCredentialChange,
|
||||
}: ListProps) => {
|
||||
return (
|
||||
|
|
@ -24,7 +22,6 @@ const List = ({
|
|||
<Item
|
||||
key={credential.id}
|
||||
credential={credential}
|
||||
pluginName={pluginName}
|
||||
isSelected={isSelected}
|
||||
onCredentialChange={onCredentialChange}
|
||||
/>
|
||||
|
|
|
|||
|
|
@ -1,23 +1,18 @@
|
|||
import React from 'react'
|
||||
import type { DataSourceCredential } from '@/types/pipeline'
|
||||
import { useTranslation } from 'react-i18next'
|
||||
import { RiArrowDownSLine } from '@remixicon/react'
|
||||
import cn from '@/utils/classnames'
|
||||
import { CredentialIcon } from '@/app/components/datasets/common/credential-icon'
|
||||
|
||||
type TriggerProps = {
|
||||
currentCredential: DataSourceCredential | undefined
|
||||
pluginName: string
|
||||
isOpen: boolean
|
||||
}
|
||||
|
||||
const Trigger = ({
|
||||
currentCredential,
|
||||
pluginName,
|
||||
isOpen,
|
||||
}: TriggerProps) => {
|
||||
const { t } = useTranslation()
|
||||
|
||||
const {
|
||||
avatar_url,
|
||||
name = '',
|
||||
|
|
@ -31,16 +26,13 @@ const Trigger = ({
|
|||
)}
|
||||
>
|
||||
<CredentialIcon
|
||||
avatar_url={avatar_url}
|
||||
avatarUrl={avatar_url}
|
||||
name={name}
|
||||
size={20}
|
||||
/>
|
||||
<div className='flex grow items-center gap-x-1 overflow-hidden'>
|
||||
<span className='system-md-semibold grow truncate text-text-secondary'>
|
||||
{t('datasetPipeline.credentialSelector.name', {
|
||||
credentialName: name,
|
||||
pluginName,
|
||||
})}
|
||||
{name}
|
||||
</span>
|
||||
<RiArrowDownSLine className='size-4 shrink-0 text-text-secondary' />
|
||||
</div>
|
||||
|
|
|
|||
|
|
@ -11,12 +11,14 @@ type HeaderProps = {
|
|||
docTitle: string
|
||||
docLink: string
|
||||
onClickConfiguration?: () => void
|
||||
pluginName: string
|
||||
} & CredentialSelectorProps
|
||||
|
||||
const Header = ({
|
||||
docTitle,
|
||||
docLink,
|
||||
onClickConfiguration,
|
||||
pluginName,
|
||||
...rest
|
||||
}: HeaderProps) => {
|
||||
const { t } = useTranslation()
|
||||
|
|
@ -29,7 +31,7 @@ const Header = ({
|
|||
/>
|
||||
<Divider type='vertical' className='mx-1 h-3.5 shrink-0' />
|
||||
<Tooltip
|
||||
popupContent={t('datasetPipeline.configurationTip', { pluginName: rest.pluginName })}
|
||||
popupContent={t('datasetPipeline.configurationTip', { pluginName })}
|
||||
position='top'
|
||||
>
|
||||
<Button
|
||||
|
|
|
|||
|
|
@ -23,12 +23,12 @@ const SimplePieChart = dynamic(() => import('@/app/components/base/simple-pie-ch
|
|||
|
||||
export type LocalFileProps = {
|
||||
allowedExtensions: string[]
|
||||
notSupportBatchUpload?: boolean
|
||||
supportBatchUpload?: boolean
|
||||
}
|
||||
|
||||
const LocalFile = ({
|
||||
allowedExtensions,
|
||||
notSupportBatchUpload,
|
||||
supportBatchUpload = false,
|
||||
}: LocalFileProps) => {
|
||||
const { t } = useTranslation()
|
||||
const { notify } = useContext(ToastContext)
|
||||
|
|
@ -42,7 +42,7 @@ const LocalFile = ({
|
|||
const fileUploader = useRef<HTMLInputElement>(null)
|
||||
const fileListRef = useRef<FileItem[]>([])
|
||||
|
||||
const hideUpload = notSupportBatchUpload && localFileList.length > 0
|
||||
const hideUpload = !supportBatchUpload && localFileList.length > 0
|
||||
|
||||
const { data: fileUploadConfigResponse } = useFileUploadConfig()
|
||||
const supportTypesShowNames = useMemo(() => {
|
||||
|
|
@ -64,9 +64,9 @@ const LocalFile = ({
|
|||
const ACCEPTS = allowedExtensions.map((ext: string) => `.${ext}`)
|
||||
const fileUploadConfig = useMemo(() => ({
|
||||
file_size_limit: fileUploadConfigResponse?.file_size_limit ?? 15,
|
||||
batch_count_limit: fileUploadConfigResponse?.batch_count_limit ?? 5,
|
||||
file_upload_limit: fileUploadConfigResponse?.file_upload_limit ?? 5,
|
||||
}), [fileUploadConfigResponse])
|
||||
batch_count_limit: supportBatchUpload ? (fileUploadConfigResponse?.batch_count_limit ?? 5) : 1,
|
||||
file_upload_limit: supportBatchUpload ? (fileUploadConfigResponse?.file_upload_limit ?? 5) : 1,
|
||||
}), [fileUploadConfigResponse, supportBatchUpload])
|
||||
|
||||
const updateFile = useCallback((fileItem: FileItem, progress: number, list: FileItem[]) => {
|
||||
const { setLocalFileList } = dataSourceStore.getState()
|
||||
|
|
@ -119,7 +119,7 @@ const LocalFile = ({
|
|||
notify({ type: 'error', message: t('datasetCreation.stepOne.uploader.validation.size', { size: fileUploadConfig.file_size_limit }) })
|
||||
|
||||
return isValidType && isValidSize
|
||||
}, [fileUploadConfig, notify, t, ACCEPTS])
|
||||
}, [notify, t, ACCEPTS, fileUploadConfig.file_size_limit])
|
||||
|
||||
type UploadResult = Awaited<ReturnType<typeof upload>>
|
||||
|
||||
|
|
@ -230,12 +230,12 @@ const LocalFile = ({
|
|||
return
|
||||
|
||||
let files = [...e.dataTransfer.files] as File[]
|
||||
if (notSupportBatchUpload)
|
||||
if (!supportBatchUpload)
|
||||
files = files.slice(0, 1)
|
||||
|
||||
const validFiles = files.filter(isValid)
|
||||
initialUpload(validFiles)
|
||||
}, [initialUpload, isValid, notSupportBatchUpload])
|
||||
}, [initialUpload, isValid, supportBatchUpload])
|
||||
|
||||
const selectHandle = useCallback(() => {
|
||||
if (fileUploader.current)
|
||||
|
|
@ -280,7 +280,7 @@ const LocalFile = ({
|
|||
id='fileUploader'
|
||||
className='hidden'
|
||||
type='file'
|
||||
multiple={!notSupportBatchUpload}
|
||||
multiple={supportBatchUpload}
|
||||
accept={ACCEPTS.join(',')}
|
||||
onChange={fileChangeHandle}
|
||||
/>
|
||||
|
|
@ -296,7 +296,7 @@ const LocalFile = ({
|
|||
<RiUploadCloud2Line className='mr-2 size-5' />
|
||||
|
||||
<span>
|
||||
{notSupportBatchUpload ? t('datasetCreation.stepOne.uploader.buttonSingleFile') : t('datasetCreation.stepOne.uploader.button')}
|
||||
{supportBatchUpload ? t('datasetCreation.stepOne.uploader.button') : t('datasetCreation.stepOne.uploader.buttonSingleFile')}
|
||||
{allowedExtensions.length > 0 && (
|
||||
<label className='ml-1 cursor-pointer text-text-accent' onClick={selectHandle}>{t('datasetCreation.stepOne.uploader.browse')}</label>
|
||||
)}
|
||||
|
|
@ -305,7 +305,7 @@ const LocalFile = ({
|
|||
<div>{t('datasetCreation.stepOne.uploader.tip', {
|
||||
size: fileUploadConfig.file_size_limit,
|
||||
supportTypes: supportTypesShowNames,
|
||||
batchCount: notSupportBatchUpload ? 1 : fileUploadConfig.batch_count_limit,
|
||||
batchCount: fileUploadConfig.batch_count_limit,
|
||||
totalCount: fileUploadConfig.file_upload_limit,
|
||||
})}</div>
|
||||
{dragging && <div ref={dragRef} className='absolute left-0 top-0 h-full w-full' />}
|
||||
|
|
|
|||
|
|
@ -19,16 +19,18 @@ import { useDocLink } from '@/context/i18n'
|
|||
import { ACCOUNT_SETTING_TAB } from '@/app/components/header/account-setting/constants'
|
||||
|
||||
type OnlineDocumentsProps = {
|
||||
isInPipeline?: boolean
|
||||
nodeId: string
|
||||
nodeData: DataSourceNodeType
|
||||
onCredentialChange: (credentialId: string) => void
|
||||
isInPipeline?: boolean
|
||||
supportBatchUpload?: boolean
|
||||
}
|
||||
|
||||
const OnlineDocuments = ({
|
||||
nodeId,
|
||||
nodeData,
|
||||
isInPipeline = false,
|
||||
supportBatchUpload = false,
|
||||
onCredentialChange,
|
||||
}: OnlineDocumentsProps) => {
|
||||
const docLink = useDocLink()
|
||||
|
|
@ -157,7 +159,7 @@ const OnlineDocuments = ({
|
|||
onSelect={handleSelectPages}
|
||||
canPreview={!isInPipeline}
|
||||
onPreview={handlePreviewPage}
|
||||
isMultipleChoice={!isInPipeline}
|
||||
isMultipleChoice={supportBatchUpload}
|
||||
currentCredentialId={currentCredentialId}
|
||||
/>
|
||||
) : (
|
||||
|
|
|
|||
|
|
@ -17,6 +17,7 @@ type FileListProps = {
|
|||
handleSelectFile: (file: OnlineDriveFile) => void
|
||||
handleOpenFolder: (file: OnlineDriveFile) => void
|
||||
isLoading: boolean
|
||||
supportBatchUpload: boolean
|
||||
}
|
||||
|
||||
const FileList = ({
|
||||
|
|
@ -32,6 +33,7 @@ const FileList = ({
|
|||
handleOpenFolder,
|
||||
isInPipeline,
|
||||
isLoading,
|
||||
supportBatchUpload,
|
||||
}: FileListProps) => {
|
||||
const [inputValue, setInputValue] = useState(keywords)
|
||||
|
||||
|
|
@ -72,8 +74,8 @@ const FileList = ({
|
|||
handleResetKeywords={handleResetKeywords}
|
||||
handleOpenFolder={handleOpenFolder}
|
||||
handleSelectFile={handleSelectFile}
|
||||
isInPipeline={isInPipeline}
|
||||
isLoading={isLoading}
|
||||
supportBatchUpload={supportBatchUpload}
|
||||
/>
|
||||
</div>
|
||||
)
|
||||
|
|
|
|||
|
|
@ -11,8 +11,8 @@ type FileListProps = {
|
|||
fileList: OnlineDriveFile[]
|
||||
selectedFileIds: string[]
|
||||
keywords: string
|
||||
isInPipeline: boolean
|
||||
isLoading: boolean
|
||||
supportBatchUpload: boolean
|
||||
handleResetKeywords: () => void
|
||||
handleSelectFile: (file: OnlineDriveFile) => void
|
||||
handleOpenFolder: (file: OnlineDriveFile) => void
|
||||
|
|
@ -25,8 +25,8 @@ const List = ({
|
|||
handleResetKeywords,
|
||||
handleSelectFile,
|
||||
handleOpenFolder,
|
||||
isInPipeline,
|
||||
isLoading,
|
||||
supportBatchUpload,
|
||||
}: FileListProps) => {
|
||||
const anchorRef = useRef<HTMLDivElement>(null)
|
||||
const observerRef = useRef<IntersectionObserver>(null)
|
||||
|
|
@ -80,7 +80,7 @@ const List = ({
|
|||
isSelected={isSelected}
|
||||
onSelect={handleSelectFile}
|
||||
onOpen={handleOpenFolder}
|
||||
isMultipleChoice={!isInPipeline}
|
||||
isMultipleChoice={supportBatchUpload}
|
||||
/>
|
||||
)
|
||||
})
|
||||
|
|
|
|||
|
|
@ -20,14 +20,16 @@ import { ACCOUNT_SETTING_TAB } from '@/app/components/header/account-setting/con
|
|||
type OnlineDriveProps = {
|
||||
nodeId: string
|
||||
nodeData: DataSourceNodeType
|
||||
isInPipeline?: boolean
|
||||
onCredentialChange: (credentialId: string) => void
|
||||
isInPipeline?: boolean
|
||||
supportBatchUpload?: boolean
|
||||
}
|
||||
|
||||
const OnlineDrive = ({
|
||||
nodeId,
|
||||
nodeData,
|
||||
isInPipeline = false,
|
||||
supportBatchUpload = false,
|
||||
onCredentialChange,
|
||||
}: OnlineDriveProps) => {
|
||||
const docLink = useDocLink()
|
||||
|
|
@ -111,7 +113,7 @@ const OnlineDrive = ({
|
|||
},
|
||||
},
|
||||
)
|
||||
}, [datasourceNodeRunURL, dataSourceStore])
|
||||
}, [dataSourceStore, datasourceNodeRunURL, breadcrumbs])
|
||||
|
||||
useEffect(() => {
|
||||
if (!currentCredentialId) return
|
||||
|
|
@ -152,12 +154,12 @@ const OnlineDrive = ({
|
|||
draft.splice(index, 1)
|
||||
}
|
||||
else {
|
||||
if (isInPipeline && draft.length >= 1) return
|
||||
if (!supportBatchUpload && draft.length >= 1) return
|
||||
draft.push(file.id)
|
||||
}
|
||||
})
|
||||
setSelectedFileIds(newSelectedFileList)
|
||||
}, [dataSourceStore, isInPipeline])
|
||||
}, [dataSourceStore, supportBatchUpload])
|
||||
|
||||
const handleOpenFolder = useCallback((file: OnlineDriveFile) => {
|
||||
const { breadcrumbs, prefix, setBreadcrumbs, setPrefix, setBucket, setOnlineDriveFileList, setSelectedFileIds } = dataSourceStore.getState()
|
||||
|
|
@ -177,7 +179,7 @@ const OnlineDrive = ({
|
|||
setBreadcrumbs(newBreadcrumbs)
|
||||
setPrefix(newPrefix)
|
||||
}
|
||||
}, [dataSourceStore, getOnlineDriveFiles])
|
||||
}, [dataSourceStore])
|
||||
|
||||
const handleSetting = useCallback(() => {
|
||||
setShowAccountSettingModal({
|
||||
|
|
@ -209,6 +211,7 @@ const OnlineDrive = ({
|
|||
handleOpenFolder={handleOpenFolder}
|
||||
isInPipeline={isInPipeline}
|
||||
isLoading={isLoading}
|
||||
supportBatchUpload={supportBatchUpload}
|
||||
/>
|
||||
</div>
|
||||
)
|
||||
|
|
|
|||
|
|
@ -46,6 +46,7 @@ const CrawledResultItem = ({
|
|||
/>
|
||||
) : (
|
||||
<Radio
|
||||
className='shrink-0'
|
||||
isChecked={isChecked}
|
||||
onCheck={handleCheckChange}
|
||||
/>
|
||||
|
|
|
|||
|
|
@ -33,14 +33,16 @@ const I18N_PREFIX = 'datasetCreation.stepOne.website'
|
|||
export type WebsiteCrawlProps = {
|
||||
nodeId: string
|
||||
nodeData: DataSourceNodeType
|
||||
isInPipeline?: boolean
|
||||
onCredentialChange: (credentialId: string) => void
|
||||
isInPipeline?: boolean
|
||||
supportBatchUpload?: boolean
|
||||
}
|
||||
|
||||
const WebsiteCrawl = ({
|
||||
nodeId,
|
||||
nodeData,
|
||||
isInPipeline = false,
|
||||
supportBatchUpload = false,
|
||||
onCredentialChange,
|
||||
}: WebsiteCrawlProps) => {
|
||||
const { t } = useTranslation()
|
||||
|
|
@ -122,7 +124,7 @@ const WebsiteCrawl = ({
|
|||
time_consuming: time_consuming ?? 0,
|
||||
}
|
||||
setCrawlResult(crawlResultData)
|
||||
handleCheckedCrawlResultChange(isInPipeline ? [crawlData[0]] : crawlData) // default select the crawl result
|
||||
handleCheckedCrawlResultChange(supportBatchUpload ? crawlData : crawlData.slice(0, 1)) // default select the crawl result
|
||||
setCrawlErrorMessage('')
|
||||
setStep(CrawlStep.finished)
|
||||
},
|
||||
|
|
@ -132,7 +134,7 @@ const WebsiteCrawl = ({
|
|||
},
|
||||
},
|
||||
)
|
||||
}, [dataSourceStore, datasourceNodeRunURL, handleCheckedCrawlResultChange, isInPipeline, t])
|
||||
}, [dataSourceStore, datasourceNodeRunURL, handleCheckedCrawlResultChange, supportBatchUpload, t])
|
||||
|
||||
const handleSubmit = useCallback((value: Record<string, any>) => {
|
||||
handleRun(value)
|
||||
|
|
@ -149,7 +151,7 @@ const WebsiteCrawl = ({
|
|||
setTotalNum(0)
|
||||
setCrawlErrorMessage('')
|
||||
onCredentialChange(credentialId)
|
||||
}, [dataSourceStore, onCredentialChange])
|
||||
}, [onCredentialChange])
|
||||
|
||||
return (
|
||||
<div className='flex flex-col'>
|
||||
|
|
@ -195,7 +197,7 @@ const WebsiteCrawl = ({
|
|||
previewIndex={previewIndex}
|
||||
onPreview={handlePreview}
|
||||
showPreview={!isInPipeline}
|
||||
isMultipleChoice={!isInPipeline} // only support single choice in test run
|
||||
isMultipleChoice={supportBatchUpload} // only support single choice in test run
|
||||
/>
|
||||
)}
|
||||
</div>
|
||||
|
|
|
|||
|
|
@ -102,7 +102,7 @@ const CreateFormPipeline = () => {
|
|||
return onlineDriveFileList.length > 0 && isVectorSpaceFull && enableBilling
|
||||
return false
|
||||
}, [allFileLoaded, datasource, datasourceType, enableBilling, isVectorSpaceFull, onlineDocuments.length, onlineDriveFileList.length, websitePages.length])
|
||||
const notSupportBatchUpload = enableBilling && plan.type === 'sandbox'
|
||||
const supportBatchUpload = !enableBilling || plan.type !== 'sandbox'
|
||||
|
||||
const nextBtnDisabled = useMemo(() => {
|
||||
if (!datasource) return true
|
||||
|
|
@ -125,15 +125,16 @@ const CreateFormPipeline = () => {
|
|||
const showSelect = useMemo(() => {
|
||||
if (datasourceType === DatasourceType.onlineDocument) {
|
||||
const pagesCount = currentWorkspace?.pages.length ?? 0
|
||||
return pagesCount > 0
|
||||
return supportBatchUpload && pagesCount > 0
|
||||
}
|
||||
if (datasourceType === DatasourceType.onlineDrive) {
|
||||
const isBucketList = onlineDriveFileList.some(file => file.type === 'bucket')
|
||||
return !isBucketList && onlineDriveFileList.filter((item) => {
|
||||
return supportBatchUpload && !isBucketList && onlineDriveFileList.filter((item) => {
|
||||
return item.type !== 'bucket'
|
||||
}).length > 0
|
||||
}
|
||||
}, [currentWorkspace?.pages.length, datasourceType, onlineDriveFileList])
|
||||
return false
|
||||
}, [currentWorkspace?.pages.length, datasourceType, supportBatchUpload, onlineDriveFileList])
|
||||
|
||||
const totalOptions = useMemo(() => {
|
||||
if (datasourceType === DatasourceType.onlineDocument)
|
||||
|
|
@ -395,7 +396,7 @@ const CreateFormPipeline = () => {
|
|||
clearWebsiteCrawlData()
|
||||
else if (dataSource.nodeData.provider_type === DatasourceType.onlineDrive)
|
||||
clearOnlineDriveData()
|
||||
}, [])
|
||||
}, [clearOnlineDocumentData, clearOnlineDriveData, clearWebsiteCrawlData])
|
||||
|
||||
const handleSwitchDataSource = useCallback((dataSource: Datasource) => {
|
||||
const {
|
||||
|
|
@ -406,13 +407,13 @@ const CreateFormPipeline = () => {
|
|||
setCurrentCredentialId('')
|
||||
currentNodeIdRef.current = dataSource.nodeId
|
||||
setDatasource(dataSource)
|
||||
}, [dataSourceStore])
|
||||
}, [clearDataSourceData, dataSourceStore])
|
||||
|
||||
const handleCredentialChange = useCallback((credentialId: string) => {
|
||||
const { setCurrentCredentialId } = dataSourceStore.getState()
|
||||
clearDataSourceData(datasource!)
|
||||
setCurrentCredentialId(credentialId)
|
||||
}, [dataSourceStore, datasource])
|
||||
}, [clearDataSourceData, dataSourceStore, datasource])
|
||||
|
||||
if (isFetchingPipelineInfo) {
|
||||
return (
|
||||
|
|
@ -443,7 +444,7 @@ const CreateFormPipeline = () => {
|
|||
{datasourceType === DatasourceType.localFile && (
|
||||
<LocalFile
|
||||
allowedExtensions={datasource!.nodeData.fileExtensions || []}
|
||||
notSupportBatchUpload={notSupportBatchUpload}
|
||||
supportBatchUpload={supportBatchUpload}
|
||||
/>
|
||||
)}
|
||||
{datasourceType === DatasourceType.onlineDocument && (
|
||||
|
|
@ -451,6 +452,7 @@ const CreateFormPipeline = () => {
|
|||
nodeId={datasource!.nodeId}
|
||||
nodeData={datasource!.nodeData}
|
||||
onCredentialChange={handleCredentialChange}
|
||||
supportBatchUpload={supportBatchUpload}
|
||||
/>
|
||||
)}
|
||||
{datasourceType === DatasourceType.websiteCrawl && (
|
||||
|
|
@ -458,6 +460,7 @@ const CreateFormPipeline = () => {
|
|||
nodeId={datasource!.nodeId}
|
||||
nodeData={datasource!.nodeData}
|
||||
onCredentialChange={handleCredentialChange}
|
||||
supportBatchUpload={supportBatchUpload}
|
||||
/>
|
||||
)}
|
||||
{datasourceType === DatasourceType.onlineDrive && (
|
||||
|
|
@ -465,6 +468,7 @@ const CreateFormPipeline = () => {
|
|||
nodeId={datasource!.nodeId}
|
||||
nodeData={datasource!.nodeData}
|
||||
onCredentialChange={handleCredentialChange}
|
||||
supportBatchUpload={supportBatchUpload}
|
||||
/>
|
||||
)}
|
||||
{isShowVectorSpaceFull && (
|
||||
|
|
|
|||
|
|
@ -27,7 +27,7 @@ const WebsitePreview = ({
|
|||
<span className='uppercase' title={currentWebsite.source_url}>{currentWebsite.source_url}</span>
|
||||
<span>·</span>
|
||||
<span>·</span>
|
||||
<span>{`${formatNumberAbbreviated(currentWebsite.content.length)} ${t('datasetPipeline.addDocuments.characters')}`}</span>
|
||||
<span>{`${formatNumberAbbreviated(currentWebsite.markdown.length)} ${t('datasetPipeline.addDocuments.characters')}`}</span>
|
||||
</div>
|
||||
</div>
|
||||
<button
|
||||
|
|
@ -39,7 +39,7 @@ const WebsitePreview = ({
|
|||
</button>
|
||||
</div>
|
||||
<div className='body-md-regular grow overflow-hidden px-6 py-5 text-text-secondary'>
|
||||
{currentWebsite.content}
|
||||
{currentWebsite.markdown}
|
||||
</div>
|
||||
</div>
|
||||
)
|
||||
|
|
|
|||
|
|
@ -113,7 +113,7 @@ const DocumentSettings = ({ datasetId, documentId }: DocumentSettingsProps) => {
|
|||
return [{
|
||||
title: websiteInfo.title,
|
||||
source_url: websiteInfo.source_url,
|
||||
content: websiteInfo.content,
|
||||
markdown: websiteInfo.content,
|
||||
description: websiteInfo.description,
|
||||
}]
|
||||
}, [websiteInfo])
|
||||
|
|
|
|||
|
|
@ -55,7 +55,7 @@ const PipelineSettings = ({
|
|||
if (lastRunData?.datasource_type === DatasourceType.websiteCrawl) {
|
||||
const { content, description, source_url, title } = lastRunData.datasource_info
|
||||
websitePages.push({
|
||||
content,
|
||||
markdown: content,
|
||||
description,
|
||||
source_url,
|
||||
title,
|
||||
|
|
@ -135,7 +135,7 @@ const PipelineSettings = ({
|
|||
push(`/datasets/${datasetId}/documents`)
|
||||
},
|
||||
})
|
||||
}, [datasetId, invalidDocumentDetail, invalidDocumentList, lastRunData, pipelineId, push, runPublishedPipeline])
|
||||
}, [datasetId, documentId, invalidDocumentDetail, invalidDocumentList, lastRunData, pipelineId, push, runPublishedPipeline])
|
||||
|
||||
const onClickProcess = useCallback(() => {
|
||||
isPreview.current = false
|
||||
|
|
|
|||
|
|
@ -131,7 +131,7 @@ const Preparation = () => {
|
|||
clearWebsiteCrawlData()
|
||||
else if (dataSource.nodeData.provider_type === DatasourceType.onlineDrive)
|
||||
clearOnlineDriveData()
|
||||
}, [])
|
||||
}, [clearOnlineDocumentData, clearOnlineDriveData, clearWebsiteCrawlData])
|
||||
|
||||
const handleSwitchDataSource = useCallback((dataSource: Datasource) => {
|
||||
const {
|
||||
|
|
@ -142,13 +142,13 @@ const Preparation = () => {
|
|||
setCurrentCredentialId('')
|
||||
currentNodeIdRef.current = dataSource.nodeId
|
||||
setDatasource(dataSource)
|
||||
}, [dataSourceStore])
|
||||
}, [clearDataSourceData, dataSourceStore])
|
||||
|
||||
const handleCredentialChange = useCallback((credentialId: string) => {
|
||||
const { setCurrentCredentialId } = dataSourceStore.getState()
|
||||
clearDataSourceData(datasource!)
|
||||
setCurrentCredentialId(credentialId)
|
||||
}, [dataSourceStore, datasource])
|
||||
}, [clearDataSourceData, dataSourceStore, datasource])
|
||||
return (
|
||||
<>
|
||||
<StepIndicator steps={steps} currentStep={currentStep} />
|
||||
|
|
@ -164,7 +164,7 @@ const Preparation = () => {
|
|||
{datasourceType === DatasourceType.localFile && (
|
||||
<LocalFile
|
||||
allowedExtensions={datasource!.nodeData.fileExtensions || []}
|
||||
notSupportBatchUpload // only support single file upload in test run
|
||||
supportBatchUpload={false} // only support single file upload in test run
|
||||
/>
|
||||
)}
|
||||
{datasourceType === DatasourceType.onlineDocument && (
|
||||
|
|
@ -173,6 +173,7 @@ const Preparation = () => {
|
|||
nodeData={datasource!.nodeData}
|
||||
isInPipeline
|
||||
onCredentialChange={handleCredentialChange}
|
||||
supportBatchUpload={false}
|
||||
/>
|
||||
)}
|
||||
{datasourceType === DatasourceType.websiteCrawl && (
|
||||
|
|
@ -181,6 +182,7 @@ const Preparation = () => {
|
|||
nodeData={datasource!.nodeData}
|
||||
isInPipeline
|
||||
onCredentialChange={handleCredentialChange}
|
||||
supportBatchUpload={false}
|
||||
/>
|
||||
)}
|
||||
{datasourceType === DatasourceType.onlineDrive && (
|
||||
|
|
@ -189,6 +191,7 @@ const Preparation = () => {
|
|||
nodeData={datasource!.nodeData}
|
||||
isInPipeline
|
||||
onCredentialChange={handleCredentialChange}
|
||||
supportBatchUpload={false}
|
||||
/>
|
||||
)}
|
||||
</div>
|
||||
|
|
|
|||
|
|
@ -70,10 +70,10 @@ export const isSystemVar = (valueSelector: ValueSelector) => {
|
|||
}
|
||||
|
||||
export const isGlobalVar = (valueSelector: ValueSelector) => {
|
||||
if(!isSystemVar(valueSelector)) return false
|
||||
if (!isSystemVar(valueSelector)) return false
|
||||
const second = valueSelector[1]
|
||||
|
||||
if(['query', 'files'].includes(second))
|
||||
if (['query', 'files'].includes(second))
|
||||
return false
|
||||
return true
|
||||
}
|
||||
|
|
@ -1296,7 +1296,7 @@ export const getNodeUsedVars = (node: Node): ValueSelector[] => {
|
|||
case BlockEnum.KnowledgeRetrieval: {
|
||||
const {
|
||||
query_variable_selector,
|
||||
query_attachment_selector,
|
||||
query_attachment_selector = [],
|
||||
} = data as KnowledgeRetrievalNodeType
|
||||
res = [query_variable_selector, query_attachment_selector]
|
||||
break
|
||||
|
|
@ -1638,7 +1638,7 @@ export const updateNodeVars = (
|
|||
)
|
||||
payload.query_variable_selector = newVarSelector
|
||||
if (
|
||||
payload.query_attachment_selector.join('.') === oldVarSelector.join('.')
|
||||
payload.query_attachment_selector?.join('.') === oldVarSelector.join('.')
|
||||
)
|
||||
payload.query_attachment_selector = newVarSelector
|
||||
break
|
||||
|
|
|
|||
|
|
@ -43,13 +43,13 @@ const BeforeRunForm: FC<CustomRunFormProps> = (props) => {
|
|||
clearWebsiteCrawlData()
|
||||
else if (datasourceType === DatasourceType.onlineDrive)
|
||||
clearOnlineDriveData()
|
||||
}, [datasourceType])
|
||||
}, [clearOnlineDocumentData, clearOnlineDriveData, clearWebsiteCrawlData, datasourceType])
|
||||
|
||||
const handleCredentialChange = useCallback((credentialId: string) => {
|
||||
const { setCurrentCredentialId } = dataSourceStore.getState()
|
||||
clearDataSourceData()
|
||||
setCurrentCredentialId(credentialId)
|
||||
}, [dataSourceStore])
|
||||
}, [clearDataSourceData, dataSourceStore])
|
||||
|
||||
return (
|
||||
<PanelWrap
|
||||
|
|
@ -60,7 +60,7 @@ const BeforeRunForm: FC<CustomRunFormProps> = (props) => {
|
|||
{datasourceType === DatasourceType.localFile && (
|
||||
<LocalFile
|
||||
allowedExtensions={datasourceNodeData.fileExtensions || []}
|
||||
notSupportBatchUpload
|
||||
supportBatchUpload={false}
|
||||
/>
|
||||
)}
|
||||
{datasourceType === DatasourceType.onlineDocument && (
|
||||
|
|
@ -69,6 +69,7 @@ const BeforeRunForm: FC<CustomRunFormProps> = (props) => {
|
|||
nodeData={datasourceNodeData}
|
||||
isInPipeline
|
||||
onCredentialChange={handleCredentialChange}
|
||||
supportBatchUpload={false}
|
||||
/>
|
||||
)}
|
||||
{datasourceType === DatasourceType.websiteCrawl && (
|
||||
|
|
@ -77,6 +78,7 @@ const BeforeRunForm: FC<CustomRunFormProps> = (props) => {
|
|||
nodeData={datasourceNodeData}
|
||||
isInPipeline
|
||||
onCredentialChange={handleCredentialChange}
|
||||
supportBatchUpload={false}
|
||||
/>
|
||||
)}
|
||||
{datasourceType === DatasourceType.onlineDrive && (
|
||||
|
|
@ -85,6 +87,7 @@ const BeforeRunForm: FC<CustomRunFormProps> = (props) => {
|
|||
nodeData={datasourceNodeData}
|
||||
isInPipeline
|
||||
onCredentialChange={handleCredentialChange}
|
||||
supportBatchUpload={false}
|
||||
/>
|
||||
)}
|
||||
<div className='flex justify-end gap-x-2'>
|
||||
|
|
|
|||
|
|
@ -80,7 +80,7 @@ const useSingleRunFormParams = ({
|
|||
},
|
||||
]
|
||||
if (hasMultiModalDatasets) {
|
||||
const currentVariable = findVariableWhenOnLLMVision(payload.query_attachment_selector, availableFileVars)
|
||||
const currentVariable = findVariableWhenOnLLMVision(payload.query_attachment_selector || [], availableFileVars)
|
||||
inputFields.push(
|
||||
{
|
||||
inputs: [{
|
||||
|
|
@ -98,13 +98,13 @@ const useSingleRunFormParams = ({
|
|||
}, [query, setQuery, t, datasetsDetail, payload.dataset_ids, payload.query_attachment_selector, availableFileVars, queryAttachment, setQueryAttachment])
|
||||
|
||||
const getDependentVars = () => {
|
||||
return [payload.query_variable_selector, payload.query_attachment_selector]
|
||||
return [payload.query_variable_selector, payload.query_attachment_selector || []]
|
||||
}
|
||||
const getDependentVar = (variable: string) => {
|
||||
if (variable === 'query')
|
||||
return payload.query_variable_selector
|
||||
if (variable === 'queryAttachment')
|
||||
return payload.query_attachment_selector
|
||||
return payload.query_attachment_selector || []
|
||||
}
|
||||
|
||||
return {
|
||||
|
|
|
|||
|
|
@ -25,6 +25,8 @@ export NEXT_PUBLIC_SENTRY_DSN=${SENTRY_DSN}
|
|||
export NEXT_PUBLIC_SITE_ABOUT=${SITE_ABOUT}
|
||||
export NEXT_TELEMETRY_DISABLED=${NEXT_TELEMETRY_DISABLED}
|
||||
|
||||
export NEXT_PUBLIC_AMPLITUDE_API_KEY=${AMPLITUDE_API_KEY}
|
||||
|
||||
export NEXT_PUBLIC_TEXT_GENERATION_TIMEOUT_MS=${TEXT_GENERATION_TIMEOUT_MS}
|
||||
export NEXT_PUBLIC_CSP_WHITELIST=${CSP_WHITELIST}
|
||||
export NEXT_PUBLIC_ALLOW_EMBED=${ALLOW_EMBED}
|
||||
|
|
|
|||
|
|
@ -0,0 +1,183 @@
|
|||
import { act, renderHook } from '@testing-library/react'
|
||||
import { useAsyncWindowOpen } from './use-async-window-open'
|
||||
|
||||
describe('useAsyncWindowOpen', () => {
|
||||
const originalOpen = window.open
|
||||
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks()
|
||||
})
|
||||
|
||||
afterAll(() => {
|
||||
window.open = originalOpen
|
||||
})
|
||||
|
||||
it('opens immediate url synchronously, clears opener, without calling async getter', async () => {
|
||||
const mockWindow: any = { opener: 'should-clear' }
|
||||
const openSpy = jest.fn(() => mockWindow)
|
||||
window.open = openSpy
|
||||
const getUrl = jest.fn()
|
||||
const { result } = renderHook(() => useAsyncWindowOpen())
|
||||
|
||||
await act(async () => {
|
||||
await result.current(getUrl, {
|
||||
immediateUrl: 'https://example.com',
|
||||
target: '_blank',
|
||||
features: undefined,
|
||||
})
|
||||
})
|
||||
|
||||
expect(openSpy).toHaveBeenCalledWith('https://example.com', '_blank', 'noopener,noreferrer')
|
||||
expect(getUrl).not.toHaveBeenCalled()
|
||||
expect(mockWindow.opener).toBeNull()
|
||||
})
|
||||
|
||||
it('appends noopener,noreferrer when immediate open passes custom features', async () => {
|
||||
const mockWindow: any = { opener: 'should-clear' }
|
||||
const openSpy = jest.fn(() => mockWindow)
|
||||
window.open = openSpy
|
||||
const getUrl = jest.fn()
|
||||
const { result } = renderHook(() => useAsyncWindowOpen())
|
||||
|
||||
await act(async () => {
|
||||
await result.current(getUrl, {
|
||||
immediateUrl: 'https://example.com',
|
||||
target: '_blank',
|
||||
features: 'width=500',
|
||||
})
|
||||
})
|
||||
|
||||
expect(openSpy).toHaveBeenCalledWith('https://example.com', '_blank', 'width=500,noopener,noreferrer')
|
||||
expect(getUrl).not.toHaveBeenCalled()
|
||||
expect(mockWindow.opener).toBeNull()
|
||||
})
|
||||
|
||||
it('reports error when immediate window fails to open', async () => {
|
||||
const openSpy = jest.fn(() => null)
|
||||
window.open = openSpy
|
||||
const getUrl = jest.fn()
|
||||
const onError = jest.fn()
|
||||
const { result } = renderHook(() => useAsyncWindowOpen())
|
||||
|
||||
await act(async () => {
|
||||
await result.current(getUrl, {
|
||||
immediateUrl: 'https://example.com',
|
||||
target: '_blank',
|
||||
onError,
|
||||
})
|
||||
})
|
||||
|
||||
expect(onError).toHaveBeenCalled()
|
||||
const errArg = onError.mock.calls[0][0] as Error
|
||||
expect(errArg.message).toBe('Failed to open new window')
|
||||
expect(getUrl).not.toHaveBeenCalled()
|
||||
})
|
||||
|
||||
it('sets opener to null and redirects when async url resolves', async () => {
|
||||
const close = jest.fn()
|
||||
const mockWindow: any = {
|
||||
location: { href: '' },
|
||||
close,
|
||||
opener: 'should-be-cleared',
|
||||
}
|
||||
const openSpy = jest.fn(() => mockWindow)
|
||||
window.open = openSpy
|
||||
const { result } = renderHook(() => useAsyncWindowOpen())
|
||||
|
||||
await act(async () => {
|
||||
await result.current(async () => 'https://example.com/path')
|
||||
})
|
||||
|
||||
expect(openSpy).toHaveBeenCalledWith('about:blank', '_blank', undefined)
|
||||
expect(mockWindow.opener).toBeNull()
|
||||
expect(mockWindow.location.href).toBe('https://example.com/path')
|
||||
expect(close).not.toHaveBeenCalled()
|
||||
})
|
||||
|
||||
it('closes placeholder and forwards error when async getter throws', async () => {
|
||||
const close = jest.fn()
|
||||
const mockWindow: any = {
|
||||
location: { href: '' },
|
||||
close,
|
||||
opener: null,
|
||||
}
|
||||
const openSpy = jest.fn(() => mockWindow)
|
||||
window.open = openSpy
|
||||
const onError = jest.fn()
|
||||
const { result } = renderHook(() => useAsyncWindowOpen())
|
||||
|
||||
const error = new Error('fetch failed')
|
||||
await act(async () => {
|
||||
await result.current(async () => {
|
||||
throw error
|
||||
}, { onError })
|
||||
})
|
||||
|
||||
expect(close).toHaveBeenCalled()
|
||||
expect(onError).toHaveBeenCalledWith(error)
|
||||
expect(mockWindow.location.href).toBe('')
|
||||
})
|
||||
|
||||
it('preserves custom features as-is for async open', async () => {
|
||||
const close = jest.fn()
|
||||
const mockWindow: any = {
|
||||
location: { href: '' },
|
||||
close,
|
||||
opener: 'should-be-cleared',
|
||||
}
|
||||
const openSpy = jest.fn(() => mockWindow)
|
||||
window.open = openSpy
|
||||
const { result } = renderHook(() => useAsyncWindowOpen())
|
||||
|
||||
await act(async () => {
|
||||
await result.current(async () => 'https://example.com/path', {
|
||||
target: '_blank',
|
||||
features: 'width=500',
|
||||
})
|
||||
})
|
||||
|
||||
expect(openSpy).toHaveBeenCalledWith('about:blank', '_blank', 'width=500')
|
||||
expect(mockWindow.opener).toBeNull()
|
||||
expect(mockWindow.location.href).toBe('https://example.com/path')
|
||||
expect(close).not.toHaveBeenCalled()
|
||||
})
|
||||
|
||||
it('closes placeholder and reports when no url is returned', async () => {
|
||||
const close = jest.fn()
|
||||
const mockWindow: any = {
|
||||
location: { href: '' },
|
||||
close,
|
||||
opener: null,
|
||||
}
|
||||
const openSpy = jest.fn(() => mockWindow)
|
||||
window.open = openSpy
|
||||
const onError = jest.fn()
|
||||
const { result } = renderHook(() => useAsyncWindowOpen())
|
||||
|
||||
await act(async () => {
|
||||
await result.current(async () => null, { onError })
|
||||
})
|
||||
|
||||
expect(close).toHaveBeenCalled()
|
||||
expect(onError).toHaveBeenCalled()
|
||||
const errArg = onError.mock.calls[0][0] as Error
|
||||
expect(errArg.message).toBe('No url resolved for new window')
|
||||
})
|
||||
|
||||
it('reports failure when window.open returns null', async () => {
|
||||
const openSpy = jest.fn(() => null)
|
||||
window.open = openSpy
|
||||
const getUrl = jest.fn()
|
||||
const onError = jest.fn()
|
||||
const { result } = renderHook(() => useAsyncWindowOpen())
|
||||
|
||||
await act(async () => {
|
||||
await result.current(getUrl, { onError })
|
||||
})
|
||||
|
||||
expect(onError).toHaveBeenCalled()
|
||||
const errArg = onError.mock.calls[0][0] as Error
|
||||
expect(errArg.message).toBe('Failed to open new window')
|
||||
expect(getUrl).not.toHaveBeenCalled()
|
||||
})
|
||||
})
|
||||
|
|
@ -1,72 +1,59 @@
|
|||
import { useCallback } from 'react'
|
||||
import Toast from '@/app/components/base/toast'
|
||||
|
||||
export type AsyncWindowOpenOptions = {
|
||||
successMessage?: string
|
||||
errorMessage?: string
|
||||
windowFeatures?: string
|
||||
onError?: (error: any) => void
|
||||
onSuccess?: (url: string) => void
|
||||
type GetUrl = () => Promise<string | null | undefined>
|
||||
|
||||
type AsyncWindowOpenOptions = {
|
||||
immediateUrl?: string | null
|
||||
target?: string
|
||||
features?: string
|
||||
onError?: (error: Error) => void
|
||||
}
|
||||
|
||||
export const useAsyncWindowOpen = () => {
|
||||
const openAsync = useCallback(async (
|
||||
fetchUrl: () => Promise<string>,
|
||||
options: AsyncWindowOpenOptions = {},
|
||||
) => {
|
||||
const {
|
||||
successMessage,
|
||||
errorMessage = 'Failed to open page',
|
||||
windowFeatures = 'noopener,noreferrer',
|
||||
onError,
|
||||
onSuccess,
|
||||
} = options
|
||||
export const useAsyncWindowOpen = () => useCallback(async (getUrl: GetUrl, options?: AsyncWindowOpenOptions) => {
|
||||
const {
|
||||
immediateUrl,
|
||||
target = '_blank',
|
||||
features,
|
||||
onError,
|
||||
} = options ?? {}
|
||||
|
||||
const newWindow = window.open('', '_blank', windowFeatures)
|
||||
const secureImmediateFeatures = features ? `${features},noopener,noreferrer` : 'noopener,noreferrer'
|
||||
|
||||
if (immediateUrl) {
|
||||
const newWindow = window.open(immediateUrl, target, secureImmediateFeatures)
|
||||
if (!newWindow) {
|
||||
const error = new Error('Popup blocked by browser')
|
||||
onError?.(error)
|
||||
Toast.notify({
|
||||
type: 'error',
|
||||
message: 'Popup blocked. Please allow popups for this site.',
|
||||
})
|
||||
onError?.(new Error('Failed to open new window'))
|
||||
return
|
||||
}
|
||||
|
||||
try {
|
||||
const url = await fetchUrl()
|
||||
|
||||
if (url) {
|
||||
newWindow.location.href = url
|
||||
onSuccess?.(url)
|
||||
|
||||
if (successMessage) {
|
||||
Toast.notify({
|
||||
type: 'success',
|
||||
message: successMessage,
|
||||
})
|
||||
}
|
||||
}
|
||||
else {
|
||||
newWindow.close()
|
||||
const error = new Error('Invalid URL received')
|
||||
onError?.(error)
|
||||
Toast.notify({
|
||||
type: 'error',
|
||||
message: errorMessage,
|
||||
})
|
||||
}
|
||||
newWindow.opener = null
|
||||
}
|
||||
catch (error) {
|
||||
newWindow.close()
|
||||
onError?.(error)
|
||||
Toast.notify({
|
||||
type: 'error',
|
||||
message: errorMessage,
|
||||
})
|
||||
}
|
||||
}, [])
|
||||
catch { /* noop */ }
|
||||
return
|
||||
}
|
||||
|
||||
return { openAsync }
|
||||
}
|
||||
const newWindow = window.open('about:blank', target, features)
|
||||
if (!newWindow) {
|
||||
onError?.(new Error('Failed to open new window'))
|
||||
return
|
||||
}
|
||||
|
||||
try {
|
||||
newWindow.opener = null
|
||||
}
|
||||
catch { /* noop */ }
|
||||
|
||||
try {
|
||||
const url = await getUrl()
|
||||
if (url) {
|
||||
newWindow.location.href = url
|
||||
return
|
||||
}
|
||||
newWindow.close()
|
||||
onError?.(new Error('No url resolved for new window'))
|
||||
}
|
||||
catch (error) {
|
||||
newWindow.close()
|
||||
onError?.(error instanceof Error ? error : new Error(String(error)))
|
||||
}
|
||||
}, [])
|
||||
|
|
|
|||
|
|
@ -137,9 +137,6 @@ const translation = {
|
|||
notConnected: '{{name}} ist nicht verbunden',
|
||||
notConnectedTip: 'Um mit {{name}} zu synchronisieren, muss zuerst eine Verbindung zu {{name}} hergestellt werden.',
|
||||
},
|
||||
credentialSelector: {
|
||||
name: '{{credentialName}}\'s {{pluginName}}',
|
||||
},
|
||||
conversion: {
|
||||
confirm: {
|
||||
title: 'Bestätigung',
|
||||
|
|
|
|||
|
|
@ -145,9 +145,6 @@ const translation = {
|
|||
emptySearchResult: 'No items were found',
|
||||
resetKeywords: 'Reset keywords',
|
||||
},
|
||||
credentialSelector: {
|
||||
name: '{{credentialName}}\'s {{pluginName}}',
|
||||
},
|
||||
configurationTip: 'Configure {{pluginName}}',
|
||||
conversion: {
|
||||
title: 'Convert to Knowledge Pipeline',
|
||||
|
|
|
|||
|
|
@ -137,9 +137,6 @@ const translation = {
|
|||
notConnected: '{{name}} no está conectado',
|
||||
notConnectedTip: 'Para sincronizar con {{name}}, primero se debe establecer conexión con {{name}}.',
|
||||
},
|
||||
credentialSelector: {
|
||||
name: '{{credentialName}} de {{pluginName}}',
|
||||
},
|
||||
conversion: {
|
||||
confirm: {
|
||||
title: 'Confirmación',
|
||||
|
|
|
|||
|
|
@ -137,9 +137,6 @@ const translation = {
|
|||
notConnected: '{{name}} متصل نیست',
|
||||
notConnectedTip: 'برای همگامسازی با {{name}}، ابتدا باید اتصال به {{name}} برقرار شود.',
|
||||
},
|
||||
credentialSelector: {
|
||||
name: '{{pluginName}} {{credentialName}}',
|
||||
},
|
||||
conversion: {
|
||||
confirm: {
|
||||
title: 'تایید',
|
||||
|
|
|
|||
|
|
@ -137,9 +137,6 @@ const translation = {
|
|||
notConnected: '{{name}} n\'est pas connecté',
|
||||
notConnectedTip: 'Pour se synchroniser avec {{name}}, une connexion à {{name}} doit d\'abord être établie.',
|
||||
},
|
||||
credentialSelector: {
|
||||
name: '{{credentialName}} de {{pluginName}}',
|
||||
},
|
||||
conversion: {
|
||||
confirm: {
|
||||
title: 'Confirmation',
|
||||
|
|
|
|||
|
|
@ -137,9 +137,6 @@ const translation = {
|
|||
notConnected: '{{name}} कनेक्ट नहीं है',
|
||||
notConnectedTip: '{{name}} के साथ सिंक करने के लिए, पहले {{name}} से कनेक्शन स्थापित करना आवश्यक है।',
|
||||
},
|
||||
credentialSelector: {
|
||||
name: '{{credentialName}} का {{pluginName}}',
|
||||
},
|
||||
conversion: {
|
||||
confirm: {
|
||||
title: 'पुष्टि',
|
||||
|
|
|
|||
|
|
@ -137,9 +137,6 @@ const translation = {
|
|||
notConnected: '{{name}} tidak terhubung',
|
||||
notConnectedTip: 'Untuk menyinkronkan dengan {{name}}, koneksi ke {{name}} harus dibuat terlebih dahulu.',
|
||||
},
|
||||
credentialSelector: {
|
||||
name: '{{credentialName}}\'s {{pluginName}}',
|
||||
},
|
||||
conversion: {
|
||||
confirm: {
|
||||
title: 'Konfirmasi',
|
||||
|
|
|
|||
|
|
@ -137,9 +137,6 @@ const translation = {
|
|||
notConnected: '{{name}} non è connesso',
|
||||
notConnectedTip: 'Per sincronizzarsi con {{name}}, è necessario prima stabilire la connessione a {{name}}.',
|
||||
},
|
||||
credentialSelector: {
|
||||
name: '{{credentialName}}\'s {{pluginName}}',
|
||||
},
|
||||
conversion: {
|
||||
confirm: {
|
||||
content: 'Questa azione è permanente. Non sarà possibile ripristinare il metodo precedente. Si prega di confermare per convertire.',
|
||||
|
|
|
|||
|
|
@ -137,9 +137,6 @@ const translation = {
|
|||
emptySearchResult: 'アイテムは見つかりませんでした',
|
||||
resetKeywords: 'キーワードをリセットする',
|
||||
},
|
||||
credentialSelector: {
|
||||
name: '{{credentialName}}の{{pluginName}}',
|
||||
},
|
||||
configurationTip: '{{pluginName}}を設定',
|
||||
conversion: {
|
||||
confirm: {
|
||||
|
|
|
|||
|
|
@ -137,9 +137,6 @@ const translation = {
|
|||
notConnected: '{{name}}가 연결되어 있지 않습니다',
|
||||
notConnectedTip: '{{name}}와(과) 동기화하려면 먼저 {{name}}에 연결해야 합니다.',
|
||||
},
|
||||
credentialSelector: {
|
||||
name: '{{credentialName}}의 {{pluginName}}',
|
||||
},
|
||||
conversion: {
|
||||
confirm: {
|
||||
title: '확인',
|
||||
|
|
|
|||
|
|
@ -137,9 +137,6 @@ const translation = {
|
|||
notConnected: '{{name}} nie jest połączony',
|
||||
notConnectedTip: 'Aby zsynchronizować się z {{name}}, najpierw należy nawiązać połączenie z {{name}}.',
|
||||
},
|
||||
credentialSelector: {
|
||||
name: '{{credentialName}}\'s {{pluginName}}',
|
||||
},
|
||||
conversion: {
|
||||
confirm: {
|
||||
title: 'Potwierdzenie',
|
||||
|
|
|
|||
|
|
@ -137,9 +137,6 @@ const translation = {
|
|||
notConnected: '{{name}} não está conectado',
|
||||
notConnectedTip: 'Para sincronizar com {{name}}, a conexão com {{name}} deve ser estabelecida primeiro.',
|
||||
},
|
||||
credentialSelector: {
|
||||
name: '{{credentialName}} de {{pluginName}}',
|
||||
},
|
||||
conversion: {
|
||||
confirm: {
|
||||
title: 'Confirmação',
|
||||
|
|
|
|||
|
|
@ -137,9 +137,6 @@ const translation = {
|
|||
notConnected: '{{name}} nu este conectat',
|
||||
notConnectedTip: 'Pentru a sincroniza cu {{name}}, trebuie mai întâi să se stabilească conexiunea cu {{name}}.',
|
||||
},
|
||||
credentialSelector: {
|
||||
name: '{{pluginName}} al/a lui {{credentialName}}',
|
||||
},
|
||||
conversion: {
|
||||
confirm: {
|
||||
title: 'Confirmare',
|
||||
|
|
|
|||
|
|
@ -137,9 +137,6 @@ const translation = {
|
|||
notConnected: '{{name}} не подключен',
|
||||
notConnectedTip: 'Чтобы синхронизироваться с {{name}}, сначала необходимо установить соединение с {{name}}.',
|
||||
},
|
||||
credentialSelector: {
|
||||
name: '{{credentialName}}\'s {{pluginName}}',
|
||||
},
|
||||
conversion: {
|
||||
confirm: {
|
||||
title: 'Подтверждение',
|
||||
|
|
|
|||
|
|
@ -137,9 +137,6 @@ const translation = {
|
|||
notConnected: '{{name}} ni povezan',
|
||||
notConnectedTip: 'Za sinhronizacijo z {{name}} je treba najprej vzpostaviti povezavo z {{name}}.',
|
||||
},
|
||||
credentialSelector: {
|
||||
name: '{{credentialName}}\'s {{pluginName}}',
|
||||
},
|
||||
conversion: {
|
||||
confirm: {
|
||||
title: 'Potrditev',
|
||||
|
|
|
|||
|
|
@ -137,9 +137,6 @@ const translation = {
|
|||
notConnected: '{{name}} ไม่ได้เชื่อมต่อ',
|
||||
notConnectedTip: 'เพื่อซิงค์กับ {{name}} ต้องสร้างการเชื่อมต่อกับ {{name}} ก่อน',
|
||||
},
|
||||
credentialSelector: {
|
||||
name: '{{credentialName}}\'s {{pluginName}}',
|
||||
},
|
||||
conversion: {
|
||||
confirm: {
|
||||
title: 'การยืนยัน',
|
||||
|
|
|
|||
|
|
@ -137,9 +137,6 @@ const translation = {
|
|||
notConnected: '{{name}} bağlı değil',
|
||||
notConnectedTip: '{{name}} ile senkronize olmak için önce {{name}} bağlantısının kurulması gerekir.',
|
||||
},
|
||||
credentialSelector: {
|
||||
name: '{{credentialName}}\'un {{pluginName}}',
|
||||
},
|
||||
conversion: {
|
||||
confirm: {
|
||||
title: 'Onay',
|
||||
|
|
|
|||
|
|
@ -137,9 +137,6 @@ const translation = {
|
|||
notConnected: '{{name}} не підключено',
|
||||
notConnectedTip: 'Щоб синхронізувати з {{name}}, спершу потрібно встановити з’єднання з {{name}}.',
|
||||
},
|
||||
credentialSelector: {
|
||||
name: '{{credentialName}}\'s {{pluginName}}',
|
||||
},
|
||||
conversion: {
|
||||
confirm: {
|
||||
title: 'Підтвердження',
|
||||
|
|
|
|||
|
|
@ -137,9 +137,6 @@ const translation = {
|
|||
notConnected: '{{name}} không được kết nối',
|
||||
notConnectedTip: 'Để đồng bộ với {{name}}, trước tiên phải thiết lập kết nối với {{name}}.',
|
||||
},
|
||||
credentialSelector: {
|
||||
name: '{{credentialName}}\'s {{pluginName}}',
|
||||
},
|
||||
conversion: {
|
||||
confirm: {
|
||||
title: 'Sự xác nhận',
|
||||
|
|
|
|||
|
|
@ -145,9 +145,6 @@ const translation = {
|
|||
emptySearchResult: '未找到任何项目',
|
||||
resetKeywords: '重置关键词',
|
||||
},
|
||||
credentialSelector: {
|
||||
name: '{{credentialName}} 的 {{pluginName}}',
|
||||
},
|
||||
configurationTip: '配置 {{pluginName}}',
|
||||
conversion: {
|
||||
title: '转换为知识流水线',
|
||||
|
|
|
|||
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue