mirror of https://github.com/langgenius/dify.git
Merge branch 'main' into feat/mcp-06-18
This commit is contained in:
commit
7d91f4783b
|
|
@ -1,6 +1,7 @@
|
|||
#!/bin/bash
|
||||
|
||||
yq eval '.services.weaviate.ports += ["8080:8080"]' -i docker/docker-compose.yaml
|
||||
yq eval '.services.weaviate.ports += ["50051:50051"]' -i docker/docker-compose.yaml
|
||||
yq eval '.services.qdrant.ports += ["6333:6333"]' -i docker/docker-compose.yaml
|
||||
yq eval '.services.chroma.ports += ["8000:8000"]' -i docker/docker-compose.yaml
|
||||
yq eval '.services["milvus-standalone"].ports += ["19530:19530"]' -i docker/docker-compose.yaml
|
||||
|
|
|
|||
|
|
@ -472,6 +472,9 @@ class ProviderConfiguration(BaseModel):
|
|||
provider_model_credentials_cache.delete()
|
||||
|
||||
self.switch_preferred_provider_type(provider_type=ProviderType.CUSTOM, session=session)
|
||||
else:
|
||||
# some historical data may have a provider record but not be set as valid
|
||||
provider_record.is_valid = True
|
||||
|
||||
session.commit()
|
||||
except Exception:
|
||||
|
|
|
|||
|
|
@ -7,7 +7,7 @@ import uuid
|
|||
from collections import deque
|
||||
from collections.abc import Sequence
|
||||
from datetime import datetime
|
||||
from typing import Final
|
||||
from typing import Final, cast
|
||||
from urllib.parse import urljoin
|
||||
|
||||
import httpx
|
||||
|
|
@ -199,7 +199,7 @@ def convert_to_trace_id(uuid_v4: str | None) -> int:
|
|||
raise ValueError("UUID cannot be None")
|
||||
try:
|
||||
uuid_obj = uuid.UUID(uuid_v4)
|
||||
return uuid_obj.int
|
||||
return cast(int, uuid_obj.int)
|
||||
except ValueError as e:
|
||||
raise ValueError(f"Invalid UUID input: {uuid_v4}") from e
|
||||
|
||||
|
|
|
|||
|
|
@ -13,6 +13,7 @@ class TracingProviderEnum(StrEnum):
|
|||
OPIK = "opik"
|
||||
WEAVE = "weave"
|
||||
ALIYUN = "aliyun"
|
||||
TENCENT = "tencent"
|
||||
|
||||
|
||||
class BaseTracingConfig(BaseModel):
|
||||
|
|
@ -195,5 +196,32 @@ class AliyunConfig(BaseTracingConfig):
|
|||
return validate_url_with_path(v, "https://tracing-analysis-dc-hz.aliyuncs.com")
|
||||
|
||||
|
||||
class TencentConfig(BaseTracingConfig):
|
||||
"""
|
||||
Tencent APM tracing config
|
||||
"""
|
||||
|
||||
token: str
|
||||
endpoint: str
|
||||
service_name: str
|
||||
|
||||
@field_validator("token")
|
||||
@classmethod
|
||||
def token_validator(cls, v, info: ValidationInfo):
|
||||
if not v or v.strip() == "":
|
||||
raise ValueError("Token cannot be empty")
|
||||
return v
|
||||
|
||||
@field_validator("endpoint")
|
||||
@classmethod
|
||||
def endpoint_validator(cls, v, info: ValidationInfo):
|
||||
return cls.validate_endpoint_url(v, "https://apm.tencentcloudapi.com")
|
||||
|
||||
@field_validator("service_name")
|
||||
@classmethod
|
||||
def service_name_validator(cls, v, info: ValidationInfo):
|
||||
return cls.validate_project_field(v, "dify_app")
|
||||
|
||||
|
||||
OPS_FILE_PATH = "ops_trace/"
|
||||
OPS_TRACE_FAILED_KEY = "FAILED_OPS_TRACE"
|
||||
|
|
|
|||
|
|
@ -90,6 +90,7 @@ class SuggestedQuestionTraceInfo(BaseTraceInfo):
|
|||
|
||||
class DatasetRetrievalTraceInfo(BaseTraceInfo):
|
||||
documents: Any = None
|
||||
error: str | None = None
|
||||
|
||||
|
||||
class ToolTraceInfo(BaseTraceInfo):
|
||||
|
|
|
|||
|
|
@ -120,6 +120,17 @@ class OpsTraceProviderConfigMap(collections.UserDict[str, dict[str, Any]]):
|
|||
"trace_instance": AliyunDataTrace,
|
||||
}
|
||||
|
||||
case TracingProviderEnum.TENCENT:
|
||||
from core.ops.entities.config_entity import TencentConfig
|
||||
from core.ops.tencent_trace.tencent_trace import TencentDataTrace
|
||||
|
||||
return {
|
||||
"config_class": TencentConfig,
|
||||
"secret_keys": ["token"],
|
||||
"other_keys": ["endpoint", "service_name"],
|
||||
"trace_instance": TencentDataTrace,
|
||||
}
|
||||
|
||||
case _:
|
||||
raise KeyError(f"Unsupported tracing provider: {provider}")
|
||||
|
||||
|
|
@ -723,6 +734,7 @@ class TraceTask:
|
|||
end_time=timer.get("end"),
|
||||
metadata=metadata,
|
||||
message_data=message_data.to_dict(),
|
||||
error=kwargs.get("error"),
|
||||
)
|
||||
|
||||
return dataset_retrieval_trace_info
|
||||
|
|
@ -889,6 +901,7 @@ class TraceQueueManager:
|
|||
continue
|
||||
file_id = uuid4().hex
|
||||
trace_info = task.execute()
|
||||
|
||||
task_data = TaskData(
|
||||
app_id=task.app_id,
|
||||
trace_info_type=type(trace_info).__name__,
|
||||
|
|
|
|||
|
|
@ -0,0 +1,337 @@
|
|||
"""
|
||||
Tencent APM Trace Client - handles network operations, metrics, and API communication
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import importlib
|
||||
import logging
|
||||
import os
|
||||
import socket
|
||||
from typing import TYPE_CHECKING
|
||||
from urllib.parse import urlparse
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from opentelemetry.metrics import Meter
|
||||
from opentelemetry.metrics._internal.instrument import Histogram
|
||||
from opentelemetry.sdk.metrics.export import MetricReader
|
||||
|
||||
from opentelemetry import trace as trace_api
|
||||
from opentelemetry.exporter.otlp.proto.grpc.trace_exporter import OTLPSpanExporter
|
||||
from opentelemetry.sdk.resources import Resource
|
||||
from opentelemetry.sdk.trace import TracerProvider
|
||||
from opentelemetry.sdk.trace.export import BatchSpanProcessor
|
||||
from opentelemetry.semconv.resource import ResourceAttributes
|
||||
from opentelemetry.trace import SpanKind
|
||||
from opentelemetry.util.types import AttributeValue
|
||||
|
||||
from configs import dify_config
|
||||
|
||||
from .entities.tencent_semconv import LLM_OPERATION_DURATION
|
||||
from .entities.tencent_trace_entity import SpanData
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class TencentTraceClient:
|
||||
"""Tencent APM trace client using OpenTelemetry OTLP exporter"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
service_name: str,
|
||||
endpoint: str,
|
||||
token: str,
|
||||
max_queue_size: int = 1000,
|
||||
schedule_delay_sec: int = 5,
|
||||
max_export_batch_size: int = 50,
|
||||
metrics_export_interval_sec: int = 10,
|
||||
):
|
||||
self.endpoint = endpoint
|
||||
self.token = token
|
||||
self.service_name = service_name
|
||||
self.metrics_export_interval_sec = metrics_export_interval_sec
|
||||
|
||||
self.resource = Resource(
|
||||
attributes={
|
||||
ResourceAttributes.SERVICE_NAME: service_name,
|
||||
ResourceAttributes.SERVICE_VERSION: f"dify-{dify_config.project.version}-{dify_config.COMMIT_SHA}",
|
||||
ResourceAttributes.DEPLOYMENT_ENVIRONMENT: f"{dify_config.DEPLOY_ENV}-{dify_config.EDITION}",
|
||||
ResourceAttributes.HOST_NAME: socket.gethostname(),
|
||||
}
|
||||
)
|
||||
# Prepare gRPC endpoint/metadata
|
||||
grpc_endpoint, insecure, _, _ = self._resolve_grpc_target(endpoint)
|
||||
|
||||
headers = (("authorization", f"Bearer {token}"),)
|
||||
|
||||
self.exporter = OTLPSpanExporter(
|
||||
endpoint=grpc_endpoint,
|
||||
headers=headers,
|
||||
insecure=insecure,
|
||||
timeout=30,
|
||||
)
|
||||
|
||||
self.tracer_provider = TracerProvider(resource=self.resource)
|
||||
self.span_processor = BatchSpanProcessor(
|
||||
span_exporter=self.exporter,
|
||||
max_queue_size=max_queue_size,
|
||||
schedule_delay_millis=schedule_delay_sec * 1000,
|
||||
max_export_batch_size=max_export_batch_size,
|
||||
)
|
||||
self.tracer_provider.add_span_processor(self.span_processor)
|
||||
|
||||
self.tracer = self.tracer_provider.get_tracer("dify.tencent_apm")
|
||||
|
||||
# Store span contexts for parent-child relationships
|
||||
self.span_contexts: dict[int, trace_api.SpanContext] = {}
|
||||
|
||||
self.meter: Meter | None = None
|
||||
self.hist_llm_duration: Histogram | None = None
|
||||
self.metric_reader: MetricReader | None = None
|
||||
|
||||
# Metrics exporter and instruments
|
||||
try:
|
||||
from opentelemetry import metrics
|
||||
from opentelemetry.sdk.metrics import Histogram, MeterProvider
|
||||
from opentelemetry.sdk.metrics.export import AggregationTemporality, PeriodicExportingMetricReader
|
||||
|
||||
protocol = os.getenv("OTEL_EXPORTER_OTLP_PROTOCOL", "").strip().lower()
|
||||
use_http_protobuf = protocol in {"http/protobuf", "http-protobuf"}
|
||||
use_http_json = protocol in {"http/json", "http-json"}
|
||||
|
||||
# Set preferred temporality for histograms to DELTA
|
||||
preferred_temporality: dict[type, AggregationTemporality] = {Histogram: AggregationTemporality.DELTA}
|
||||
|
||||
def _create_metric_exporter(exporter_cls, **kwargs):
|
||||
"""Create metric exporter with preferred_temporality support"""
|
||||
try:
|
||||
return exporter_cls(**kwargs, preferred_temporality=preferred_temporality)
|
||||
except Exception:
|
||||
return exporter_cls(**kwargs)
|
||||
|
||||
metric_reader = None
|
||||
if use_http_json:
|
||||
exporter_cls = None
|
||||
for mod_path in (
|
||||
"opentelemetry.exporter.otlp.http.json.metric_exporter",
|
||||
"opentelemetry.exporter.otlp.json.metric_exporter",
|
||||
):
|
||||
try:
|
||||
mod = importlib.import_module(mod_path)
|
||||
exporter_cls = getattr(mod, "OTLPMetricExporter", None)
|
||||
if exporter_cls:
|
||||
break
|
||||
except Exception:
|
||||
continue
|
||||
if exporter_cls is not None:
|
||||
metric_exporter = _create_metric_exporter(
|
||||
exporter_cls,
|
||||
endpoint=endpoint,
|
||||
headers={"authorization": f"Bearer {token}"},
|
||||
)
|
||||
else:
|
||||
from opentelemetry.exporter.otlp.proto.http.metric_exporter import (
|
||||
OTLPMetricExporter as HttpMetricExporter,
|
||||
)
|
||||
|
||||
metric_exporter = _create_metric_exporter(
|
||||
HttpMetricExporter,
|
||||
endpoint=endpoint,
|
||||
headers={"authorization": f"Bearer {token}"},
|
||||
)
|
||||
metric_reader = PeriodicExportingMetricReader(
|
||||
metric_exporter, export_interval_millis=self.metrics_export_interval_sec * 1000
|
||||
)
|
||||
|
||||
elif use_http_protobuf:
|
||||
from opentelemetry.exporter.otlp.proto.http.metric_exporter import (
|
||||
OTLPMetricExporter as HttpMetricExporter,
|
||||
)
|
||||
|
||||
metric_exporter = _create_metric_exporter(
|
||||
HttpMetricExporter,
|
||||
endpoint=endpoint,
|
||||
headers={"authorization": f"Bearer {token}"},
|
||||
)
|
||||
metric_reader = PeriodicExportingMetricReader(
|
||||
metric_exporter, export_interval_millis=self.metrics_export_interval_sec * 1000
|
||||
)
|
||||
else:
|
||||
from opentelemetry.exporter.otlp.proto.grpc.metric_exporter import (
|
||||
OTLPMetricExporter as GrpcMetricExporter,
|
||||
)
|
||||
|
||||
m_grpc_endpoint, m_insecure, _, _ = self._resolve_grpc_target(endpoint)
|
||||
|
||||
metric_exporter = _create_metric_exporter(
|
||||
GrpcMetricExporter,
|
||||
endpoint=m_grpc_endpoint,
|
||||
headers={"authorization": f"Bearer {token}"},
|
||||
insecure=m_insecure,
|
||||
)
|
||||
metric_reader = PeriodicExportingMetricReader(
|
||||
metric_exporter, export_interval_millis=self.metrics_export_interval_sec * 1000
|
||||
)
|
||||
|
||||
if metric_reader is not None:
|
||||
provider = MeterProvider(resource=self.resource, metric_readers=[metric_reader])
|
||||
metrics.set_meter_provider(provider)
|
||||
self.meter = metrics.get_meter("dify-sdk", dify_config.project.version)
|
||||
self.hist_llm_duration = self.meter.create_histogram(
|
||||
name=LLM_OPERATION_DURATION,
|
||||
unit="s",
|
||||
description="LLM operation duration (seconds)",
|
||||
)
|
||||
self.metric_reader = metric_reader
|
||||
else:
|
||||
self.meter = None
|
||||
self.hist_llm_duration = None
|
||||
self.metric_reader = None
|
||||
except Exception:
|
||||
logger.exception("[Tencent APM] Metrics initialization failed; metrics disabled")
|
||||
self.meter = None
|
||||
self.hist_llm_duration = None
|
||||
self.metric_reader = None
|
||||
|
||||
def add_span(self, span_data: SpanData) -> None:
|
||||
"""Create and export span using OpenTelemetry Tracer API"""
|
||||
try:
|
||||
self._create_and_export_span(span_data)
|
||||
logger.debug("[Tencent APM] Created span: %s", span_data.name)
|
||||
|
||||
except Exception:
|
||||
logger.exception("[Tencent APM] Failed to create span: %s", span_data.name)
|
||||
|
||||
# Metrics recording API
|
||||
def record_llm_duration(self, latency_seconds: float, attributes: dict[str, str] | None = None) -> None:
|
||||
"""Record LLM operation duration histogram in seconds."""
|
||||
try:
|
||||
if not hasattr(self, "hist_llm_duration") or self.hist_llm_duration is None:
|
||||
return
|
||||
attrs: dict[str, str] = {}
|
||||
if attributes:
|
||||
for k, v in attributes.items():
|
||||
attrs[k] = str(v) if not isinstance(v, (str, int, float, bool)) else v # type: ignore[assignment]
|
||||
self.hist_llm_duration.record(latency_seconds, attrs) # type: ignore[attr-defined]
|
||||
except Exception:
|
||||
logger.debug("[Tencent APM] Failed to record LLM duration", exc_info=True)
|
||||
|
||||
def _create_and_export_span(self, span_data: SpanData) -> None:
|
||||
"""Create span using OpenTelemetry Tracer API"""
|
||||
try:
|
||||
parent_context = None
|
||||
if span_data.parent_span_id and span_data.parent_span_id in self.span_contexts:
|
||||
parent_context = trace_api.set_span_in_context(
|
||||
trace_api.NonRecordingSpan(self.span_contexts[span_data.parent_span_id])
|
||||
)
|
||||
|
||||
span = self.tracer.start_span(
|
||||
name=span_data.name,
|
||||
context=parent_context,
|
||||
kind=SpanKind.INTERNAL,
|
||||
start_time=span_data.start_time,
|
||||
)
|
||||
self.span_contexts[span_data.span_id] = span.get_span_context()
|
||||
|
||||
if span_data.attributes:
|
||||
attributes: dict[str, AttributeValue] = {}
|
||||
for key, value in span_data.attributes.items():
|
||||
if isinstance(value, (int, float, bool)):
|
||||
attributes[key] = value
|
||||
else:
|
||||
attributes[key] = str(value)
|
||||
span.set_attributes(attributes)
|
||||
|
||||
if span_data.events:
|
||||
for event in span_data.events:
|
||||
span.add_event(event.name, event.attributes, event.timestamp)
|
||||
|
||||
if span_data.status:
|
||||
span.set_status(span_data.status)
|
||||
|
||||
# Manually end span; do not use context manager to avoid double-end warnings
|
||||
span.end(end_time=span_data.end_time)
|
||||
|
||||
except Exception:
|
||||
logger.exception("[Tencent APM] Error creating span: %s", span_data.name)
|
||||
|
||||
def api_check(self) -> bool:
|
||||
"""Check API connectivity using socket connection test for gRPC endpoints"""
|
||||
try:
|
||||
# Resolve gRPC target consistently with exporters
|
||||
_, _, host, port = self._resolve_grpc_target(self.endpoint)
|
||||
|
||||
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
|
||||
sock.settimeout(5)
|
||||
result = sock.connect_ex((host, port))
|
||||
sock.close()
|
||||
|
||||
if result == 0:
|
||||
logger.info("[Tencent APM] Endpoint %s:%s is accessible", host, port)
|
||||
return True
|
||||
else:
|
||||
logger.warning("[Tencent APM] Endpoint %s:%s is not accessible", host, port)
|
||||
if host in ["127.0.0.1", "localhost"]:
|
||||
logger.info("[Tencent APM] Development environment detected, allowing config save")
|
||||
return True
|
||||
return False
|
||||
|
||||
except Exception:
|
||||
logger.exception("[Tencent APM] API check failed")
|
||||
if "127.0.0.1" in self.endpoint or "localhost" in self.endpoint:
|
||||
return True
|
||||
return False
|
||||
|
||||
def get_project_url(self) -> str:
|
||||
"""Get project console URL"""
|
||||
return "https://console.cloud.tencent.com/apm"
|
||||
|
||||
def shutdown(self) -> None:
|
||||
"""Shutdown the client and export remaining spans"""
|
||||
try:
|
||||
if self.span_processor:
|
||||
logger.info("[Tencent APM] Flushing remaining spans before shutdown")
|
||||
_ = self.span_processor.force_flush()
|
||||
self.span_processor.shutdown()
|
||||
|
||||
if self.tracer_provider:
|
||||
self.tracer_provider.shutdown()
|
||||
if self.metric_reader is not None:
|
||||
try:
|
||||
self.metric_reader.shutdown() # type: ignore[attr-defined]
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
except Exception:
|
||||
logger.exception("[Tencent APM] Error during client shutdown")
|
||||
|
||||
@staticmethod
|
||||
def _resolve_grpc_target(endpoint: str, default_port: int = 4317) -> tuple[str, bool, str, int]:
|
||||
"""Normalize endpoint to gRPC target and security flag.
|
||||
|
||||
Returns:
|
||||
(grpc_endpoint, insecure, host, port)
|
||||
"""
|
||||
try:
|
||||
if endpoint.startswith(("http://", "https://")):
|
||||
parsed = urlparse(endpoint)
|
||||
host = parsed.hostname or "localhost"
|
||||
port = parsed.port or default_port
|
||||
insecure = parsed.scheme == "http"
|
||||
return f"{host}:{port}", insecure, host, port
|
||||
|
||||
host = endpoint
|
||||
port = default_port
|
||||
if ":" in endpoint:
|
||||
parts = endpoint.rsplit(":", 1)
|
||||
host = parts[0] or "localhost"
|
||||
try:
|
||||
port = int(parts[1])
|
||||
except Exception:
|
||||
port = default_port
|
||||
|
||||
insecure = ("localhost" in host) or ("127.0.0.1" in host)
|
||||
return f"{host}:{port}", insecure, host, port
|
||||
except Exception:
|
||||
host, port = "localhost", default_port
|
||||
return f"{host}:{port}", True, host, port
|
||||
|
|
@ -0,0 +1 @@
|
|||
# Tencent trace entities module
|
||||
|
|
@ -0,0 +1,73 @@
|
|||
from enum import Enum
|
||||
|
||||
# public
|
||||
GEN_AI_SESSION_ID = "gen_ai.session.id"
|
||||
|
||||
GEN_AI_USER_ID = "gen_ai.user.id"
|
||||
|
||||
GEN_AI_USER_NAME = "gen_ai.user.name"
|
||||
|
||||
GEN_AI_SPAN_KIND = "gen_ai.span.kind"
|
||||
|
||||
GEN_AI_FRAMEWORK = "gen_ai.framework"
|
||||
|
||||
GEN_AI_IS_ENTRY = "gen_ai.is_entry" # mark to count the LLM-related traces
|
||||
|
||||
# Chain
|
||||
INPUT_VALUE = "gen_ai.entity.input"
|
||||
|
||||
OUTPUT_VALUE = "gen_ai.entity.output"
|
||||
|
||||
|
||||
# Retriever
|
||||
RETRIEVAL_QUERY = "retrieval.query"
|
||||
|
||||
RETRIEVAL_DOCUMENT = "retrieval.document"
|
||||
|
||||
|
||||
# GENERATION
|
||||
GEN_AI_MODEL_NAME = "gen_ai.response.model"
|
||||
|
||||
GEN_AI_PROVIDER = "gen_ai.provider.name"
|
||||
|
||||
|
||||
GEN_AI_USAGE_INPUT_TOKENS = "gen_ai.usage.input_tokens"
|
||||
|
||||
GEN_AI_USAGE_OUTPUT_TOKENS = "gen_ai.usage.output_tokens"
|
||||
|
||||
GEN_AI_USAGE_TOTAL_TOKENS = "gen_ai.usage.total_tokens"
|
||||
|
||||
GEN_AI_PROMPT_TEMPLATE_TEMPLATE = "gen_ai.prompt_template.template"
|
||||
|
||||
GEN_AI_PROMPT_TEMPLATE_VARIABLE = "gen_ai.prompt_template.variable"
|
||||
|
||||
GEN_AI_PROMPT = "gen_ai.prompt"
|
||||
|
||||
GEN_AI_COMPLETION = "gen_ai.completion"
|
||||
|
||||
GEN_AI_RESPONSE_FINISH_REASON = "gen_ai.response.finish_reason"
|
||||
|
||||
# Tool
|
||||
TOOL_NAME = "tool.name"
|
||||
|
||||
TOOL_DESCRIPTION = "tool.description"
|
||||
|
||||
TOOL_PARAMETERS = "tool.parameters"
|
||||
|
||||
# Instrumentation Library
|
||||
INSTRUMENTATION_NAME = "dify-sdk"
|
||||
INSTRUMENTATION_VERSION = "0.1.0"
|
||||
INSTRUMENTATION_LANGUAGE = "python"
|
||||
|
||||
|
||||
# Metrics
|
||||
LLM_OPERATION_DURATION = "gen_ai.client.operation.duration"
|
||||
|
||||
|
||||
class GenAISpanKind(Enum):
|
||||
WORKFLOW = "WORKFLOW" # OpenLLMetry
|
||||
RETRIEVER = "RETRIEVER" # RAG
|
||||
GENERATION = "GENERATION" # Langfuse
|
||||
TOOL = "TOOL" # OpenLLMetry
|
||||
AGENT = "AGENT" # OpenLLMetry
|
||||
TASK = "TASK" # OpenLLMetry
|
||||
|
|
@ -0,0 +1,21 @@
|
|||
from collections.abc import Sequence
|
||||
|
||||
from opentelemetry import trace as trace_api
|
||||
from opentelemetry.sdk.trace import Event
|
||||
from opentelemetry.trace import Status, StatusCode
|
||||
from pydantic import BaseModel, Field
|
||||
|
||||
|
||||
class SpanData(BaseModel):
|
||||
model_config = {"arbitrary_types_allowed": True}
|
||||
|
||||
trace_id: int = Field(..., description="The unique identifier for the trace.")
|
||||
parent_span_id: int | None = Field(None, description="The ID of the parent span, if any.")
|
||||
span_id: int = Field(..., description="The unique identifier for this span.")
|
||||
name: str = Field(..., description="The name of the span.")
|
||||
attributes: dict[str, str] = Field(default_factory=dict, description="Attributes associated with the span.")
|
||||
events: Sequence[Event] = Field(default_factory=list, description="Events recorded in the span.")
|
||||
links: Sequence[trace_api.Link] = Field(default_factory=list, description="Links to other spans.")
|
||||
status: Status = Field(default=Status(StatusCode.UNSET), description="The status of the span.")
|
||||
start_time: int = Field(..., description="The start time of the span in nanoseconds.")
|
||||
end_time: int = Field(..., description="The end time of the span in nanoseconds.")
|
||||
|
|
@ -0,0 +1,372 @@
|
|||
"""
|
||||
Tencent APM Span Builder - handles all span construction logic
|
||||
"""
|
||||
|
||||
import json
|
||||
import logging
|
||||
from datetime import datetime
|
||||
|
||||
from opentelemetry.trace import Status, StatusCode
|
||||
|
||||
from core.ops.entities.trace_entity import (
|
||||
DatasetRetrievalTraceInfo,
|
||||
MessageTraceInfo,
|
||||
ToolTraceInfo,
|
||||
WorkflowTraceInfo,
|
||||
)
|
||||
from core.ops.tencent_trace.entities.tencent_semconv import (
|
||||
GEN_AI_COMPLETION,
|
||||
GEN_AI_FRAMEWORK,
|
||||
GEN_AI_IS_ENTRY,
|
||||
GEN_AI_MODEL_NAME,
|
||||
GEN_AI_PROMPT,
|
||||
GEN_AI_PROVIDER,
|
||||
GEN_AI_RESPONSE_FINISH_REASON,
|
||||
GEN_AI_SESSION_ID,
|
||||
GEN_AI_SPAN_KIND,
|
||||
GEN_AI_USAGE_INPUT_TOKENS,
|
||||
GEN_AI_USAGE_OUTPUT_TOKENS,
|
||||
GEN_AI_USAGE_TOTAL_TOKENS,
|
||||
GEN_AI_USER_ID,
|
||||
INPUT_VALUE,
|
||||
OUTPUT_VALUE,
|
||||
RETRIEVAL_DOCUMENT,
|
||||
RETRIEVAL_QUERY,
|
||||
TOOL_DESCRIPTION,
|
||||
TOOL_NAME,
|
||||
TOOL_PARAMETERS,
|
||||
GenAISpanKind,
|
||||
)
|
||||
from core.ops.tencent_trace.entities.tencent_trace_entity import SpanData
|
||||
from core.ops.tencent_trace.utils import TencentTraceUtils
|
||||
from core.rag.models.document import Document
|
||||
from core.workflow.entities.workflow_node_execution import (
|
||||
WorkflowNodeExecution,
|
||||
WorkflowNodeExecutionMetadataKey,
|
||||
WorkflowNodeExecutionStatus,
|
||||
)
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class TencentSpanBuilder:
|
||||
"""Builder class for constructing different types of spans"""
|
||||
|
||||
@staticmethod
|
||||
def _get_time_nanoseconds(time_value: datetime | None) -> int:
|
||||
"""Convert datetime to nanoseconds for span creation."""
|
||||
return TencentTraceUtils.convert_datetime_to_nanoseconds(time_value)
|
||||
|
||||
@staticmethod
|
||||
def build_workflow_spans(
|
||||
trace_info: WorkflowTraceInfo, trace_id: int, user_id: str, links: list | None = None
|
||||
) -> list[SpanData]:
|
||||
"""Build workflow-related spans"""
|
||||
spans = []
|
||||
links = links or []
|
||||
|
||||
message_span_id = None
|
||||
workflow_span_id = TencentTraceUtils.convert_to_span_id(trace_info.workflow_run_id, "workflow")
|
||||
|
||||
if hasattr(trace_info, "metadata") and trace_info.metadata.get("conversation_id"):
|
||||
message_span_id = TencentTraceUtils.convert_to_span_id(trace_info.workflow_run_id, "message")
|
||||
|
||||
status = Status(StatusCode.OK)
|
||||
if trace_info.error:
|
||||
status = Status(StatusCode.ERROR, trace_info.error)
|
||||
|
||||
if message_span_id:
|
||||
message_span = TencentSpanBuilder._build_message_span(
|
||||
trace_info, trace_id, message_span_id, user_id, status, links
|
||||
)
|
||||
spans.append(message_span)
|
||||
|
||||
workflow_span = TencentSpanBuilder._build_workflow_span(
|
||||
trace_info, trace_id, workflow_span_id, message_span_id, user_id, status, links
|
||||
)
|
||||
spans.append(workflow_span)
|
||||
|
||||
return spans
|
||||
|
||||
@staticmethod
|
||||
def _build_message_span(
|
||||
trace_info: WorkflowTraceInfo, trace_id: int, message_span_id: int, user_id: str, status: Status, links: list
|
||||
) -> SpanData:
|
||||
"""Build message span for chatflow"""
|
||||
return SpanData(
|
||||
trace_id=trace_id,
|
||||
parent_span_id=None,
|
||||
span_id=message_span_id,
|
||||
name="message",
|
||||
start_time=TencentSpanBuilder._get_time_nanoseconds(trace_info.start_time),
|
||||
end_time=TencentSpanBuilder._get_time_nanoseconds(trace_info.end_time),
|
||||
attributes={
|
||||
GEN_AI_SESSION_ID: trace_info.metadata.get("conversation_id", ""),
|
||||
GEN_AI_USER_ID: str(user_id),
|
||||
GEN_AI_SPAN_KIND: GenAISpanKind.WORKFLOW.value,
|
||||
GEN_AI_FRAMEWORK: "dify",
|
||||
GEN_AI_IS_ENTRY: "true",
|
||||
INPUT_VALUE: trace_info.workflow_run_inputs.get("sys.query", ""),
|
||||
OUTPUT_VALUE: json.dumps(trace_info.workflow_run_outputs, ensure_ascii=False),
|
||||
},
|
||||
status=status,
|
||||
links=links,
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def _build_workflow_span(
|
||||
trace_info: WorkflowTraceInfo,
|
||||
trace_id: int,
|
||||
workflow_span_id: int,
|
||||
message_span_id: int | None,
|
||||
user_id: str,
|
||||
status: Status,
|
||||
links: list,
|
||||
) -> SpanData:
|
||||
"""Build workflow span"""
|
||||
attributes = {
|
||||
GEN_AI_USER_ID: str(user_id),
|
||||
GEN_AI_SPAN_KIND: GenAISpanKind.WORKFLOW.value,
|
||||
GEN_AI_FRAMEWORK: "dify",
|
||||
INPUT_VALUE: json.dumps(trace_info.workflow_run_inputs, ensure_ascii=False),
|
||||
OUTPUT_VALUE: json.dumps(trace_info.workflow_run_outputs, ensure_ascii=False),
|
||||
}
|
||||
|
||||
if message_span_id is None:
|
||||
attributes[GEN_AI_IS_ENTRY] = "true"
|
||||
|
||||
return SpanData(
|
||||
trace_id=trace_id,
|
||||
parent_span_id=message_span_id,
|
||||
span_id=workflow_span_id,
|
||||
name="workflow",
|
||||
start_time=TencentSpanBuilder._get_time_nanoseconds(trace_info.start_time),
|
||||
end_time=TencentSpanBuilder._get_time_nanoseconds(trace_info.end_time),
|
||||
attributes=attributes,
|
||||
status=status,
|
||||
links=links,
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def build_workflow_llm_span(
|
||||
trace_id: int, workflow_span_id: int, trace_info: WorkflowTraceInfo, node_execution: WorkflowNodeExecution
|
||||
) -> SpanData:
|
||||
"""Build LLM span for workflow nodes."""
|
||||
process_data = node_execution.process_data or {}
|
||||
outputs = node_execution.outputs or {}
|
||||
usage_data = process_data.get("usage", {}) if "usage" in process_data else outputs.get("usage", {})
|
||||
|
||||
return SpanData(
|
||||
trace_id=trace_id,
|
||||
parent_span_id=workflow_span_id,
|
||||
span_id=TencentTraceUtils.convert_to_span_id(node_execution.id, "node"),
|
||||
name="GENERATION",
|
||||
start_time=TencentSpanBuilder._get_time_nanoseconds(node_execution.created_at),
|
||||
end_time=TencentSpanBuilder._get_time_nanoseconds(node_execution.finished_at),
|
||||
attributes={
|
||||
GEN_AI_SESSION_ID: trace_info.metadata.get("conversation_id", ""),
|
||||
GEN_AI_SPAN_KIND: GenAISpanKind.GENERATION.value,
|
||||
GEN_AI_FRAMEWORK: "dify",
|
||||
GEN_AI_MODEL_NAME: process_data.get("model_name", ""),
|
||||
GEN_AI_PROVIDER: process_data.get("model_provider", ""),
|
||||
GEN_AI_USAGE_INPUT_TOKENS: str(usage_data.get("prompt_tokens", 0)),
|
||||
GEN_AI_USAGE_OUTPUT_TOKENS: str(usage_data.get("completion_tokens", 0)),
|
||||
GEN_AI_USAGE_TOTAL_TOKENS: str(usage_data.get("total_tokens", 0)),
|
||||
GEN_AI_PROMPT: json.dumps(process_data.get("prompts", []), ensure_ascii=False),
|
||||
GEN_AI_COMPLETION: str(outputs.get("text", "")),
|
||||
GEN_AI_RESPONSE_FINISH_REASON: outputs.get("finish_reason", ""),
|
||||
INPUT_VALUE: json.dumps(process_data.get("prompts", []), ensure_ascii=False),
|
||||
OUTPUT_VALUE: str(outputs.get("text", "")),
|
||||
},
|
||||
status=TencentSpanBuilder._get_workflow_node_status(node_execution),
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def build_message_span(
|
||||
trace_info: MessageTraceInfo, trace_id: int, user_id: str, links: list | None = None
|
||||
) -> SpanData:
|
||||
"""Build message span."""
|
||||
links = links or []
|
||||
status = Status(StatusCode.OK)
|
||||
if trace_info.error:
|
||||
status = Status(StatusCode.ERROR, trace_info.error)
|
||||
|
||||
return SpanData(
|
||||
trace_id=trace_id,
|
||||
parent_span_id=None,
|
||||
span_id=TencentTraceUtils.convert_to_span_id(trace_info.message_id, "message"),
|
||||
name="message",
|
||||
start_time=TencentSpanBuilder._get_time_nanoseconds(trace_info.start_time),
|
||||
end_time=TencentSpanBuilder._get_time_nanoseconds(trace_info.end_time),
|
||||
attributes={
|
||||
GEN_AI_SESSION_ID: trace_info.metadata.get("conversation_id", ""),
|
||||
GEN_AI_USER_ID: str(user_id),
|
||||
GEN_AI_SPAN_KIND: GenAISpanKind.WORKFLOW.value,
|
||||
GEN_AI_FRAMEWORK: "dify",
|
||||
GEN_AI_IS_ENTRY: "true",
|
||||
INPUT_VALUE: str(trace_info.inputs or ""),
|
||||
OUTPUT_VALUE: str(trace_info.outputs or ""),
|
||||
},
|
||||
status=status,
|
||||
links=links,
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def build_tool_span(trace_info: ToolTraceInfo, trace_id: int, parent_span_id: int) -> SpanData:
|
||||
"""Build tool span."""
|
||||
status = Status(StatusCode.OK)
|
||||
if trace_info.error:
|
||||
status = Status(StatusCode.ERROR, trace_info.error)
|
||||
|
||||
return SpanData(
|
||||
trace_id=trace_id,
|
||||
parent_span_id=parent_span_id,
|
||||
span_id=TencentTraceUtils.convert_to_span_id(trace_info.message_id, "tool"),
|
||||
name=trace_info.tool_name,
|
||||
start_time=TencentSpanBuilder._get_time_nanoseconds(trace_info.start_time),
|
||||
end_time=TencentSpanBuilder._get_time_nanoseconds(trace_info.end_time),
|
||||
attributes={
|
||||
GEN_AI_SPAN_KIND: GenAISpanKind.TOOL.value,
|
||||
GEN_AI_FRAMEWORK: "dify",
|
||||
TOOL_NAME: trace_info.tool_name,
|
||||
TOOL_DESCRIPTION: "",
|
||||
TOOL_PARAMETERS: json.dumps(trace_info.tool_parameters, ensure_ascii=False),
|
||||
INPUT_VALUE: json.dumps(trace_info.tool_inputs, ensure_ascii=False),
|
||||
OUTPUT_VALUE: str(trace_info.tool_outputs),
|
||||
},
|
||||
status=status,
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def build_retrieval_span(trace_info: DatasetRetrievalTraceInfo, trace_id: int, parent_span_id: int) -> SpanData:
|
||||
"""Build dataset retrieval span."""
|
||||
status = Status(StatusCode.OK)
|
||||
if getattr(trace_info, "error", None):
|
||||
status = Status(StatusCode.ERROR, trace_info.error) # type: ignore[arg-type]
|
||||
|
||||
documents_data = TencentSpanBuilder._extract_retrieval_documents(trace_info.documents)
|
||||
|
||||
return SpanData(
|
||||
trace_id=trace_id,
|
||||
parent_span_id=parent_span_id,
|
||||
span_id=TencentTraceUtils.convert_to_span_id(trace_info.message_id, "retrieval"),
|
||||
name="retrieval",
|
||||
start_time=TencentSpanBuilder._get_time_nanoseconds(trace_info.start_time),
|
||||
end_time=TencentSpanBuilder._get_time_nanoseconds(trace_info.end_time),
|
||||
attributes={
|
||||
GEN_AI_SPAN_KIND: GenAISpanKind.RETRIEVER.value,
|
||||
GEN_AI_FRAMEWORK: "dify",
|
||||
RETRIEVAL_QUERY: str(trace_info.inputs or ""),
|
||||
RETRIEVAL_DOCUMENT: json.dumps(documents_data, ensure_ascii=False),
|
||||
INPUT_VALUE: str(trace_info.inputs or ""),
|
||||
OUTPUT_VALUE: json.dumps(documents_data, ensure_ascii=False),
|
||||
},
|
||||
status=status,
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def _get_workflow_node_status(node_execution: WorkflowNodeExecution) -> Status:
|
||||
"""Get workflow node execution status."""
|
||||
if node_execution.status == WorkflowNodeExecutionStatus.SUCCEEDED:
|
||||
return Status(StatusCode.OK)
|
||||
elif node_execution.status in [WorkflowNodeExecutionStatus.FAILED, WorkflowNodeExecutionStatus.EXCEPTION]:
|
||||
return Status(StatusCode.ERROR, str(node_execution.error))
|
||||
return Status(StatusCode.UNSET)
|
||||
|
||||
@staticmethod
|
||||
def build_workflow_retrieval_span(
|
||||
trace_id: int, workflow_span_id: int, trace_info: WorkflowTraceInfo, node_execution: WorkflowNodeExecution
|
||||
) -> SpanData:
|
||||
"""Build knowledge retrieval span for workflow nodes."""
|
||||
input_value = ""
|
||||
if node_execution.inputs:
|
||||
input_value = str(node_execution.inputs.get("query", ""))
|
||||
output_value = ""
|
||||
if node_execution.outputs:
|
||||
output_value = json.dumps(node_execution.outputs.get("result", []), ensure_ascii=False)
|
||||
|
||||
return SpanData(
|
||||
trace_id=trace_id,
|
||||
parent_span_id=workflow_span_id,
|
||||
span_id=TencentTraceUtils.convert_to_span_id(node_execution.id, "node"),
|
||||
name=node_execution.title,
|
||||
start_time=TencentSpanBuilder._get_time_nanoseconds(node_execution.created_at),
|
||||
end_time=TencentSpanBuilder._get_time_nanoseconds(node_execution.finished_at),
|
||||
attributes={
|
||||
GEN_AI_SESSION_ID: trace_info.metadata.get("conversation_id", ""),
|
||||
GEN_AI_SPAN_KIND: GenAISpanKind.RETRIEVER.value,
|
||||
GEN_AI_FRAMEWORK: "dify",
|
||||
RETRIEVAL_QUERY: input_value,
|
||||
RETRIEVAL_DOCUMENT: output_value,
|
||||
INPUT_VALUE: input_value,
|
||||
OUTPUT_VALUE: output_value,
|
||||
},
|
||||
status=TencentSpanBuilder._get_workflow_node_status(node_execution),
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def build_workflow_tool_span(
|
||||
trace_id: int, workflow_span_id: int, trace_info: WorkflowTraceInfo, node_execution: WorkflowNodeExecution
|
||||
) -> SpanData:
|
||||
"""Build tool span for workflow nodes."""
|
||||
tool_des = {}
|
||||
if node_execution.metadata:
|
||||
tool_des = node_execution.metadata.get(WorkflowNodeExecutionMetadataKey.TOOL_INFO, {})
|
||||
|
||||
return SpanData(
|
||||
trace_id=trace_id,
|
||||
parent_span_id=workflow_span_id,
|
||||
span_id=TencentTraceUtils.convert_to_span_id(node_execution.id, "node"),
|
||||
name=node_execution.title,
|
||||
start_time=TencentSpanBuilder._get_time_nanoseconds(node_execution.created_at),
|
||||
end_time=TencentSpanBuilder._get_time_nanoseconds(node_execution.finished_at),
|
||||
attributes={
|
||||
GEN_AI_SPAN_KIND: GenAISpanKind.TOOL.value,
|
||||
GEN_AI_FRAMEWORK: "dify",
|
||||
TOOL_NAME: node_execution.title,
|
||||
TOOL_DESCRIPTION: json.dumps(tool_des, ensure_ascii=False),
|
||||
TOOL_PARAMETERS: json.dumps(node_execution.inputs or {}, ensure_ascii=False),
|
||||
INPUT_VALUE: json.dumps(node_execution.inputs or {}, ensure_ascii=False),
|
||||
OUTPUT_VALUE: json.dumps(node_execution.outputs, ensure_ascii=False),
|
||||
},
|
||||
status=TencentSpanBuilder._get_workflow_node_status(node_execution),
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def build_workflow_task_span(
|
||||
trace_id: int, workflow_span_id: int, trace_info: WorkflowTraceInfo, node_execution: WorkflowNodeExecution
|
||||
) -> SpanData:
|
||||
"""Build generic task span for workflow nodes."""
|
||||
return SpanData(
|
||||
trace_id=trace_id,
|
||||
parent_span_id=workflow_span_id,
|
||||
span_id=TencentTraceUtils.convert_to_span_id(node_execution.id, "node"),
|
||||
name=node_execution.title,
|
||||
start_time=TencentSpanBuilder._get_time_nanoseconds(node_execution.created_at),
|
||||
end_time=TencentSpanBuilder._get_time_nanoseconds(node_execution.finished_at),
|
||||
attributes={
|
||||
GEN_AI_SESSION_ID: trace_info.metadata.get("conversation_id", ""),
|
||||
GEN_AI_SPAN_KIND: GenAISpanKind.TASK.value,
|
||||
GEN_AI_FRAMEWORK: "dify",
|
||||
INPUT_VALUE: json.dumps(node_execution.inputs, ensure_ascii=False),
|
||||
OUTPUT_VALUE: json.dumps(node_execution.outputs, ensure_ascii=False),
|
||||
},
|
||||
status=TencentSpanBuilder._get_workflow_node_status(node_execution),
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def _extract_retrieval_documents(documents: list[Document]):
|
||||
"""Extract documents data for retrieval tracing."""
|
||||
documents_data = []
|
||||
for document in documents:
|
||||
document_data = {
|
||||
"content": document.page_content,
|
||||
"metadata": {
|
||||
"dataset_id": document.metadata.get("dataset_id"),
|
||||
"doc_id": document.metadata.get("doc_id"),
|
||||
"document_id": document.metadata.get("document_id"),
|
||||
},
|
||||
"score": document.metadata.get("score"),
|
||||
}
|
||||
documents_data.append(document_data)
|
||||
return documents_data
|
||||
|
|
@ -0,0 +1,317 @@
|
|||
"""
|
||||
Tencent APM tracing implementation with separated concerns
|
||||
"""
|
||||
|
||||
import logging
|
||||
|
||||
from sqlalchemy import select
|
||||
from sqlalchemy.orm import Session, sessionmaker
|
||||
|
||||
from core.ops.base_trace_instance import BaseTraceInstance
|
||||
from core.ops.entities.config_entity import TencentConfig
|
||||
from core.ops.entities.trace_entity import (
|
||||
BaseTraceInfo,
|
||||
DatasetRetrievalTraceInfo,
|
||||
GenerateNameTraceInfo,
|
||||
MessageTraceInfo,
|
||||
ModerationTraceInfo,
|
||||
SuggestedQuestionTraceInfo,
|
||||
ToolTraceInfo,
|
||||
WorkflowTraceInfo,
|
||||
)
|
||||
from core.ops.tencent_trace.client import TencentTraceClient
|
||||
from core.ops.tencent_trace.entities.tencent_trace_entity import SpanData
|
||||
from core.ops.tencent_trace.span_builder import TencentSpanBuilder
|
||||
from core.ops.tencent_trace.utils import TencentTraceUtils
|
||||
from core.repositories import SQLAlchemyWorkflowNodeExecutionRepository
|
||||
from core.workflow.entities.workflow_node_execution import (
|
||||
WorkflowNodeExecution,
|
||||
)
|
||||
from core.workflow.nodes import NodeType
|
||||
from extensions.ext_database import db
|
||||
from models import Account, App, TenantAccountJoin, WorkflowNodeExecutionTriggeredFrom
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class TencentDataTrace(BaseTraceInstance):
|
||||
"""
|
||||
Tencent APM trace implementation with single responsibility principle.
|
||||
Acts as a coordinator that delegates specific tasks to specialized classes.
|
||||
"""
|
||||
|
||||
def __init__(self, tencent_config: TencentConfig):
|
||||
super().__init__(tencent_config)
|
||||
self.trace_client = TencentTraceClient(
|
||||
service_name=tencent_config.service_name,
|
||||
endpoint=tencent_config.endpoint,
|
||||
token=tencent_config.token,
|
||||
metrics_export_interval_sec=5,
|
||||
)
|
||||
|
||||
def trace(self, trace_info: BaseTraceInfo) -> None:
|
||||
"""Main tracing entry point - coordinates different trace types."""
|
||||
if isinstance(trace_info, WorkflowTraceInfo):
|
||||
self.workflow_trace(trace_info)
|
||||
elif isinstance(trace_info, MessageTraceInfo):
|
||||
self.message_trace(trace_info)
|
||||
elif isinstance(trace_info, ModerationTraceInfo):
|
||||
pass
|
||||
elif isinstance(trace_info, SuggestedQuestionTraceInfo):
|
||||
self.suggested_question_trace(trace_info)
|
||||
elif isinstance(trace_info, DatasetRetrievalTraceInfo):
|
||||
self.dataset_retrieval_trace(trace_info)
|
||||
elif isinstance(trace_info, ToolTraceInfo):
|
||||
self.tool_trace(trace_info)
|
||||
elif isinstance(trace_info, GenerateNameTraceInfo):
|
||||
pass
|
||||
|
||||
def api_check(self) -> bool:
|
||||
return self.trace_client.api_check()
|
||||
|
||||
def get_project_url(self) -> str:
|
||||
return self.trace_client.get_project_url()
|
||||
|
||||
def workflow_trace(self, trace_info: WorkflowTraceInfo) -> None:
|
||||
"""Handle workflow tracing by coordinating data retrieval and span construction."""
|
||||
try:
|
||||
trace_id = TencentTraceUtils.convert_to_trace_id(trace_info.workflow_run_id)
|
||||
|
||||
links = []
|
||||
if trace_info.trace_id:
|
||||
links.append(TencentTraceUtils.create_link(trace_info.trace_id))
|
||||
|
||||
user_id = self._get_user_id(trace_info)
|
||||
|
||||
workflow_spans = TencentSpanBuilder.build_workflow_spans(trace_info, trace_id, str(user_id), links)
|
||||
|
||||
for span in workflow_spans:
|
||||
self.trace_client.add_span(span)
|
||||
|
||||
self._process_workflow_nodes(trace_info, trace_id)
|
||||
|
||||
except Exception:
|
||||
logger.exception("[Tencent APM] Failed to process workflow trace")
|
||||
|
||||
def message_trace(self, trace_info: MessageTraceInfo) -> None:
|
||||
"""Handle message tracing."""
|
||||
try:
|
||||
trace_id = TencentTraceUtils.convert_to_trace_id(trace_info.message_id)
|
||||
user_id = self._get_user_id(trace_info)
|
||||
|
||||
links = []
|
||||
if trace_info.trace_id:
|
||||
links.append(TencentTraceUtils.create_link(trace_info.trace_id))
|
||||
|
||||
message_span = TencentSpanBuilder.build_message_span(trace_info, trace_id, str(user_id), links)
|
||||
|
||||
self.trace_client.add_span(message_span)
|
||||
|
||||
except Exception:
|
||||
logger.exception("[Tencent APM] Failed to process message trace")
|
||||
|
||||
def tool_trace(self, trace_info: ToolTraceInfo) -> None:
|
||||
"""Handle tool tracing."""
|
||||
try:
|
||||
parent_span_id = None
|
||||
trace_root_id = None
|
||||
|
||||
if trace_info.message_id:
|
||||
parent_span_id = TencentTraceUtils.convert_to_span_id(trace_info.message_id, "message")
|
||||
trace_root_id = trace_info.message_id
|
||||
|
||||
if parent_span_id and trace_root_id:
|
||||
trace_id = TencentTraceUtils.convert_to_trace_id(trace_root_id)
|
||||
|
||||
tool_span = TencentSpanBuilder.build_tool_span(trace_info, trace_id, parent_span_id)
|
||||
|
||||
self.trace_client.add_span(tool_span)
|
||||
|
||||
except Exception:
|
||||
logger.exception("[Tencent APM] Failed to process tool trace")
|
||||
|
||||
def dataset_retrieval_trace(self, trace_info: DatasetRetrievalTraceInfo) -> None:
|
||||
"""Handle dataset retrieval tracing."""
|
||||
try:
|
||||
parent_span_id = None
|
||||
trace_root_id = None
|
||||
|
||||
if trace_info.message_id:
|
||||
parent_span_id = TencentTraceUtils.convert_to_span_id(trace_info.message_id, "message")
|
||||
trace_root_id = trace_info.message_id
|
||||
|
||||
if parent_span_id and trace_root_id:
|
||||
trace_id = TencentTraceUtils.convert_to_trace_id(trace_root_id)
|
||||
|
||||
retrieval_span = TencentSpanBuilder.build_retrieval_span(trace_info, trace_id, parent_span_id)
|
||||
|
||||
self.trace_client.add_span(retrieval_span)
|
||||
|
||||
except Exception:
|
||||
logger.exception("[Tencent APM] Failed to process dataset retrieval trace")
|
||||
|
||||
def suggested_question_trace(self, trace_info: SuggestedQuestionTraceInfo) -> None:
|
||||
"""Handle suggested question tracing"""
|
||||
try:
|
||||
logger.info("[Tencent APM] Processing suggested question trace")
|
||||
|
||||
except Exception:
|
||||
logger.exception("[Tencent APM] Failed to process suggested question trace")
|
||||
|
||||
def _process_workflow_nodes(self, trace_info: WorkflowTraceInfo, trace_id: int) -> None:
|
||||
"""Process workflow node executions."""
|
||||
try:
|
||||
workflow_span_id = TencentTraceUtils.convert_to_span_id(trace_info.workflow_run_id, "workflow")
|
||||
|
||||
node_executions = self._get_workflow_node_executions(trace_info)
|
||||
|
||||
for node_execution in node_executions:
|
||||
try:
|
||||
node_span = self._build_workflow_node_span(node_execution, trace_id, trace_info, workflow_span_id)
|
||||
if node_span:
|
||||
self.trace_client.add_span(node_span)
|
||||
|
||||
if node_execution.node_type == NodeType.LLM:
|
||||
self._record_llm_metrics(node_execution)
|
||||
except Exception:
|
||||
logger.exception("[Tencent APM] Failed to process node execution: %s", node_execution.id)
|
||||
|
||||
except Exception:
|
||||
logger.exception("[Tencent APM] Failed to process workflow nodes")
|
||||
|
||||
def _build_workflow_node_span(
|
||||
self, node_execution: WorkflowNodeExecution, trace_id: int, trace_info: WorkflowTraceInfo, workflow_span_id: int
|
||||
) -> SpanData | None:
|
||||
"""Build span for different node types"""
|
||||
try:
|
||||
if node_execution.node_type == NodeType.LLM:
|
||||
return TencentSpanBuilder.build_workflow_llm_span(
|
||||
trace_id, workflow_span_id, trace_info, node_execution
|
||||
)
|
||||
elif node_execution.node_type == NodeType.KNOWLEDGE_RETRIEVAL:
|
||||
return TencentSpanBuilder.build_workflow_retrieval_span(
|
||||
trace_id, workflow_span_id, trace_info, node_execution
|
||||
)
|
||||
elif node_execution.node_type == NodeType.TOOL:
|
||||
return TencentSpanBuilder.build_workflow_tool_span(
|
||||
trace_id, workflow_span_id, trace_info, node_execution
|
||||
)
|
||||
else:
|
||||
# Handle all other node types as generic tasks
|
||||
return TencentSpanBuilder.build_workflow_task_span(
|
||||
trace_id, workflow_span_id, trace_info, node_execution
|
||||
)
|
||||
except Exception:
|
||||
logger.debug(
|
||||
"[Tencent APM] Error building span for node %s: %s",
|
||||
node_execution.id,
|
||||
node_execution.node_type,
|
||||
exc_info=True,
|
||||
)
|
||||
return None
|
||||
|
||||
def _get_workflow_node_executions(self, trace_info: WorkflowTraceInfo) -> list[WorkflowNodeExecution]:
|
||||
"""Retrieve workflow node executions from database."""
|
||||
try:
|
||||
session_maker = sessionmaker(bind=db.engine)
|
||||
|
||||
with Session(db.engine, expire_on_commit=False) as session:
|
||||
app_id = trace_info.metadata.get("app_id")
|
||||
if not app_id:
|
||||
raise ValueError("No app_id found in trace_info metadata")
|
||||
|
||||
app_stmt = select(App).where(App.id == app_id)
|
||||
app = session.scalar(app_stmt)
|
||||
if not app:
|
||||
raise ValueError(f"App with id {app_id} not found")
|
||||
|
||||
if not app.created_by:
|
||||
raise ValueError(f"App with id {app_id} has no creator")
|
||||
|
||||
account_stmt = select(Account).where(Account.id == app.created_by)
|
||||
service_account = session.scalar(account_stmt)
|
||||
if not service_account:
|
||||
raise ValueError(f"Creator account not found for app {app_id}")
|
||||
|
||||
current_tenant = (
|
||||
session.query(TenantAccountJoin).filter_by(account_id=service_account.id, current=True).first()
|
||||
)
|
||||
if not current_tenant:
|
||||
raise ValueError(f"Current tenant not found for account {service_account.id}")
|
||||
|
||||
service_account.set_tenant_id(current_tenant.tenant_id)
|
||||
|
||||
repository = SQLAlchemyWorkflowNodeExecutionRepository(
|
||||
session_factory=session_maker,
|
||||
user=service_account,
|
||||
app_id=trace_info.metadata.get("app_id"),
|
||||
triggered_from=WorkflowNodeExecutionTriggeredFrom.WORKFLOW_RUN,
|
||||
)
|
||||
|
||||
executions = repository.get_by_workflow_run(workflow_run_id=trace_info.workflow_run_id)
|
||||
return list(executions)
|
||||
|
||||
except Exception:
|
||||
logger.exception("[Tencent APM] Failed to get workflow node executions")
|
||||
return []
|
||||
|
||||
def _get_user_id(self, trace_info: BaseTraceInfo) -> str:
|
||||
"""Get user ID from trace info."""
|
||||
try:
|
||||
tenant_id = None
|
||||
user_id = None
|
||||
|
||||
if isinstance(trace_info, (WorkflowTraceInfo, GenerateNameTraceInfo)):
|
||||
tenant_id = trace_info.tenant_id
|
||||
|
||||
if hasattr(trace_info, "metadata") and trace_info.metadata:
|
||||
user_id = trace_info.metadata.get("user_id")
|
||||
|
||||
if user_id and tenant_id:
|
||||
stmt = (
|
||||
select(Account.name)
|
||||
.join(TenantAccountJoin, Account.id == TenantAccountJoin.account_id)
|
||||
.where(Account.id == user_id, TenantAccountJoin.tenant_id == tenant_id)
|
||||
)
|
||||
|
||||
session_maker = sessionmaker(bind=db.engine)
|
||||
with session_maker() as session:
|
||||
account_name = session.scalar(stmt)
|
||||
return account_name or str(user_id)
|
||||
elif user_id:
|
||||
return str(user_id)
|
||||
|
||||
return "anonymous"
|
||||
|
||||
except Exception:
|
||||
logger.exception("[Tencent APM] Failed to get user ID")
|
||||
return "unknown"
|
||||
|
||||
def _record_llm_metrics(self, node_execution: WorkflowNodeExecution) -> None:
|
||||
"""Record LLM performance metrics"""
|
||||
try:
|
||||
if not hasattr(self.trace_client, "record_llm_duration"):
|
||||
return
|
||||
|
||||
process_data = node_execution.process_data or {}
|
||||
usage = process_data.get("usage", {})
|
||||
latency_s = float(usage.get("latency", 0.0))
|
||||
|
||||
if latency_s > 0:
|
||||
attributes = {
|
||||
"provider": process_data.get("model_provider", ""),
|
||||
"model": process_data.get("model_name", ""),
|
||||
"span_kind": "GENERATION",
|
||||
}
|
||||
self.trace_client.record_llm_duration(latency_s, attributes)
|
||||
|
||||
except Exception:
|
||||
logger.debug("[Tencent APM] Failed to record LLM metrics")
|
||||
|
||||
def __del__(self):
|
||||
"""Ensure proper cleanup on garbage collection."""
|
||||
try:
|
||||
if hasattr(self, "trace_client"):
|
||||
self.trace_client.shutdown()
|
||||
except Exception:
|
||||
pass
|
||||
|
|
@ -0,0 +1,65 @@
|
|||
"""
|
||||
Utility functions for Tencent APM tracing
|
||||
"""
|
||||
|
||||
import hashlib
|
||||
import random
|
||||
import uuid
|
||||
from datetime import datetime
|
||||
from typing import cast
|
||||
|
||||
from opentelemetry.trace import Link, SpanContext, TraceFlags
|
||||
|
||||
|
||||
class TencentTraceUtils:
|
||||
"""Utility class for common tracing operations."""
|
||||
|
||||
INVALID_SPAN_ID = 0x0000000000000000
|
||||
INVALID_TRACE_ID = 0x00000000000000000000000000000000
|
||||
|
||||
@staticmethod
|
||||
def convert_to_trace_id(uuid_v4: str | None) -> int:
|
||||
try:
|
||||
uuid_obj = uuid.UUID(uuid_v4) if uuid_v4 else uuid.uuid4()
|
||||
except Exception as e:
|
||||
raise ValueError(f"Invalid UUID input: {e}")
|
||||
return cast(int, uuid_obj.int)
|
||||
|
||||
@staticmethod
|
||||
def convert_to_span_id(uuid_v4: str | None, span_type: str) -> int:
|
||||
try:
|
||||
uuid_obj = uuid.UUID(uuid_v4) if uuid_v4 else uuid.uuid4()
|
||||
except Exception as e:
|
||||
raise ValueError(f"Invalid UUID input: {e}")
|
||||
combined_key = f"{uuid_obj.hex}-{span_type}"
|
||||
hash_bytes = hashlib.sha256(combined_key.encode("utf-8")).digest()
|
||||
return int.from_bytes(hash_bytes[:8], byteorder="big", signed=False)
|
||||
|
||||
@staticmethod
|
||||
def generate_span_id() -> int:
|
||||
span_id = random.getrandbits(64)
|
||||
while span_id == TencentTraceUtils.INVALID_SPAN_ID:
|
||||
span_id = random.getrandbits(64)
|
||||
return span_id
|
||||
|
||||
@staticmethod
|
||||
def convert_datetime_to_nanoseconds(start_time: datetime | None) -> int:
|
||||
if start_time is None:
|
||||
start_time = datetime.now()
|
||||
timestamp_in_seconds = start_time.timestamp()
|
||||
return int(timestamp_in_seconds * 1e9)
|
||||
|
||||
@staticmethod
|
||||
def create_link(trace_id_str: str) -> Link:
|
||||
try:
|
||||
trace_id = int(trace_id_str, 16) if len(trace_id_str) == 32 else cast(int, uuid.UUID(trace_id_str).int)
|
||||
except (ValueError, TypeError):
|
||||
trace_id = cast(int, uuid.uuid4().int)
|
||||
|
||||
span_context = SpanContext(
|
||||
trace_id=trace_id,
|
||||
span_id=TencentTraceUtils.INVALID_SPAN_ID,
|
||||
is_remote=False,
|
||||
trace_flags=TraceFlags(TraceFlags.SAMPLED),
|
||||
)
|
||||
return Link(span_context)
|
||||
|
|
@ -1,9 +1,24 @@
|
|||
"""
|
||||
Weaviate vector database implementation for Dify's RAG system.
|
||||
|
||||
This module provides integration with Weaviate vector database for storing and retrieving
|
||||
document embeddings used in retrieval-augmented generation workflows.
|
||||
"""
|
||||
|
||||
import datetime
|
||||
import json
|
||||
import logging
|
||||
import uuid as _uuid
|
||||
from typing import Any
|
||||
from urllib.parse import urlparse
|
||||
|
||||
import weaviate # type: ignore
|
||||
import weaviate
|
||||
import weaviate.classes.config as wc
|
||||
from pydantic import BaseModel, model_validator
|
||||
from weaviate.classes.data import DataObject
|
||||
from weaviate.classes.init import Auth
|
||||
from weaviate.classes.query import Filter, MetadataQuery
|
||||
from weaviate.exceptions import UnexpectedStatusCodeError
|
||||
|
||||
from configs import dify_config
|
||||
from core.rag.datasource.vdb.field import Field
|
||||
|
|
@ -15,265 +30,394 @@ from core.rag.models.document import Document
|
|||
from extensions.ext_redis import redis_client
|
||||
from models.dataset import Dataset
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class WeaviateConfig(BaseModel):
|
||||
"""
|
||||
Configuration model for Weaviate connection settings.
|
||||
|
||||
Attributes:
|
||||
endpoint: Weaviate server endpoint URL
|
||||
api_key: Optional API key for authentication
|
||||
batch_size: Number of objects to batch per insert operation
|
||||
"""
|
||||
|
||||
endpoint: str
|
||||
api_key: str | None = None
|
||||
batch_size: int = 100
|
||||
|
||||
@model_validator(mode="before")
|
||||
@classmethod
|
||||
def validate_config(cls, values: dict):
|
||||
def validate_config(cls, values: dict) -> dict:
|
||||
"""Validates that required configuration values are present."""
|
||||
if not values["endpoint"]:
|
||||
raise ValueError("config WEAVIATE_ENDPOINT is required")
|
||||
return values
|
||||
|
||||
|
||||
class WeaviateVector(BaseVector):
|
||||
"""
|
||||
Weaviate vector database implementation for document storage and retrieval.
|
||||
|
||||
Handles creation, insertion, deletion, and querying of document embeddings
|
||||
in a Weaviate collection.
|
||||
"""
|
||||
|
||||
def __init__(self, collection_name: str, config: WeaviateConfig, attributes: list):
|
||||
"""
|
||||
Initializes the Weaviate vector store.
|
||||
|
||||
Args:
|
||||
collection_name: Name of the Weaviate collection
|
||||
config: Weaviate configuration settings
|
||||
attributes: List of metadata attributes to store
|
||||
"""
|
||||
super().__init__(collection_name)
|
||||
self._client = self._init_client(config)
|
||||
self._attributes = attributes
|
||||
|
||||
def _init_client(self, config: WeaviateConfig) -> weaviate.Client:
|
||||
auth_config = weaviate.AuthApiKey(api_key=config.api_key or "")
|
||||
def _init_client(self, config: WeaviateConfig) -> weaviate.WeaviateClient:
|
||||
"""
|
||||
Initializes and returns a connected Weaviate client.
|
||||
|
||||
weaviate.connect.connection.has_grpc = False # ty: ignore [unresolved-attribute]
|
||||
Configures both HTTP and gRPC connections with proper authentication.
|
||||
"""
|
||||
p = urlparse(config.endpoint)
|
||||
host = p.hostname or config.endpoint.replace("https://", "").replace("http://", "")
|
||||
http_secure = p.scheme == "https"
|
||||
http_port = p.port or (443 if http_secure else 80)
|
||||
|
||||
try:
|
||||
client = weaviate.Client(
|
||||
url=config.endpoint, auth_client_secret=auth_config, timeout_config=(5, 60), startup_period=None
|
||||
)
|
||||
except Exception as exc:
|
||||
raise ConnectionError("Vector database connection error") from exc
|
||||
grpc_host = host
|
||||
grpc_secure = http_secure
|
||||
grpc_port = 443 if grpc_secure else 50051
|
||||
|
||||
client.batch.configure(
|
||||
# `batch_size` takes an `int` value to enable auto-batching
|
||||
# (`None` is used for manual batching)
|
||||
batch_size=config.batch_size,
|
||||
# dynamically update the `batch_size` based on import speed
|
||||
dynamic=True,
|
||||
# `timeout_retries` takes an `int` value to retry on time outs
|
||||
timeout_retries=3,
|
||||
client = weaviate.connect_to_custom(
|
||||
http_host=host,
|
||||
http_port=http_port,
|
||||
http_secure=http_secure,
|
||||
grpc_host=grpc_host,
|
||||
grpc_port=grpc_port,
|
||||
grpc_secure=grpc_secure,
|
||||
auth_credentials=Auth.api_key(config.api_key) if config.api_key else None,
|
||||
)
|
||||
|
||||
if not client.is_ready():
|
||||
raise ConnectionError("Vector database is not ready")
|
||||
|
||||
return client
|
||||
|
||||
def get_type(self) -> str:
|
||||
"""Returns the vector database type identifier."""
|
||||
return VectorType.WEAVIATE
|
||||
|
||||
def get_collection_name(self, dataset: Dataset) -> str:
|
||||
"""
|
||||
Retrieves or generates the collection name for a dataset.
|
||||
|
||||
Uses existing index structure if available, otherwise generates from dataset ID.
|
||||
"""
|
||||
if dataset.index_struct_dict:
|
||||
class_prefix: str = dataset.index_struct_dict["vector_store"]["class_prefix"]
|
||||
if not class_prefix.endswith("_Node"):
|
||||
# original class_prefix
|
||||
class_prefix += "_Node"
|
||||
|
||||
return class_prefix
|
||||
|
||||
dataset_id = dataset.id
|
||||
return Dataset.gen_collection_name_by_id(dataset_id)
|
||||
|
||||
def to_index_struct(self):
|
||||
def to_index_struct(self) -> dict:
|
||||
"""Returns the index structure dictionary for persistence."""
|
||||
return {"type": self.get_type(), "vector_store": {"class_prefix": self._collection_name}}
|
||||
|
||||
def create(self, texts: list[Document], embeddings: list[list[float]], **kwargs):
|
||||
# create collection
|
||||
"""
|
||||
Creates a new collection and adds initial documents with embeddings.
|
||||
"""
|
||||
self._create_collection()
|
||||
# create vector
|
||||
self.add_texts(texts, embeddings)
|
||||
|
||||
def _create_collection(self):
|
||||
"""
|
||||
Creates the Weaviate collection with required schema if it doesn't exist.
|
||||
|
||||
Uses Redis locking to prevent concurrent creation attempts.
|
||||
"""
|
||||
lock_name = f"vector_indexing_lock_{self._collection_name}"
|
||||
with redis_client.lock(lock_name, timeout=20):
|
||||
collection_exist_cache_key = f"vector_indexing_{self._collection_name}"
|
||||
if redis_client.get(collection_exist_cache_key):
|
||||
cache_key = f"vector_indexing_{self._collection_name}"
|
||||
if redis_client.get(cache_key):
|
||||
return
|
||||
schema = self._default_schema(self._collection_name)
|
||||
if not self._client.schema.contains(schema):
|
||||
# create collection
|
||||
self._client.schema.create_class(schema)
|
||||
redis_client.set(collection_exist_cache_key, 1, ex=3600)
|
||||
|
||||
try:
|
||||
if not self._client.collections.exists(self._collection_name):
|
||||
self._client.collections.create(
|
||||
name=self._collection_name,
|
||||
properties=[
|
||||
wc.Property(
|
||||
name=Field.TEXT_KEY.value,
|
||||
data_type=wc.DataType.TEXT,
|
||||
tokenization=wc.Tokenization.WORD,
|
||||
),
|
||||
wc.Property(name="document_id", data_type=wc.DataType.TEXT),
|
||||
wc.Property(name="doc_id", data_type=wc.DataType.TEXT),
|
||||
wc.Property(name="chunk_index", data_type=wc.DataType.INT),
|
||||
],
|
||||
vector_config=wc.Configure.Vectors.self_provided(),
|
||||
)
|
||||
|
||||
self._ensure_properties()
|
||||
redis_client.set(cache_key, 1, ex=3600)
|
||||
except Exception as e:
|
||||
logger.exception("Error creating collection %s", self._collection_name)
|
||||
raise
|
||||
|
||||
def _ensure_properties(self) -> None:
|
||||
"""
|
||||
Ensures all required properties exist in the collection schema.
|
||||
|
||||
Adds missing properties if the collection exists but lacks them.
|
||||
"""
|
||||
if not self._client.collections.exists(self._collection_name):
|
||||
return
|
||||
|
||||
col = self._client.collections.use(self._collection_name)
|
||||
cfg = col.config.get()
|
||||
existing = {p.name for p in (cfg.properties or [])}
|
||||
|
||||
to_add = []
|
||||
if "document_id" not in existing:
|
||||
to_add.append(wc.Property(name="document_id", data_type=wc.DataType.TEXT))
|
||||
if "doc_id" not in existing:
|
||||
to_add.append(wc.Property(name="doc_id", data_type=wc.DataType.TEXT))
|
||||
if "chunk_index" not in existing:
|
||||
to_add.append(wc.Property(name="chunk_index", data_type=wc.DataType.INT))
|
||||
|
||||
for prop in to_add:
|
||||
try:
|
||||
col.config.add_property(prop)
|
||||
except Exception as e:
|
||||
logger.warning("Could not add property %s: %s", prop.name, e)
|
||||
|
||||
def _get_uuids(self, documents: list[Document]) -> list[str]:
|
||||
"""
|
||||
Generates deterministic UUIDs for documents based on their content.
|
||||
|
||||
Uses UUID5 with URL namespace to ensure consistent IDs for identical content.
|
||||
"""
|
||||
URL_NAMESPACE = _uuid.UUID("6ba7b811-9dad-11d1-80b4-00c04fd430c8")
|
||||
|
||||
uuids = []
|
||||
for doc in documents:
|
||||
uuid_val = _uuid.uuid5(URL_NAMESPACE, doc.page_content)
|
||||
uuids.append(str(uuid_val))
|
||||
|
||||
return uuids
|
||||
|
||||
def add_texts(self, documents: list[Document], embeddings: list[list[float]], **kwargs):
|
||||
"""
|
||||
Adds documents with their embeddings to the collection.
|
||||
|
||||
Batches insertions for efficiency and returns the list of inserted object IDs.
|
||||
"""
|
||||
uuids = self._get_uuids(documents)
|
||||
texts = [d.page_content for d in documents]
|
||||
metadatas = [d.metadata for d in documents]
|
||||
|
||||
ids = []
|
||||
col = self._client.collections.use(self._collection_name)
|
||||
objs: list[DataObject] = []
|
||||
ids_out: list[str] = []
|
||||
|
||||
with self._client.batch as batch:
|
||||
for i, text in enumerate(texts):
|
||||
data_properties = {Field.TEXT_KEY: text}
|
||||
if metadatas is not None:
|
||||
# metadata maybe None
|
||||
for key, val in (metadatas[i] or {}).items():
|
||||
data_properties[key] = self._json_serializable(val)
|
||||
for i, text in enumerate(texts):
|
||||
props: dict[str, Any] = {Field.TEXT_KEY.value: text}
|
||||
meta = metadatas[i] or {}
|
||||
for k, v in meta.items():
|
||||
props[k] = self._json_serializable(v)
|
||||
|
||||
batch.add_data_object(
|
||||
data_object=data_properties,
|
||||
class_name=self._collection_name,
|
||||
uuid=uuids[i],
|
||||
vector=embeddings[i] if embeddings else None,
|
||||
candidate = uuids[i] if uuids else None
|
||||
uid = candidate if (candidate and self._is_uuid(candidate)) else str(_uuid.uuid4())
|
||||
ids_out.append(uid)
|
||||
|
||||
vec_payload = None
|
||||
if embeddings and i < len(embeddings) and embeddings[i]:
|
||||
vec_payload = {"default": embeddings[i]}
|
||||
|
||||
objs.append(
|
||||
DataObject(
|
||||
uuid=uid,
|
||||
properties=props, # type: ignore[arg-type] # mypy incorrectly infers DataObject signature
|
||||
vector=vec_payload,
|
||||
)
|
||||
ids.append(uuids[i])
|
||||
return ids
|
||||
)
|
||||
|
||||
def delete_by_metadata_field(self, key: str, value: str):
|
||||
# check whether the index already exists
|
||||
schema = self._default_schema(self._collection_name)
|
||||
if self._client.schema.contains(schema):
|
||||
where_filter = {"operator": "Equal", "path": [key], "valueText": value}
|
||||
batch_size = max(1, int(dify_config.WEAVIATE_BATCH_SIZE or 100))
|
||||
with col.batch.dynamic() as batch:
|
||||
for obj in objs:
|
||||
batch.add_object(properties=obj.properties, uuid=obj.uuid, vector=obj.vector)
|
||||
|
||||
self._client.batch.delete_objects(class_name=self._collection_name, where=where_filter, output="minimal")
|
||||
return ids_out
|
||||
|
||||
def _is_uuid(self, val: str) -> bool:
|
||||
"""Validates whether a string is a valid UUID format."""
|
||||
try:
|
||||
_uuid.UUID(str(val))
|
||||
return True
|
||||
except Exception:
|
||||
return False
|
||||
|
||||
def delete_by_metadata_field(self, key: str, value: str) -> None:
|
||||
"""Deletes all objects matching a specific metadata field value."""
|
||||
if not self._client.collections.exists(self._collection_name):
|
||||
return
|
||||
|
||||
col = self._client.collections.use(self._collection_name)
|
||||
col.data.delete_many(where=Filter.by_property(key).equal(value))
|
||||
|
||||
def delete(self):
|
||||
# check whether the index already exists
|
||||
schema = self._default_schema(self._collection_name)
|
||||
if self._client.schema.contains(schema):
|
||||
self._client.schema.delete_class(self._collection_name)
|
||||
"""Deletes the entire collection from Weaviate."""
|
||||
if self._client.collections.exists(self._collection_name):
|
||||
self._client.collections.delete(self._collection_name)
|
||||
|
||||
def text_exists(self, id: str) -> bool:
|
||||
collection_name = self._collection_name
|
||||
schema = self._default_schema(self._collection_name)
|
||||
|
||||
# check whether the index already exists
|
||||
if not self._client.schema.contains(schema):
|
||||
"""Checks if a document with the given doc_id exists in the collection."""
|
||||
if not self._client.collections.exists(self._collection_name):
|
||||
return False
|
||||
result = (
|
||||
self._client.query.get(collection_name)
|
||||
.with_additional(["id"])
|
||||
.with_where(
|
||||
{
|
||||
"path": ["doc_id"],
|
||||
"operator": "Equal",
|
||||
"valueText": id,
|
||||
}
|
||||
)
|
||||
.with_limit(1)
|
||||
.do()
|
||||
|
||||
col = self._client.collections.use(self._collection_name)
|
||||
res = col.query.fetch_objects(
|
||||
filters=Filter.by_property("doc_id").equal(id),
|
||||
limit=1,
|
||||
return_properties=["doc_id"],
|
||||
)
|
||||
|
||||
if "errors" in result:
|
||||
raise ValueError(f"Error during query: {result['errors']}")
|
||||
return len(res.objects) > 0
|
||||
|
||||
entries = result["data"]["Get"][collection_name]
|
||||
if len(entries) == 0:
|
||||
return False
|
||||
def delete_by_ids(self, ids: list[str]) -> None:
|
||||
"""
|
||||
Deletes objects by their UUID identifiers.
|
||||
|
||||
return True
|
||||
Silently ignores 404 errors for non-existent IDs.
|
||||
"""
|
||||
if not self._client.collections.exists(self._collection_name):
|
||||
return
|
||||
|
||||
def delete_by_ids(self, ids: list[str]):
|
||||
# check whether the index already exists
|
||||
schema = self._default_schema(self._collection_name)
|
||||
if self._client.schema.contains(schema):
|
||||
for uuid in ids:
|
||||
try:
|
||||
self._client.data_object.delete(
|
||||
class_name=self._collection_name,
|
||||
uuid=uuid,
|
||||
)
|
||||
except weaviate.UnexpectedStatusCodeException as e:
|
||||
# tolerate not found error
|
||||
if e.status_code != 404:
|
||||
raise e
|
||||
col = self._client.collections.use(self._collection_name)
|
||||
|
||||
for uid in ids:
|
||||
try:
|
||||
col.data.delete_by_id(uid)
|
||||
except UnexpectedStatusCodeError as e:
|
||||
if getattr(e, "status_code", None) != 404:
|
||||
raise
|
||||
|
||||
def search_by_vector(self, query_vector: list[float], **kwargs: Any) -> list[Document]:
|
||||
"""Look up similar documents by embedding vector in Weaviate."""
|
||||
collection_name = self._collection_name
|
||||
properties = self._attributes
|
||||
properties.append(Field.TEXT_KEY)
|
||||
query_obj = self._client.query.get(collection_name, properties)
|
||||
"""
|
||||
Performs vector similarity search using the provided query vector.
|
||||
|
||||
vector = {"vector": query_vector}
|
||||
document_ids_filter = kwargs.get("document_ids_filter")
|
||||
if document_ids_filter:
|
||||
operands = []
|
||||
for document_id_filter in document_ids_filter:
|
||||
operands.append({"path": ["document_id"], "operator": "Equal", "valueText": document_id_filter})
|
||||
where_filter = {"operator": "Or", "operands": operands}
|
||||
query_obj = query_obj.with_where(where_filter)
|
||||
result = (
|
||||
query_obj.with_near_vector(vector)
|
||||
.with_limit(kwargs.get("top_k", 4))
|
||||
.with_additional(["vector", "distance"])
|
||||
.do()
|
||||
Filters by document IDs if provided and applies score threshold.
|
||||
Returns documents sorted by relevance score.
|
||||
"""
|
||||
if not self._client.collections.exists(self._collection_name):
|
||||
return []
|
||||
|
||||
col = self._client.collections.use(self._collection_name)
|
||||
props = list({*self._attributes, "document_id", Field.TEXT_KEY.value})
|
||||
|
||||
where = None
|
||||
doc_ids = kwargs.get("document_ids_filter") or []
|
||||
if doc_ids:
|
||||
ors = [Filter.by_property("document_id").equal(x) for x in doc_ids]
|
||||
where = ors[0]
|
||||
for f in ors[1:]:
|
||||
where = where | f
|
||||
|
||||
top_k = int(kwargs.get("top_k", 4))
|
||||
score_threshold = float(kwargs.get("score_threshold") or 0.0)
|
||||
|
||||
res = col.query.near_vector(
|
||||
near_vector=query_vector,
|
||||
limit=top_k,
|
||||
return_properties=props,
|
||||
return_metadata=MetadataQuery(distance=True),
|
||||
include_vector=False,
|
||||
filters=where,
|
||||
target_vector="default",
|
||||
)
|
||||
if "errors" in result:
|
||||
raise ValueError(f"Error during query: {result['errors']}")
|
||||
|
||||
docs_and_scores = []
|
||||
for res in result["data"]["Get"][collection_name]:
|
||||
text = res.pop(Field.TEXT_KEY)
|
||||
score = 1 - res["_additional"]["distance"]
|
||||
docs_and_scores.append((Document(page_content=text, metadata=res), score))
|
||||
docs: list[Document] = []
|
||||
for obj in res.objects:
|
||||
properties = dict(obj.properties or {})
|
||||
text = properties.pop(Field.TEXT_KEY.value, "")
|
||||
distance = (obj.metadata.distance if obj.metadata else None) or 1.0
|
||||
score = 1.0 - distance
|
||||
|
||||
docs = []
|
||||
for doc, score in docs_and_scores:
|
||||
score_threshold = float(kwargs.get("score_threshold") or 0.0)
|
||||
# check score threshold
|
||||
if score >= score_threshold:
|
||||
if doc.metadata is not None:
|
||||
doc.metadata["score"] = score
|
||||
docs.append(doc)
|
||||
# Sort the documents by score in descending order
|
||||
docs = sorted(docs, key=lambda x: x.metadata.get("score", 0) if x.metadata else 0, reverse=True)
|
||||
if score > score_threshold:
|
||||
properties["score"] = score
|
||||
docs.append(Document(page_content=text, metadata=properties))
|
||||
|
||||
docs.sort(key=lambda d: d.metadata.get("score", 0.0), reverse=True)
|
||||
return docs
|
||||
|
||||
def search_by_full_text(self, query: str, **kwargs: Any) -> list[Document]:
|
||||
"""Return docs using BM25F.
|
||||
|
||||
Args:
|
||||
query: Text to look up documents similar to.
|
||||
|
||||
Returns:
|
||||
List of Documents most similar to the query.
|
||||
"""
|
||||
collection_name = self._collection_name
|
||||
content: dict[str, Any] = {"concepts": [query]}
|
||||
properties = self._attributes
|
||||
properties.append(Field.TEXT_KEY)
|
||||
if kwargs.get("search_distance"):
|
||||
content["certainty"] = kwargs.get("search_distance")
|
||||
query_obj = self._client.query.get(collection_name, properties)
|
||||
document_ids_filter = kwargs.get("document_ids_filter")
|
||||
if document_ids_filter:
|
||||
operands = []
|
||||
for document_id_filter in document_ids_filter:
|
||||
operands.append({"path": ["document_id"], "operator": "Equal", "valueText": document_id_filter})
|
||||
where_filter = {"operator": "Or", "operands": operands}
|
||||
query_obj = query_obj.with_where(where_filter)
|
||||
query_obj = query_obj.with_additional(["vector"])
|
||||
properties = ["text"]
|
||||
result = query_obj.with_bm25(query=query, properties=properties).with_limit(kwargs.get("top_k", 4)).do()
|
||||
if "errors" in result:
|
||||
raise ValueError(f"Error during query: {result['errors']}")
|
||||
docs = []
|
||||
for res in result["data"]["Get"][collection_name]:
|
||||
text = res.pop(Field.TEXT_KEY)
|
||||
additional = res.pop("_additional")
|
||||
docs.append(Document(page_content=text, vector=additional["vector"], metadata=res))
|
||||
Performs BM25 full-text search on document content.
|
||||
|
||||
Filters by document IDs if provided and returns matching documents with vectors.
|
||||
"""
|
||||
if not self._client.collections.exists(self._collection_name):
|
||||
return []
|
||||
|
||||
col = self._client.collections.use(self._collection_name)
|
||||
props = list({*self._attributes, Field.TEXT_KEY.value})
|
||||
|
||||
where = None
|
||||
doc_ids = kwargs.get("document_ids_filter") or []
|
||||
if doc_ids:
|
||||
ors = [Filter.by_property("document_id").equal(x) for x in doc_ids]
|
||||
where = ors[0]
|
||||
for f in ors[1:]:
|
||||
where = where | f
|
||||
|
||||
top_k = int(kwargs.get("top_k", 4))
|
||||
|
||||
res = col.query.bm25(
|
||||
query=query,
|
||||
query_properties=[Field.TEXT_KEY.value],
|
||||
limit=top_k,
|
||||
return_properties=props,
|
||||
include_vector=True,
|
||||
filters=where,
|
||||
)
|
||||
|
||||
docs: list[Document] = []
|
||||
for obj in res.objects:
|
||||
properties = dict(obj.properties or {})
|
||||
text = properties.pop(Field.TEXT_KEY.value, "")
|
||||
|
||||
vec = obj.vector
|
||||
if isinstance(vec, dict):
|
||||
vec = vec.get("default") or next(iter(vec.values()), None)
|
||||
|
||||
docs.append(Document(page_content=text, vector=vec, metadata=properties))
|
||||
return docs
|
||||
|
||||
def _default_schema(self, index_name: str):
|
||||
return {
|
||||
"class": index_name,
|
||||
"properties": [
|
||||
{
|
||||
"name": "text",
|
||||
"dataType": ["text"],
|
||||
}
|
||||
],
|
||||
}
|
||||
|
||||
def _json_serializable(self, value: Any):
|
||||
def _json_serializable(self, value: Any) -> Any:
|
||||
"""Converts values to JSON-serializable format, handling datetime objects."""
|
||||
if isinstance(value, datetime.datetime):
|
||||
return value.isoformat()
|
||||
return value
|
||||
|
||||
|
||||
class WeaviateVectorFactory(AbstractVectorFactory):
|
||||
"""Factory class for creating WeaviateVector instances."""
|
||||
|
||||
def init_vector(self, dataset: Dataset, attributes: list, embeddings: Embeddings) -> WeaviateVector:
|
||||
"""
|
||||
Initializes a WeaviateVector instance for the given dataset.
|
||||
|
||||
Uses existing collection name from dataset index structure or generates a new one.
|
||||
Updates dataset index structure if not already set.
|
||||
"""
|
||||
if dataset.index_struct_dict:
|
||||
class_prefix: str = dataset.index_struct_dict["vector_store"]["class_prefix"]
|
||||
collection_name = class_prefix
|
||||
|
|
@ -281,7 +425,6 @@ class WeaviateVectorFactory(AbstractVectorFactory):
|
|||
dataset_id = dataset.id
|
||||
collection_name = Dataset.gen_collection_name_by_id(dataset_id)
|
||||
dataset.index_struct = json.dumps(self.gen_index_struct_dict(VectorType.WEAVIATE, collection_name))
|
||||
|
||||
return WeaviateVector(
|
||||
collection_name=collection_name,
|
||||
config=WeaviateConfig(
|
||||
|
|
|
|||
|
|
@ -1,10 +1,11 @@
|
|||
import logging
|
||||
import time as time_module
|
||||
from datetime import datetime
|
||||
from typing import Any
|
||||
from typing import Any, cast
|
||||
|
||||
from pydantic import BaseModel
|
||||
from sqlalchemy import update
|
||||
from sqlalchemy.engine import CursorResult
|
||||
from sqlalchemy.orm import Session
|
||||
|
||||
from configs import dify_config
|
||||
|
|
@ -267,7 +268,7 @@ def _execute_provider_updates(updates_to_perform: list[_ProviderUpdateOperation]
|
|||
|
||||
# Build and execute the update statement
|
||||
stmt = update(Provider).where(*where_conditions).values(**update_values)
|
||||
result = session.execute(stmt)
|
||||
result = cast(CursorResult, session.execute(stmt))
|
||||
rows_affected = result.rowcount
|
||||
|
||||
logger.debug(
|
||||
|
|
|
|||
|
|
@ -64,7 +64,10 @@ def build_from_mapping(
|
|||
config: FileUploadConfig | None = None,
|
||||
strict_type_validation: bool = False,
|
||||
) -> File:
|
||||
transfer_method = FileTransferMethod.value_of(mapping.get("transfer_method"))
|
||||
transfer_method_value = mapping.get("transfer_method")
|
||||
if not transfer_method_value:
|
||||
raise ValueError("transfer_method is required in file mapping")
|
||||
transfer_method = FileTransferMethod.value_of(transfer_method_value)
|
||||
|
||||
build_functions: dict[FileTransferMethod, Callable] = {
|
||||
FileTransferMethod.LOCAL_FILE: _build_from_local_file,
|
||||
|
|
@ -104,6 +107,8 @@ def build_from_mappings(
|
|||
) -> Sequence[File]:
|
||||
# TODO(QuantumGhost): Performance concern - each mapping triggers a separate database query.
|
||||
# Implement batch processing to reduce database load when handling multiple files.
|
||||
# Filter out None/empty mappings to avoid errors
|
||||
valid_mappings = [m for m in mappings if m and m.get("transfer_method")]
|
||||
files = [
|
||||
build_from_mapping(
|
||||
mapping=mapping,
|
||||
|
|
@ -111,7 +116,7 @@ def build_from_mappings(
|
|||
config=config,
|
||||
strict_type_validation=strict_type_validation,
|
||||
)
|
||||
for mapping in mappings
|
||||
for mapping in valid_mappings
|
||||
]
|
||||
|
||||
if (
|
||||
|
|
|
|||
|
|
@ -13,7 +13,7 @@ dependencies = [
|
|||
"celery~=5.5.2",
|
||||
"chardet~=5.1.0",
|
||||
"flask~=3.1.2",
|
||||
"flask-compress~=1.17",
|
||||
"flask-compress>=1.17,<1.18",
|
||||
"flask-cors~=6.0.0",
|
||||
"flask-login~=0.6.3",
|
||||
"flask-migrate~=4.0.7",
|
||||
|
|
@ -86,6 +86,7 @@ dependencies = [
|
|||
"sendgrid~=6.12.3",
|
||||
"flask-restx~=1.3.0",
|
||||
"packaging~=23.2",
|
||||
"weaviate-client==4.17.0",
|
||||
]
|
||||
# Before adding new dependency, consider place it in
|
||||
# alphabet order (a-z) and suitable group.
|
||||
|
|
@ -214,7 +215,7 @@ vdb = [
|
|||
"tidb-vector==0.0.9",
|
||||
"upstash-vector==0.6.0",
|
||||
"volcengine-compat~=1.0.0",
|
||||
"weaviate-client~=3.24.0",
|
||||
"weaviate-client>=4.0.0,<5.0.0",
|
||||
"xinference-client~=1.2.2",
|
||||
"mo-vector~=0.1.13",
|
||||
"mysql-connector-python>=9.3.0",
|
||||
|
|
|
|||
|
|
@ -7,8 +7,10 @@ using SQLAlchemy 2.0 style queries for WorkflowNodeExecutionModel operations.
|
|||
|
||||
from collections.abc import Sequence
|
||||
from datetime import datetime
|
||||
from typing import cast
|
||||
|
||||
from sqlalchemy import asc, delete, desc, select
|
||||
from sqlalchemy.engine import CursorResult
|
||||
from sqlalchemy.orm import Session, sessionmaker
|
||||
|
||||
from models.workflow import WorkflowNodeExecutionModel
|
||||
|
|
@ -181,7 +183,7 @@ class DifyAPISQLAlchemyWorkflowNodeExecutionRepository(DifyAPIWorkflowNodeExecut
|
|||
|
||||
# Delete the batch
|
||||
delete_stmt = delete(WorkflowNodeExecutionModel).where(WorkflowNodeExecutionModel.id.in_(execution_ids))
|
||||
result = session.execute(delete_stmt)
|
||||
result = cast(CursorResult, session.execute(delete_stmt))
|
||||
session.commit()
|
||||
total_deleted += result.rowcount
|
||||
|
||||
|
|
@ -228,7 +230,7 @@ class DifyAPISQLAlchemyWorkflowNodeExecutionRepository(DifyAPIWorkflowNodeExecut
|
|||
|
||||
# Delete the batch
|
||||
delete_stmt = delete(WorkflowNodeExecutionModel).where(WorkflowNodeExecutionModel.id.in_(execution_ids))
|
||||
result = session.execute(delete_stmt)
|
||||
result = cast(CursorResult, session.execute(delete_stmt))
|
||||
session.commit()
|
||||
total_deleted += result.rowcount
|
||||
|
||||
|
|
@ -285,6 +287,6 @@ class DifyAPISQLAlchemyWorkflowNodeExecutionRepository(DifyAPIWorkflowNodeExecut
|
|||
|
||||
with self._session_maker() as session:
|
||||
stmt = delete(WorkflowNodeExecutionModel).where(WorkflowNodeExecutionModel.id.in_(execution_ids))
|
||||
result = session.execute(stmt)
|
||||
result = cast(CursorResult, session.execute(stmt))
|
||||
session.commit()
|
||||
return result.rowcount
|
||||
|
|
|
|||
|
|
@ -22,8 +22,10 @@ Implementation Notes:
|
|||
import logging
|
||||
from collections.abc import Sequence
|
||||
from datetime import datetime
|
||||
from typing import cast
|
||||
|
||||
from sqlalchemy import delete, select
|
||||
from sqlalchemy.engine import CursorResult
|
||||
from sqlalchemy.orm import Session, sessionmaker
|
||||
|
||||
from libs.infinite_scroll_pagination import InfiniteScrollPagination
|
||||
|
|
@ -150,7 +152,7 @@ class DifyAPISQLAlchemyWorkflowRunRepository(APIWorkflowRunRepository):
|
|||
|
||||
with self._session_maker() as session:
|
||||
stmt = delete(WorkflowRun).where(WorkflowRun.id.in_(run_ids))
|
||||
result = session.execute(stmt)
|
||||
result = cast(CursorResult, session.execute(stmt))
|
||||
session.commit()
|
||||
|
||||
deleted_count = result.rowcount
|
||||
|
|
@ -186,7 +188,7 @@ class DifyAPISQLAlchemyWorkflowRunRepository(APIWorkflowRunRepository):
|
|||
|
||||
# Delete the batch
|
||||
delete_stmt = delete(WorkflowRun).where(WorkflowRun.id.in_(run_ids))
|
||||
result = session.execute(delete_stmt)
|
||||
result = cast(CursorResult, session.execute(delete_stmt))
|
||||
session.commit()
|
||||
|
||||
batch_deleted = result.rowcount
|
||||
|
|
|
|||
|
|
@ -102,6 +102,15 @@ class OpsService:
|
|||
except Exception:
|
||||
new_decrypt_tracing_config.update({"project_url": "https://arms.console.aliyun.com/"})
|
||||
|
||||
if tracing_provider == "tencent" and (
|
||||
"project_url" not in decrypt_tracing_config or not decrypt_tracing_config.get("project_url")
|
||||
):
|
||||
try:
|
||||
project_url = OpsTraceManager.get_trace_config_project_url(decrypt_tracing_config, tracing_provider)
|
||||
new_decrypt_tracing_config.update({"project_url": project_url})
|
||||
except Exception:
|
||||
new_decrypt_tracing_config.update({"project_url": "https://console.cloud.tencent.com/apm"})
|
||||
|
||||
trace_config_data.tracing_config = new_decrypt_tracing_config
|
||||
return trace_config_data.to_dict()
|
||||
|
||||
|
|
@ -144,7 +153,7 @@ class OpsService:
|
|||
project_url = f"{tracing_config.get('host')}/project/{project_key}"
|
||||
except Exception:
|
||||
project_url = None
|
||||
elif tracing_provider in ("langsmith", "opik"):
|
||||
elif tracing_provider in ("langsmith", "opik", "tencent"):
|
||||
try:
|
||||
project_url = OpsTraceManager.get_trace_config_project_url(tracing_config, tracing_provider)
|
||||
except Exception:
|
||||
|
|
|
|||
|
|
@ -86,12 +86,16 @@ class WorkflowAppService:
|
|||
),
|
||||
)
|
||||
if created_by_account:
|
||||
account = session.scalar(select(Account).where(Account.email == created_by_account))
|
||||
if not account:
|
||||
raise ValueError(f"Account not found: {created_by_account}")
|
||||
|
||||
stmt = stmt.join(
|
||||
Account,
|
||||
and_(
|
||||
WorkflowAppLog.created_by == Account.id,
|
||||
WorkflowAppLog.created_by_role == CreatorUserRole.ACCOUNT,
|
||||
Account.email == created_by_account,
|
||||
Account.id == account.id,
|
||||
),
|
||||
)
|
||||
|
||||
|
|
|
|||
|
|
@ -789,6 +789,31 @@ class TestWorkflowAppService:
|
|||
assert result_account_filter["total"] == 3
|
||||
assert all(log.created_by_role == CreatorUserRole.ACCOUNT for log in result_account_filter["data"])
|
||||
|
||||
# Test filtering by changed account email
|
||||
original_email = account.email
|
||||
new_email = "changed@example.com"
|
||||
account.email = new_email
|
||||
db_session_with_containers.commit()
|
||||
|
||||
assert account.email == new_email
|
||||
|
||||
# Results for new email, is expected to be the same as the original email
|
||||
result_with_new_email = service.get_paginate_workflow_app_logs(
|
||||
session=db_session_with_containers, app_model=app, created_by_account=new_email, page=1, limit=20
|
||||
)
|
||||
assert result_with_new_email["total"] == 3
|
||||
assert all(log.created_by_role == CreatorUserRole.ACCOUNT for log in result_with_new_email["data"])
|
||||
|
||||
# Old email unbound, is unexpected input, should raise ValueError
|
||||
with pytest.raises(ValueError) as exc_info:
|
||||
service.get_paginate_workflow_app_logs(
|
||||
session=db_session_with_containers, app_model=app, created_by_account=original_email, page=1, limit=20
|
||||
)
|
||||
assert "Account not found" in str(exc_info.value)
|
||||
|
||||
account.email = original_email
|
||||
db_session_with_containers.commit()
|
||||
|
||||
# Test filtering by non-existent session ID
|
||||
result_no_session = service.get_paginate_workflow_app_logs(
|
||||
session=db_session_with_containers,
|
||||
|
|
@ -799,15 +824,16 @@ class TestWorkflowAppService:
|
|||
)
|
||||
assert result_no_session["total"] == 0
|
||||
|
||||
# Test filtering by non-existent account email
|
||||
result_no_account = service.get_paginate_workflow_app_logs(
|
||||
session=db_session_with_containers,
|
||||
app_model=app,
|
||||
created_by_account="nonexistent@example.com",
|
||||
page=1,
|
||||
limit=20,
|
||||
)
|
||||
assert result_no_account["total"] == 0
|
||||
# Test filtering by non-existent account email, is unexpected input, should raise ValueError
|
||||
with pytest.raises(ValueError) as exc_info:
|
||||
service.get_paginate_workflow_app_logs(
|
||||
session=db_session_with_containers,
|
||||
app_model=app,
|
||||
created_by_account="nonexistent@example.com",
|
||||
page=1,
|
||||
limit=20,
|
||||
)
|
||||
assert "Account not found" in str(exc_info.value)
|
||||
|
||||
def test_get_paginate_workflow_app_logs_with_uuid_keyword_search(
|
||||
self, db_session_with_containers, mock_external_service_dependencies
|
||||
|
|
@ -1057,15 +1083,15 @@ class TestWorkflowAppService:
|
|||
assert len(result_no_session["data"]) == 0
|
||||
|
||||
# Test with account email that doesn't exist
|
||||
result_no_account = service.get_paginate_workflow_app_logs(
|
||||
session=db_session_with_containers,
|
||||
app_model=app,
|
||||
created_by_account="nonexistent@example.com",
|
||||
page=1,
|
||||
limit=20,
|
||||
)
|
||||
assert result_no_account["total"] == 0
|
||||
assert len(result_no_account["data"]) == 0
|
||||
with pytest.raises(ValueError) as exc_info:
|
||||
service.get_paginate_workflow_app_logs(
|
||||
session=db_session_with_containers,
|
||||
app_model=app,
|
||||
created_by_account="nonexistent@example.com",
|
||||
page=1,
|
||||
limit=20,
|
||||
)
|
||||
assert "Account not found" in str(exc_info.value)
|
||||
|
||||
def test_get_paginate_workflow_app_logs_with_complex_query_combinations(
|
||||
self, db_session_with_containers, mock_external_service_dependencies
|
||||
|
|
|
|||
|
|
@ -0,0 +1,401 @@
|
|||
"""
|
||||
TestContainers-based integration tests for mail_owner_transfer_task.
|
||||
|
||||
This module provides comprehensive integration tests for the mail owner transfer tasks
|
||||
using TestContainers to ensure real email service integration and proper functionality
|
||||
testing with actual database and service dependencies.
|
||||
"""
|
||||
|
||||
import logging
|
||||
from unittest.mock import MagicMock, patch
|
||||
|
||||
import pytest
|
||||
from faker import Faker
|
||||
|
||||
from libs.email_i18n import EmailType
|
||||
from models.account import Account, Tenant, TenantAccountJoin, TenantAccountRole
|
||||
from tasks.mail_owner_transfer_task import (
|
||||
send_new_owner_transfer_notify_email_task,
|
||||
send_old_owner_transfer_notify_email_task,
|
||||
send_owner_transfer_confirm_task,
|
||||
)
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class TestMailOwnerTransferTask:
|
||||
"""Integration tests for mail owner transfer tasks using testcontainers."""
|
||||
|
||||
@pytest.fixture
|
||||
def mock_mail_dependencies(self):
|
||||
"""Mock setup for mail service dependencies."""
|
||||
with (
|
||||
patch("tasks.mail_owner_transfer_task.mail") as mock_mail,
|
||||
patch("tasks.mail_owner_transfer_task.get_email_i18n_service") as mock_get_email_service,
|
||||
):
|
||||
# Setup mock mail service
|
||||
mock_mail.is_inited.return_value = True
|
||||
|
||||
# Setup mock email service
|
||||
mock_email_service = MagicMock()
|
||||
mock_get_email_service.return_value = mock_email_service
|
||||
|
||||
yield {
|
||||
"mail": mock_mail,
|
||||
"email_service": mock_email_service,
|
||||
"get_email_service": mock_get_email_service,
|
||||
}
|
||||
|
||||
def _create_test_account_and_tenant(self, db_session_with_containers):
|
||||
"""
|
||||
Helper method to create test account and tenant for testing.
|
||||
|
||||
Args:
|
||||
db_session_with_containers: Database session from testcontainers infrastructure
|
||||
|
||||
Returns:
|
||||
tuple: (account, tenant) - Created account and tenant instances
|
||||
"""
|
||||
fake = Faker()
|
||||
|
||||
# Create account
|
||||
account = Account(
|
||||
email=fake.email(),
|
||||
name=fake.name(),
|
||||
interface_language="en-US",
|
||||
status="active",
|
||||
)
|
||||
db_session_with_containers.add(account)
|
||||
db_session_with_containers.commit()
|
||||
|
||||
# Create tenant
|
||||
tenant = Tenant(
|
||||
name=fake.company(),
|
||||
status="normal",
|
||||
)
|
||||
db_session_with_containers.add(tenant)
|
||||
db_session_with_containers.commit()
|
||||
|
||||
# Create tenant-account join
|
||||
join = TenantAccountJoin(
|
||||
tenant_id=tenant.id,
|
||||
account_id=account.id,
|
||||
role=TenantAccountRole.OWNER.value,
|
||||
current=True,
|
||||
)
|
||||
db_session_with_containers.add(join)
|
||||
db_session_with_containers.commit()
|
||||
|
||||
return account, tenant
|
||||
|
||||
def test_send_owner_transfer_confirm_task_success(self, db_session_with_containers, mock_mail_dependencies):
|
||||
"""
|
||||
Test successful owner transfer confirmation email sending.
|
||||
|
||||
This test verifies:
|
||||
- Proper email service initialization check
|
||||
- Correct email service method calls with right parameters
|
||||
- Email template context is properly constructed
|
||||
"""
|
||||
# Arrange: Create test data
|
||||
account, tenant = self._create_test_account_and_tenant(db_session_with_containers)
|
||||
|
||||
test_language = "en-US"
|
||||
test_email = account.email
|
||||
test_code = "123456"
|
||||
test_workspace = tenant.name
|
||||
|
||||
# Act: Execute the task
|
||||
send_owner_transfer_confirm_task(
|
||||
language=test_language,
|
||||
to=test_email,
|
||||
code=test_code,
|
||||
workspace=test_workspace,
|
||||
)
|
||||
|
||||
# Assert: Verify the expected outcomes
|
||||
mock_mail_dependencies["mail"].is_inited.assert_called_once()
|
||||
mock_mail_dependencies["get_email_service"].assert_called_once()
|
||||
|
||||
# Verify email service was called with correct parameters
|
||||
mock_mail_dependencies["email_service"].send_email.assert_called_once()
|
||||
call_args = mock_mail_dependencies["email_service"].send_email.call_args
|
||||
|
||||
assert call_args[1]["email_type"] == EmailType.OWNER_TRANSFER_CONFIRM
|
||||
assert call_args[1]["language_code"] == test_language
|
||||
assert call_args[1]["to"] == test_email
|
||||
assert call_args[1]["template_context"]["to"] == test_email
|
||||
assert call_args[1]["template_context"]["code"] == test_code
|
||||
assert call_args[1]["template_context"]["WorkspaceName"] == test_workspace
|
||||
|
||||
def test_send_owner_transfer_confirm_task_mail_not_initialized(
|
||||
self, db_session_with_containers, mock_mail_dependencies
|
||||
):
|
||||
"""
|
||||
Test owner transfer confirmation email when mail service is not initialized.
|
||||
|
||||
This test verifies:
|
||||
- Early return when mail service is not initialized
|
||||
- No email service calls are made
|
||||
- No exceptions are raised
|
||||
"""
|
||||
# Arrange: Set mail service as not initialized
|
||||
mock_mail_dependencies["mail"].is_inited.return_value = False
|
||||
|
||||
test_language = "en-US"
|
||||
test_email = "test@example.com"
|
||||
test_code = "123456"
|
||||
test_workspace = "Test Workspace"
|
||||
|
||||
# Act: Execute the task
|
||||
send_owner_transfer_confirm_task(
|
||||
language=test_language,
|
||||
to=test_email,
|
||||
code=test_code,
|
||||
workspace=test_workspace,
|
||||
)
|
||||
|
||||
# Assert: Verify no email service calls were made
|
||||
mock_mail_dependencies["get_email_service"].assert_not_called()
|
||||
mock_mail_dependencies["email_service"].send_email.assert_not_called()
|
||||
|
||||
def test_send_owner_transfer_confirm_task_exception_handling(
|
||||
self, db_session_with_containers, mock_mail_dependencies
|
||||
):
|
||||
"""
|
||||
Test exception handling in owner transfer confirmation email.
|
||||
|
||||
This test verifies:
|
||||
- Exceptions are properly caught and logged
|
||||
- No exceptions are propagated to caller
|
||||
- Email service calls are attempted
|
||||
- Error logging works correctly
|
||||
"""
|
||||
# Arrange: Setup email service to raise exception
|
||||
mock_mail_dependencies["email_service"].send_email.side_effect = Exception("Email service error")
|
||||
|
||||
test_language = "en-US"
|
||||
test_email = "test@example.com"
|
||||
test_code = "123456"
|
||||
test_workspace = "Test Workspace"
|
||||
|
||||
# Act & Assert: Verify no exception is raised
|
||||
try:
|
||||
send_owner_transfer_confirm_task(
|
||||
language=test_language,
|
||||
to=test_email,
|
||||
code=test_code,
|
||||
workspace=test_workspace,
|
||||
)
|
||||
except Exception as e:
|
||||
pytest.fail(f"Task should not raise exceptions, but raised: {e}")
|
||||
|
||||
# Verify email service was called despite the exception
|
||||
mock_mail_dependencies["email_service"].send_email.assert_called_once()
|
||||
|
||||
def test_send_old_owner_transfer_notify_email_task_success(
|
||||
self, db_session_with_containers, mock_mail_dependencies
|
||||
):
|
||||
"""
|
||||
Test successful old owner transfer notification email sending.
|
||||
|
||||
This test verifies:
|
||||
- Proper email service initialization check
|
||||
- Correct email service method calls with right parameters
|
||||
- Email template context includes new owner email
|
||||
"""
|
||||
# Arrange: Create test data
|
||||
account, tenant = self._create_test_account_and_tenant(db_session_with_containers)
|
||||
|
||||
test_language = "en-US"
|
||||
test_email = account.email
|
||||
test_workspace = tenant.name
|
||||
test_new_owner_email = "newowner@example.com"
|
||||
|
||||
# Act: Execute the task
|
||||
send_old_owner_transfer_notify_email_task(
|
||||
language=test_language,
|
||||
to=test_email,
|
||||
workspace=test_workspace,
|
||||
new_owner_email=test_new_owner_email,
|
||||
)
|
||||
|
||||
# Assert: Verify the expected outcomes
|
||||
mock_mail_dependencies["mail"].is_inited.assert_called_once()
|
||||
mock_mail_dependencies["get_email_service"].assert_called_once()
|
||||
|
||||
# Verify email service was called with correct parameters
|
||||
mock_mail_dependencies["email_service"].send_email.assert_called_once()
|
||||
call_args = mock_mail_dependencies["email_service"].send_email.call_args
|
||||
|
||||
assert call_args[1]["email_type"] == EmailType.OWNER_TRANSFER_OLD_NOTIFY
|
||||
assert call_args[1]["language_code"] == test_language
|
||||
assert call_args[1]["to"] == test_email
|
||||
assert call_args[1]["template_context"]["to"] == test_email
|
||||
assert call_args[1]["template_context"]["WorkspaceName"] == test_workspace
|
||||
assert call_args[1]["template_context"]["NewOwnerEmail"] == test_new_owner_email
|
||||
|
||||
def test_send_old_owner_transfer_notify_email_task_mail_not_initialized(
|
||||
self, db_session_with_containers, mock_mail_dependencies
|
||||
):
|
||||
"""
|
||||
Test old owner transfer notification email when mail service is not initialized.
|
||||
|
||||
This test verifies:
|
||||
- Early return when mail service is not initialized
|
||||
- No email service calls are made
|
||||
- No exceptions are raised
|
||||
"""
|
||||
# Arrange: Set mail service as not initialized
|
||||
mock_mail_dependencies["mail"].is_inited.return_value = False
|
||||
|
||||
test_language = "en-US"
|
||||
test_email = "test@example.com"
|
||||
test_workspace = "Test Workspace"
|
||||
test_new_owner_email = "newowner@example.com"
|
||||
|
||||
# Act: Execute the task
|
||||
send_old_owner_transfer_notify_email_task(
|
||||
language=test_language,
|
||||
to=test_email,
|
||||
workspace=test_workspace,
|
||||
new_owner_email=test_new_owner_email,
|
||||
)
|
||||
|
||||
# Assert: Verify no email service calls were made
|
||||
mock_mail_dependencies["get_email_service"].assert_not_called()
|
||||
mock_mail_dependencies["email_service"].send_email.assert_not_called()
|
||||
|
||||
def test_send_old_owner_transfer_notify_email_task_exception_handling(
|
||||
self, db_session_with_containers, mock_mail_dependencies
|
||||
):
|
||||
"""
|
||||
Test exception handling in old owner transfer notification email.
|
||||
|
||||
This test verifies:
|
||||
- Exceptions are properly caught and logged
|
||||
- No exceptions are propagated to caller
|
||||
- Email service calls are attempted
|
||||
- Error logging works correctly
|
||||
"""
|
||||
# Arrange: Setup email service to raise exception
|
||||
mock_mail_dependencies["email_service"].send_email.side_effect = Exception("Email service error")
|
||||
|
||||
test_language = "en-US"
|
||||
test_email = "test@example.com"
|
||||
test_workspace = "Test Workspace"
|
||||
test_new_owner_email = "newowner@example.com"
|
||||
|
||||
# Act & Assert: Verify no exception is raised
|
||||
try:
|
||||
send_old_owner_transfer_notify_email_task(
|
||||
language=test_language,
|
||||
to=test_email,
|
||||
workspace=test_workspace,
|
||||
new_owner_email=test_new_owner_email,
|
||||
)
|
||||
except Exception as e:
|
||||
pytest.fail(f"Task should not raise exceptions, but raised: {e}")
|
||||
|
||||
# Verify email service was called despite the exception
|
||||
mock_mail_dependencies["email_service"].send_email.assert_called_once()
|
||||
|
||||
def test_send_new_owner_transfer_notify_email_task_success(
|
||||
self, db_session_with_containers, mock_mail_dependencies
|
||||
):
|
||||
"""
|
||||
Test successful new owner transfer notification email sending.
|
||||
|
||||
This test verifies:
|
||||
- Proper email service initialization check
|
||||
- Correct email service method calls with right parameters
|
||||
- Email template context is properly constructed
|
||||
"""
|
||||
# Arrange: Create test data
|
||||
account, tenant = self._create_test_account_and_tenant(db_session_with_containers)
|
||||
|
||||
test_language = "en-US"
|
||||
test_email = account.email
|
||||
test_workspace = tenant.name
|
||||
|
||||
# Act: Execute the task
|
||||
send_new_owner_transfer_notify_email_task(
|
||||
language=test_language,
|
||||
to=test_email,
|
||||
workspace=test_workspace,
|
||||
)
|
||||
|
||||
# Assert: Verify the expected outcomes
|
||||
mock_mail_dependencies["mail"].is_inited.assert_called_once()
|
||||
mock_mail_dependencies["get_email_service"].assert_called_once()
|
||||
|
||||
# Verify email service was called with correct parameters
|
||||
mock_mail_dependencies["email_service"].send_email.assert_called_once()
|
||||
call_args = mock_mail_dependencies["email_service"].send_email.call_args
|
||||
|
||||
assert call_args[1]["email_type"] == EmailType.OWNER_TRANSFER_NEW_NOTIFY
|
||||
assert call_args[1]["language_code"] == test_language
|
||||
assert call_args[1]["to"] == test_email
|
||||
assert call_args[1]["template_context"]["to"] == test_email
|
||||
assert call_args[1]["template_context"]["WorkspaceName"] == test_workspace
|
||||
|
||||
def test_send_new_owner_transfer_notify_email_task_mail_not_initialized(
|
||||
self, db_session_with_containers, mock_mail_dependencies
|
||||
):
|
||||
"""
|
||||
Test new owner transfer notification email when mail service is not initialized.
|
||||
|
||||
This test verifies:
|
||||
- Early return when mail service is not initialized
|
||||
- No email service calls are made
|
||||
- No exceptions are raised
|
||||
"""
|
||||
# Arrange: Set mail service as not initialized
|
||||
mock_mail_dependencies["mail"].is_inited.return_value = False
|
||||
|
||||
test_language = "en-US"
|
||||
test_email = "test@example.com"
|
||||
test_workspace = "Test Workspace"
|
||||
|
||||
# Act: Execute the task
|
||||
send_new_owner_transfer_notify_email_task(
|
||||
language=test_language,
|
||||
to=test_email,
|
||||
workspace=test_workspace,
|
||||
)
|
||||
|
||||
# Assert: Verify no email service calls were made
|
||||
mock_mail_dependencies["get_email_service"].assert_not_called()
|
||||
mock_mail_dependencies["email_service"].send_email.assert_not_called()
|
||||
|
||||
def test_send_new_owner_transfer_notify_email_task_exception_handling(
|
||||
self, db_session_with_containers, mock_mail_dependencies
|
||||
):
|
||||
"""
|
||||
Test exception handling in new owner transfer notification email.
|
||||
|
||||
This test verifies:
|
||||
- Exceptions are properly caught and logged
|
||||
- No exceptions are propagated to caller
|
||||
- Email service calls are attempted
|
||||
- Error logging works correctly
|
||||
"""
|
||||
# Arrange: Setup email service to raise exception
|
||||
mock_mail_dependencies["email_service"].send_email.side_effect = Exception("Email service error")
|
||||
|
||||
test_language = "en-US"
|
||||
test_email = "test@example.com"
|
||||
test_workspace = "Test Workspace"
|
||||
|
||||
# Act & Assert: Verify no exception is raised
|
||||
try:
|
||||
send_new_owner_transfer_notify_email_task(
|
||||
language=test_language,
|
||||
to=test_email,
|
||||
workspace=test_workspace,
|
||||
)
|
||||
except Exception as e:
|
||||
pytest.fail(f"Task should not raise exceptions, but raised: {e}")
|
||||
|
||||
# Verify email service was called despite the exception
|
||||
mock_mail_dependencies["email_service"].send_email.assert_called_once()
|
||||
1981
api/uv.lock
1981
api/uv.lock
File diff suppressed because it is too large
Load Diff
|
|
@ -329,7 +329,7 @@ services:
|
|||
|
||||
# The Weaviate vector store.
|
||||
weaviate:
|
||||
image: semitechnologies/weaviate:1.19.0
|
||||
image: semitechnologies/weaviate:1.27.0
|
||||
profiles:
|
||||
- ""
|
||||
- weaviate
|
||||
|
|
|
|||
|
|
@ -181,7 +181,7 @@ services:
|
|||
|
||||
# The Weaviate vector store.
|
||||
weaviate:
|
||||
image: semitechnologies/weaviate:1.19.0
|
||||
image: semitechnologies/weaviate:1.27.0
|
||||
profiles:
|
||||
- ""
|
||||
- weaviate
|
||||
|
|
@ -206,6 +206,7 @@ services:
|
|||
AUTHORIZATION_ADMINLIST_USERS: ${WEAVIATE_AUTHORIZATION_ADMINLIST_USERS:-hello@dify.ai}
|
||||
ports:
|
||||
- "${EXPOSE_WEAVIATE_PORT:-8080}:8080"
|
||||
- "${EXPOSE_WEAVIATE_GRPC_PORT:-50051}:50051"
|
||||
|
||||
networks:
|
||||
# create a network between sandbox, api and ssrf_proxy, and can not access outside.
|
||||
|
|
|
|||
|
|
@ -0,0 +1,9 @@
|
|||
services:
|
||||
api:
|
||||
volumes:
|
||||
- ../api/core/rag/datasource/vdb/weaviate/weaviate_vector.py:/app/api/core/rag/datasource/vdb/weaviate/weaviate_vector.py:ro
|
||||
command: >
|
||||
sh -c "
|
||||
pip install --no-cache-dir 'weaviate>=4.0.0' &&
|
||||
/bin/bash /entrypoint.sh
|
||||
"
|
||||
|
|
@ -936,7 +936,7 @@ services:
|
|||
|
||||
# The Weaviate vector store.
|
||||
weaviate:
|
||||
image: semitechnologies/weaviate:1.19.0
|
||||
image: semitechnologies/weaviate:1.27.0
|
||||
profiles:
|
||||
- ""
|
||||
- weaviate
|
||||
|
|
|
|||
|
|
@ -5,7 +5,7 @@ import { useTranslation } from 'react-i18next'
|
|||
import { useBoolean } from 'ahooks'
|
||||
import TracingIcon from './tracing-icon'
|
||||
import ProviderPanel from './provider-panel'
|
||||
import type { AliyunConfig, ArizeConfig, LangFuseConfig, LangSmithConfig, OpikConfig, PhoenixConfig, WeaveConfig } from './type'
|
||||
import type { AliyunConfig, ArizeConfig, LangFuseConfig, LangSmithConfig, OpikConfig, PhoenixConfig, TencentConfig, WeaveConfig } from './type'
|
||||
import { TracingProvider } from './type'
|
||||
import ProviderConfigModal from './provider-config-modal'
|
||||
import Indicator from '@/app/components/header/indicator'
|
||||
|
|
@ -30,7 +30,8 @@ export type PopupProps = {
|
|||
opikConfig: OpikConfig | null
|
||||
weaveConfig: WeaveConfig | null
|
||||
aliyunConfig: AliyunConfig | null
|
||||
onConfigUpdated: (provider: TracingProvider, payload: ArizeConfig | PhoenixConfig | LangSmithConfig | LangFuseConfig | OpikConfig | WeaveConfig | AliyunConfig) => void
|
||||
tencentConfig: TencentConfig | null
|
||||
onConfigUpdated: (provider: TracingProvider, payload: ArizeConfig | PhoenixConfig | LangSmithConfig | LangFuseConfig | OpikConfig | WeaveConfig | AliyunConfig | TencentConfig) => void
|
||||
onConfigRemoved: (provider: TracingProvider) => void
|
||||
}
|
||||
|
||||
|
|
@ -48,6 +49,7 @@ const ConfigPopup: FC<PopupProps> = ({
|
|||
opikConfig,
|
||||
weaveConfig,
|
||||
aliyunConfig,
|
||||
tencentConfig,
|
||||
onConfigUpdated,
|
||||
onConfigRemoved,
|
||||
}) => {
|
||||
|
|
@ -81,8 +83,8 @@ const ConfigPopup: FC<PopupProps> = ({
|
|||
hideConfigModal()
|
||||
}, [currentProvider, hideConfigModal, onConfigRemoved])
|
||||
|
||||
const providerAllConfigured = arizeConfig && phoenixConfig && langSmithConfig && langFuseConfig && opikConfig && weaveConfig && aliyunConfig
|
||||
const providerAllNotConfigured = !arizeConfig && !phoenixConfig && !langSmithConfig && !langFuseConfig && !opikConfig && !weaveConfig && !aliyunConfig
|
||||
const providerAllConfigured = arizeConfig && phoenixConfig && langSmithConfig && langFuseConfig && opikConfig && weaveConfig && aliyunConfig && tencentConfig
|
||||
const providerAllNotConfigured = !arizeConfig && !phoenixConfig && !langSmithConfig && !langFuseConfig && !opikConfig && !weaveConfig && !aliyunConfig && !tencentConfig
|
||||
|
||||
const switchContent = (
|
||||
<Switch
|
||||
|
|
@ -182,6 +184,19 @@ const ConfigPopup: FC<PopupProps> = ({
|
|||
key="aliyun-provider-panel"
|
||||
/>
|
||||
)
|
||||
|
||||
const tencentPanel = (
|
||||
<ProviderPanel
|
||||
type={TracingProvider.tencent}
|
||||
readOnly={readOnly}
|
||||
config={tencentConfig}
|
||||
hasConfigured={!!tencentConfig}
|
||||
onConfig={handleOnConfig(TracingProvider.tencent)}
|
||||
isChosen={chosenProvider === TracingProvider.tencent}
|
||||
onChoose={handleOnChoose(TracingProvider.tencent)}
|
||||
key="tencent-provider-panel"
|
||||
/>
|
||||
)
|
||||
const configuredProviderPanel = () => {
|
||||
const configuredPanels: JSX.Element[] = []
|
||||
|
||||
|
|
@ -206,6 +221,9 @@ const ConfigPopup: FC<PopupProps> = ({
|
|||
if (aliyunConfig)
|
||||
configuredPanels.push(aliyunPanel)
|
||||
|
||||
if (tencentConfig)
|
||||
configuredPanels.push(tencentPanel)
|
||||
|
||||
return configuredPanels
|
||||
}
|
||||
|
||||
|
|
@ -233,6 +251,9 @@ const ConfigPopup: FC<PopupProps> = ({
|
|||
if (!aliyunConfig)
|
||||
notConfiguredPanels.push(aliyunPanel)
|
||||
|
||||
if (!tencentConfig)
|
||||
notConfiguredPanels.push(tencentPanel)
|
||||
|
||||
return notConfiguredPanels
|
||||
}
|
||||
|
||||
|
|
@ -249,6 +270,8 @@ const ConfigPopup: FC<PopupProps> = ({
|
|||
return opikConfig
|
||||
if (currentProvider === TracingProvider.aliyun)
|
||||
return aliyunConfig
|
||||
if (currentProvider === TracingProvider.tencent)
|
||||
return tencentConfig
|
||||
return weaveConfig
|
||||
}
|
||||
|
||||
|
|
@ -297,6 +320,7 @@ const ConfigPopup: FC<PopupProps> = ({
|
|||
{arizePanel}
|
||||
{phoenixPanel}
|
||||
{aliyunPanel}
|
||||
{tencentPanel}
|
||||
</div>
|
||||
</>
|
||||
)
|
||||
|
|
|
|||
|
|
@ -8,4 +8,5 @@ export const docURL = {
|
|||
[TracingProvider.opik]: 'https://www.comet.com/docs/opik/tracing/integrations/dify#setup-instructions',
|
||||
[TracingProvider.weave]: 'https://weave-docs.wandb.ai/',
|
||||
[TracingProvider.aliyun]: 'https://help.aliyun.com/zh/arms/tracing-analysis/untitled-document-1750672984680',
|
||||
[TracingProvider.tencent]: 'https://cloud.tencent.com/document/product/248/116531',
|
||||
}
|
||||
|
|
|
|||
|
|
@ -8,12 +8,12 @@ import {
|
|||
import { useTranslation } from 'react-i18next'
|
||||
import { usePathname } from 'next/navigation'
|
||||
import { useBoolean } from 'ahooks'
|
||||
import type { AliyunConfig, ArizeConfig, LangFuseConfig, LangSmithConfig, OpikConfig, PhoenixConfig, WeaveConfig } from './type'
|
||||
import type { AliyunConfig, ArizeConfig, LangFuseConfig, LangSmithConfig, OpikConfig, PhoenixConfig, TencentConfig, WeaveConfig } from './type'
|
||||
import { TracingProvider } from './type'
|
||||
import TracingIcon from './tracing-icon'
|
||||
import ConfigButton from './config-button'
|
||||
import cn from '@/utils/classnames'
|
||||
import { AliyunIcon, ArizeIcon, LangfuseIcon, LangsmithIcon, OpikIcon, PhoenixIcon, WeaveIcon } from '@/app/components/base/icons/src/public/tracing'
|
||||
import { AliyunIcon, ArizeIcon, LangfuseIcon, LangsmithIcon, OpikIcon, PhoenixIcon, TencentIcon, WeaveIcon } from '@/app/components/base/icons/src/public/tracing'
|
||||
import Indicator from '@/app/components/header/indicator'
|
||||
import { fetchTracingConfig as doFetchTracingConfig, fetchTracingStatus, updateTracingStatus } from '@/service/apps'
|
||||
import type { TracingStatus } from '@/models/app'
|
||||
|
|
@ -71,6 +71,7 @@ const Panel: FC = () => {
|
|||
[TracingProvider.opik]: OpikIcon,
|
||||
[TracingProvider.weave]: WeaveIcon,
|
||||
[TracingProvider.aliyun]: AliyunIcon,
|
||||
[TracingProvider.tencent]: TencentIcon,
|
||||
}
|
||||
const InUseProviderIcon = inUseTracingProvider ? providerIconMap[inUseTracingProvider] : undefined
|
||||
|
||||
|
|
@ -81,7 +82,8 @@ const Panel: FC = () => {
|
|||
const [opikConfig, setOpikConfig] = useState<OpikConfig | null>(null)
|
||||
const [weaveConfig, setWeaveConfig] = useState<WeaveConfig | null>(null)
|
||||
const [aliyunConfig, setAliyunConfig] = useState<AliyunConfig | null>(null)
|
||||
const hasConfiguredTracing = !!(langSmithConfig || langFuseConfig || opikConfig || weaveConfig || arizeConfig || phoenixConfig || aliyunConfig)
|
||||
const [tencentConfig, setTencentConfig] = useState<TencentConfig | null>(null)
|
||||
const hasConfiguredTracing = !!(langSmithConfig || langFuseConfig || opikConfig || weaveConfig || arizeConfig || phoenixConfig || aliyunConfig || tencentConfig)
|
||||
|
||||
const fetchTracingConfig = async () => {
|
||||
const getArizeConfig = async () => {
|
||||
|
|
@ -119,6 +121,11 @@ const Panel: FC = () => {
|
|||
if (!aliyunHasNotConfig)
|
||||
setAliyunConfig(aliyunConfig as AliyunConfig)
|
||||
}
|
||||
const getTencentConfig = async () => {
|
||||
const { tracing_config: tencentConfig, has_not_configured: tencentHasNotConfig } = await doFetchTracingConfig({ appId, provider: TracingProvider.tencent })
|
||||
if (!tencentHasNotConfig)
|
||||
setTencentConfig(tencentConfig as TencentConfig)
|
||||
}
|
||||
Promise.all([
|
||||
getArizeConfig(),
|
||||
getPhoenixConfig(),
|
||||
|
|
@ -127,6 +134,7 @@ const Panel: FC = () => {
|
|||
getOpikConfig(),
|
||||
getWeaveConfig(),
|
||||
getAliyunConfig(),
|
||||
getTencentConfig(),
|
||||
])
|
||||
}
|
||||
|
||||
|
|
@ -147,6 +155,8 @@ const Panel: FC = () => {
|
|||
setWeaveConfig(tracing_config as WeaveConfig)
|
||||
else if (provider === TracingProvider.aliyun)
|
||||
setAliyunConfig(tracing_config as AliyunConfig)
|
||||
else if (provider === TracingProvider.tencent)
|
||||
setTencentConfig(tracing_config as TencentConfig)
|
||||
}
|
||||
|
||||
const handleTracingConfigRemoved = (provider: TracingProvider) => {
|
||||
|
|
@ -164,6 +174,8 @@ const Panel: FC = () => {
|
|||
setWeaveConfig(null)
|
||||
else if (provider === TracingProvider.aliyun)
|
||||
setAliyunConfig(null)
|
||||
else if (provider === TracingProvider.tencent)
|
||||
setTencentConfig(null)
|
||||
if (provider === inUseTracingProvider) {
|
||||
handleTracingStatusChange({
|
||||
enabled: false,
|
||||
|
|
@ -209,6 +221,7 @@ const Panel: FC = () => {
|
|||
opikConfig={opikConfig}
|
||||
weaveConfig={weaveConfig}
|
||||
aliyunConfig={aliyunConfig}
|
||||
tencentConfig={tencentConfig}
|
||||
onConfigUpdated={handleTracingConfigUpdated}
|
||||
onConfigRemoved={handleTracingConfigRemoved}
|
||||
>
|
||||
|
|
@ -245,6 +258,7 @@ const Panel: FC = () => {
|
|||
opikConfig={opikConfig}
|
||||
weaveConfig={weaveConfig}
|
||||
aliyunConfig={aliyunConfig}
|
||||
tencentConfig={tencentConfig}
|
||||
onConfigUpdated={handleTracingConfigUpdated}
|
||||
onConfigRemoved={handleTracingConfigRemoved}
|
||||
>
|
||||
|
|
|
|||
|
|
@ -4,7 +4,7 @@ import React, { useCallback, useState } from 'react'
|
|||
import { useTranslation } from 'react-i18next'
|
||||
import { useBoolean } from 'ahooks'
|
||||
import Field from './field'
|
||||
import type { AliyunConfig, ArizeConfig, LangFuseConfig, LangSmithConfig, OpikConfig, PhoenixConfig, WeaveConfig } from './type'
|
||||
import type { AliyunConfig, ArizeConfig, LangFuseConfig, LangSmithConfig, OpikConfig, PhoenixConfig, TencentConfig, WeaveConfig } from './type'
|
||||
import { TracingProvider } from './type'
|
||||
import { docURL } from './config'
|
||||
import {
|
||||
|
|
@ -22,10 +22,10 @@ import Divider from '@/app/components/base/divider'
|
|||
type Props = {
|
||||
appId: string
|
||||
type: TracingProvider
|
||||
payload?: ArizeConfig | PhoenixConfig | LangSmithConfig | LangFuseConfig | OpikConfig | WeaveConfig | AliyunConfig | null
|
||||
payload?: ArizeConfig | PhoenixConfig | LangSmithConfig | LangFuseConfig | OpikConfig | WeaveConfig | AliyunConfig | TencentConfig | null
|
||||
onRemoved: () => void
|
||||
onCancel: () => void
|
||||
onSaved: (payload: ArizeConfig | PhoenixConfig | LangSmithConfig | LangFuseConfig | OpikConfig | WeaveConfig | AliyunConfig) => void
|
||||
onSaved: (payload: ArizeConfig | PhoenixConfig | LangSmithConfig | LangFuseConfig | OpikConfig | WeaveConfig | AliyunConfig | TencentConfig) => void
|
||||
onChosen: (provider: TracingProvider) => void
|
||||
}
|
||||
|
||||
|
|
@ -77,6 +77,12 @@ const aliyunConfigTemplate = {
|
|||
endpoint: '',
|
||||
}
|
||||
|
||||
const tencentConfigTemplate = {
|
||||
token: '',
|
||||
endpoint: '',
|
||||
service_name: '',
|
||||
}
|
||||
|
||||
const ProviderConfigModal: FC<Props> = ({
|
||||
appId,
|
||||
type,
|
||||
|
|
@ -90,7 +96,7 @@ const ProviderConfigModal: FC<Props> = ({
|
|||
const isEdit = !!payload
|
||||
const isAdd = !isEdit
|
||||
const [isSaving, setIsSaving] = useState(false)
|
||||
const [config, setConfig] = useState<ArizeConfig | PhoenixConfig | LangSmithConfig | LangFuseConfig | OpikConfig | WeaveConfig | AliyunConfig>((() => {
|
||||
const [config, setConfig] = useState<ArizeConfig | PhoenixConfig | LangSmithConfig | LangFuseConfig | OpikConfig | WeaveConfig | AliyunConfig | TencentConfig>((() => {
|
||||
if (isEdit)
|
||||
return payload
|
||||
|
||||
|
|
@ -112,6 +118,9 @@ const ProviderConfigModal: FC<Props> = ({
|
|||
else if (type === TracingProvider.aliyun)
|
||||
return aliyunConfigTemplate
|
||||
|
||||
else if (type === TracingProvider.tencent)
|
||||
return tencentConfigTemplate
|
||||
|
||||
return weaveConfigTemplate
|
||||
})())
|
||||
const [isShowRemoveConfirm, {
|
||||
|
|
@ -202,6 +211,16 @@ const ProviderConfigModal: FC<Props> = ({
|
|||
errorMessage = t('common.errorMsg.fieldRequired', { field: 'Endpoint' })
|
||||
}
|
||||
|
||||
if (type === TracingProvider.tencent) {
|
||||
const postData = config as TencentConfig
|
||||
if (!errorMessage && !postData.token)
|
||||
errorMessage = t('common.errorMsg.fieldRequired', { field: 'Token' })
|
||||
if (!errorMessage && !postData.endpoint)
|
||||
errorMessage = t('common.errorMsg.fieldRequired', { field: 'Endpoint' })
|
||||
if (!errorMessage && !postData.service_name)
|
||||
errorMessage = t('common.errorMsg.fieldRequired', { field: 'Service Name' })
|
||||
}
|
||||
|
||||
return errorMessage
|
||||
}, [config, t, type])
|
||||
const handleSave = useCallback(async () => {
|
||||
|
|
@ -338,6 +357,34 @@ const ProviderConfigModal: FC<Props> = ({
|
|||
/>
|
||||
</>
|
||||
)}
|
||||
{type === TracingProvider.tencent && (
|
||||
<>
|
||||
<Field
|
||||
label='Token'
|
||||
labelClassName='!text-sm'
|
||||
isRequired
|
||||
value={(config as TencentConfig).token}
|
||||
onChange={handleConfigChange('token')}
|
||||
placeholder={t(`${I18N_PREFIX}.placeholder`, { key: 'Token' })!}
|
||||
/>
|
||||
<Field
|
||||
label='Endpoint'
|
||||
labelClassName='!text-sm'
|
||||
isRequired
|
||||
value={(config as TencentConfig).endpoint}
|
||||
onChange={handleConfigChange('endpoint')}
|
||||
placeholder='https://your-region.cls.tencentcs.com'
|
||||
/>
|
||||
<Field
|
||||
label='Service Name'
|
||||
labelClassName='!text-sm'
|
||||
isRequired
|
||||
value={(config as TencentConfig).service_name}
|
||||
onChange={handleConfigChange('service_name')}
|
||||
placeholder='dify_app'
|
||||
/>
|
||||
</>
|
||||
)}
|
||||
{type === TracingProvider.weave && (
|
||||
<>
|
||||
<Field
|
||||
|
|
|
|||
|
|
@ -7,7 +7,7 @@ import {
|
|||
import { useTranslation } from 'react-i18next'
|
||||
import { TracingProvider } from './type'
|
||||
import cn from '@/utils/classnames'
|
||||
import { AliyunIconBig, ArizeIconBig, LangfuseIconBig, LangsmithIconBig, OpikIconBig, PhoenixIconBig, WeaveIconBig } from '@/app/components/base/icons/src/public/tracing'
|
||||
import { AliyunIconBig, ArizeIconBig, LangfuseIconBig, LangsmithIconBig, OpikIconBig, PhoenixIconBig, TencentIconBig, WeaveIconBig } from '@/app/components/base/icons/src/public/tracing'
|
||||
import { Eye as View } from '@/app/components/base/icons/src/vender/solid/general'
|
||||
|
||||
const I18N_PREFIX = 'app.tracing'
|
||||
|
|
@ -31,6 +31,7 @@ const getIcon = (type: TracingProvider) => {
|
|||
[TracingProvider.opik]: OpikIconBig,
|
||||
[TracingProvider.weave]: WeaveIconBig,
|
||||
[TracingProvider.aliyun]: AliyunIconBig,
|
||||
[TracingProvider.tencent]: TencentIconBig,
|
||||
})[type]
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -6,6 +6,7 @@ export enum TracingProvider {
|
|||
opik = 'opik',
|
||||
weave = 'weave',
|
||||
aliyun = 'aliyun',
|
||||
tencent = 'tencent',
|
||||
}
|
||||
|
||||
export type ArizeConfig = {
|
||||
|
|
@ -53,3 +54,9 @@ export type AliyunConfig = {
|
|||
license_key: string
|
||||
endpoint: string
|
||||
}
|
||||
|
||||
export type TencentConfig = {
|
||||
token: string
|
||||
endpoint: string
|
||||
service_name: string
|
||||
}
|
||||
|
|
|
|||
|
|
@ -36,6 +36,7 @@ const Header: FC<IHeaderProps> = ({
|
|||
appData,
|
||||
currentConversationId,
|
||||
inputsForms,
|
||||
allInputsHidden,
|
||||
} = useEmbeddedChatbotContext()
|
||||
|
||||
const isClient = typeof window !== 'undefined'
|
||||
|
|
@ -124,7 +125,7 @@ const Header: FC<IHeaderProps> = ({
|
|||
</ActionButton>
|
||||
</Tooltip>
|
||||
)}
|
||||
{currentConversationId && inputsForms.length > 0 && (
|
||||
{currentConversationId && inputsForms.length > 0 && !allInputsHidden && (
|
||||
<ViewFormDropdown />
|
||||
)}
|
||||
</div>
|
||||
|
|
@ -171,7 +172,7 @@ const Header: FC<IHeaderProps> = ({
|
|||
</ActionButton>
|
||||
</Tooltip>
|
||||
)}
|
||||
{currentConversationId && inputsForms.length > 0 && (
|
||||
{currentConversationId && inputsForms.length > 0 && !allInputsHidden && (
|
||||
<ViewFormDropdown iconColor={theme?.colorPathOnHeader} />
|
||||
)}
|
||||
</div>
|
||||
|
|
|
|||
|
|
@ -1,13 +0,0 @@
|
|||
export { default as Chunk } from './Chunk'
|
||||
export { default as Collapse } from './Collapse'
|
||||
export { default as Divider } from './Divider'
|
||||
export { default as File } from './File'
|
||||
export { default as GeneralType } from './GeneralType'
|
||||
export { default as LayoutRight2LineMod } from './LayoutRight2LineMod'
|
||||
export { default as OptionCardEffectBlueLight } from './OptionCardEffectBlueLight'
|
||||
export { default as OptionCardEffectBlue } from './OptionCardEffectBlue'
|
||||
export { default as OptionCardEffectOrange } from './OptionCardEffectOrange'
|
||||
export { default as OptionCardEffectPurple } from './OptionCardEffectPurple'
|
||||
export { default as ParentChildType } from './ParentChildType'
|
||||
export { default as SelectionMod } from './SelectionMod'
|
||||
export { default as Watercrawl } from './Watercrawl'
|
||||
|
|
@ -1,5 +0,0 @@
|
|||
.wrapper {
|
||||
display: inline-flex;
|
||||
background: url(~@/app/components/base/icons/assets/image/llm/baichuan-text-cn.png) center center no-repeat;
|
||||
background-size: contain;
|
||||
}
|
||||
|
|
@ -1,20 +0,0 @@
|
|||
// GENERATE BY script
|
||||
// DON NOT EDIT IT MANUALLY
|
||||
|
||||
import * as React from 'react'
|
||||
import cn from '@/utils/classnames'
|
||||
import s from './BaichuanTextCn.module.css'
|
||||
|
||||
const Icon = (
|
||||
{
|
||||
ref,
|
||||
className,
|
||||
...restProps
|
||||
}: React.DetailedHTMLProps<React.HTMLAttributes<HTMLSpanElement>, HTMLSpanElement> & {
|
||||
ref?: React.RefObject<HTMLSpanElement>;
|
||||
},
|
||||
) => <span className={cn(s.wrapper, className)} {...restProps} ref={ref} />
|
||||
|
||||
Icon.displayName = 'BaichuanTextCn'
|
||||
|
||||
export default Icon
|
||||
|
|
@ -1,5 +0,0 @@
|
|||
.wrapper {
|
||||
display: inline-flex;
|
||||
background: url(~@/app/components/base/icons/assets/image/llm/minimax.png) center center no-repeat;
|
||||
background-size: contain;
|
||||
}
|
||||
|
|
@ -1,20 +0,0 @@
|
|||
// GENERATE BY script
|
||||
// DON NOT EDIT IT MANUALLY
|
||||
|
||||
import * as React from 'react'
|
||||
import cn from '@/utils/classnames'
|
||||
import s from './Minimax.module.css'
|
||||
|
||||
const Icon = (
|
||||
{
|
||||
ref,
|
||||
className,
|
||||
...restProps
|
||||
}: React.DetailedHTMLProps<React.HTMLAttributes<HTMLSpanElement>, HTMLSpanElement> & {
|
||||
ref?: React.RefObject<HTMLSpanElement>;
|
||||
},
|
||||
) => <span className={cn(s.wrapper, className)} {...restProps} ref={ref} />
|
||||
|
||||
Icon.displayName = 'Minimax'
|
||||
|
||||
export default Icon
|
||||
|
|
@ -1,5 +0,0 @@
|
|||
.wrapper {
|
||||
display: inline-flex;
|
||||
background: url(~@/app/components/base/icons/assets/image/llm/minimax-text.png) center center no-repeat;
|
||||
background-size: contain;
|
||||
}
|
||||
|
|
@ -1,20 +0,0 @@
|
|||
// GENERATE BY script
|
||||
// DON NOT EDIT IT MANUALLY
|
||||
|
||||
import * as React from 'react'
|
||||
import cn from '@/utils/classnames'
|
||||
import s from './MinimaxText.module.css'
|
||||
|
||||
const Icon = (
|
||||
{
|
||||
ref,
|
||||
className,
|
||||
...restProps
|
||||
}: React.DetailedHTMLProps<React.HTMLAttributes<HTMLSpanElement>, HTMLSpanElement> & {
|
||||
ref?: React.RefObject<HTMLSpanElement>;
|
||||
},
|
||||
) => <span className={cn(s.wrapper, className)} {...restProps} ref={ref} />
|
||||
|
||||
Icon.displayName = 'MinimaxText'
|
||||
|
||||
export default Icon
|
||||
|
|
@ -1,5 +0,0 @@
|
|||
.wrapper {
|
||||
display: inline-flex;
|
||||
background: url(~@/app/components/base/icons/assets/image/llm/tongyi.png) center center no-repeat;
|
||||
background-size: contain;
|
||||
}
|
||||
|
|
@ -1,20 +0,0 @@
|
|||
// GENERATE BY script
|
||||
// DON NOT EDIT IT MANUALLY
|
||||
|
||||
import * as React from 'react'
|
||||
import cn from '@/utils/classnames'
|
||||
import s from './Tongyi.module.css'
|
||||
|
||||
const Icon = (
|
||||
{
|
||||
ref,
|
||||
className,
|
||||
...restProps
|
||||
}: React.DetailedHTMLProps<React.HTMLAttributes<HTMLSpanElement>, HTMLSpanElement> & {
|
||||
ref?: React.RefObject<HTMLSpanElement>;
|
||||
},
|
||||
) => <span className={cn(s.wrapper, className)} {...restProps} ref={ref} />
|
||||
|
||||
Icon.displayName = 'Tongyi'
|
||||
|
||||
export default Icon
|
||||
|
|
@ -1,5 +0,0 @@
|
|||
.wrapper {
|
||||
display: inline-flex;
|
||||
background: url(~@/app/components/base/icons/assets/image/llm/tongyi-text.png) center center no-repeat;
|
||||
background-size: contain;
|
||||
}
|
||||
|
|
@ -1,20 +0,0 @@
|
|||
// GENERATE BY script
|
||||
// DON NOT EDIT IT MANUALLY
|
||||
|
||||
import * as React from 'react'
|
||||
import cn from '@/utils/classnames'
|
||||
import s from './TongyiText.module.css'
|
||||
|
||||
const Icon = (
|
||||
{
|
||||
ref,
|
||||
className,
|
||||
...restProps
|
||||
}: React.DetailedHTMLProps<React.HTMLAttributes<HTMLSpanElement>, HTMLSpanElement> & {
|
||||
ref?: React.RefObject<HTMLSpanElement>;
|
||||
},
|
||||
) => <span className={cn(s.wrapper, className)} {...restProps} ref={ref} />
|
||||
|
||||
Icon.displayName = 'TongyiText'
|
||||
|
||||
export default Icon
|
||||
|
|
@ -1,5 +0,0 @@
|
|||
.wrapper {
|
||||
display: inline-flex;
|
||||
background: url(~@/app/components/base/icons/assets/image/llm/tongyi-text-cn.png) center center no-repeat;
|
||||
background-size: contain;
|
||||
}
|
||||
|
|
@ -1,20 +0,0 @@
|
|||
// GENERATE BY script
|
||||
// DON NOT EDIT IT MANUALLY
|
||||
|
||||
import * as React from 'react'
|
||||
import cn from '@/utils/classnames'
|
||||
import s from './TongyiTextCn.module.css'
|
||||
|
||||
const Icon = (
|
||||
{
|
||||
ref,
|
||||
className,
|
||||
...restProps
|
||||
}: React.DetailedHTMLProps<React.HTMLAttributes<HTMLSpanElement>, HTMLSpanElement> & {
|
||||
ref?: React.RefObject<HTMLSpanElement>;
|
||||
},
|
||||
) => <span className={cn(s.wrapper, className)} {...restProps} ref={ref} />
|
||||
|
||||
Icon.displayName = 'TongyiTextCn'
|
||||
|
||||
export default Icon
|
||||
|
|
@ -1,5 +0,0 @@
|
|||
.wrapper {
|
||||
display: inline-flex;
|
||||
background: url(~@/app/components/base/icons/assets/image/llm/wxyy.png) center center no-repeat;
|
||||
background-size: contain;
|
||||
}
|
||||
|
|
@ -1,20 +0,0 @@
|
|||
// GENERATE BY script
|
||||
// DON NOT EDIT IT MANUALLY
|
||||
|
||||
import * as React from 'react'
|
||||
import cn from '@/utils/classnames'
|
||||
import s from './Wxyy.module.css'
|
||||
|
||||
const Icon = (
|
||||
{
|
||||
ref,
|
||||
className,
|
||||
...restProps
|
||||
}: React.DetailedHTMLProps<React.HTMLAttributes<HTMLSpanElement>, HTMLSpanElement> & {
|
||||
ref?: React.RefObject<HTMLSpanElement>;
|
||||
},
|
||||
) => <span className={cn(s.wrapper, className)} {...restProps} ref={ref} />
|
||||
|
||||
Icon.displayName = 'Wxyy'
|
||||
|
||||
export default Icon
|
||||
|
|
@ -1,5 +0,0 @@
|
|||
.wrapper {
|
||||
display: inline-flex;
|
||||
background: url(~@/app/components/base/icons/assets/image/llm/wxyy-text.png) center center no-repeat;
|
||||
background-size: contain;
|
||||
}
|
||||
|
|
@ -1,20 +0,0 @@
|
|||
// GENERATE BY script
|
||||
// DON NOT EDIT IT MANUALLY
|
||||
|
||||
import * as React from 'react'
|
||||
import cn from '@/utils/classnames'
|
||||
import s from './WxyyText.module.css'
|
||||
|
||||
const Icon = (
|
||||
{
|
||||
ref,
|
||||
className,
|
||||
...restProps
|
||||
}: React.DetailedHTMLProps<React.HTMLAttributes<HTMLSpanElement>, HTMLSpanElement> & {
|
||||
ref?: React.RefObject<HTMLSpanElement>;
|
||||
},
|
||||
) => <span className={cn(s.wrapper, className)} {...restProps} ref={ref} />
|
||||
|
||||
Icon.displayName = 'WxyyText'
|
||||
|
||||
export default Icon
|
||||
|
|
@ -1,5 +0,0 @@
|
|||
.wrapper {
|
||||
display: inline-flex;
|
||||
background: url(~@/app/components/base/icons/assets/image/llm/wxyy-text-cn.png) center center no-repeat;
|
||||
background-size: contain;
|
||||
}
|
||||
|
|
@ -1,20 +0,0 @@
|
|||
// GENERATE BY script
|
||||
// DON NOT EDIT IT MANUALLY
|
||||
|
||||
import * as React from 'react'
|
||||
import cn from '@/utils/classnames'
|
||||
import s from './WxyyTextCn.module.css'
|
||||
|
||||
const Icon = (
|
||||
{
|
||||
ref,
|
||||
className,
|
||||
...restProps
|
||||
}: React.DetailedHTMLProps<React.HTMLAttributes<HTMLSpanElement>, HTMLSpanElement> & {
|
||||
ref?: React.RefObject<HTMLSpanElement>;
|
||||
},
|
||||
) => <span className={cn(s.wrapper, className)} {...restProps} ref={ref} />
|
||||
|
||||
Icon.displayName = 'WxyyTextCn'
|
||||
|
||||
export default Icon
|
||||
|
|
@ -1,9 +0,0 @@
|
|||
export { default as BaichuanTextCn } from './BaichuanTextCn'
|
||||
export { default as MinimaxText } from './MinimaxText'
|
||||
export { default as Minimax } from './Minimax'
|
||||
export { default as TongyiTextCn } from './TongyiTextCn'
|
||||
export { default as TongyiText } from './TongyiText'
|
||||
export { default as Tongyi } from './Tongyi'
|
||||
export { default as WxyyTextCn } from './WxyyTextCn'
|
||||
export { default as WxyyText } from './WxyyText'
|
||||
export { default as Wxyy } from './Wxyy'
|
||||
|
|
@ -1 +0,0 @@
|
|||
export { default as Checked } from './Checked'
|
||||
|
|
@ -1,20 +0,0 @@
|
|||
// GENERATE BY script
|
||||
// DON NOT EDIT IT MANUALLY
|
||||
|
||||
import * as React from 'react'
|
||||
import data from './WebReader.json'
|
||||
import IconBase from '@/app/components/base/icons/IconBase'
|
||||
import type { IconData } from '@/app/components/base/icons/IconBase'
|
||||
|
||||
const Icon = (
|
||||
{
|
||||
ref,
|
||||
...props
|
||||
}: React.SVGProps<SVGSVGElement> & {
|
||||
ref?: React.RefObject<React.RefObject<HTMLOrSVGElement>>;
|
||||
},
|
||||
) => <IconBase {...props} ref={ref} data={data as IconData} />
|
||||
|
||||
Icon.displayName = 'WebReader'
|
||||
|
||||
export default Icon
|
||||
|
|
@ -1,20 +0,0 @@
|
|||
// GENERATE BY script
|
||||
// DON NOT EDIT IT MANUALLY
|
||||
|
||||
import * as React from 'react'
|
||||
import data from './Wikipedia.json'
|
||||
import IconBase from '@/app/components/base/icons/IconBase'
|
||||
import type { IconData } from '@/app/components/base/icons/IconBase'
|
||||
|
||||
const Icon = (
|
||||
{
|
||||
ref,
|
||||
...props
|
||||
}: React.SVGProps<SVGSVGElement> & {
|
||||
ref?: React.RefObject<React.RefObject<HTMLOrSVGElement>>;
|
||||
},
|
||||
) => <IconBase {...props} ref={ref} data={data as IconData} />
|
||||
|
||||
Icon.displayName = 'Wikipedia'
|
||||
|
||||
export default Icon
|
||||
|
|
@ -1,7 +0,0 @@
|
|||
export { default as Google } from './Google'
|
||||
export { default as PartnerDark } from './PartnerDark'
|
||||
export { default as PartnerLight } from './PartnerLight'
|
||||
export { default as VerifiedDark } from './VerifiedDark'
|
||||
export { default as VerifiedLight } from './VerifiedLight'
|
||||
export { default as WebReader } from './WebReader'
|
||||
export { default as Wikipedia } from './Wikipedia'
|
||||
|
|
@ -18,3 +18,4 @@ const Icon = (
|
|||
Icon.displayName = 'DataSet'
|
||||
|
||||
export default Icon
|
||||
|
||||
|
|
|
|||
|
|
@ -1,20 +0,0 @@
|
|||
// GENERATE BY script
|
||||
// DON NOT EDIT IT MANUALLY
|
||||
|
||||
import * as React from 'react'
|
||||
import data from './Loading.json'
|
||||
import IconBase from '@/app/components/base/icons/IconBase'
|
||||
import type { IconData } from '@/app/components/base/icons/IconBase'
|
||||
|
||||
const Icon = (
|
||||
{
|
||||
ref,
|
||||
...props
|
||||
}: React.SVGProps<SVGSVGElement> & {
|
||||
ref?: React.RefObject<React.RefObject<HTMLOrSVGElement>>;
|
||||
},
|
||||
) => <IconBase {...props} ref={ref} data={data as IconData} />
|
||||
|
||||
Icon.displayName = 'Loading'
|
||||
|
||||
export default Icon
|
||||
|
|
@ -1,20 +0,0 @@
|
|||
// GENERATE BY script
|
||||
// DON NOT EDIT IT MANUALLY
|
||||
|
||||
import * as React from 'react'
|
||||
import data from './Search.json'
|
||||
import IconBase from '@/app/components/base/icons/IconBase'
|
||||
import type { IconData } from '@/app/components/base/icons/IconBase'
|
||||
|
||||
const Icon = (
|
||||
{
|
||||
ref,
|
||||
...props
|
||||
}: React.SVGProps<SVGSVGElement> & {
|
||||
ref?: React.RefObject<React.RefObject<HTMLOrSVGElement>>;
|
||||
},
|
||||
) => <IconBase {...props} ref={ref} data={data as IconData} />
|
||||
|
||||
Icon.displayName = 'Search'
|
||||
|
||||
export default Icon
|
||||
|
|
@ -1,20 +0,0 @@
|
|||
// GENERATE BY script
|
||||
// DON NOT EDIT IT MANUALLY
|
||||
|
||||
import * as React from 'react'
|
||||
import data from './ThoughtList.json'
|
||||
import IconBase from '@/app/components/base/icons/IconBase'
|
||||
import type { IconData } from '@/app/components/base/icons/IconBase'
|
||||
|
||||
const Icon = (
|
||||
{
|
||||
ref,
|
||||
...props
|
||||
}: React.SVGProps<SVGSVGElement> & {
|
||||
ref?: React.RefObject<React.RefObject<HTMLOrSVGElement>>;
|
||||
},
|
||||
) => <IconBase {...props} ref={ref} data={data as IconData} />
|
||||
|
||||
Icon.displayName = 'ThoughtList'
|
||||
|
||||
export default Icon
|
||||
|
|
@ -1,20 +0,0 @@
|
|||
// GENERATE BY script
|
||||
// DON NOT EDIT IT MANUALLY
|
||||
|
||||
import * as React from 'react'
|
||||
import data from './WebReader.json'
|
||||
import IconBase from '@/app/components/base/icons/IconBase'
|
||||
import type { IconData } from '@/app/components/base/icons/IconBase'
|
||||
|
||||
const Icon = (
|
||||
{
|
||||
ref,
|
||||
...props
|
||||
}: React.SVGProps<SVGSVGElement> & {
|
||||
ref?: React.RefObject<React.RefObject<HTMLOrSVGElement>>;
|
||||
},
|
||||
) => <IconBase {...props} ref={ref} data={data as IconData} />
|
||||
|
||||
Icon.displayName = 'WebReader'
|
||||
|
||||
export default Icon
|
||||
|
|
@ -1,5 +1,2 @@
|
|||
export { default as DataSet } from './DataSet'
|
||||
export { default as Loading } from './Loading'
|
||||
export { default as Search } from './Search'
|
||||
export { default as ThoughtList } from './ThoughtList'
|
||||
export { default as WebReader } from './WebReader'
|
||||
|
||||
|
|
|
|||
|
|
@ -0,0 +1,170 @@
|
|||
{
|
||||
"icon": {
|
||||
"type": "element",
|
||||
"name": "svg",
|
||||
"attributes": {
|
||||
"width": "80px",
|
||||
"height": "18px",
|
||||
"viewBox": "0 0 80 18",
|
||||
"version": "1.1"
|
||||
},
|
||||
"isRootNode": true,
|
||||
"children": [
|
||||
{
|
||||
"type": "element",
|
||||
"name": "title",
|
||||
"attributes": {},
|
||||
"children": []
|
||||
},
|
||||
{
|
||||
"type": "element",
|
||||
"name": "g",
|
||||
"attributes": {
|
||||
"id": "页面-1",
|
||||
"stroke": "none",
|
||||
"stroke-width": "1",
|
||||
"fill": "none",
|
||||
"fill-rule": "evenodd"
|
||||
},
|
||||
"children": [
|
||||
{
|
||||
"type": "element",
|
||||
"name": "g",
|
||||
"attributes": {
|
||||
"id": "logo",
|
||||
"fill-rule": "nonzero"
|
||||
},
|
||||
"children": [
|
||||
{
|
||||
"type": "element",
|
||||
"name": "g",
|
||||
"attributes": {
|
||||
"id": "XMLID_25_",
|
||||
"transform": "translate(30.592488, 1.100000)",
|
||||
"fill": "#253554"
|
||||
},
|
||||
"children": [
|
||||
{
|
||||
"type": "element",
|
||||
"name": "path",
|
||||
"attributes": {
|
||||
"d": "M30.8788968,0.6 L21.8088578,0.6 L21.8088578,1.9 L24.5604427,1.9 L24.5604427,6.7 L21.2993051,6.7 L21.2993051,8 L24.5604427,8 L24.5604427,15.9 L26.089101,15.9 L26.089101,8 L29.5540597,8 L29.5540597,15.6 L32.3056445,15.6 L32.3056445,14.3 L31.0827179,14.3 L31.0827179,0.6 L30.8788968,0.6 Z M25.9871904,6.5 L25.9871904,1.9 L29.5540597,1.9 L29.5540597,6.7 L26.089101,6.7 L26.089101,6.5 L25.9871904,6.5 Z",
|
||||
"id": "XMLID_38_"
|
||||
},
|
||||
"children": []
|
||||
},
|
||||
{
|
||||
"type": "element",
|
||||
"name": "polygon",
|
||||
"attributes": {
|
||||
"id": "XMLID_14_",
|
||||
"points": "5.60508028 12.2 12.8407294 12.2 12.8407294 13.5 5.60508028 13.5"
|
||||
},
|
||||
"children": []
|
||||
},
|
||||
{
|
||||
"type": "element",
|
||||
"name": "path",
|
||||
"attributes": {
|
||||
"d": "M0.611463304,9.8 C0.611463304,12.1 0.509552753,14 0,15.5 C0,15.6 0,15.6 0.101910551,15.6 C0.101910551,15.6 1.22292661,15.6 1.42674771,15.6 C1.93630046,13.4 1.93630046,11.6 1.93630046,10.3 L3.77069037,10.3 L3.77069037,14.3 L2.54776377,14.3 C2.44585321,14.3 2.44585321,14.3 2.44585321,14.4 L2.85349542,15.6 L5.19743808,15.6 L5.19743808,0.6 L0.713373854,0.6 L0.611463304,9.8 L0.611463304,9.8 Z M2.03821101,9.2 L2.03821101,6.2 L3.87260092,6.2 L3.87260092,9.4 L2.03821101,9.4 L2.03821101,9.2 Z M3.87260092,1.9 L3.87260092,5 L2.03821101,5 L2.03821101,1.9 L3.87260092,1.9 Z",
|
||||
"id": "XMLID_33_"
|
||||
},
|
||||
"children": []
|
||||
},
|
||||
{
|
||||
"type": "element",
|
||||
"name": "path",
|
||||
"attributes": {
|
||||
"d": "M13.3502821,5.9 L15.0827615,5.9 L15.0827615,4.7 L9.88532341,4.7 C9.98723396,4.3 10.0891445,3.8 10.3948762,3.5 L14.8789404,3.5 L14.8789404,2.3 L13.6560138,2.3 C13.7579243,1.6 14.1655665,0.7 14.1655665,0.7 C14.1655665,0.6 14.1655665,0.6 14.063656,0.6 L12.9426399,0.6 L12.4330872,2.3 L10.8025184,2.3 C10.9044289,1.6 11.0063395,0.8 11.2101606,0.1 C11.2101606,0 11.2101606,0 11.10825,0 C11.0063395,0 10.1910551,0 9.88532341,0 C9.78341286,0.9 9.68150231,1.7 9.37577066,2.4 L8.4585757,2.4 L7.94902295,0.7 L6.82800689,0.7 C6.72609634,0.7 6.72609634,0.7 6.72609634,0.8 C6.72609634,0.9 6.92991744,1.7 7.23564909,2.4 L6.01272249,2.4 L6.01272249,3.6 L8.8662179,3.6 C8.76430735,4 8.6623968,4.5 8.35666515,4.8 L5.60508028,4.8 L5.60508028,6 L7.74520185,6 C6.82800689,7.2 6.01272249,7.7 5.60508028,8 C5.60508028,8.1 5.60508028,9.3 5.60508028,9.3 C5.60508028,9.4 5.70699083,9.4 5.80890138,9.3 C6.21654359,9.2 6.72609634,8.8 7.03182799,8.4 L12.025445,8.4 L12.025445,10.2 L8.15284405,10.2 L8.2547546,9.1 C8.2547546,9 8.2547546,9 8.15284405,9 C8.0509335,9 6.92991744,9 6.92991744,9 L6.82800689,11.2 C6.82800689,11.3 6.82800689,11.3 6.92991744,11.3 C7.03182799,11.3 13.6560138,11.3 13.6560138,11.3 L13.6560138,14.5 L10.7006078,14.5 C10.5986973,14.5 10.5986973,14.5 10.5986973,14.6 L11.0063395,15.8 L15.2865826,15.8 L15.2865826,10.2 L13.6560138,10.2 L13.6560138,7.8 C14.2674771,8.3 14.8789404,8.8 15.4904037,9 C15.5923142,9.1 15.6942248,9.1 15.6942248,9 C15.6942248,9 15.6942248,7.8 15.6942248,7.7 C15.0827615,7.5 14.1655665,7 13.3502821,5.9 Z M11.7197133,5.9 C11.9235344,6.4 12.3311766,6.9 12.7388188,7.2 L8.35666515,7.2 C8.76430735,6.8 8.96812845,6.3 9.37577066,5.9 L11.7197133,5.9 L11.7197133,5.9 Z",
|
||||
"id": "XMLID_30_"
|
||||
},
|
||||
"children": []
|
||||
},
|
||||
{
|
||||
"type": "element",
|
||||
"name": "path",
|
||||
"attributes": {
|
||||
"d": "M22.6241422,11.3 C22.6241422,11.3 21.4012156,12.2 20.178289,13.1 L20.178289,4.7 L16.9171514,4.7 L16.9171514,6.2 L18.7515413,6.2 L18.7515413,14.3 C18.2419886,14.7 17.8343464,14.8 17.8343464,14.8 L18.7515413,15.9 L22.7260528,13 L22.6241422,11.3 C22.9298739,11.3 22.8279633,11.2 22.6241422,11.3 Z",
|
||||
"id": "XMLID_8_"
|
||||
},
|
||||
"children": []
|
||||
},
|
||||
{
|
||||
"type": "element",
|
||||
"name": "path",
|
||||
"attributes": {
|
||||
"d": "M18.9553624,3.4 L20.3821101,3.4 C20.5859312,3.4 20.5859312,3.3 20.5859312,3.3 L18.5477202,0.2 L17.019062,0.2 L16.9171514,0.3 C17.019062,0.4 18.9553624,3.4 18.9553624,3.4 Z",
|
||||
"id": "XMLID_7_"
|
||||
},
|
||||
"children": []
|
||||
},
|
||||
{
|
||||
"type": "element",
|
||||
"name": "rect",
|
||||
"attributes": {
|
||||
"id": "XMLID_6_",
|
||||
"x": "35.2610505",
|
||||
"y": "0.9",
|
||||
"width": "11.4139817",
|
||||
"height": "1.5"
|
||||
},
|
||||
"children": []
|
||||
},
|
||||
{
|
||||
"type": "element",
|
||||
"name": "path",
|
||||
"attributes": {
|
||||
"d": "M39.4393831,7.8 L48.4075115,7.8 L48.4075115,6.3 L33.6304817,6.3 L33.6304817,7.8 L37.7069037,7.8 C36.7897088,10 34.8534083,15.4 34.7514978,15.5 C34.7514978,15.6 34.7514978,15.6 34.8534083,15.6 L47.5922271,15.6 C47.6941377,15.6 47.6941377,15.5 47.6941377,15.5 L45.8597478,10.6 L44.3310895,10.6 C44.229179,10.6 44.229179,10.7 44.229179,10.7 C44.229179,10.8 45.5540161,14.2 45.5540161,14.2 L37.197351,14.2 L39.4393831,7.8 Z",
|
||||
"id": "XMLID_5_"
|
||||
},
|
||||
"children": []
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"type": "element",
|
||||
"name": "g",
|
||||
"attributes": {
|
||||
"id": "XMLID_19_"
|
||||
},
|
||||
"children": [
|
||||
{
|
||||
"type": "element",
|
||||
"name": "path",
|
||||
"attributes": {
|
||||
"d": "M22.5,14.7 C22.1,15.1 21.3,15.7 19.9,15.7 C19.3,15.7 18.6,15.7 18.3,15.7 C17.9,15.7 14.9,15.7 11.3,15.7 C13.9,13.2 16.1,11.1 16.3,10.9 C16.5,10.7 17,10.2 17.5,9.8 C18.5,8.9 19.3,8.8 20,8.8 C21,8.8 21.8,9.2 22.5,9.8 C23.9,11.1 23.9,13.4 22.5,14.7 M24.2,8.2 C23.2,7.1 21.7,6.4 20.1,6.4 C18.7,6.4 17.5,6.9 16.4,7.7 C16,8.1 15.4,8.5 14.9,9.1 C14.5,9.5 5.9,17.9 5.9,17.9 C6.4,18 7,18 7.5,18 C8,18 18,18 18.4,18 C19.2,18 19.8,18 20.4,17.9 C21.7,17.8 23,17.3 24.1,16.3 C26.4,14.1 26.4,10.4 24.2,8.2 Z",
|
||||
"id": "XMLID_22_",
|
||||
"fill": "#00A3FF"
|
||||
},
|
||||
"children": []
|
||||
},
|
||||
{
|
||||
"type": "element",
|
||||
"name": "path",
|
||||
"attributes": {
|
||||
"d": "M10.2,7.6 C9.1,6.8 8,6.4 6.7,6.4 C5.1,6.4 3.6,7.1 2.6,8.2 C0.4,10.5 0.4,14.1 2.7,16.4 C3.7,17.3 4.7,17.8 5.9,17.9 L8.2,15.7 C7.8,15.7 7.3,15.7 6.9,15.7 C5.6,15.6 4.8,15.2 4.3,14.7 C2.9,13.3 2.9,11.1 4.2,9.7 C4.9,9 5.7,8.7 6.7,8.7 C7.3,8.7 8.2,8.8 9.1,9.7 C9.5,10.1 10.6,10.9 11,11.3 L11.1,11.3 L12.6,9.8 L12.6,9.7 C11.9,9 10.8,8.1 10.2,7.6",
|
||||
"id": "XMLID_2_",
|
||||
"fill": "#00C8DC"
|
||||
},
|
||||
"children": []
|
||||
},
|
||||
{
|
||||
"type": "element",
|
||||
"name": "path",
|
||||
"attributes": {
|
||||
"d": "M20.7,5.1 C19.6,2.1 16.7,0 13.4,0 C9.5,0 6.4,2.9 5.8,6.5 C6.1,6.5 6.4,6.4 6.8,6.4 C7.2,6.4 7.7,6.5 8.1,6.5 L8.1,6.5 C8.6,4 10.8,2.2 13.4,2.2 C15.6,2.2 17.5,3.5 18.4,5.4 C18.4,5.4 18.5,5.5 18.5,5.4 C19.2,5.3 20,5.1 20.7,5.1 C20.7,5.2 20.7,5.2 20.7,5.1",
|
||||
"id": "XMLID_1_",
|
||||
"fill": "#006EFF"
|
||||
},
|
||||
"children": []
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
"name": "TencentIcon"
|
||||
}
|
||||
|
|
@ -2,7 +2,7 @@
|
|||
// DON NOT EDIT IT MANUALLY
|
||||
|
||||
import * as React from 'react'
|
||||
import data from './Checked.json'
|
||||
import data from './TencentIcon.json'
|
||||
import IconBase from '@/app/components/base/icons/IconBase'
|
||||
import type { IconData } from '@/app/components/base/icons/IconBase'
|
||||
|
||||
|
|
@ -15,6 +15,6 @@ const Icon = (
|
|||
},
|
||||
) => <IconBase {...props} ref={ref} data={data as IconData} />
|
||||
|
||||
Icon.displayName = 'Checked'
|
||||
Icon.displayName = 'TencentIcon'
|
||||
|
||||
export default Icon
|
||||
|
|
@ -0,0 +1,170 @@
|
|||
{
|
||||
"icon": {
|
||||
"type": "element",
|
||||
"name": "svg",
|
||||
"attributes": {
|
||||
"width": "80px",
|
||||
"height": "18px",
|
||||
"viewBox": "0 0 80 18",
|
||||
"version": "1.1"
|
||||
},
|
||||
"isRootNode": true,
|
||||
"children": [
|
||||
{
|
||||
"type": "element",
|
||||
"name": "title",
|
||||
"attributes": {},
|
||||
"children": []
|
||||
},
|
||||
{
|
||||
"type": "element",
|
||||
"name": "g",
|
||||
"attributes": {
|
||||
"id": "页面-1",
|
||||
"stroke": "none",
|
||||
"stroke-width": "1",
|
||||
"fill": "none",
|
||||
"fill-rule": "evenodd"
|
||||
},
|
||||
"children": [
|
||||
{
|
||||
"type": "element",
|
||||
"name": "g",
|
||||
"attributes": {
|
||||
"id": "logo",
|
||||
"fill-rule": "nonzero"
|
||||
},
|
||||
"children": [
|
||||
{
|
||||
"type": "element",
|
||||
"name": "g",
|
||||
"attributes": {
|
||||
"id": "XMLID_25_",
|
||||
"transform": "translate(30.592488, 1.100000)",
|
||||
"fill": "#253554"
|
||||
},
|
||||
"children": [
|
||||
{
|
||||
"type": "element",
|
||||
"name": "path",
|
||||
"attributes": {
|
||||
"d": "M30.8788968,0.6 L21.8088578,0.6 L21.8088578,1.9 L24.5604427,1.9 L24.5604427,6.7 L21.2993051,6.7 L21.2993051,8 L24.5604427,8 L24.5604427,15.9 L26.089101,15.9 L26.089101,8 L29.5540597,8 L29.5540597,15.6 L32.3056445,15.6 L32.3056445,14.3 L31.0827179,14.3 L31.0827179,0.6 L30.8788968,0.6 Z M25.9871904,6.5 L25.9871904,1.9 L29.5540597,1.9 L29.5540597,6.7 L26.089101,6.7 L26.089101,6.5 L25.9871904,6.5 Z",
|
||||
"id": "XMLID_38_"
|
||||
},
|
||||
"children": []
|
||||
},
|
||||
{
|
||||
"type": "element",
|
||||
"name": "polygon",
|
||||
"attributes": {
|
||||
"id": "XMLID_14_",
|
||||
"points": "5.60508028 12.2 12.8407294 12.2 12.8407294 13.5 5.60508028 13.5"
|
||||
},
|
||||
"children": []
|
||||
},
|
||||
{
|
||||
"type": "element",
|
||||
"name": "path",
|
||||
"attributes": {
|
||||
"d": "M0.611463304,9.8 C0.611463304,12.1 0.509552753,14 0,15.5 C0,15.6 0,15.6 0.101910551,15.6 C0.101910551,15.6 1.22292661,15.6 1.42674771,15.6 C1.93630046,13.4 1.93630046,11.6 1.93630046,10.3 L3.77069037,10.3 L3.77069037,14.3 L2.54776377,14.3 C2.44585321,14.3 2.44585321,14.3 2.44585321,14.4 L2.85349542,15.6 L5.19743808,15.6 L5.19743808,0.6 L0.713373854,0.6 L0.611463304,9.8 L0.611463304,9.8 Z M2.03821101,9.2 L2.03821101,6.2 L3.87260092,6.2 L3.87260092,9.4 L2.03821101,9.4 L2.03821101,9.2 Z M3.87260092,1.9 L3.87260092,5 L2.03821101,5 L2.03821101,1.9 L3.87260092,1.9 Z",
|
||||
"id": "XMLID_33_"
|
||||
},
|
||||
"children": []
|
||||
},
|
||||
{
|
||||
"type": "element",
|
||||
"name": "path",
|
||||
"attributes": {
|
||||
"d": "M13.3502821,5.9 L15.0827615,5.9 L15.0827615,4.7 L9.88532341,4.7 C9.98723396,4.3 10.0891445,3.8 10.3948762,3.5 L14.8789404,3.5 L14.8789404,2.3 L13.6560138,2.3 C13.7579243,1.6 14.1655665,0.7 14.1655665,0.7 C14.1655665,0.6 14.1655665,0.6 14.063656,0.6 L12.9426399,0.6 L12.4330872,2.3 L10.8025184,2.3 C10.9044289,1.6 11.0063395,0.8 11.2101606,0.1 C11.2101606,0 11.2101606,0 11.10825,0 C11.0063395,0 10.1910551,0 9.88532341,0 C9.78341286,0.9 9.68150231,1.7 9.37577066,2.4 L8.4585757,2.4 L7.94902295,0.7 L6.82800689,0.7 C6.72609634,0.7 6.72609634,0.7 6.72609634,0.8 C6.72609634,0.9 6.92991744,1.7 7.23564909,2.4 L6.01272249,2.4 L6.01272249,3.6 L8.8662179,3.6 C8.76430735,4 8.6623968,4.5 8.35666515,4.8 L5.60508028,4.8 L5.60508028,6 L7.74520185,6 C6.82800689,7.2 6.01272249,7.7 5.60508028,8 C5.60508028,8.1 5.60508028,9.3 5.60508028,9.3 C5.60508028,9.4 5.70699083,9.4 5.80890138,9.3 C6.21654359,9.2 6.72609634,8.8 7.03182799,8.4 L12.025445,8.4 L12.025445,10.2 L8.15284405,10.2 L8.2547546,9.1 C8.2547546,9 8.2547546,9 8.15284405,9 C8.0509335,9 6.92991744,9 6.92991744,9 L6.82800689,11.2 C6.82800689,11.3 6.82800689,11.3 6.92991744,11.3 C7.03182799,11.3 13.6560138,11.3 13.6560138,11.3 L13.6560138,14.5 L10.7006078,14.5 C10.5986973,14.5 10.5986973,14.5 10.5986973,14.6 L11.0063395,15.8 L15.2865826,15.8 L15.2865826,10.2 L13.6560138,10.2 L13.6560138,7.8 C14.2674771,8.3 14.8789404,8.8 15.4904037,9 C15.5923142,9.1 15.6942248,9.1 15.6942248,9 C15.6942248,9 15.6942248,7.8 15.6942248,7.7 C15.0827615,7.5 14.1655665,7 13.3502821,5.9 Z M11.7197133,5.9 C11.9235344,6.4 12.3311766,6.9 12.7388188,7.2 L8.35666515,7.2 C8.76430735,6.8 8.96812845,6.3 9.37577066,5.9 L11.7197133,5.9 L11.7197133,5.9 Z",
|
||||
"id": "XMLID_30_"
|
||||
},
|
||||
"children": []
|
||||
},
|
||||
{
|
||||
"type": "element",
|
||||
"name": "path",
|
||||
"attributes": {
|
||||
"d": "M22.6241422,11.3 C22.6241422,11.3 21.4012156,12.2 20.178289,13.1 L20.178289,4.7 L16.9171514,4.7 L16.9171514,6.2 L18.7515413,6.2 L18.7515413,14.3 C18.2419886,14.7 17.8343464,14.8 17.8343464,14.8 L18.7515413,15.9 L22.7260528,13 L22.6241422,11.3 C22.9298739,11.3 22.8279633,11.2 22.6241422,11.3 Z",
|
||||
"id": "XMLID_8_"
|
||||
},
|
||||
"children": []
|
||||
},
|
||||
{
|
||||
"type": "element",
|
||||
"name": "path",
|
||||
"attributes": {
|
||||
"d": "M18.9553624,3.4 L20.3821101,3.4 C20.5859312,3.4 20.5859312,3.3 20.5859312,3.3 L18.5477202,0.2 L17.019062,0.2 L16.9171514,0.3 C17.019062,0.4 18.9553624,3.4 18.9553624,3.4 Z",
|
||||
"id": "XMLID_7_"
|
||||
},
|
||||
"children": []
|
||||
},
|
||||
{
|
||||
"type": "element",
|
||||
"name": "rect",
|
||||
"attributes": {
|
||||
"id": "XMLID_6_",
|
||||
"x": "35.2610505",
|
||||
"y": "0.9",
|
||||
"width": "11.4139817",
|
||||
"height": "1.5"
|
||||
},
|
||||
"children": []
|
||||
},
|
||||
{
|
||||
"type": "element",
|
||||
"name": "path",
|
||||
"attributes": {
|
||||
"d": "M39.4393831,7.8 L48.4075115,7.8 L48.4075115,6.3 L33.6304817,6.3 L33.6304817,7.8 L37.7069037,7.8 C36.7897088,10 34.8534083,15.4 34.7514978,15.5 C34.7514978,15.6 34.7514978,15.6 34.8534083,15.6 L47.5922271,15.6 C47.6941377,15.6 47.6941377,15.5 47.6941377,15.5 L45.8597478,10.6 L44.3310895,10.6 C44.229179,10.6 44.229179,10.7 44.229179,10.7 C44.229179,10.8 45.5540161,14.2 45.5540161,14.2 L37.197351,14.2 L39.4393831,7.8 Z",
|
||||
"id": "XMLID_5_"
|
||||
},
|
||||
"children": []
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"type": "element",
|
||||
"name": "g",
|
||||
"attributes": {
|
||||
"id": "XMLID_19_"
|
||||
},
|
||||
"children": [
|
||||
{
|
||||
"type": "element",
|
||||
"name": "path",
|
||||
"attributes": {
|
||||
"d": "M22.5,14.7 C22.1,15.1 21.3,15.7 19.9,15.7 C19.3,15.7 18.6,15.7 18.3,15.7 C17.9,15.7 14.9,15.7 11.3,15.7 C13.9,13.2 16.1,11.1 16.3,10.9 C16.5,10.7 17,10.2 17.5,9.8 C18.5,8.9 19.3,8.8 20,8.8 C21,8.8 21.8,9.2 22.5,9.8 C23.9,11.1 23.9,13.4 22.5,14.7 M24.2,8.2 C23.2,7.1 21.7,6.4 20.1,6.4 C18.7,6.4 17.5,6.9 16.4,7.7 C16,8.1 15.4,8.5 14.9,9.1 C14.5,9.5 5.9,17.9 5.9,17.9 C6.4,18 7,18 7.5,18 C8,18 18,18 18.4,18 C19.2,18 19.8,18 20.4,17.9 C21.7,17.8 23,17.3 24.1,16.3 C26.4,14.1 26.4,10.4 24.2,8.2 Z",
|
||||
"id": "XMLID_22_",
|
||||
"fill": "#00A3FF"
|
||||
},
|
||||
"children": []
|
||||
},
|
||||
{
|
||||
"type": "element",
|
||||
"name": "path",
|
||||
"attributes": {
|
||||
"d": "M10.2,7.6 C9.1,6.8 8,6.4 6.7,6.4 C5.1,6.4 3.6,7.1 2.6,8.2 C0.4,10.5 0.4,14.1 2.7,16.4 C3.7,17.3 4.7,17.8 5.9,17.9 L8.2,15.7 C7.8,15.7 7.3,15.7 6.9,15.7 C5.6,15.6 4.8,15.2 4.3,14.7 C2.9,13.3 2.9,11.1 4.2,9.7 C4.9,9 5.7,8.7 6.7,8.7 C7.3,8.7 8.2,8.8 9.1,9.7 C9.5,10.1 10.6,10.9 11,11.3 L11.1,11.3 L12.6,9.8 L12.6,9.7 C11.9,9 10.8,8.1 10.2,7.6",
|
||||
"id": "XMLID_2_",
|
||||
"fill": "#00C8DC"
|
||||
},
|
||||
"children": []
|
||||
},
|
||||
{
|
||||
"type": "element",
|
||||
"name": "path",
|
||||
"attributes": {
|
||||
"d": "M20.7,5.1 C19.6,2.1 16.7,0 13.4,0 C9.5,0 6.4,2.9 5.8,6.5 C6.1,6.5 6.4,6.4 6.8,6.4 C7.2,6.4 7.7,6.5 8.1,6.5 L8.1,6.5 C8.6,4 10.8,2.2 13.4,2.2 C15.6,2.2 17.5,3.5 18.4,5.4 C18.4,5.4 18.5,5.5 18.5,5.4 C19.2,5.3 20,5.1 20.7,5.1 C20.7,5.2 20.7,5.2 20.7,5.1",
|
||||
"id": "XMLID_1_",
|
||||
"fill": "#006EFF"
|
||||
},
|
||||
"children": []
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
"name": "TencentIconBig"
|
||||
}
|
||||
|
|
@ -2,7 +2,7 @@
|
|||
// DON NOT EDIT IT MANUALLY
|
||||
|
||||
import * as React from 'react'
|
||||
import data from './Google.json'
|
||||
import data from './TencentIconBig.json'
|
||||
import IconBase from '@/app/components/base/icons/IconBase'
|
||||
import type { IconData } from '@/app/components/base/icons/IconBase'
|
||||
|
||||
|
|
@ -15,6 +15,6 @@ const Icon = (
|
|||
},
|
||||
) => <IconBase {...props} ref={ref} data={data as IconData} />
|
||||
|
||||
Icon.displayName = 'Google'
|
||||
Icon.displayName = 'TencentIconBig'
|
||||
|
||||
export default Icon
|
||||
|
|
@ -10,6 +10,8 @@ export { default as OpikIconBig } from './OpikIconBig'
|
|||
export { default as OpikIcon } from './OpikIcon'
|
||||
export { default as PhoenixIconBig } from './PhoenixIconBig'
|
||||
export { default as PhoenixIcon } from './PhoenixIcon'
|
||||
export { default as TencentIconBig } from './TencentIconBig'
|
||||
export { default as TencentIcon } from './TencentIcon'
|
||||
export { default as TracingIcon } from './TracingIcon'
|
||||
export { default as WeaveIconBig } from './WeaveIconBig'
|
||||
export { default as WeaveIcon } from './WeaveIcon'
|
||||
|
|
|
|||
|
|
@ -1,20 +0,0 @@
|
|||
// GENERATE BY script
|
||||
// DON NOT EDIT IT MANUALLY
|
||||
|
||||
import * as React from 'react'
|
||||
import data from './AlignLeft01.json'
|
||||
import IconBase from '@/app/components/base/icons/IconBase'
|
||||
import type { IconData } from '@/app/components/base/icons/IconBase'
|
||||
|
||||
const Icon = (
|
||||
{
|
||||
ref,
|
||||
...props
|
||||
}: React.SVGProps<SVGSVGElement> & {
|
||||
ref?: React.RefObject<React.RefObject<HTMLOrSVGElement>>;
|
||||
},
|
||||
) => <IconBase {...props} ref={ref} data={data as IconData} />
|
||||
|
||||
Icon.displayName = 'AlignLeft01'
|
||||
|
||||
export default Icon
|
||||
|
|
@ -1,20 +0,0 @@
|
|||
// GENERATE BY script
|
||||
// DON NOT EDIT IT MANUALLY
|
||||
|
||||
import * as React from 'react'
|
||||
import data from './AlignRight01.json'
|
||||
import IconBase from '@/app/components/base/icons/IconBase'
|
||||
import type { IconData } from '@/app/components/base/icons/IconBase'
|
||||
|
||||
const Icon = (
|
||||
{
|
||||
ref,
|
||||
...props
|
||||
}: React.SVGProps<SVGSVGElement> & {
|
||||
ref?: React.RefObject<React.RefObject<HTMLOrSVGElement>>;
|
||||
},
|
||||
) => <IconBase {...props} ref={ref} data={data as IconData} />
|
||||
|
||||
Icon.displayName = 'AlignRight01'
|
||||
|
||||
export default Icon
|
||||
|
|
@ -1,20 +0,0 @@
|
|||
// GENERATE BY script
|
||||
// DON NOT EDIT IT MANUALLY
|
||||
|
||||
import * as React from 'react'
|
||||
import data from './Grid01.json'
|
||||
import IconBase from '@/app/components/base/icons/IconBase'
|
||||
import type { IconData } from '@/app/components/base/icons/IconBase'
|
||||
|
||||
const Icon = (
|
||||
{
|
||||
ref,
|
||||
...props
|
||||
}: React.SVGProps<SVGSVGElement> & {
|
||||
ref?: React.RefObject<React.RefObject<HTMLOrSVGElement>>;
|
||||
},
|
||||
) => <IconBase {...props} ref={ref} data={data as IconData} />
|
||||
|
||||
Icon.displayName = 'Grid01'
|
||||
|
||||
export default Icon
|
||||
|
|
@ -1,4 +1 @@
|
|||
export { default as AlignLeft01 } from './AlignLeft01'
|
||||
export { default as AlignRight01 } from './AlignRight01'
|
||||
export { default as Grid01 } from './Grid01'
|
||||
export { default as LayoutGrid02 } from './LayoutGrid02'
|
||||
|
|
|
|||
|
|
@ -1,20 +0,0 @@
|
|||
// GENERATE BY script
|
||||
// DON NOT EDIT IT MANUALLY
|
||||
|
||||
import * as React from 'react'
|
||||
import data from './Route.json'
|
||||
import IconBase from '@/app/components/base/icons/IconBase'
|
||||
import type { IconData } from '@/app/components/base/icons/IconBase'
|
||||
|
||||
const Icon = (
|
||||
{
|
||||
ref,
|
||||
...props
|
||||
}: React.SVGProps<SVGSVGElement> & {
|
||||
ref?: React.RefObject<React.RefObject<HTMLOrSVGElement>>;
|
||||
},
|
||||
) => <IconBase {...props} ref={ref} data={data as IconData} />
|
||||
|
||||
Icon.displayName = 'Route'
|
||||
|
||||
export default Icon
|
||||
|
|
@ -1 +0,0 @@
|
|||
export { default as Route } from './Route'
|
||||
|
|
@ -1,20 +0,0 @@
|
|||
// GENERATE BY script
|
||||
// DON NOT EDIT IT MANUALLY
|
||||
|
||||
import * as React from 'react'
|
||||
import data from './User01.json'
|
||||
import IconBase from '@/app/components/base/icons/IconBase'
|
||||
import type { IconData } from '@/app/components/base/icons/IconBase'
|
||||
|
||||
const Icon = (
|
||||
{
|
||||
ref,
|
||||
...props
|
||||
}: React.SVGProps<SVGSVGElement> & {
|
||||
ref?: React.RefObject<React.RefObject<HTMLOrSVGElement>>;
|
||||
},
|
||||
) => <IconBase {...props} ref={ref} data={data as IconData} />
|
||||
|
||||
Icon.displayName = 'User01'
|
||||
|
||||
export default Icon
|
||||
|
|
@ -1,20 +0,0 @@
|
|||
// GENERATE BY script
|
||||
// DON NOT EDIT IT MANUALLY
|
||||
|
||||
import * as React from 'react'
|
||||
import data from './Users01.json'
|
||||
import IconBase from '@/app/components/base/icons/IconBase'
|
||||
import type { IconData } from '@/app/components/base/icons/IconBase'
|
||||
|
||||
const Icon = (
|
||||
{
|
||||
ref,
|
||||
...props
|
||||
}: React.SVGProps<SVGSVGElement> & {
|
||||
ref?: React.RefObject<React.RefObject<HTMLOrSVGElement>>;
|
||||
},
|
||||
) => <IconBase {...props} ref={ref} data={data as IconData} />
|
||||
|
||||
Icon.displayName = 'Users01'
|
||||
|
||||
export default Icon
|
||||
|
|
@ -1,2 +0,0 @@
|
|||
export { default as User01 } from './User01'
|
||||
export { default as Users01 } from './Users01'
|
||||
|
|
@ -1,20 +0,0 @@
|
|||
// GENERATE BY script
|
||||
// DON NOT EDIT IT MANUALLY
|
||||
|
||||
import * as React from 'react'
|
||||
import data from './Stars02.json'
|
||||
import IconBase from '@/app/components/base/icons/IconBase'
|
||||
import type { IconData } from '@/app/components/base/icons/IconBase'
|
||||
|
||||
const Icon = (
|
||||
{
|
||||
ref,
|
||||
...props
|
||||
}: React.SVGProps<SVGSVGElement> & {
|
||||
ref?: React.RefObject<React.RefObject<HTMLOrSVGElement>>;
|
||||
},
|
||||
) => <IconBase {...props} ref={ref} data={data as IconData} />
|
||||
|
||||
Icon.displayName = 'Stars02'
|
||||
|
||||
export default Icon
|
||||
|
|
@ -1 +0,0 @@
|
|||
export { default as Stars02 } from './Stars02'
|
||||
|
|
@ -1,20 +0,0 @@
|
|||
// GENERATE BY script
|
||||
// DON NOT EDIT IT MANUALLY
|
||||
|
||||
import * as React from 'react'
|
||||
import data from './ChevronDown.json'
|
||||
import IconBase from '@/app/components/base/icons/IconBase'
|
||||
import type { IconData } from '@/app/components/base/icons/IconBase'
|
||||
|
||||
const Icon = (
|
||||
{
|
||||
ref,
|
||||
...props
|
||||
}: React.SVGProps<SVGSVGElement> & {
|
||||
ref?: React.RefObject<React.RefObject<HTMLOrSVGElement>>;
|
||||
},
|
||||
) => <IconBase {...props} ref={ref} data={data as IconData} />
|
||||
|
||||
Icon.displayName = 'ChevronDown'
|
||||
|
||||
export default Icon
|
||||
|
|
@ -1,20 +0,0 @@
|
|||
// GENERATE BY script
|
||||
// DON NOT EDIT IT MANUALLY
|
||||
|
||||
import * as React from 'react'
|
||||
import data from './HighPriority.json'
|
||||
import IconBase from '@/app/components/base/icons/IconBase'
|
||||
import type { IconData } from '@/app/components/base/icons/IconBase'
|
||||
|
||||
const Icon = (
|
||||
{
|
||||
ref,
|
||||
...props
|
||||
}: React.SVGProps<SVGSVGElement> & {
|
||||
ref?: React.RefObject<React.RefObject<HTMLOrSVGElement>>;
|
||||
},
|
||||
) => <IconBase {...props} ref={ref} data={data as IconData} />
|
||||
|
||||
Icon.displayName = 'HighPriority'
|
||||
|
||||
export default Icon
|
||||
|
|
@ -1,2 +0,0 @@
|
|||
export { default as ChevronDown } from './ChevronDown'
|
||||
export { default as HighPriority } from './HighPriority'
|
||||
|
|
@ -1,20 +0,0 @@
|
|||
// GENERATE BY script
|
||||
// DON NOT EDIT IT MANUALLY
|
||||
|
||||
import * as React from 'react'
|
||||
import data from './Grid01.json'
|
||||
import IconBase from '@/app/components/base/icons/IconBase'
|
||||
import type { IconData } from '@/app/components/base/icons/IconBase'
|
||||
|
||||
const Icon = (
|
||||
{
|
||||
ref,
|
||||
...props
|
||||
}: React.SVGProps<SVGSVGElement> & {
|
||||
ref?: React.RefObject<React.RefObject<HTMLOrSVGElement>>;
|
||||
},
|
||||
) => <IconBase {...props} ref={ref} data={data as IconData} />
|
||||
|
||||
Icon.displayName = 'Grid01'
|
||||
|
||||
export default Icon
|
||||
|
|
@ -1 +0,0 @@
|
|||
export { default as Grid01 } from './Grid01'
|
||||
|
|
@ -199,7 +199,7 @@ Chat applications support session persistence, allowing previous chat history to
|
|||
--header 'Authorization: Bearer {api_key}' \\
|
||||
--header 'Content-Type: application/json' \\
|
||||
--data-raw '{
|
||||
"inputs": ${JSON.stringify(props.inputs)},
|
||||
"inputs": ${JSON.stringify(props.inputs)},
|
||||
"query": "What are the specs of the iPhone 13 Pro Max?",
|
||||
"response_mode": "streaming",
|
||||
"conversation_id": "",
|
||||
|
|
@ -1182,7 +1182,7 @@ Chat applications support session persistence, allowing previous chat history to
|
|||
--header 'Content-Type: application/json' \\
|
||||
--data-raw '{
|
||||
"value": "Updated Value",
|
||||
"user": "abc-123"
|
||||
"user": "abc-123"
|
||||
}'`}
|
||||
/>
|
||||
|
||||
|
|
|
|||
|
|
@ -1188,7 +1188,7 @@ import { Row, Col, Properties, Property, Heading, SubProperty } from '../md.tsx'
|
|||
--header 'Content-Type: application/json' \\
|
||||
--data-raw '{
|
||||
"value": "Updated Value",
|
||||
"user": "abc-123"
|
||||
"user": "abc-123"
|
||||
}'`}
|
||||
/>
|
||||
|
||||
|
|
|
|||
|
|
@ -182,7 +182,7 @@ Workflow 应用无会话支持,适合用于翻译/文章写作/总结 AI 等
|
|||
--header 'Authorization: Bearer {api_key}' \\
|
||||
--header 'Content-Type: application/json' \\
|
||||
--data-raw '{
|
||||
"inputs": ${JSON.stringify(props.inputs)},
|
||||
"inputs": ${JSON.stringify(props.inputs)},
|
||||
"response_mode": "streaming",
|
||||
"user": "abc-123"
|
||||
}'`}
|
||||
|
|
|
|||
|
|
@ -20,7 +20,9 @@ import type { SiteInfo } from '@/models/share'
|
|||
import { TEXT_GENERATION_TIMEOUT_MS } from '@/config'
|
||||
import {
|
||||
getFilesInLogs,
|
||||
getProcessedFiles,
|
||||
} from '@/app/components/base/file-uploader/utils'
|
||||
import type { FileEntity } from '@/app/components/base/file-uploader/types'
|
||||
import { formatBooleanInputs } from '@/utils/model-config'
|
||||
|
||||
export type IResultProps = {
|
||||
|
|
@ -160,8 +162,22 @@ const Result: FC<IResultProps> = ({
|
|||
if (!checkCanSend())
|
||||
return
|
||||
|
||||
// Process inputs: convert file entities to API format
|
||||
const processedInputs = { ...formatBooleanInputs(promptConfig?.prompt_variables, inputs) }
|
||||
promptConfig?.prompt_variables.forEach((variable) => {
|
||||
const value = processedInputs[variable.key]
|
||||
if (variable.type === 'file' && value && typeof value === 'object' && !Array.isArray(value)) {
|
||||
// Convert single file entity to API format
|
||||
processedInputs[variable.key] = getProcessedFiles([value as FileEntity])[0]
|
||||
}
|
||||
else if (variable.type === 'file-list' && Array.isArray(value) && value.length > 0) {
|
||||
// Convert file entity array to API format
|
||||
processedInputs[variable.key] = getProcessedFiles(value as FileEntity[])
|
||||
}
|
||||
})
|
||||
|
||||
const data: Record<string, any> = {
|
||||
inputs: formatBooleanInputs(promptConfig?.prompt_variables, inputs),
|
||||
inputs: processedInputs,
|
||||
}
|
||||
if (visionConfig.enabled && completionFiles && completionFiles?.length > 0) {
|
||||
data.files = completionFiles.map((item) => {
|
||||
|
|
|
|||
|
|
@ -15,7 +15,6 @@ import { DEFAULT_VALUE_MAX_LEN } from '@/config'
|
|||
import TextGenerationImageUploader from '@/app/components/base/image-uploader/text-generation-image-uploader'
|
||||
import type { VisionFile, VisionSettings } from '@/types/app'
|
||||
import { FileUploaderInAttachmentWrapper } from '@/app/components/base/file-uploader'
|
||||
import { getProcessedFiles } from '@/app/components/base/file-uploader/utils'
|
||||
import useBreakpoints, { MediaType } from '@/hooks/use-breakpoints'
|
||||
import cn from '@/utils/classnames'
|
||||
import BoolInput from '@/app/components/workflow/nodes/_base/components/before-run-form/bool-input'
|
||||
|
|
@ -82,9 +81,9 @@ const RunOnce: FC<IRunOnceProps> = ({
|
|||
else if (item.type === 'checkbox')
|
||||
newInputs[item.key] = item.default || false
|
||||
else if (item.type === 'file')
|
||||
newInputs[item.key] = item.default
|
||||
newInputs[item.key] = undefined
|
||||
else if (item.type === 'file-list')
|
||||
newInputs[item.key] = item.default || []
|
||||
newInputs[item.key] = []
|
||||
else
|
||||
newInputs[item.key] = undefined
|
||||
})
|
||||
|
|
@ -148,8 +147,8 @@ const RunOnce: FC<IRunOnceProps> = ({
|
|||
)}
|
||||
{item.type === 'file' && (
|
||||
<FileUploaderInAttachmentWrapper
|
||||
value={inputs[item.key] ? [inputs[item.key]] : []}
|
||||
onChange={(files) => { handleInputsChange({ ...inputsRef.current, [item.key]: getProcessedFiles(files)[0] }) }}
|
||||
value={(inputs[item.key] && typeof inputs[item.key] === 'object') ? [inputs[item.key]] : []}
|
||||
onChange={(files) => { handleInputsChange({ ...inputsRef.current, [item.key]: files[0] }) }}
|
||||
fileConfig={{
|
||||
...item.config,
|
||||
fileUploadConfig: (visionConfig as any).fileUploadConfig,
|
||||
|
|
@ -158,8 +157,8 @@ const RunOnce: FC<IRunOnceProps> = ({
|
|||
)}
|
||||
{item.type === 'file-list' && (
|
||||
<FileUploaderInAttachmentWrapper
|
||||
value={inputs[item.key]}
|
||||
onChange={(files) => { handleInputsChange({ ...inputsRef.current, [item.key]: getProcessedFiles(files) }) }}
|
||||
value={Array.isArray(inputs[item.key]) ? inputs[item.key] : []}
|
||||
onChange={(files) => { handleInputsChange({ ...inputsRef.current, [item.key]: files }) }}
|
||||
fileConfig={{
|
||||
...item.config,
|
||||
fileUploadConfig: (visionConfig as any).fileUploadConfig,
|
||||
|
|
|
|||
|
|
@ -160,6 +160,10 @@ const translation = {
|
|||
title: 'Cloud-Monitor',
|
||||
description: 'Die vollständig verwaltete und wartungsfreie Observability-Plattform von Alibaba Cloud ermöglicht eine sofortige Überwachung, Verfolgung und Bewertung von Dify-Anwendungen.',
|
||||
},
|
||||
tencent: {
|
||||
title: 'Tencent APM',
|
||||
description: 'Tencent Application Performance Monitoring bietet umfassendes Tracing und multidimensionale Analyse für LLM-Anwendungen.',
|
||||
},
|
||||
},
|
||||
answerIcon: {
|
||||
descriptionInExplore: 'Gibt an, ob das web app Symbol zum Ersetzen 🤖 in Explore verwendet werden soll',
|
||||
|
|
|
|||
|
|
@ -183,6 +183,10 @@ const translation = {
|
|||
title: 'Cloud Monitor',
|
||||
description: 'The fully-managed and maintenance-free observability platform provided by Alibaba Cloud, enables out-of-the-box monitoring, tracing, and evaluation of Dify applications.',
|
||||
},
|
||||
tencent: {
|
||||
title: 'Tencent APM',
|
||||
description: 'Tencent Application Performance Monitoring provides comprehensive tracing and multi-dimensional analysis for LLM applications.',
|
||||
},
|
||||
inUse: 'In use',
|
||||
configProvider: {
|
||||
title: 'Config ',
|
||||
|
|
|
|||
|
|
@ -163,6 +163,10 @@ const translation = {
|
|||
title: 'Monitor de Nubes',
|
||||
description: 'La plataforma de observabilidad totalmente gestionada y sin mantenimiento proporcionada por Alibaba Cloud, permite la monitorización, trazado y evaluación de aplicaciones Dify de manera inmediata.',
|
||||
},
|
||||
tencent: {
|
||||
title: 'Tencent APM',
|
||||
description: 'Tencent Application Performance Monitoring proporciona rastreo integral y análisis multidimensional para aplicaciones LLM.',
|
||||
},
|
||||
},
|
||||
answerIcon: {
|
||||
title: 'Usar el icono de la aplicación web para reemplazar 🤖',
|
||||
|
|
|
|||
|
|
@ -171,6 +171,10 @@ const translation = {
|
|||
title: 'نظارت بر ابر',
|
||||
description: 'پلتفرم مشاهدهپذیری کاملاً مدیریتشده و بدون نیاز به نگهداری که توسط Alibaba Cloud ارائه شده، امکان نظارت، ردیابی و ارزیابی برنامههای Dify را بهصورت آماده و با تنظیمات اولیه فراهم میکند.',
|
||||
},
|
||||
tencent: {
|
||||
title: 'تنست ایپیام',
|
||||
description: 'نظارت بر عملکرد برنامههای Tencent تحلیلهای جامع و ردیابی چندبعدی برای برنامههای LLM ارائه میدهد.',
|
||||
},
|
||||
},
|
||||
answerIcon: {
|
||||
descriptionInExplore: 'آیا از نماد web app برای جایگزینی 🤖 در Explore استفاده کنیم یا خیر',
|
||||
|
|
|
|||
|
|
@ -163,6 +163,10 @@ const translation = {
|
|||
title: 'Surveillance Cloud',
|
||||
description: 'La plateforme d\'observabilité entièrement gérée et sans maintenance fournie par Alibaba Cloud permet une surveillance, un traçage et une évaluation prêts à l\'emploi des applications Dify.',
|
||||
},
|
||||
tencent: {
|
||||
title: 'Tencent APM',
|
||||
description: 'Tencent Application Performance Monitoring fournit une traçabilité complète et une analyse multidimensionnelle pour les applications LLM.',
|
||||
},
|
||||
},
|
||||
answerIcon: {
|
||||
description: 'S’il faut utiliser l’icône web app pour remplacer 🤖 dans l’application partagée',
|
||||
|
|
|
|||
|
|
@ -163,6 +163,10 @@ const translation = {
|
|||
title: 'क्लाउड मॉनिटर',
|
||||
description: 'अलीबाबा क्लाउड द्वारा प्रदान की गई पूरी तरह से प्रबंधित और रखरखाव-मुक्त अवलोकन प्लेटफ़ॉर्म, Dify अनुप्रयोगों की स्वचालित निगरानी, ट्रेसिंग और मूल्यांकन का सक्षम बनाता है।',
|
||||
},
|
||||
tencent: {
|
||||
title: 'टेनसेंट एपीएम',
|
||||
description: 'Tencent एप्लिकेशन परफॉर्मेंस मॉनिटरिंग LLM एप्लिकेशन के लिए व्यापक ट्रेसिंग और बहु-आयामी विश्लेषण प्रदान करता है।',
|
||||
},
|
||||
},
|
||||
answerIcon: {
|
||||
title: 'बदलने 🤖 के लिए web app चिह्न का उपयोग करें',
|
||||
|
|
|
|||
|
|
@ -155,6 +155,10 @@ const translation = {
|
|||
description: 'Mengonfigurasi penyedia LLMOps Pihak Ketiga dan melacak performa aplikasi.',
|
||||
inUse: 'Sedang digunakan',
|
||||
tracingDescription: 'Tangkap konteks lengkap eksekusi aplikasi, termasuk panggilan LLM, konteks, perintah, permintaan HTTP, dan lainnya, ke platform pelacakan pihak ketiga.',
|
||||
tencent: {
|
||||
title: 'Tencent APM',
|
||||
description: 'Tencent Application Performance Monitoring menyediakan pelacakan komprehensif dan analisis multi-dimensi untuk aplikasi LLM.',
|
||||
},
|
||||
},
|
||||
appSelector: {
|
||||
placeholder: 'Pilih aplikasi...',
|
||||
|
|
|
|||
|
|
@ -169,6 +169,10 @@ const translation = {
|
|||
title: 'Monitoraggio Cloud',
|
||||
description: 'La piattaforma di osservabilità completamente gestita e senza manutenzione fornita da Alibaba Cloud consente il monitoraggio, il tracciamento e la valutazione delle applicazioni Dify fin da subito.',
|
||||
},
|
||||
tencent: {
|
||||
title: 'Tencent APM',
|
||||
description: 'Tencent Application Performance Monitoring fornisce tracciamento completo e analisi multidimensionale per le applicazioni LLM.',
|
||||
},
|
||||
},
|
||||
answerIcon: {
|
||||
description: 'Se utilizzare l\'icona web app per la sostituzione 🤖 nell\'applicazione condivisa',
|
||||
|
|
|
|||
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue