mirror of https://github.com/langgenius/dify.git
Merge branch 'main' into feat/memory-orchestration-fed
This commit is contained in:
commit
8a348615bf
|
|
@ -1,6 +1,7 @@
|
|||
#!/bin/bash
|
||||
|
||||
yq eval '.services.weaviate.ports += ["8080:8080"]' -i docker/docker-compose.yaml
|
||||
yq eval '.services.weaviate.ports += ["50051:50051"]' -i docker/docker-compose.yaml
|
||||
yq eval '.services.qdrant.ports += ["6333:6333"]' -i docker/docker-compose.yaml
|
||||
yq eval '.services.chroma.ports += ["8000:8000"]' -i docker/docker-compose.yaml
|
||||
yq eval '.services["milvus-standalone"].ports += ["19530:19530"]' -i docker/docker-compose.yaml
|
||||
|
|
|
|||
|
|
@ -1,5 +1,4 @@
|
|||
import flask_restx
|
||||
from flask import Response
|
||||
from flask_restx import Resource, fields, marshal_with
|
||||
from flask_restx._http import HTTPStatus
|
||||
from sqlalchemy import select
|
||||
|
|
@ -156,11 +155,6 @@ class AppApiKeyListResource(BaseApiKeyListResource):
|
|||
"""Create a new API key for an app"""
|
||||
return super().post(resource_id)
|
||||
|
||||
def after_request(self, resp: Response):
|
||||
resp.headers["Access-Control-Allow-Origin"] = "*"
|
||||
resp.headers["Access-Control-Allow-Credentials"] = "true"
|
||||
return resp
|
||||
|
||||
resource_type = "app"
|
||||
resource_model = App
|
||||
resource_id_field = "app_id"
|
||||
|
|
@ -177,11 +171,6 @@ class AppApiKeyResource(BaseApiKeyResource):
|
|||
"""Delete an API key for an app"""
|
||||
return super().delete(resource_id, api_key_id)
|
||||
|
||||
def after_request(self, resp):
|
||||
resp.headers["Access-Control-Allow-Origin"] = "*"
|
||||
resp.headers["Access-Control-Allow-Credentials"] = "true"
|
||||
return resp
|
||||
|
||||
resource_type = "app"
|
||||
resource_model = App
|
||||
resource_id_field = "app_id"
|
||||
|
|
@ -206,11 +195,6 @@ class DatasetApiKeyListResource(BaseApiKeyListResource):
|
|||
"""Create a new API key for a dataset"""
|
||||
return super().post(resource_id)
|
||||
|
||||
def after_request(self, resp: Response):
|
||||
resp.headers["Access-Control-Allow-Origin"] = "*"
|
||||
resp.headers["Access-Control-Allow-Credentials"] = "true"
|
||||
return resp
|
||||
|
||||
resource_type = "dataset"
|
||||
resource_model = Dataset
|
||||
resource_id_field = "dataset_id"
|
||||
|
|
@ -227,11 +211,6 @@ class DatasetApiKeyResource(BaseApiKeyResource):
|
|||
"""Delete an API key for a dataset"""
|
||||
return super().delete(resource_id, api_key_id)
|
||||
|
||||
def after_request(self, resp: Response):
|
||||
resp.headers["Access-Control-Allow-Origin"] = "*"
|
||||
resp.headers["Access-Control-Allow-Credentials"] = "true"
|
||||
return resp
|
||||
|
||||
resource_type = "dataset"
|
||||
resource_model = Dataset
|
||||
resource_id_field = "dataset_id"
|
||||
|
|
|
|||
|
|
@ -472,6 +472,9 @@ class ProviderConfiguration(BaseModel):
|
|||
provider_model_credentials_cache.delete()
|
||||
|
||||
self.switch_preferred_provider_type(provider_type=ProviderType.CUSTOM, session=session)
|
||||
else:
|
||||
# some historical data may have a provider record but not be set as valid
|
||||
provider_record.is_valid = True
|
||||
|
||||
session.commit()
|
||||
except Exception:
|
||||
|
|
|
|||
|
|
@ -1,9 +1,24 @@
|
|||
"""
|
||||
Weaviate vector database implementation for Dify's RAG system.
|
||||
|
||||
This module provides integration with Weaviate vector database for storing and retrieving
|
||||
document embeddings used in retrieval-augmented generation workflows.
|
||||
"""
|
||||
|
||||
import datetime
|
||||
import json
|
||||
import logging
|
||||
import uuid as _uuid
|
||||
from typing import Any
|
||||
from urllib.parse import urlparse
|
||||
|
||||
import weaviate # type: ignore
|
||||
import weaviate
|
||||
import weaviate.classes.config as wc
|
||||
from pydantic import BaseModel, model_validator
|
||||
from weaviate.classes.data import DataObject
|
||||
from weaviate.classes.init import Auth
|
||||
from weaviate.classes.query import Filter, MetadataQuery
|
||||
from weaviate.exceptions import UnexpectedStatusCodeError
|
||||
|
||||
from configs import dify_config
|
||||
from core.rag.datasource.vdb.field import Field
|
||||
|
|
@ -15,265 +30,394 @@ from core.rag.models.document import Document
|
|||
from extensions.ext_redis import redis_client
|
||||
from models.dataset import Dataset
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class WeaviateConfig(BaseModel):
|
||||
"""
|
||||
Configuration model for Weaviate connection settings.
|
||||
|
||||
Attributes:
|
||||
endpoint: Weaviate server endpoint URL
|
||||
api_key: Optional API key for authentication
|
||||
batch_size: Number of objects to batch per insert operation
|
||||
"""
|
||||
|
||||
endpoint: str
|
||||
api_key: str | None = None
|
||||
batch_size: int = 100
|
||||
|
||||
@model_validator(mode="before")
|
||||
@classmethod
|
||||
def validate_config(cls, values: dict):
|
||||
def validate_config(cls, values: dict) -> dict:
|
||||
"""Validates that required configuration values are present."""
|
||||
if not values["endpoint"]:
|
||||
raise ValueError("config WEAVIATE_ENDPOINT is required")
|
||||
return values
|
||||
|
||||
|
||||
class WeaviateVector(BaseVector):
|
||||
"""
|
||||
Weaviate vector database implementation for document storage and retrieval.
|
||||
|
||||
Handles creation, insertion, deletion, and querying of document embeddings
|
||||
in a Weaviate collection.
|
||||
"""
|
||||
|
||||
def __init__(self, collection_name: str, config: WeaviateConfig, attributes: list):
|
||||
"""
|
||||
Initializes the Weaviate vector store.
|
||||
|
||||
Args:
|
||||
collection_name: Name of the Weaviate collection
|
||||
config: Weaviate configuration settings
|
||||
attributes: List of metadata attributes to store
|
||||
"""
|
||||
super().__init__(collection_name)
|
||||
self._client = self._init_client(config)
|
||||
self._attributes = attributes
|
||||
|
||||
def _init_client(self, config: WeaviateConfig) -> weaviate.Client:
|
||||
auth_config = weaviate.AuthApiKey(api_key=config.api_key or "")
|
||||
def _init_client(self, config: WeaviateConfig) -> weaviate.WeaviateClient:
|
||||
"""
|
||||
Initializes and returns a connected Weaviate client.
|
||||
|
||||
weaviate.connect.connection.has_grpc = False # ty: ignore [unresolved-attribute]
|
||||
Configures both HTTP and gRPC connections with proper authentication.
|
||||
"""
|
||||
p = urlparse(config.endpoint)
|
||||
host = p.hostname or config.endpoint.replace("https://", "").replace("http://", "")
|
||||
http_secure = p.scheme == "https"
|
||||
http_port = p.port or (443 if http_secure else 80)
|
||||
|
||||
try:
|
||||
client = weaviate.Client(
|
||||
url=config.endpoint, auth_client_secret=auth_config, timeout_config=(5, 60), startup_period=None
|
||||
)
|
||||
except Exception as exc:
|
||||
raise ConnectionError("Vector database connection error") from exc
|
||||
grpc_host = host
|
||||
grpc_secure = http_secure
|
||||
grpc_port = 443 if grpc_secure else 50051
|
||||
|
||||
client.batch.configure(
|
||||
# `batch_size` takes an `int` value to enable auto-batching
|
||||
# (`None` is used for manual batching)
|
||||
batch_size=config.batch_size,
|
||||
# dynamically update the `batch_size` based on import speed
|
||||
dynamic=True,
|
||||
# `timeout_retries` takes an `int` value to retry on time outs
|
||||
timeout_retries=3,
|
||||
client = weaviate.connect_to_custom(
|
||||
http_host=host,
|
||||
http_port=http_port,
|
||||
http_secure=http_secure,
|
||||
grpc_host=grpc_host,
|
||||
grpc_port=grpc_port,
|
||||
grpc_secure=grpc_secure,
|
||||
auth_credentials=Auth.api_key(config.api_key) if config.api_key else None,
|
||||
)
|
||||
|
||||
if not client.is_ready():
|
||||
raise ConnectionError("Vector database is not ready")
|
||||
|
||||
return client
|
||||
|
||||
def get_type(self) -> str:
|
||||
"""Returns the vector database type identifier."""
|
||||
return VectorType.WEAVIATE
|
||||
|
||||
def get_collection_name(self, dataset: Dataset) -> str:
|
||||
"""
|
||||
Retrieves or generates the collection name for a dataset.
|
||||
|
||||
Uses existing index structure if available, otherwise generates from dataset ID.
|
||||
"""
|
||||
if dataset.index_struct_dict:
|
||||
class_prefix: str = dataset.index_struct_dict["vector_store"]["class_prefix"]
|
||||
if not class_prefix.endswith("_Node"):
|
||||
# original class_prefix
|
||||
class_prefix += "_Node"
|
||||
|
||||
return class_prefix
|
||||
|
||||
dataset_id = dataset.id
|
||||
return Dataset.gen_collection_name_by_id(dataset_id)
|
||||
|
||||
def to_index_struct(self):
|
||||
def to_index_struct(self) -> dict:
|
||||
"""Returns the index structure dictionary for persistence."""
|
||||
return {"type": self.get_type(), "vector_store": {"class_prefix": self._collection_name}}
|
||||
|
||||
def create(self, texts: list[Document], embeddings: list[list[float]], **kwargs):
|
||||
# create collection
|
||||
"""
|
||||
Creates a new collection and adds initial documents with embeddings.
|
||||
"""
|
||||
self._create_collection()
|
||||
# create vector
|
||||
self.add_texts(texts, embeddings)
|
||||
|
||||
def _create_collection(self):
|
||||
"""
|
||||
Creates the Weaviate collection with required schema if it doesn't exist.
|
||||
|
||||
Uses Redis locking to prevent concurrent creation attempts.
|
||||
"""
|
||||
lock_name = f"vector_indexing_lock_{self._collection_name}"
|
||||
with redis_client.lock(lock_name, timeout=20):
|
||||
collection_exist_cache_key = f"vector_indexing_{self._collection_name}"
|
||||
if redis_client.get(collection_exist_cache_key):
|
||||
cache_key = f"vector_indexing_{self._collection_name}"
|
||||
if redis_client.get(cache_key):
|
||||
return
|
||||
schema = self._default_schema(self._collection_name)
|
||||
if not self._client.schema.contains(schema):
|
||||
# create collection
|
||||
self._client.schema.create_class(schema)
|
||||
redis_client.set(collection_exist_cache_key, 1, ex=3600)
|
||||
|
||||
try:
|
||||
if not self._client.collections.exists(self._collection_name):
|
||||
self._client.collections.create(
|
||||
name=self._collection_name,
|
||||
properties=[
|
||||
wc.Property(
|
||||
name=Field.TEXT_KEY.value,
|
||||
data_type=wc.DataType.TEXT,
|
||||
tokenization=wc.Tokenization.WORD,
|
||||
),
|
||||
wc.Property(name="document_id", data_type=wc.DataType.TEXT),
|
||||
wc.Property(name="doc_id", data_type=wc.DataType.TEXT),
|
||||
wc.Property(name="chunk_index", data_type=wc.DataType.INT),
|
||||
],
|
||||
vector_config=wc.Configure.Vectors.self_provided(),
|
||||
)
|
||||
|
||||
self._ensure_properties()
|
||||
redis_client.set(cache_key, 1, ex=3600)
|
||||
except Exception as e:
|
||||
logger.exception("Error creating collection %s", self._collection_name)
|
||||
raise
|
||||
|
||||
def _ensure_properties(self) -> None:
|
||||
"""
|
||||
Ensures all required properties exist in the collection schema.
|
||||
|
||||
Adds missing properties if the collection exists but lacks them.
|
||||
"""
|
||||
if not self._client.collections.exists(self._collection_name):
|
||||
return
|
||||
|
||||
col = self._client.collections.use(self._collection_name)
|
||||
cfg = col.config.get()
|
||||
existing = {p.name for p in (cfg.properties or [])}
|
||||
|
||||
to_add = []
|
||||
if "document_id" not in existing:
|
||||
to_add.append(wc.Property(name="document_id", data_type=wc.DataType.TEXT))
|
||||
if "doc_id" not in existing:
|
||||
to_add.append(wc.Property(name="doc_id", data_type=wc.DataType.TEXT))
|
||||
if "chunk_index" not in existing:
|
||||
to_add.append(wc.Property(name="chunk_index", data_type=wc.DataType.INT))
|
||||
|
||||
for prop in to_add:
|
||||
try:
|
||||
col.config.add_property(prop)
|
||||
except Exception as e:
|
||||
logger.warning("Could not add property %s: %s", prop.name, e)
|
||||
|
||||
def _get_uuids(self, documents: list[Document]) -> list[str]:
|
||||
"""
|
||||
Generates deterministic UUIDs for documents based on their content.
|
||||
|
||||
Uses UUID5 with URL namespace to ensure consistent IDs for identical content.
|
||||
"""
|
||||
URL_NAMESPACE = _uuid.UUID("6ba7b811-9dad-11d1-80b4-00c04fd430c8")
|
||||
|
||||
uuids = []
|
||||
for doc in documents:
|
||||
uuid_val = _uuid.uuid5(URL_NAMESPACE, doc.page_content)
|
||||
uuids.append(str(uuid_val))
|
||||
|
||||
return uuids
|
||||
|
||||
def add_texts(self, documents: list[Document], embeddings: list[list[float]], **kwargs):
|
||||
"""
|
||||
Adds documents with their embeddings to the collection.
|
||||
|
||||
Batches insertions for efficiency and returns the list of inserted object IDs.
|
||||
"""
|
||||
uuids = self._get_uuids(documents)
|
||||
texts = [d.page_content for d in documents]
|
||||
metadatas = [d.metadata for d in documents]
|
||||
|
||||
ids = []
|
||||
col = self._client.collections.use(self._collection_name)
|
||||
objs: list[DataObject] = []
|
||||
ids_out: list[str] = []
|
||||
|
||||
with self._client.batch as batch:
|
||||
for i, text in enumerate(texts):
|
||||
data_properties = {Field.TEXT_KEY: text}
|
||||
if metadatas is not None:
|
||||
# metadata maybe None
|
||||
for key, val in (metadatas[i] or {}).items():
|
||||
data_properties[key] = self._json_serializable(val)
|
||||
for i, text in enumerate(texts):
|
||||
props: dict[str, Any] = {Field.TEXT_KEY.value: text}
|
||||
meta = metadatas[i] or {}
|
||||
for k, v in meta.items():
|
||||
props[k] = self._json_serializable(v)
|
||||
|
||||
batch.add_data_object(
|
||||
data_object=data_properties,
|
||||
class_name=self._collection_name,
|
||||
uuid=uuids[i],
|
||||
vector=embeddings[i] if embeddings else None,
|
||||
candidate = uuids[i] if uuids else None
|
||||
uid = candidate if (candidate and self._is_uuid(candidate)) else str(_uuid.uuid4())
|
||||
ids_out.append(uid)
|
||||
|
||||
vec_payload = None
|
||||
if embeddings and i < len(embeddings) and embeddings[i]:
|
||||
vec_payload = {"default": embeddings[i]}
|
||||
|
||||
objs.append(
|
||||
DataObject(
|
||||
uuid=uid,
|
||||
properties=props, # type: ignore[arg-type] # mypy incorrectly infers DataObject signature
|
||||
vector=vec_payload,
|
||||
)
|
||||
ids.append(uuids[i])
|
||||
return ids
|
||||
)
|
||||
|
||||
def delete_by_metadata_field(self, key: str, value: str):
|
||||
# check whether the index already exists
|
||||
schema = self._default_schema(self._collection_name)
|
||||
if self._client.schema.contains(schema):
|
||||
where_filter = {"operator": "Equal", "path": [key], "valueText": value}
|
||||
batch_size = max(1, int(dify_config.WEAVIATE_BATCH_SIZE or 100))
|
||||
with col.batch.dynamic() as batch:
|
||||
for obj in objs:
|
||||
batch.add_object(properties=obj.properties, uuid=obj.uuid, vector=obj.vector)
|
||||
|
||||
self._client.batch.delete_objects(class_name=self._collection_name, where=where_filter, output="minimal")
|
||||
return ids_out
|
||||
|
||||
def _is_uuid(self, val: str) -> bool:
|
||||
"""Validates whether a string is a valid UUID format."""
|
||||
try:
|
||||
_uuid.UUID(str(val))
|
||||
return True
|
||||
except Exception:
|
||||
return False
|
||||
|
||||
def delete_by_metadata_field(self, key: str, value: str) -> None:
|
||||
"""Deletes all objects matching a specific metadata field value."""
|
||||
if not self._client.collections.exists(self._collection_name):
|
||||
return
|
||||
|
||||
col = self._client.collections.use(self._collection_name)
|
||||
col.data.delete_many(where=Filter.by_property(key).equal(value))
|
||||
|
||||
def delete(self):
|
||||
# check whether the index already exists
|
||||
schema = self._default_schema(self._collection_name)
|
||||
if self._client.schema.contains(schema):
|
||||
self._client.schema.delete_class(self._collection_name)
|
||||
"""Deletes the entire collection from Weaviate."""
|
||||
if self._client.collections.exists(self._collection_name):
|
||||
self._client.collections.delete(self._collection_name)
|
||||
|
||||
def text_exists(self, id: str) -> bool:
|
||||
collection_name = self._collection_name
|
||||
schema = self._default_schema(self._collection_name)
|
||||
|
||||
# check whether the index already exists
|
||||
if not self._client.schema.contains(schema):
|
||||
"""Checks if a document with the given doc_id exists in the collection."""
|
||||
if not self._client.collections.exists(self._collection_name):
|
||||
return False
|
||||
result = (
|
||||
self._client.query.get(collection_name)
|
||||
.with_additional(["id"])
|
||||
.with_where(
|
||||
{
|
||||
"path": ["doc_id"],
|
||||
"operator": "Equal",
|
||||
"valueText": id,
|
||||
}
|
||||
)
|
||||
.with_limit(1)
|
||||
.do()
|
||||
|
||||
col = self._client.collections.use(self._collection_name)
|
||||
res = col.query.fetch_objects(
|
||||
filters=Filter.by_property("doc_id").equal(id),
|
||||
limit=1,
|
||||
return_properties=["doc_id"],
|
||||
)
|
||||
|
||||
if "errors" in result:
|
||||
raise ValueError(f"Error during query: {result['errors']}")
|
||||
return len(res.objects) > 0
|
||||
|
||||
entries = result["data"]["Get"][collection_name]
|
||||
if len(entries) == 0:
|
||||
return False
|
||||
def delete_by_ids(self, ids: list[str]) -> None:
|
||||
"""
|
||||
Deletes objects by their UUID identifiers.
|
||||
|
||||
return True
|
||||
Silently ignores 404 errors for non-existent IDs.
|
||||
"""
|
||||
if not self._client.collections.exists(self._collection_name):
|
||||
return
|
||||
|
||||
def delete_by_ids(self, ids: list[str]):
|
||||
# check whether the index already exists
|
||||
schema = self._default_schema(self._collection_name)
|
||||
if self._client.schema.contains(schema):
|
||||
for uuid in ids:
|
||||
try:
|
||||
self._client.data_object.delete(
|
||||
class_name=self._collection_name,
|
||||
uuid=uuid,
|
||||
)
|
||||
except weaviate.UnexpectedStatusCodeException as e:
|
||||
# tolerate not found error
|
||||
if e.status_code != 404:
|
||||
raise e
|
||||
col = self._client.collections.use(self._collection_name)
|
||||
|
||||
for uid in ids:
|
||||
try:
|
||||
col.data.delete_by_id(uid)
|
||||
except UnexpectedStatusCodeError as e:
|
||||
if getattr(e, "status_code", None) != 404:
|
||||
raise
|
||||
|
||||
def search_by_vector(self, query_vector: list[float], **kwargs: Any) -> list[Document]:
|
||||
"""Look up similar documents by embedding vector in Weaviate."""
|
||||
collection_name = self._collection_name
|
||||
properties = self._attributes
|
||||
properties.append(Field.TEXT_KEY)
|
||||
query_obj = self._client.query.get(collection_name, properties)
|
||||
"""
|
||||
Performs vector similarity search using the provided query vector.
|
||||
|
||||
vector = {"vector": query_vector}
|
||||
document_ids_filter = kwargs.get("document_ids_filter")
|
||||
if document_ids_filter:
|
||||
operands = []
|
||||
for document_id_filter in document_ids_filter:
|
||||
operands.append({"path": ["document_id"], "operator": "Equal", "valueText": document_id_filter})
|
||||
where_filter = {"operator": "Or", "operands": operands}
|
||||
query_obj = query_obj.with_where(where_filter)
|
||||
result = (
|
||||
query_obj.with_near_vector(vector)
|
||||
.with_limit(kwargs.get("top_k", 4))
|
||||
.with_additional(["vector", "distance"])
|
||||
.do()
|
||||
Filters by document IDs if provided and applies score threshold.
|
||||
Returns documents sorted by relevance score.
|
||||
"""
|
||||
if not self._client.collections.exists(self._collection_name):
|
||||
return []
|
||||
|
||||
col = self._client.collections.use(self._collection_name)
|
||||
props = list({*self._attributes, "document_id", Field.TEXT_KEY.value})
|
||||
|
||||
where = None
|
||||
doc_ids = kwargs.get("document_ids_filter") or []
|
||||
if doc_ids:
|
||||
ors = [Filter.by_property("document_id").equal(x) for x in doc_ids]
|
||||
where = ors[0]
|
||||
for f in ors[1:]:
|
||||
where = where | f
|
||||
|
||||
top_k = int(kwargs.get("top_k", 4))
|
||||
score_threshold = float(kwargs.get("score_threshold") or 0.0)
|
||||
|
||||
res = col.query.near_vector(
|
||||
near_vector=query_vector,
|
||||
limit=top_k,
|
||||
return_properties=props,
|
||||
return_metadata=MetadataQuery(distance=True),
|
||||
include_vector=False,
|
||||
filters=where,
|
||||
target_vector="default",
|
||||
)
|
||||
if "errors" in result:
|
||||
raise ValueError(f"Error during query: {result['errors']}")
|
||||
|
||||
docs_and_scores = []
|
||||
for res in result["data"]["Get"][collection_name]:
|
||||
text = res.pop(Field.TEXT_KEY)
|
||||
score = 1 - res["_additional"]["distance"]
|
||||
docs_and_scores.append((Document(page_content=text, metadata=res), score))
|
||||
docs: list[Document] = []
|
||||
for obj in res.objects:
|
||||
properties = dict(obj.properties or {})
|
||||
text = properties.pop(Field.TEXT_KEY.value, "")
|
||||
distance = (obj.metadata.distance if obj.metadata else None) or 1.0
|
||||
score = 1.0 - distance
|
||||
|
||||
docs = []
|
||||
for doc, score in docs_and_scores:
|
||||
score_threshold = float(kwargs.get("score_threshold") or 0.0)
|
||||
# check score threshold
|
||||
if score >= score_threshold:
|
||||
if doc.metadata is not None:
|
||||
doc.metadata["score"] = score
|
||||
docs.append(doc)
|
||||
# Sort the documents by score in descending order
|
||||
docs = sorted(docs, key=lambda x: x.metadata.get("score", 0) if x.metadata else 0, reverse=True)
|
||||
if score > score_threshold:
|
||||
properties["score"] = score
|
||||
docs.append(Document(page_content=text, metadata=properties))
|
||||
|
||||
docs.sort(key=lambda d: d.metadata.get("score", 0.0), reverse=True)
|
||||
return docs
|
||||
|
||||
def search_by_full_text(self, query: str, **kwargs: Any) -> list[Document]:
|
||||
"""Return docs using BM25F.
|
||||
|
||||
Args:
|
||||
query: Text to look up documents similar to.
|
||||
|
||||
Returns:
|
||||
List of Documents most similar to the query.
|
||||
"""
|
||||
collection_name = self._collection_name
|
||||
content: dict[str, Any] = {"concepts": [query]}
|
||||
properties = self._attributes
|
||||
properties.append(Field.TEXT_KEY)
|
||||
if kwargs.get("search_distance"):
|
||||
content["certainty"] = kwargs.get("search_distance")
|
||||
query_obj = self._client.query.get(collection_name, properties)
|
||||
document_ids_filter = kwargs.get("document_ids_filter")
|
||||
if document_ids_filter:
|
||||
operands = []
|
||||
for document_id_filter in document_ids_filter:
|
||||
operands.append({"path": ["document_id"], "operator": "Equal", "valueText": document_id_filter})
|
||||
where_filter = {"operator": "Or", "operands": operands}
|
||||
query_obj = query_obj.with_where(where_filter)
|
||||
query_obj = query_obj.with_additional(["vector"])
|
||||
properties = ["text"]
|
||||
result = query_obj.with_bm25(query=query, properties=properties).with_limit(kwargs.get("top_k", 4)).do()
|
||||
if "errors" in result:
|
||||
raise ValueError(f"Error during query: {result['errors']}")
|
||||
docs = []
|
||||
for res in result["data"]["Get"][collection_name]:
|
||||
text = res.pop(Field.TEXT_KEY)
|
||||
additional = res.pop("_additional")
|
||||
docs.append(Document(page_content=text, vector=additional["vector"], metadata=res))
|
||||
Performs BM25 full-text search on document content.
|
||||
|
||||
Filters by document IDs if provided and returns matching documents with vectors.
|
||||
"""
|
||||
if not self._client.collections.exists(self._collection_name):
|
||||
return []
|
||||
|
||||
col = self._client.collections.use(self._collection_name)
|
||||
props = list({*self._attributes, Field.TEXT_KEY.value})
|
||||
|
||||
where = None
|
||||
doc_ids = kwargs.get("document_ids_filter") or []
|
||||
if doc_ids:
|
||||
ors = [Filter.by_property("document_id").equal(x) for x in doc_ids]
|
||||
where = ors[0]
|
||||
for f in ors[1:]:
|
||||
where = where | f
|
||||
|
||||
top_k = int(kwargs.get("top_k", 4))
|
||||
|
||||
res = col.query.bm25(
|
||||
query=query,
|
||||
query_properties=[Field.TEXT_KEY.value],
|
||||
limit=top_k,
|
||||
return_properties=props,
|
||||
include_vector=True,
|
||||
filters=where,
|
||||
)
|
||||
|
||||
docs: list[Document] = []
|
||||
for obj in res.objects:
|
||||
properties = dict(obj.properties or {})
|
||||
text = properties.pop(Field.TEXT_KEY.value, "")
|
||||
|
||||
vec = obj.vector
|
||||
if isinstance(vec, dict):
|
||||
vec = vec.get("default") or next(iter(vec.values()), None)
|
||||
|
||||
docs.append(Document(page_content=text, vector=vec, metadata=properties))
|
||||
return docs
|
||||
|
||||
def _default_schema(self, index_name: str):
|
||||
return {
|
||||
"class": index_name,
|
||||
"properties": [
|
||||
{
|
||||
"name": "text",
|
||||
"dataType": ["text"],
|
||||
}
|
||||
],
|
||||
}
|
||||
|
||||
def _json_serializable(self, value: Any):
|
||||
def _json_serializable(self, value: Any) -> Any:
|
||||
"""Converts values to JSON-serializable format, handling datetime objects."""
|
||||
if isinstance(value, datetime.datetime):
|
||||
return value.isoformat()
|
||||
return value
|
||||
|
||||
|
||||
class WeaviateVectorFactory(AbstractVectorFactory):
|
||||
"""Factory class for creating WeaviateVector instances."""
|
||||
|
||||
def init_vector(self, dataset: Dataset, attributes: list, embeddings: Embeddings) -> WeaviateVector:
|
||||
"""
|
||||
Initializes a WeaviateVector instance for the given dataset.
|
||||
|
||||
Uses existing collection name from dataset index structure or generates a new one.
|
||||
Updates dataset index structure if not already set.
|
||||
"""
|
||||
if dataset.index_struct_dict:
|
||||
class_prefix: str = dataset.index_struct_dict["vector_store"]["class_prefix"]
|
||||
collection_name = class_prefix
|
||||
|
|
@ -281,7 +425,6 @@ class WeaviateVectorFactory(AbstractVectorFactory):
|
|||
dataset_id = dataset.id
|
||||
collection_name = Dataset.gen_collection_name_by_id(dataset_id)
|
||||
dataset.index_struct = json.dumps(self.gen_index_struct_dict(VectorType.WEAVIATE, collection_name))
|
||||
|
||||
return WeaviateVector(
|
||||
collection_name=collection_name,
|
||||
config=WeaviateConfig(
|
||||
|
|
|
|||
|
|
@ -25,7 +25,7 @@ class FirecrawlApp:
|
|||
}
|
||||
if params:
|
||||
json_data.update(params)
|
||||
response = self._post_request(f"{self.base_url}/v1/scrape", json_data, headers)
|
||||
response = self._post_request(f"{self.base_url}/v2/scrape", json_data, headers)
|
||||
if response.status_code == 200:
|
||||
response_data = response.json()
|
||||
data = response_data["data"]
|
||||
|
|
@ -42,7 +42,7 @@ class FirecrawlApp:
|
|||
json_data = {"url": url}
|
||||
if params:
|
||||
json_data.update(params)
|
||||
response = self._post_request(f"{self.base_url}/v1/crawl", json_data, headers)
|
||||
response = self._post_request(f"{self.base_url}/v2/crawl", json_data, headers)
|
||||
if response.status_code == 200:
|
||||
# There's also another two fields in the response: "success" (bool) and "url" (str)
|
||||
job_id = response.json().get("id")
|
||||
|
|
@ -51,9 +51,25 @@ class FirecrawlApp:
|
|||
self._handle_error(response, "start crawl job")
|
||||
return "" # unreachable
|
||||
|
||||
def map(self, url: str, params: dict[str, Any] | None = None) -> dict[str, Any]:
|
||||
# Documentation: https://docs.firecrawl.dev/api-reference/endpoint/map
|
||||
headers = self._prepare_headers()
|
||||
json_data: dict[str, Any] = {"url": url, "integration": "dify"}
|
||||
if params:
|
||||
# Pass through provided params, including optional "sitemap": "only" | "include" | "skip"
|
||||
json_data.update(params)
|
||||
response = self._post_request(f"{self.base_url}/v2/map", json_data, headers)
|
||||
if response.status_code == 200:
|
||||
return cast(dict[str, Any], response.json())
|
||||
elif response.status_code in {402, 409, 500, 429, 408}:
|
||||
self._handle_error(response, "start map job")
|
||||
return {}
|
||||
else:
|
||||
raise Exception(f"Failed to start map job. Status code: {response.status_code}")
|
||||
|
||||
def check_crawl_status(self, job_id) -> dict[str, Any]:
|
||||
headers = self._prepare_headers()
|
||||
response = self._get_request(f"{self.base_url}/v1/crawl/{job_id}", headers)
|
||||
response = self._get_request(f"{self.base_url}/v2/crawl/{job_id}", headers)
|
||||
if response.status_code == 200:
|
||||
crawl_status_response = response.json()
|
||||
if crawl_status_response.get("status") == "completed":
|
||||
|
|
@ -135,12 +151,16 @@ class FirecrawlApp:
|
|||
"lang": "en",
|
||||
"country": "us",
|
||||
"timeout": 60000,
|
||||
"ignoreInvalidURLs": False,
|
||||
"ignoreInvalidURLs": True,
|
||||
"scrapeOptions": {},
|
||||
"sources": [
|
||||
{"type": "web"},
|
||||
],
|
||||
"integration": "dify",
|
||||
}
|
||||
if params:
|
||||
json_data.update(params)
|
||||
response = self._post_request(f"{self.base_url}/v1/search", json_data, headers)
|
||||
response = self._post_request(f"{self.base_url}/v2/search", json_data, headers)
|
||||
if response.status_code == 200:
|
||||
response_data = response.json()
|
||||
if not response_data.get("success"):
|
||||
|
|
|
|||
|
|
@ -189,6 +189,11 @@ class ToolInvokeMessage(BaseModel):
|
|||
data: Mapping[str, Any] = Field(..., description="Detailed log data")
|
||||
metadata: Mapping[str, Any] = Field(default_factory=dict, description="The metadata of the log")
|
||||
|
||||
@field_validator("metadata", mode="before")
|
||||
@classmethod
|
||||
def _normalize_metadata(cls, value: Mapping[str, Any] | None) -> Mapping[str, Any]:
|
||||
return value or {}
|
||||
|
||||
class RetrieverResourceMessage(BaseModel):
|
||||
retriever_resources: list[RetrievalSourceMetadata] = Field(..., description="retriever resources")
|
||||
context: str = Field(..., description="context")
|
||||
|
|
@ -376,6 +381,11 @@ class ToolEntity(BaseModel):
|
|||
def set_parameters(cls, v, validation_info: ValidationInfo) -> list[ToolParameter]:
|
||||
return v or []
|
||||
|
||||
@field_validator("output_schema", mode="before")
|
||||
@classmethod
|
||||
def _normalize_output_schema(cls, value: Mapping[str, object] | None) -> Mapping[str, object]:
|
||||
return value or {}
|
||||
|
||||
|
||||
class OAuthSchema(BaseModel):
|
||||
client_schema: list[ProviderConfig] = Field(
|
||||
|
|
|
|||
|
|
@ -7,6 +7,7 @@ from collections.abc import Mapping
|
|||
from functools import singledispatchmethod
|
||||
from typing import TYPE_CHECKING, final
|
||||
|
||||
from core.model_runtime.entities.llm_entities import LLMUsage
|
||||
from core.workflow.entities import GraphRuntimeState
|
||||
from core.workflow.enums import ErrorStrategy, NodeExecutionType
|
||||
from core.workflow.graph import Graph
|
||||
|
|
@ -125,6 +126,7 @@ class EventHandler:
|
|||
node_execution = self._graph_execution.get_or_create_node_execution(event.node_id)
|
||||
is_initial_attempt = node_execution.retry_count == 0
|
||||
node_execution.mark_started(event.id)
|
||||
self._graph_runtime_state.increment_node_run_steps()
|
||||
|
||||
# Track in response coordinator for stream ordering
|
||||
self._response_coordinator.track_node_execution(event.node_id, event.id)
|
||||
|
|
@ -163,6 +165,8 @@ class EventHandler:
|
|||
node_execution = self._graph_execution.get_or_create_node_execution(event.node_id)
|
||||
node_execution.mark_taken()
|
||||
|
||||
self._accumulate_node_usage(event.node_run_result.llm_usage)
|
||||
|
||||
# Store outputs in variable pool
|
||||
self._store_node_outputs(event.node_id, event.node_run_result.outputs)
|
||||
|
||||
|
|
@ -212,6 +216,8 @@ class EventHandler:
|
|||
node_execution.mark_failed(event.error)
|
||||
self._graph_execution.record_node_failure()
|
||||
|
||||
self._accumulate_node_usage(event.node_run_result.llm_usage)
|
||||
|
||||
result = self._error_handler.handle_node_failure(event)
|
||||
|
||||
if result:
|
||||
|
|
@ -235,6 +241,8 @@ class EventHandler:
|
|||
node_execution = self._graph_execution.get_or_create_node_execution(event.node_id)
|
||||
node_execution.mark_taken()
|
||||
|
||||
self._accumulate_node_usage(event.node_run_result.llm_usage)
|
||||
|
||||
# Persist outputs produced by the exception strategy (e.g. default values)
|
||||
self._store_node_outputs(event.node_id, event.node_run_result.outputs)
|
||||
|
||||
|
|
@ -286,6 +294,19 @@ class EventHandler:
|
|||
self._state_manager.enqueue_node(event.node_id)
|
||||
self._state_manager.start_execution(event.node_id)
|
||||
|
||||
def _accumulate_node_usage(self, usage: LLMUsage) -> None:
|
||||
"""Accumulate token usage into the shared runtime state."""
|
||||
if usage.total_tokens <= 0:
|
||||
return
|
||||
|
||||
self._graph_runtime_state.add_tokens(usage.total_tokens)
|
||||
|
||||
current_usage = self._graph_runtime_state.llm_usage
|
||||
if current_usage.total_tokens == 0:
|
||||
self._graph_runtime_state.llm_usage = usage
|
||||
else:
|
||||
self._graph_runtime_state.llm_usage = current_usage.plus(usage)
|
||||
|
||||
def _store_node_outputs(self, node_id: str, outputs: Mapping[str, object]) -> None:
|
||||
"""
|
||||
Store node outputs in the variable pool.
|
||||
|
|
|
|||
|
|
@ -214,7 +214,7 @@ vdb = [
|
|||
"tidb-vector==0.0.9",
|
||||
"upstash-vector==0.6.0",
|
||||
"volcengine-compat~=1.0.0",
|
||||
"weaviate-client~=3.24.0",
|
||||
"weaviate-client>=4.0.0,<5.0.0",
|
||||
"xinference-client~=1.2.2",
|
||||
"mo-vector~=0.1.13",
|
||||
"mysql-connector-python>=9.3.0",
|
||||
|
|
|
|||
|
|
@ -1,8 +1,11 @@
|
|||
import datetime
|
||||
import logging
|
||||
import time
|
||||
from collections.abc import Sequence
|
||||
|
||||
import click
|
||||
from sqlalchemy import select
|
||||
from sqlalchemy.orm import Session, sessionmaker
|
||||
|
||||
import app
|
||||
from configs import dify_config
|
||||
|
|
@ -35,50 +38,53 @@ def clean_workflow_runlogs_precise():
|
|||
|
||||
retention_days = dify_config.WORKFLOW_LOG_RETENTION_DAYS
|
||||
cutoff_date = datetime.datetime.now() - datetime.timedelta(days=retention_days)
|
||||
session_factory = sessionmaker(db.engine, expire_on_commit=False)
|
||||
|
||||
try:
|
||||
total_workflow_runs = db.session.query(WorkflowRun).where(WorkflowRun.created_at < cutoff_date).count()
|
||||
if total_workflow_runs == 0:
|
||||
logger.info("No expired workflow run logs found")
|
||||
return
|
||||
logger.info("Found %s expired workflow run logs to clean", total_workflow_runs)
|
||||
with session_factory.begin() as session:
|
||||
total_workflow_runs = session.query(WorkflowRun).where(WorkflowRun.created_at < cutoff_date).count()
|
||||
if total_workflow_runs == 0:
|
||||
logger.info("No expired workflow run logs found")
|
||||
return
|
||||
logger.info("Found %s expired workflow run logs to clean", total_workflow_runs)
|
||||
|
||||
total_deleted = 0
|
||||
failed_batches = 0
|
||||
batch_count = 0
|
||||
|
||||
while True:
|
||||
workflow_runs = (
|
||||
db.session.query(WorkflowRun.id).where(WorkflowRun.created_at < cutoff_date).limit(BATCH_SIZE).all()
|
||||
)
|
||||
with session_factory.begin() as session:
|
||||
workflow_run_ids = session.scalars(
|
||||
select(WorkflowRun.id)
|
||||
.where(WorkflowRun.created_at < cutoff_date)
|
||||
.order_by(WorkflowRun.created_at, WorkflowRun.id)
|
||||
.limit(BATCH_SIZE)
|
||||
).all()
|
||||
|
||||
if not workflow_runs:
|
||||
break
|
||||
|
||||
workflow_run_ids = [run.id for run in workflow_runs]
|
||||
batch_count += 1
|
||||
|
||||
success = _delete_batch_with_retry(workflow_run_ids, failed_batches)
|
||||
|
||||
if success:
|
||||
total_deleted += len(workflow_run_ids)
|
||||
failed_batches = 0
|
||||
else:
|
||||
failed_batches += 1
|
||||
if failed_batches >= MAX_RETRIES:
|
||||
logger.error("Failed to delete batch after %s retries, aborting cleanup for today", MAX_RETRIES)
|
||||
if not workflow_run_ids:
|
||||
break
|
||||
|
||||
batch_count += 1
|
||||
|
||||
success = _delete_batch(session, workflow_run_ids, failed_batches)
|
||||
|
||||
if success:
|
||||
total_deleted += len(workflow_run_ids)
|
||||
failed_batches = 0
|
||||
else:
|
||||
# Calculate incremental delay times: 5, 10, 15 minutes
|
||||
retry_delay_minutes = failed_batches * 5
|
||||
logger.warning("Batch deletion failed, retrying in %s minutes...", retry_delay_minutes)
|
||||
time.sleep(retry_delay_minutes * 60)
|
||||
continue
|
||||
failed_batches += 1
|
||||
if failed_batches >= MAX_RETRIES:
|
||||
logger.error("Failed to delete batch after %s retries, aborting cleanup for today", MAX_RETRIES)
|
||||
break
|
||||
else:
|
||||
# Calculate incremental delay times: 5, 10, 15 minutes
|
||||
retry_delay_minutes = failed_batches * 5
|
||||
logger.warning("Batch deletion failed, retrying in %s minutes...", retry_delay_minutes)
|
||||
time.sleep(retry_delay_minutes * 60)
|
||||
continue
|
||||
|
||||
logger.info("Cleanup completed: %s expired workflow run logs deleted", total_deleted)
|
||||
|
||||
except Exception:
|
||||
db.session.rollback()
|
||||
logger.exception("Unexpected error in workflow log cleanup")
|
||||
raise
|
||||
|
||||
|
|
@ -87,69 +93,56 @@ def clean_workflow_runlogs_precise():
|
|||
click.echo(click.style(f"Cleaned workflow run logs from db success latency: {execution_time:.2f}s", fg="green"))
|
||||
|
||||
|
||||
def _delete_batch_with_retry(workflow_run_ids: list[str], attempt_count: int) -> bool:
|
||||
"""Delete a single batch with a retry mechanism and complete cascading deletion"""
|
||||
def _delete_batch(session: Session, workflow_run_ids: Sequence[str], attempt_count: int) -> bool:
|
||||
"""Delete a single batch of workflow runs and all related data within a nested transaction."""
|
||||
try:
|
||||
with db.session.begin_nested():
|
||||
with session.begin_nested():
|
||||
message_data = (
|
||||
db.session.query(Message.id, Message.conversation_id)
|
||||
session.query(Message.id, Message.conversation_id)
|
||||
.where(Message.workflow_run_id.in_(workflow_run_ids))
|
||||
.all()
|
||||
)
|
||||
message_id_list = [msg.id for msg in message_data]
|
||||
conversation_id_list = list({msg.conversation_id for msg in message_data if msg.conversation_id})
|
||||
if message_id_list:
|
||||
db.session.query(AppAnnotationHitHistory).where(
|
||||
AppAnnotationHitHistory.message_id.in_(message_id_list)
|
||||
).delete(synchronize_session=False)
|
||||
message_related_models = [
|
||||
AppAnnotationHitHistory,
|
||||
MessageAgentThought,
|
||||
MessageChain,
|
||||
MessageFile,
|
||||
MessageAnnotation,
|
||||
MessageFeedback,
|
||||
]
|
||||
for model in message_related_models:
|
||||
session.query(model).where(model.message_id.in_(message_id_list)).delete(synchronize_session=False) # type: ignore
|
||||
# error: "DeclarativeAttributeIntercept" has no attribute "message_id". But this type is only in lib
|
||||
# and these 6 types all have the message_id field.
|
||||
|
||||
db.session.query(MessageAgentThought).where(MessageAgentThought.message_id.in_(message_id_list)).delete(
|
||||
session.query(Message).where(Message.workflow_run_id.in_(workflow_run_ids)).delete(
|
||||
synchronize_session=False
|
||||
)
|
||||
|
||||
db.session.query(MessageChain).where(MessageChain.message_id.in_(message_id_list)).delete(
|
||||
synchronize_session=False
|
||||
)
|
||||
|
||||
db.session.query(MessageFile).where(MessageFile.message_id.in_(message_id_list)).delete(
|
||||
synchronize_session=False
|
||||
)
|
||||
|
||||
db.session.query(MessageAnnotation).where(MessageAnnotation.message_id.in_(message_id_list)).delete(
|
||||
synchronize_session=False
|
||||
)
|
||||
|
||||
db.session.query(MessageFeedback).where(MessageFeedback.message_id.in_(message_id_list)).delete(
|
||||
synchronize_session=False
|
||||
)
|
||||
|
||||
db.session.query(Message).where(Message.workflow_run_id.in_(workflow_run_ids)).delete(
|
||||
synchronize_session=False
|
||||
)
|
||||
|
||||
db.session.query(WorkflowAppLog).where(WorkflowAppLog.workflow_run_id.in_(workflow_run_ids)).delete(
|
||||
session.query(WorkflowAppLog).where(WorkflowAppLog.workflow_run_id.in_(workflow_run_ids)).delete(
|
||||
synchronize_session=False
|
||||
)
|
||||
|
||||
db.session.query(WorkflowNodeExecutionModel).where(
|
||||
session.query(WorkflowNodeExecutionModel).where(
|
||||
WorkflowNodeExecutionModel.workflow_run_id.in_(workflow_run_ids)
|
||||
).delete(synchronize_session=False)
|
||||
|
||||
if conversation_id_list:
|
||||
db.session.query(ConversationVariable).where(
|
||||
session.query(ConversationVariable).where(
|
||||
ConversationVariable.conversation_id.in_(conversation_id_list)
|
||||
).delete(synchronize_session=False)
|
||||
|
||||
db.session.query(Conversation).where(Conversation.id.in_(conversation_id_list)).delete(
|
||||
session.query(Conversation).where(Conversation.id.in_(conversation_id_list)).delete(
|
||||
synchronize_session=False
|
||||
)
|
||||
|
||||
db.session.query(WorkflowRun).where(WorkflowRun.id.in_(workflow_run_ids)).delete(synchronize_session=False)
|
||||
session.query(WorkflowRun).where(WorkflowRun.id.in_(workflow_run_ids)).delete(synchronize_session=False)
|
||||
|
||||
db.session.commit()
|
||||
return True
|
||||
return True
|
||||
|
||||
except Exception:
|
||||
db.session.rollback()
|
||||
logger.exception("Batch deletion failed (attempt %s)", attempt_count + 1)
|
||||
return False
|
||||
|
|
|
|||
|
|
@ -23,6 +23,7 @@ class CrawlOptions:
|
|||
only_main_content: bool = False
|
||||
includes: str | None = None
|
||||
excludes: str | None = None
|
||||
prompt: str | None = None
|
||||
max_depth: int | None = None
|
||||
use_sitemap: bool = True
|
||||
|
||||
|
|
@ -70,6 +71,7 @@ class WebsiteCrawlApiRequest:
|
|||
only_main_content=self.options.get("only_main_content", False),
|
||||
includes=self.options.get("includes"),
|
||||
excludes=self.options.get("excludes"),
|
||||
prompt=self.options.get("prompt"),
|
||||
max_depth=self.options.get("max_depth"),
|
||||
use_sitemap=self.options.get("use_sitemap", True),
|
||||
)
|
||||
|
|
@ -174,6 +176,7 @@ class WebsiteService:
|
|||
def _crawl_with_firecrawl(cls, request: CrawlRequest, api_key: str, config: dict) -> dict[str, Any]:
|
||||
firecrawl_app = FirecrawlApp(api_key=api_key, base_url=config.get("base_url"))
|
||||
|
||||
params: dict[str, Any]
|
||||
if not request.options.crawl_sub_pages:
|
||||
params = {
|
||||
"includePaths": [],
|
||||
|
|
@ -188,8 +191,10 @@ class WebsiteService:
|
|||
"limit": request.options.limit,
|
||||
"scrapeOptions": {"onlyMainContent": request.options.only_main_content},
|
||||
}
|
||||
if request.options.max_depth:
|
||||
params["maxDepth"] = request.options.max_depth
|
||||
|
||||
# Add optional prompt for Firecrawl v2 crawl-params compatibility
|
||||
if request.options.prompt:
|
||||
params["prompt"] = request.options.prompt
|
||||
|
||||
job_id = firecrawl_app.crawl_url(request.url, params)
|
||||
website_crawl_time_cache_key = f"website_crawl_{job_id}"
|
||||
|
|
|
|||
|
|
@ -86,12 +86,16 @@ class WorkflowAppService:
|
|||
),
|
||||
)
|
||||
if created_by_account:
|
||||
account = session.scalar(select(Account).where(Account.email == created_by_account))
|
||||
if not account:
|
||||
raise ValueError(f"Account not found: {created_by_account}")
|
||||
|
||||
stmt = stmt.join(
|
||||
Account,
|
||||
and_(
|
||||
WorkflowAppLog.created_by == Account.id,
|
||||
WorkflowAppLog.created_by_role == CreatorUserRole.ACCOUNT,
|
||||
Account.email == created_by_account,
|
||||
Account.id == account.id,
|
||||
),
|
||||
)
|
||||
|
||||
|
|
|
|||
|
|
@ -75,10 +75,7 @@
|
|||
</head>
|
||||
<body>
|
||||
<div class="container">
|
||||
<div class="header">
|
||||
<!-- Optional: Add a logo or a header image here -->
|
||||
<img src="https://assets.dify.ai/images/logo.png" alt="Dify Logo">
|
||||
</div>
|
||||
<div class="header"></div>
|
||||
<div class="content">
|
||||
<p class="content1">Dear {{ to }},</p>
|
||||
<p class="content2">{{ inviter_name }} is pleased to invite you to join our workspace on {{application_title}}, a platform specifically designed for LLM application development. On {{application_title}}, you can explore, create, and collaborate to build and operate AI applications.</p>
|
||||
|
|
|
|||
|
|
@ -789,6 +789,31 @@ class TestWorkflowAppService:
|
|||
assert result_account_filter["total"] == 3
|
||||
assert all(log.created_by_role == CreatorUserRole.ACCOUNT for log in result_account_filter["data"])
|
||||
|
||||
# Test filtering by changed account email
|
||||
original_email = account.email
|
||||
new_email = "changed@example.com"
|
||||
account.email = new_email
|
||||
db_session_with_containers.commit()
|
||||
|
||||
assert account.email == new_email
|
||||
|
||||
# Results for new email, is expected to be the same as the original email
|
||||
result_with_new_email = service.get_paginate_workflow_app_logs(
|
||||
session=db_session_with_containers, app_model=app, created_by_account=new_email, page=1, limit=20
|
||||
)
|
||||
assert result_with_new_email["total"] == 3
|
||||
assert all(log.created_by_role == CreatorUserRole.ACCOUNT for log in result_with_new_email["data"])
|
||||
|
||||
# Old email unbound, is unexpected input, should raise ValueError
|
||||
with pytest.raises(ValueError) as exc_info:
|
||||
service.get_paginate_workflow_app_logs(
|
||||
session=db_session_with_containers, app_model=app, created_by_account=original_email, page=1, limit=20
|
||||
)
|
||||
assert "Account not found" in str(exc_info.value)
|
||||
|
||||
account.email = original_email
|
||||
db_session_with_containers.commit()
|
||||
|
||||
# Test filtering by non-existent session ID
|
||||
result_no_session = service.get_paginate_workflow_app_logs(
|
||||
session=db_session_with_containers,
|
||||
|
|
@ -799,15 +824,16 @@ class TestWorkflowAppService:
|
|||
)
|
||||
assert result_no_session["total"] == 0
|
||||
|
||||
# Test filtering by non-existent account email
|
||||
result_no_account = service.get_paginate_workflow_app_logs(
|
||||
session=db_session_with_containers,
|
||||
app_model=app,
|
||||
created_by_account="nonexistent@example.com",
|
||||
page=1,
|
||||
limit=20,
|
||||
)
|
||||
assert result_no_account["total"] == 0
|
||||
# Test filtering by non-existent account email, is unexpected input, should raise ValueError
|
||||
with pytest.raises(ValueError) as exc_info:
|
||||
service.get_paginate_workflow_app_logs(
|
||||
session=db_session_with_containers,
|
||||
app_model=app,
|
||||
created_by_account="nonexistent@example.com",
|
||||
page=1,
|
||||
limit=20,
|
||||
)
|
||||
assert "Account not found" in str(exc_info.value)
|
||||
|
||||
def test_get_paginate_workflow_app_logs_with_uuid_keyword_search(
|
||||
self, db_session_with_containers, mock_external_service_dependencies
|
||||
|
|
@ -1057,15 +1083,15 @@ class TestWorkflowAppService:
|
|||
assert len(result_no_session["data"]) == 0
|
||||
|
||||
# Test with account email that doesn't exist
|
||||
result_no_account = service.get_paginate_workflow_app_logs(
|
||||
session=db_session_with_containers,
|
||||
app_model=app,
|
||||
created_by_account="nonexistent@example.com",
|
||||
page=1,
|
||||
limit=20,
|
||||
)
|
||||
assert result_no_account["total"] == 0
|
||||
assert len(result_no_account["data"]) == 0
|
||||
with pytest.raises(ValueError) as exc_info:
|
||||
service.get_paginate_workflow_app_logs(
|
||||
session=db_session_with_containers,
|
||||
app_model=app,
|
||||
created_by_account="nonexistent@example.com",
|
||||
page=1,
|
||||
limit=20,
|
||||
)
|
||||
assert "Account not found" in str(exc_info.value)
|
||||
|
||||
def test_get_paginate_workflow_app_logs_with_complex_query_combinations(
|
||||
self, db_session_with_containers, mock_external_service_dependencies
|
||||
|
|
|
|||
|
|
@ -110,19 +110,6 @@ class TestAlibabaCloudMySQLVector(unittest.TestCase):
|
|||
assert mock_cursor.execute.call_count >= 3 # CREATE TABLE + 2 indexes
|
||||
mock_redis.set.assert_called_once()
|
||||
|
||||
def test_config_validation(self):
|
||||
"""Test configuration validation."""
|
||||
# Test missing required fields
|
||||
with pytest.raises(ValueError):
|
||||
AlibabaCloudMySQLVectorConfig(
|
||||
host="", # Empty host should raise error
|
||||
port=3306,
|
||||
user="test",
|
||||
password="test",
|
||||
database="test",
|
||||
max_connection=5,
|
||||
)
|
||||
|
||||
@patch(
|
||||
"core.rag.datasource.vdb.alibabacloud_mysql.alibabacloud_mysql_vector.mysql.connector.pooling.MySQLConnectionPool"
|
||||
)
|
||||
|
|
@ -718,5 +705,29 @@ class TestAlibabaCloudMySQLVector(unittest.TestCase):
|
|||
mock_cursor.fetchone.side_effect = [{"VERSION()": "8.0.36"}, {"vector_support": True}]
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"invalid_config_override",
|
||||
[
|
||||
{"host": ""}, # Test empty host
|
||||
{"port": 0}, # Test invalid port
|
||||
{"max_connection": 0}, # Test invalid max_connection
|
||||
],
|
||||
)
|
||||
def test_config_validation_parametrized(invalid_config_override):
|
||||
"""Test configuration validation for various invalid inputs using parametrize."""
|
||||
config = {
|
||||
"host": "localhost",
|
||||
"port": 3306,
|
||||
"user": "test",
|
||||
"password": "test",
|
||||
"database": "test",
|
||||
"max_connection": 5,
|
||||
}
|
||||
config.update(invalid_config_override)
|
||||
|
||||
with pytest.raises(ValueError):
|
||||
AlibabaCloudMySQLVectorConfig(**config)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
unittest.main()
|
||||
|
|
|
|||
|
|
@ -0,0 +1,29 @@
|
|||
from core.tools.entities.common_entities import I18nObject
|
||||
from core.tools.entities.tool_entities import ToolEntity, ToolIdentity, ToolInvokeMessage
|
||||
|
||||
|
||||
def _make_identity() -> ToolIdentity:
|
||||
return ToolIdentity(
|
||||
author="author",
|
||||
name="tool",
|
||||
label=I18nObject(en_US="Label"),
|
||||
provider="builtin",
|
||||
)
|
||||
|
||||
|
||||
def test_log_message_metadata_none_defaults_to_empty_dict():
|
||||
log_message = ToolInvokeMessage.LogMessage(
|
||||
id="log-1",
|
||||
label="Log entry",
|
||||
status=ToolInvokeMessage.LogMessage.LogStatus.START,
|
||||
data={},
|
||||
metadata=None,
|
||||
)
|
||||
|
||||
assert log_message.metadata == {}
|
||||
|
||||
|
||||
def test_tool_entity_output_schema_none_defaults_to_empty_dict():
|
||||
entity = ToolEntity(identity=_make_identity(), output_schema=None)
|
||||
|
||||
assert entity.output_schema == {}
|
||||
26
api/uv.lock
26
api/uv.lock
|
|
@ -587,16 +587,16 @@ wheels = [
|
|||
|
||||
[[package]]
|
||||
name = "boto3-stubs"
|
||||
version = "1.40.50"
|
||||
version = "1.40.51"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "botocore-stubs" },
|
||||
{ name = "types-s3transfer" },
|
||||
{ name = "typing-extensions", marker = "python_full_version < '3.12'" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/35/c8/06584145c4ccc80e3297a97874bfaa43e6b2fb9f8a69bcc38e29a1457bf5/boto3_stubs-1.40.50.tar.gz", hash = "sha256:29828adfcb8629b5e285468eb89610f1fc71f964ad0913de3049a0a9d5de0be1", size = 100836, upload-time = "2025-10-10T20:32:34.867Z" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/82/4d/b07f9ee0fe432fa8ec6dc368ee7a0409e2b6d9df2c5a2a88265c9b6fd878/boto3_stubs-1.40.51.tar.gz", hash = "sha256:0281e820813a310954e15fb7c1d470c24c34c1cccc7b1ddad977fa293a1080a9", size = 100890, upload-time = "2025-10-13T19:25:36.126Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/7b/69/f18c7135dc8a2b74e21b4a2375fa455e4d9e7e47f7838bc175d52005054a/boto3_stubs-1.40.50-py3-none-any.whl", hash = "sha256:01b9c67df62f26371a4a7473c616eece988a5305e7f7cb3fbc014d178685ac4e", size = 69689, upload-time = "2025-10-10T20:32:25.77Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/d3/2e/4476431f11fc3bf7a7e0f4f5c275f17607aa127da7c0d8685a4dc6bf6291/boto3_stubs-1.40.51-py3-none-any.whl", hash = "sha256:896d0ffaa298ce1749eea1a54946320a0f4e07c6912f8e1f8c0744a708ee25a4", size = 69709, upload-time = "2025-10-13T19:25:23.116Z" },
|
||||
]
|
||||
|
||||
[package.optional-dependencies]
|
||||
|
|
@ -620,14 +620,14 @@ wheels = [
|
|||
|
||||
[[package]]
|
||||
name = "botocore-stubs"
|
||||
version = "1.40.50"
|
||||
version = "1.40.51"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "types-awscrt" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/20/4b/86ad2d24ea36eed159c8e1f85a2645bfeedae34ccb8c77ea8c99abbd66d1/botocore_stubs-1.40.50.tar.gz", hash = "sha256:d772b2d3aea6b4e464963fe45b2d504eee7bc3842f047cebbae5492b3993e0fd", size = 42250, upload-time = "2025-10-11T23:08:59.925Z" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/55/ca/429fadb6e037cb7b300d508a0b24b59a71961db12539e21749cbec7e7422/botocore_stubs-1.40.51.tar.gz", hash = "sha256:8ddbeb1f68e39382533bb53f3b968d29e640406016af00ad8bbd6e1a2bd59536", size = 42249, upload-time = "2025-10-13T20:26:57.777Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/b9/c1/4a736155b2d5dd7fdd09af8fba9ed59693c565d6e2bc1b5adc769da36cb5/botocore_stubs-1.40.50-py3-none-any.whl", hash = "sha256:7cb8d636e061e600929cd03339c3bbc162c21435b4bfeb6413cf7b0b612e7de0", size = 66541, upload-time = "2025-10-11T23:08:57.678Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/c9/b9/5f1296bc46f293f284a1a6259f3c1f21f4161088dc6f70428698841b56a7/botocore_stubs-1.40.51-py3-none-any.whl", hash = "sha256:9a028104979205c9be0b68bb59ba679e4fe452e017eec3d40f6c2b41c590a73c", size = 66541, upload-time = "2025-10-13T20:26:55.559Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
|
@ -1667,7 +1667,7 @@ vdb = [
|
|||
{ name = "tidb-vector", specifier = "==0.0.9" },
|
||||
{ name = "upstash-vector", specifier = "==0.6.0" },
|
||||
{ name = "volcengine-compat", specifier = "~=1.0.0" },
|
||||
{ name = "weaviate-client", specifier = "~=3.24.0" },
|
||||
{ name = "weaviate-client", specifier = ">=4.0.0,<5.0.0" },
|
||||
{ name = "xinference-client", specifier = "~=1.2.2" },
|
||||
]
|
||||
|
||||
|
|
@ -6901,16 +6901,20 @@ wheels = [
|
|||
|
||||
[[package]]
|
||||
name = "weaviate-client"
|
||||
version = "3.24.2"
|
||||
version = "4.17.0"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "authlib" },
|
||||
{ name = "requests" },
|
||||
{ name = "deprecation" },
|
||||
{ name = "grpcio" },
|
||||
{ name = "httpx" },
|
||||
{ name = "protobuf" },
|
||||
{ name = "pydantic" },
|
||||
{ name = "validators" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/1f/c1/3285a21d8885f2b09aabb65edb9a8e062a35c2d7175e1bb024fa096582ab/weaviate-client-3.24.2.tar.gz", hash = "sha256:6914c48c9a7e5ad0be9399271f9cb85d6f59ab77476c6d4e56a3925bf149edaa", size = 199332, upload-time = "2023-10-04T08:37:54.26Z" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/bd/0e/e4582b007427187a9fde55fa575db4b766c81929d2b43a3dd8becce50567/weaviate_client-4.17.0.tar.gz", hash = "sha256:731d58d84b0989df4db399b686357ed285fb95971a492ccca8dec90bb2343c51", size = 769019, upload-time = "2025-09-26T11:20:27.381Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/ab/98/3136d05f93e30cf29e1db280eaadf766df18d812dfe7994bcced653b2340/weaviate_client-3.24.2-py3-none-any.whl", hash = "sha256:bc50ca5fcebcd48de0d00f66700b0cf7c31a97c4cd3d29b4036d77c5d1d9479b", size = 107968, upload-time = "2023-10-04T08:37:52.511Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/5b/c5/2da3a45866da7a935dab8ad07be05dcaee48b3ad4955144583b651929be7/weaviate_client-4.17.0-py3-none-any.whl", hash = "sha256:60e4a355b90537ee1e942ab0b76a94750897a13d9cf13c5a6decbd166d0ca8b5", size = 582763, upload-time = "2025-09-26T11:20:25.864Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
|
|
|||
|
|
@ -329,7 +329,7 @@ services:
|
|||
|
||||
# The Weaviate vector store.
|
||||
weaviate:
|
||||
image: semitechnologies/weaviate:1.19.0
|
||||
image: semitechnologies/weaviate:1.27.0
|
||||
profiles:
|
||||
- ""
|
||||
- weaviate
|
||||
|
|
|
|||
|
|
@ -181,7 +181,7 @@ services:
|
|||
|
||||
# The Weaviate vector store.
|
||||
weaviate:
|
||||
image: semitechnologies/weaviate:1.19.0
|
||||
image: semitechnologies/weaviate:1.27.0
|
||||
profiles:
|
||||
- ""
|
||||
- weaviate
|
||||
|
|
@ -206,6 +206,7 @@ services:
|
|||
AUTHORIZATION_ADMINLIST_USERS: ${WEAVIATE_AUTHORIZATION_ADMINLIST_USERS:-hello@dify.ai}
|
||||
ports:
|
||||
- "${EXPOSE_WEAVIATE_PORT:-8080}:8080"
|
||||
- "${EXPOSE_WEAVIATE_GRPC_PORT:-50051}:50051"
|
||||
|
||||
networks:
|
||||
# create a network between sandbox, api and ssrf_proxy, and can not access outside.
|
||||
|
|
|
|||
|
|
@ -0,0 +1,9 @@
|
|||
services:
|
||||
api:
|
||||
volumes:
|
||||
- ../api/core/rag/datasource/vdb/weaviate/weaviate_vector.py:/app/api/core/rag/datasource/vdb/weaviate/weaviate_vector.py:ro
|
||||
command: >
|
||||
sh -c "
|
||||
pip install --no-cache-dir 'weaviate>=4.0.0' &&
|
||||
/bin/bash /entrypoint.sh
|
||||
"
|
||||
|
|
@ -936,7 +936,7 @@ services:
|
|||
|
||||
# The Weaviate vector store.
|
||||
weaviate:
|
||||
image: semitechnologies/weaviate:1.19.0
|
||||
image: semitechnologies/weaviate:1.27.0
|
||||
profiles:
|
||||
- ""
|
||||
- weaviate
|
||||
|
|
|
|||
|
|
@ -1,10 +1,11 @@
|
|||
'use client'
|
||||
import type { FC } from 'react'
|
||||
import React, { useState } from 'react'
|
||||
import React, { useMemo, useState } from 'react'
|
||||
import { useTranslation } from 'react-i18next'
|
||||
import { useBoolean } from 'ahooks'
|
||||
import { useContext } from 'use-context-selector'
|
||||
import produce from 'immer'
|
||||
import { ReactSortable } from 'react-sortablejs'
|
||||
import Panel from '../base/feature-panel'
|
||||
import EditModal from './config-modal'
|
||||
import VarItem from './var-item'
|
||||
|
|
@ -22,6 +23,7 @@ import { useModalContext } from '@/context/modal-context'
|
|||
import { useEventEmitterContextContext } from '@/context/event-emitter'
|
||||
import type { InputVar } from '@/app/components/workflow/types'
|
||||
import { InputVarType } from '@/app/components/workflow/types'
|
||||
import cn from '@/utils/classnames'
|
||||
|
||||
export const ADD_EXTERNAL_DATA_TOOL = 'ADD_EXTERNAL_DATA_TOOL'
|
||||
|
||||
|
|
@ -218,6 +220,16 @@ const ConfigVar: FC<IConfigVarProps> = ({ promptVariables, readonly, onPromptVar
|
|||
|
||||
showEditModal()
|
||||
}
|
||||
|
||||
const promptVariablesWithIds = useMemo(() => promptVariables.map((item) => {
|
||||
return {
|
||||
id: item.key,
|
||||
variable: { ...item },
|
||||
}
|
||||
}), [promptVariables])
|
||||
|
||||
const canDrag = !readonly && promptVariables.length > 1
|
||||
|
||||
return (
|
||||
<Panel
|
||||
className="mt-2"
|
||||
|
|
@ -245,18 +257,32 @@ const ConfigVar: FC<IConfigVarProps> = ({ promptVariables, readonly, onPromptVar
|
|||
)}
|
||||
{hasVar && (
|
||||
<div className='mt-1 px-3 pb-3'>
|
||||
{promptVariables.map(({ key, name, type, required, config, icon, icon_background }, index) => (
|
||||
<VarItem
|
||||
key={index}
|
||||
readonly={readonly}
|
||||
name={key}
|
||||
label={name}
|
||||
required={!!required}
|
||||
type={type}
|
||||
onEdit={() => handleConfig({ type, key, index, name, config, icon, icon_background })}
|
||||
onRemove={() => handleRemoveVar(index)}
|
||||
/>
|
||||
))}
|
||||
<ReactSortable
|
||||
className='space-y-1'
|
||||
list={promptVariablesWithIds}
|
||||
setList={(list) => { onPromptVariablesChange?.(list.map(item => item.variable)) }}
|
||||
handle='.handle'
|
||||
ghostClass='opacity-50'
|
||||
animation={150}
|
||||
>
|
||||
{promptVariablesWithIds.map((item, index) => {
|
||||
const { key, name, type, required, config, icon, icon_background } = item.variable
|
||||
return (
|
||||
<VarItem
|
||||
className={cn(canDrag && 'handle')}
|
||||
key={key}
|
||||
readonly={readonly}
|
||||
name={key}
|
||||
label={name}
|
||||
required={!!required}
|
||||
type={type}
|
||||
onEdit={() => handleConfig({ type, key, index, name, config, icon, icon_background })}
|
||||
onRemove={() => handleRemoveVar(index)}
|
||||
canDrag={canDrag}
|
||||
/>
|
||||
)
|
||||
})}
|
||||
</ReactSortable>
|
||||
</div>
|
||||
)}
|
||||
|
||||
|
|
|
|||
|
|
@ -3,6 +3,7 @@ import type { FC } from 'react'
|
|||
import React, { useState } from 'react'
|
||||
import {
|
||||
RiDeleteBinLine,
|
||||
RiDraggable,
|
||||
RiEditLine,
|
||||
} from '@remixicon/react'
|
||||
import type { IInputTypeIconProps } from './input-type-icon'
|
||||
|
|
@ -12,6 +13,7 @@ import Badge from '@/app/components/base/badge'
|
|||
import cn from '@/utils/classnames'
|
||||
|
||||
type ItemProps = {
|
||||
className?: string
|
||||
readonly?: boolean
|
||||
name: string
|
||||
label: string
|
||||
|
|
@ -19,9 +21,11 @@ type ItemProps = {
|
|||
type: string
|
||||
onEdit: () => void
|
||||
onRemove: () => void
|
||||
canDrag?: boolean
|
||||
}
|
||||
|
||||
const VarItem: FC<ItemProps> = ({
|
||||
className,
|
||||
readonly,
|
||||
name,
|
||||
label,
|
||||
|
|
@ -29,12 +33,16 @@ const VarItem: FC<ItemProps> = ({
|
|||
type,
|
||||
onEdit,
|
||||
onRemove,
|
||||
canDrag,
|
||||
}) => {
|
||||
const [isDeleting, setIsDeleting] = useState(false)
|
||||
|
||||
return (
|
||||
<div className={cn('group relative mb-1 flex h-[34px] w-full items-center rounded-lg border-[0.5px] border-components-panel-border-subtle bg-components-panel-on-panel-item-bg pl-2.5 pr-3 shadow-xs last-of-type:mb-0 hover:bg-components-panel-on-panel-item-bg-hover hover:shadow-sm', isDeleting && 'border-state-destructive-border hover:bg-state-destructive-hover', readonly && 'cursor-not-allowed opacity-30')}>
|
||||
<VarIcon className='mr-1 h-4 w-4 shrink-0 text-text-accent' />
|
||||
<div className={cn('group relative mb-1 flex h-[34px] w-full items-center rounded-lg border-[0.5px] border-components-panel-border-subtle bg-components-panel-on-panel-item-bg pl-2.5 pr-3 shadow-xs last-of-type:mb-0 hover:bg-components-panel-on-panel-item-bg-hover hover:shadow-sm', isDeleting && 'border-state-destructive-border hover:bg-state-destructive-hover', readonly && 'cursor-not-allowed opacity-30', className)}>
|
||||
<VarIcon className={cn('mr-1 h-4 w-4 shrink-0 text-text-accent', canDrag && 'group-hover:opacity-0')} />
|
||||
{canDrag && (
|
||||
<RiDraggable className='absolute left-3 top-3 hidden h-3 w-3 cursor-pointer text-text-tertiary group-hover:block' />
|
||||
)}
|
||||
<div className='flex w-0 grow items-center'>
|
||||
<div className='truncate' title={`${name} · ${label}`}>
|
||||
<span className='system-sm-medium text-text-secondary'>{name}</span>
|
||||
|
|
|
|||
|
|
@ -1,13 +0,0 @@
|
|||
export { default as Chunk } from './Chunk'
|
||||
export { default as Collapse } from './Collapse'
|
||||
export { default as Divider } from './Divider'
|
||||
export { default as File } from './File'
|
||||
export { default as GeneralType } from './GeneralType'
|
||||
export { default as LayoutRight2LineMod } from './LayoutRight2LineMod'
|
||||
export { default as OptionCardEffectBlueLight } from './OptionCardEffectBlueLight'
|
||||
export { default as OptionCardEffectBlue } from './OptionCardEffectBlue'
|
||||
export { default as OptionCardEffectOrange } from './OptionCardEffectOrange'
|
||||
export { default as OptionCardEffectPurple } from './OptionCardEffectPurple'
|
||||
export { default as ParentChildType } from './ParentChildType'
|
||||
export { default as SelectionMod } from './SelectionMod'
|
||||
export { default as Watercrawl } from './Watercrawl'
|
||||
|
|
@ -1,5 +0,0 @@
|
|||
.wrapper {
|
||||
display: inline-flex;
|
||||
background: url(~@/app/components/base/icons/assets/image/llm/baichuan-text-cn.png) center center no-repeat;
|
||||
background-size: contain;
|
||||
}
|
||||
|
|
@ -1,20 +0,0 @@
|
|||
// GENERATE BY script
|
||||
// DON NOT EDIT IT MANUALLY
|
||||
|
||||
import * as React from 'react'
|
||||
import cn from '@/utils/classnames'
|
||||
import s from './BaichuanTextCn.module.css'
|
||||
|
||||
const Icon = (
|
||||
{
|
||||
ref,
|
||||
className,
|
||||
...restProps
|
||||
}: React.DetailedHTMLProps<React.HTMLAttributes<HTMLSpanElement>, HTMLSpanElement> & {
|
||||
ref?: React.RefObject<HTMLSpanElement>;
|
||||
},
|
||||
) => <span className={cn(s.wrapper, className)} {...restProps} ref={ref} />
|
||||
|
||||
Icon.displayName = 'BaichuanTextCn'
|
||||
|
||||
export default Icon
|
||||
|
|
@ -1,5 +0,0 @@
|
|||
.wrapper {
|
||||
display: inline-flex;
|
||||
background: url(~@/app/components/base/icons/assets/image/llm/minimax.png) center center no-repeat;
|
||||
background-size: contain;
|
||||
}
|
||||
|
|
@ -1,20 +0,0 @@
|
|||
// GENERATE BY script
|
||||
// DON NOT EDIT IT MANUALLY
|
||||
|
||||
import * as React from 'react'
|
||||
import cn from '@/utils/classnames'
|
||||
import s from './Minimax.module.css'
|
||||
|
||||
const Icon = (
|
||||
{
|
||||
ref,
|
||||
className,
|
||||
...restProps
|
||||
}: React.DetailedHTMLProps<React.HTMLAttributes<HTMLSpanElement>, HTMLSpanElement> & {
|
||||
ref?: React.RefObject<HTMLSpanElement>;
|
||||
},
|
||||
) => <span className={cn(s.wrapper, className)} {...restProps} ref={ref} />
|
||||
|
||||
Icon.displayName = 'Minimax'
|
||||
|
||||
export default Icon
|
||||
|
|
@ -1,5 +0,0 @@
|
|||
.wrapper {
|
||||
display: inline-flex;
|
||||
background: url(~@/app/components/base/icons/assets/image/llm/minimax-text.png) center center no-repeat;
|
||||
background-size: contain;
|
||||
}
|
||||
|
|
@ -1,20 +0,0 @@
|
|||
// GENERATE BY script
|
||||
// DON NOT EDIT IT MANUALLY
|
||||
|
||||
import * as React from 'react'
|
||||
import cn from '@/utils/classnames'
|
||||
import s from './MinimaxText.module.css'
|
||||
|
||||
const Icon = (
|
||||
{
|
||||
ref,
|
||||
className,
|
||||
...restProps
|
||||
}: React.DetailedHTMLProps<React.HTMLAttributes<HTMLSpanElement>, HTMLSpanElement> & {
|
||||
ref?: React.RefObject<HTMLSpanElement>;
|
||||
},
|
||||
) => <span className={cn(s.wrapper, className)} {...restProps} ref={ref} />
|
||||
|
||||
Icon.displayName = 'MinimaxText'
|
||||
|
||||
export default Icon
|
||||
|
|
@ -1,5 +0,0 @@
|
|||
.wrapper {
|
||||
display: inline-flex;
|
||||
background: url(~@/app/components/base/icons/assets/image/llm/tongyi.png) center center no-repeat;
|
||||
background-size: contain;
|
||||
}
|
||||
|
|
@ -1,20 +0,0 @@
|
|||
// GENERATE BY script
|
||||
// DON NOT EDIT IT MANUALLY
|
||||
|
||||
import * as React from 'react'
|
||||
import cn from '@/utils/classnames'
|
||||
import s from './Tongyi.module.css'
|
||||
|
||||
const Icon = (
|
||||
{
|
||||
ref,
|
||||
className,
|
||||
...restProps
|
||||
}: React.DetailedHTMLProps<React.HTMLAttributes<HTMLSpanElement>, HTMLSpanElement> & {
|
||||
ref?: React.RefObject<HTMLSpanElement>;
|
||||
},
|
||||
) => <span className={cn(s.wrapper, className)} {...restProps} ref={ref} />
|
||||
|
||||
Icon.displayName = 'Tongyi'
|
||||
|
||||
export default Icon
|
||||
|
|
@ -1,5 +0,0 @@
|
|||
.wrapper {
|
||||
display: inline-flex;
|
||||
background: url(~@/app/components/base/icons/assets/image/llm/tongyi-text.png) center center no-repeat;
|
||||
background-size: contain;
|
||||
}
|
||||
|
|
@ -1,20 +0,0 @@
|
|||
// GENERATE BY script
|
||||
// DON NOT EDIT IT MANUALLY
|
||||
|
||||
import * as React from 'react'
|
||||
import cn from '@/utils/classnames'
|
||||
import s from './TongyiText.module.css'
|
||||
|
||||
const Icon = (
|
||||
{
|
||||
ref,
|
||||
className,
|
||||
...restProps
|
||||
}: React.DetailedHTMLProps<React.HTMLAttributes<HTMLSpanElement>, HTMLSpanElement> & {
|
||||
ref?: React.RefObject<HTMLSpanElement>;
|
||||
},
|
||||
) => <span className={cn(s.wrapper, className)} {...restProps} ref={ref} />
|
||||
|
||||
Icon.displayName = 'TongyiText'
|
||||
|
||||
export default Icon
|
||||
|
|
@ -1,5 +0,0 @@
|
|||
.wrapper {
|
||||
display: inline-flex;
|
||||
background: url(~@/app/components/base/icons/assets/image/llm/tongyi-text-cn.png) center center no-repeat;
|
||||
background-size: contain;
|
||||
}
|
||||
|
|
@ -1,20 +0,0 @@
|
|||
// GENERATE BY script
|
||||
// DON NOT EDIT IT MANUALLY
|
||||
|
||||
import * as React from 'react'
|
||||
import cn from '@/utils/classnames'
|
||||
import s from './TongyiTextCn.module.css'
|
||||
|
||||
const Icon = (
|
||||
{
|
||||
ref,
|
||||
className,
|
||||
...restProps
|
||||
}: React.DetailedHTMLProps<React.HTMLAttributes<HTMLSpanElement>, HTMLSpanElement> & {
|
||||
ref?: React.RefObject<HTMLSpanElement>;
|
||||
},
|
||||
) => <span className={cn(s.wrapper, className)} {...restProps} ref={ref} />
|
||||
|
||||
Icon.displayName = 'TongyiTextCn'
|
||||
|
||||
export default Icon
|
||||
|
|
@ -1,5 +0,0 @@
|
|||
.wrapper {
|
||||
display: inline-flex;
|
||||
background: url(~@/app/components/base/icons/assets/image/llm/wxyy.png) center center no-repeat;
|
||||
background-size: contain;
|
||||
}
|
||||
|
|
@ -1,20 +0,0 @@
|
|||
// GENERATE BY script
|
||||
// DON NOT EDIT IT MANUALLY
|
||||
|
||||
import * as React from 'react'
|
||||
import cn from '@/utils/classnames'
|
||||
import s from './Wxyy.module.css'
|
||||
|
||||
const Icon = (
|
||||
{
|
||||
ref,
|
||||
className,
|
||||
...restProps
|
||||
}: React.DetailedHTMLProps<React.HTMLAttributes<HTMLSpanElement>, HTMLSpanElement> & {
|
||||
ref?: React.RefObject<HTMLSpanElement>;
|
||||
},
|
||||
) => <span className={cn(s.wrapper, className)} {...restProps} ref={ref} />
|
||||
|
||||
Icon.displayName = 'Wxyy'
|
||||
|
||||
export default Icon
|
||||
|
|
@ -1,5 +0,0 @@
|
|||
.wrapper {
|
||||
display: inline-flex;
|
||||
background: url(~@/app/components/base/icons/assets/image/llm/wxyy-text.png) center center no-repeat;
|
||||
background-size: contain;
|
||||
}
|
||||
|
|
@ -1,20 +0,0 @@
|
|||
// GENERATE BY script
|
||||
// DON NOT EDIT IT MANUALLY
|
||||
|
||||
import * as React from 'react'
|
||||
import cn from '@/utils/classnames'
|
||||
import s from './WxyyText.module.css'
|
||||
|
||||
const Icon = (
|
||||
{
|
||||
ref,
|
||||
className,
|
||||
...restProps
|
||||
}: React.DetailedHTMLProps<React.HTMLAttributes<HTMLSpanElement>, HTMLSpanElement> & {
|
||||
ref?: React.RefObject<HTMLSpanElement>;
|
||||
},
|
||||
) => <span className={cn(s.wrapper, className)} {...restProps} ref={ref} />
|
||||
|
||||
Icon.displayName = 'WxyyText'
|
||||
|
||||
export default Icon
|
||||
|
|
@ -1,5 +0,0 @@
|
|||
.wrapper {
|
||||
display: inline-flex;
|
||||
background: url(~@/app/components/base/icons/assets/image/llm/wxyy-text-cn.png) center center no-repeat;
|
||||
background-size: contain;
|
||||
}
|
||||
|
|
@ -1,20 +0,0 @@
|
|||
// GENERATE BY script
|
||||
// DON NOT EDIT IT MANUALLY
|
||||
|
||||
import * as React from 'react'
|
||||
import cn from '@/utils/classnames'
|
||||
import s from './WxyyTextCn.module.css'
|
||||
|
||||
const Icon = (
|
||||
{
|
||||
ref,
|
||||
className,
|
||||
...restProps
|
||||
}: React.DetailedHTMLProps<React.HTMLAttributes<HTMLSpanElement>, HTMLSpanElement> & {
|
||||
ref?: React.RefObject<HTMLSpanElement>;
|
||||
},
|
||||
) => <span className={cn(s.wrapper, className)} {...restProps} ref={ref} />
|
||||
|
||||
Icon.displayName = 'WxyyTextCn'
|
||||
|
||||
export default Icon
|
||||
|
|
@ -1,9 +0,0 @@
|
|||
export { default as BaichuanTextCn } from './BaichuanTextCn'
|
||||
export { default as MinimaxText } from './MinimaxText'
|
||||
export { default as Minimax } from './Minimax'
|
||||
export { default as TongyiTextCn } from './TongyiTextCn'
|
||||
export { default as TongyiText } from './TongyiText'
|
||||
export { default as Tongyi } from './Tongyi'
|
||||
export { default as WxyyTextCn } from './WxyyTextCn'
|
||||
export { default as WxyyText } from './WxyyText'
|
||||
export { default as Wxyy } from './Wxyy'
|
||||
|
|
@ -1,20 +0,0 @@
|
|||
// GENERATE BY script
|
||||
// DON NOT EDIT IT MANUALLY
|
||||
|
||||
import * as React from 'react'
|
||||
import data from './Checked.json'
|
||||
import IconBase from '@/app/components/base/icons/IconBase'
|
||||
import type { IconData } from '@/app/components/base/icons/IconBase'
|
||||
|
||||
const Icon = (
|
||||
{
|
||||
ref,
|
||||
...props
|
||||
}: React.SVGProps<SVGSVGElement> & {
|
||||
ref?: React.RefObject<React.RefObject<HTMLOrSVGElement>>;
|
||||
},
|
||||
) => <IconBase {...props} ref={ref} data={data as IconData} />
|
||||
|
||||
Icon.displayName = 'Checked'
|
||||
|
||||
export default Icon
|
||||
|
|
@ -1 +0,0 @@
|
|||
export { default as Checked } from './Checked'
|
||||
|
|
@ -1,20 +0,0 @@
|
|||
// GENERATE BY script
|
||||
// DON NOT EDIT IT MANUALLY
|
||||
|
||||
import * as React from 'react'
|
||||
import data from './Google.json'
|
||||
import IconBase from '@/app/components/base/icons/IconBase'
|
||||
import type { IconData } from '@/app/components/base/icons/IconBase'
|
||||
|
||||
const Icon = (
|
||||
{
|
||||
ref,
|
||||
...props
|
||||
}: React.SVGProps<SVGSVGElement> & {
|
||||
ref?: React.RefObject<React.RefObject<HTMLOrSVGElement>>;
|
||||
},
|
||||
) => <IconBase {...props} ref={ref} data={data as IconData} />
|
||||
|
||||
Icon.displayName = 'Google'
|
||||
|
||||
export default Icon
|
||||
|
|
@ -1,20 +0,0 @@
|
|||
// GENERATE BY script
|
||||
// DON NOT EDIT IT MANUALLY
|
||||
|
||||
import * as React from 'react'
|
||||
import data from './WebReader.json'
|
||||
import IconBase from '@/app/components/base/icons/IconBase'
|
||||
import type { IconData } from '@/app/components/base/icons/IconBase'
|
||||
|
||||
const Icon = (
|
||||
{
|
||||
ref,
|
||||
...props
|
||||
}: React.SVGProps<SVGSVGElement> & {
|
||||
ref?: React.RefObject<React.RefObject<HTMLOrSVGElement>>;
|
||||
},
|
||||
) => <IconBase {...props} ref={ref} data={data as IconData} />
|
||||
|
||||
Icon.displayName = 'WebReader'
|
||||
|
||||
export default Icon
|
||||
|
|
@ -1,20 +0,0 @@
|
|||
// GENERATE BY script
|
||||
// DON NOT EDIT IT MANUALLY
|
||||
|
||||
import * as React from 'react'
|
||||
import data from './Wikipedia.json'
|
||||
import IconBase from '@/app/components/base/icons/IconBase'
|
||||
import type { IconData } from '@/app/components/base/icons/IconBase'
|
||||
|
||||
const Icon = (
|
||||
{
|
||||
ref,
|
||||
...props
|
||||
}: React.SVGProps<SVGSVGElement> & {
|
||||
ref?: React.RefObject<React.RefObject<HTMLOrSVGElement>>;
|
||||
},
|
||||
) => <IconBase {...props} ref={ref} data={data as IconData} />
|
||||
|
||||
Icon.displayName = 'Wikipedia'
|
||||
|
||||
export default Icon
|
||||
|
|
@ -1,7 +0,0 @@
|
|||
export { default as Google } from './Google'
|
||||
export { default as PartnerDark } from './PartnerDark'
|
||||
export { default as PartnerLight } from './PartnerLight'
|
||||
export { default as VerifiedDark } from './VerifiedDark'
|
||||
export { default as VerifiedLight } from './VerifiedLight'
|
||||
export { default as WebReader } from './WebReader'
|
||||
export { default as Wikipedia } from './Wikipedia'
|
||||
|
|
@ -18,3 +18,4 @@ const Icon = (
|
|||
Icon.displayName = 'DataSet'
|
||||
|
||||
export default Icon
|
||||
|
||||
|
|
|
|||
|
|
@ -1,20 +0,0 @@
|
|||
// GENERATE BY script
|
||||
// DON NOT EDIT IT MANUALLY
|
||||
|
||||
import * as React from 'react'
|
||||
import data from './Loading.json'
|
||||
import IconBase from '@/app/components/base/icons/IconBase'
|
||||
import type { IconData } from '@/app/components/base/icons/IconBase'
|
||||
|
||||
const Icon = (
|
||||
{
|
||||
ref,
|
||||
...props
|
||||
}: React.SVGProps<SVGSVGElement> & {
|
||||
ref?: React.RefObject<React.RefObject<HTMLOrSVGElement>>;
|
||||
},
|
||||
) => <IconBase {...props} ref={ref} data={data as IconData} />
|
||||
|
||||
Icon.displayName = 'Loading'
|
||||
|
||||
export default Icon
|
||||
|
|
@ -1,20 +0,0 @@
|
|||
// GENERATE BY script
|
||||
// DON NOT EDIT IT MANUALLY
|
||||
|
||||
import * as React from 'react'
|
||||
import data from './Search.json'
|
||||
import IconBase from '@/app/components/base/icons/IconBase'
|
||||
import type { IconData } from '@/app/components/base/icons/IconBase'
|
||||
|
||||
const Icon = (
|
||||
{
|
||||
ref,
|
||||
...props
|
||||
}: React.SVGProps<SVGSVGElement> & {
|
||||
ref?: React.RefObject<React.RefObject<HTMLOrSVGElement>>;
|
||||
},
|
||||
) => <IconBase {...props} ref={ref} data={data as IconData} />
|
||||
|
||||
Icon.displayName = 'Search'
|
||||
|
||||
export default Icon
|
||||
|
|
@ -1,20 +0,0 @@
|
|||
// GENERATE BY script
|
||||
// DON NOT EDIT IT MANUALLY
|
||||
|
||||
import * as React from 'react'
|
||||
import data from './ThoughtList.json'
|
||||
import IconBase from '@/app/components/base/icons/IconBase'
|
||||
import type { IconData } from '@/app/components/base/icons/IconBase'
|
||||
|
||||
const Icon = (
|
||||
{
|
||||
ref,
|
||||
...props
|
||||
}: React.SVGProps<SVGSVGElement> & {
|
||||
ref?: React.RefObject<React.RefObject<HTMLOrSVGElement>>;
|
||||
},
|
||||
) => <IconBase {...props} ref={ref} data={data as IconData} />
|
||||
|
||||
Icon.displayName = 'ThoughtList'
|
||||
|
||||
export default Icon
|
||||
|
|
@ -1,20 +0,0 @@
|
|||
// GENERATE BY script
|
||||
// DON NOT EDIT IT MANUALLY
|
||||
|
||||
import * as React from 'react'
|
||||
import data from './WebReader.json'
|
||||
import IconBase from '@/app/components/base/icons/IconBase'
|
||||
import type { IconData } from '@/app/components/base/icons/IconBase'
|
||||
|
||||
const Icon = (
|
||||
{
|
||||
ref,
|
||||
...props
|
||||
}: React.SVGProps<SVGSVGElement> & {
|
||||
ref?: React.RefObject<React.RefObject<HTMLOrSVGElement>>;
|
||||
},
|
||||
) => <IconBase {...props} ref={ref} data={data as IconData} />
|
||||
|
||||
Icon.displayName = 'WebReader'
|
||||
|
||||
export default Icon
|
||||
|
|
@ -1,5 +1,2 @@
|
|||
export { default as DataSet } from './DataSet'
|
||||
export { default as Loading } from './Loading'
|
||||
export { default as Search } from './Search'
|
||||
export { default as ThoughtList } from './ThoughtList'
|
||||
export { default as WebReader } from './WebReader'
|
||||
|
||||
|
|
|
|||
|
|
@ -1,20 +0,0 @@
|
|||
// GENERATE BY script
|
||||
// DON NOT EDIT IT MANUALLY
|
||||
|
||||
import * as React from 'react'
|
||||
import data from './AlignLeft01.json'
|
||||
import IconBase from '@/app/components/base/icons/IconBase'
|
||||
import type { IconData } from '@/app/components/base/icons/IconBase'
|
||||
|
||||
const Icon = (
|
||||
{
|
||||
ref,
|
||||
...props
|
||||
}: React.SVGProps<SVGSVGElement> & {
|
||||
ref?: React.RefObject<React.RefObject<HTMLOrSVGElement>>;
|
||||
},
|
||||
) => <IconBase {...props} ref={ref} data={data as IconData} />
|
||||
|
||||
Icon.displayName = 'AlignLeft01'
|
||||
|
||||
export default Icon
|
||||
|
|
@ -1,20 +0,0 @@
|
|||
// GENERATE BY script
|
||||
// DON NOT EDIT IT MANUALLY
|
||||
|
||||
import * as React from 'react'
|
||||
import data from './AlignRight01.json'
|
||||
import IconBase from '@/app/components/base/icons/IconBase'
|
||||
import type { IconData } from '@/app/components/base/icons/IconBase'
|
||||
|
||||
const Icon = (
|
||||
{
|
||||
ref,
|
||||
...props
|
||||
}: React.SVGProps<SVGSVGElement> & {
|
||||
ref?: React.RefObject<React.RefObject<HTMLOrSVGElement>>;
|
||||
},
|
||||
) => <IconBase {...props} ref={ref} data={data as IconData} />
|
||||
|
||||
Icon.displayName = 'AlignRight01'
|
||||
|
||||
export default Icon
|
||||
|
|
@ -1,20 +0,0 @@
|
|||
// GENERATE BY script
|
||||
// DON NOT EDIT IT MANUALLY
|
||||
|
||||
import * as React from 'react'
|
||||
import data from './Grid01.json'
|
||||
import IconBase from '@/app/components/base/icons/IconBase'
|
||||
import type { IconData } from '@/app/components/base/icons/IconBase'
|
||||
|
||||
const Icon = (
|
||||
{
|
||||
ref,
|
||||
...props
|
||||
}: React.SVGProps<SVGSVGElement> & {
|
||||
ref?: React.RefObject<React.RefObject<HTMLOrSVGElement>>;
|
||||
},
|
||||
) => <IconBase {...props} ref={ref} data={data as IconData} />
|
||||
|
||||
Icon.displayName = 'Grid01'
|
||||
|
||||
export default Icon
|
||||
|
|
@ -1,4 +1 @@
|
|||
export { default as AlignLeft01 } from './AlignLeft01'
|
||||
export { default as AlignRight01 } from './AlignRight01'
|
||||
export { default as Grid01 } from './Grid01'
|
||||
export { default as LayoutGrid02 } from './LayoutGrid02'
|
||||
|
|
|
|||
|
|
@ -1,20 +0,0 @@
|
|||
// GENERATE BY script
|
||||
// DON NOT EDIT IT MANUALLY
|
||||
|
||||
import * as React from 'react'
|
||||
import data from './Route.json'
|
||||
import IconBase from '@/app/components/base/icons/IconBase'
|
||||
import type { IconData } from '@/app/components/base/icons/IconBase'
|
||||
|
||||
const Icon = (
|
||||
{
|
||||
ref,
|
||||
...props
|
||||
}: React.SVGProps<SVGSVGElement> & {
|
||||
ref?: React.RefObject<React.RefObject<HTMLOrSVGElement>>;
|
||||
},
|
||||
) => <IconBase {...props} ref={ref} data={data as IconData} />
|
||||
|
||||
Icon.displayName = 'Route'
|
||||
|
||||
export default Icon
|
||||
|
|
@ -1 +0,0 @@
|
|||
export { default as Route } from './Route'
|
||||
|
|
@ -1,20 +0,0 @@
|
|||
// GENERATE BY script
|
||||
// DON NOT EDIT IT MANUALLY
|
||||
|
||||
import * as React from 'react'
|
||||
import data from './User01.json'
|
||||
import IconBase from '@/app/components/base/icons/IconBase'
|
||||
import type { IconData } from '@/app/components/base/icons/IconBase'
|
||||
|
||||
const Icon = (
|
||||
{
|
||||
ref,
|
||||
...props
|
||||
}: React.SVGProps<SVGSVGElement> & {
|
||||
ref?: React.RefObject<React.RefObject<HTMLOrSVGElement>>;
|
||||
},
|
||||
) => <IconBase {...props} ref={ref} data={data as IconData} />
|
||||
|
||||
Icon.displayName = 'User01'
|
||||
|
||||
export default Icon
|
||||
|
|
@ -1,20 +0,0 @@
|
|||
// GENERATE BY script
|
||||
// DON NOT EDIT IT MANUALLY
|
||||
|
||||
import * as React from 'react'
|
||||
import data from './Users01.json'
|
||||
import IconBase from '@/app/components/base/icons/IconBase'
|
||||
import type { IconData } from '@/app/components/base/icons/IconBase'
|
||||
|
||||
const Icon = (
|
||||
{
|
||||
ref,
|
||||
...props
|
||||
}: React.SVGProps<SVGSVGElement> & {
|
||||
ref?: React.RefObject<React.RefObject<HTMLOrSVGElement>>;
|
||||
},
|
||||
) => <IconBase {...props} ref={ref} data={data as IconData} />
|
||||
|
||||
Icon.displayName = 'Users01'
|
||||
|
||||
export default Icon
|
||||
|
|
@ -1,2 +0,0 @@
|
|||
export { default as User01 } from './User01'
|
||||
export { default as Users01 } from './Users01'
|
||||
|
|
@ -1,20 +0,0 @@
|
|||
// GENERATE BY script
|
||||
// DON NOT EDIT IT MANUALLY
|
||||
|
||||
import * as React from 'react'
|
||||
import data from './Stars02.json'
|
||||
import IconBase from '@/app/components/base/icons/IconBase'
|
||||
import type { IconData } from '@/app/components/base/icons/IconBase'
|
||||
|
||||
const Icon = (
|
||||
{
|
||||
ref,
|
||||
...props
|
||||
}: React.SVGProps<SVGSVGElement> & {
|
||||
ref?: React.RefObject<React.RefObject<HTMLOrSVGElement>>;
|
||||
},
|
||||
) => <IconBase {...props} ref={ref} data={data as IconData} />
|
||||
|
||||
Icon.displayName = 'Stars02'
|
||||
|
||||
export default Icon
|
||||
|
|
@ -1 +0,0 @@
|
|||
export { default as Stars02 } from './Stars02'
|
||||
|
|
@ -1,20 +0,0 @@
|
|||
// GENERATE BY script
|
||||
// DON NOT EDIT IT MANUALLY
|
||||
|
||||
import * as React from 'react'
|
||||
import data from './ChevronDown.json'
|
||||
import IconBase from '@/app/components/base/icons/IconBase'
|
||||
import type { IconData } from '@/app/components/base/icons/IconBase'
|
||||
|
||||
const Icon = (
|
||||
{
|
||||
ref,
|
||||
...props
|
||||
}: React.SVGProps<SVGSVGElement> & {
|
||||
ref?: React.RefObject<React.RefObject<HTMLOrSVGElement>>;
|
||||
},
|
||||
) => <IconBase {...props} ref={ref} data={data as IconData} />
|
||||
|
||||
Icon.displayName = 'ChevronDown'
|
||||
|
||||
export default Icon
|
||||
|
|
@ -1,20 +0,0 @@
|
|||
// GENERATE BY script
|
||||
// DON NOT EDIT IT MANUALLY
|
||||
|
||||
import * as React from 'react'
|
||||
import data from './HighPriority.json'
|
||||
import IconBase from '@/app/components/base/icons/IconBase'
|
||||
import type { IconData } from '@/app/components/base/icons/IconBase'
|
||||
|
||||
const Icon = (
|
||||
{
|
||||
ref,
|
||||
...props
|
||||
}: React.SVGProps<SVGSVGElement> & {
|
||||
ref?: React.RefObject<React.RefObject<HTMLOrSVGElement>>;
|
||||
},
|
||||
) => <IconBase {...props} ref={ref} data={data as IconData} />
|
||||
|
||||
Icon.displayName = 'HighPriority'
|
||||
|
||||
export default Icon
|
||||
|
|
@ -1,2 +0,0 @@
|
|||
export { default as ChevronDown } from './ChevronDown'
|
||||
export { default as HighPriority } from './HighPriority'
|
||||
|
|
@ -1,20 +0,0 @@
|
|||
// GENERATE BY script
|
||||
// DON NOT EDIT IT MANUALLY
|
||||
|
||||
import * as React from 'react'
|
||||
import data from './Grid01.json'
|
||||
import IconBase from '@/app/components/base/icons/IconBase'
|
||||
import type { IconData } from '@/app/components/base/icons/IconBase'
|
||||
|
||||
const Icon = (
|
||||
{
|
||||
ref,
|
||||
...props
|
||||
}: React.SVGProps<SVGSVGElement> & {
|
||||
ref?: React.RefObject<React.RefObject<HTMLOrSVGElement>>;
|
||||
},
|
||||
) => <IconBase {...props} ref={ref} data={data as IconData} />
|
||||
|
||||
Icon.displayName = 'Grid01'
|
||||
|
||||
export default Icon
|
||||
|
|
@ -1 +0,0 @@
|
|||
export { default as Grid01 } from './Grid01'
|
||||
|
|
@ -767,8 +767,8 @@ The text generation application offers non-session support and is ideal for tran
|
|||
<Col>
|
||||
<CodeGroup
|
||||
title="Request"
|
||||
tag="POST"
|
||||
label="/meta"
|
||||
tag="GET"
|
||||
label="/site"
|
||||
targetCode={`curl -X GET '${props.appDetail.api_base_url}/site' \\
|
||||
-H 'Authorization: Bearer {api_key}'`}
|
||||
/>
|
||||
|
|
|
|||
|
|
@ -764,8 +764,8 @@ import { Row, Col, Properties, Property, Heading, SubProperty, Paragraph } from
|
|||
<Col>
|
||||
<CodeGroup
|
||||
title="Request"
|
||||
tag="POST"
|
||||
label="/meta"
|
||||
tag="GET"
|
||||
label="/site"
|
||||
targetCode={`curl -X GET '${props.appDetail.api_base_url}/site' \\
|
||||
-H 'Authorization: Bearer {api_key}'`}
|
||||
/>
|
||||
|
|
|
|||
|
|
@ -728,8 +728,8 @@ import { Row, Col, Properties, Property, Heading, SubProperty } from '../md.tsx'
|
|||
<Col>
|
||||
<CodeGroup
|
||||
title="Request"
|
||||
tag="POST"
|
||||
label="/meta"
|
||||
tag="GET"
|
||||
label="/site"
|
||||
targetCode={`curl -X GET '${props.appDetail.api_base_url}/site' \\
|
||||
-H 'Authorization: Bearer {api_key}'`}
|
||||
/>
|
||||
|
|
|
|||
|
|
@ -199,7 +199,7 @@ Chat applications support session persistence, allowing previous chat history to
|
|||
--header 'Authorization: Bearer {api_key}' \\
|
||||
--header 'Content-Type: application/json' \\
|
||||
--data-raw '{
|
||||
"inputs": ${JSON.stringify(props.inputs)},
|
||||
"inputs": ${JSON.stringify(props.inputs)},
|
||||
"query": "What are the specs of the iPhone 13 Pro Max?",
|
||||
"response_mode": "streaming",
|
||||
"conversation_id": "",
|
||||
|
|
@ -1182,7 +1182,7 @@ Chat applications support session persistence, allowing previous chat history to
|
|||
--header 'Content-Type: application/json' \\
|
||||
--data-raw '{
|
||||
"value": "Updated Value",
|
||||
"user": "abc-123"
|
||||
"user": "abc-123"
|
||||
}'`}
|
||||
/>
|
||||
|
||||
|
|
@ -1599,8 +1599,8 @@ Chat applications support session persistence, allowing previous chat history to
|
|||
<Col>
|
||||
<CodeGroup
|
||||
title="Request"
|
||||
tag="POST"
|
||||
label="/meta"
|
||||
tag="GET"
|
||||
label="/site"
|
||||
targetCode={`curl -X GET '${props.appDetail.api_base_url}/site' \\
|
||||
-H 'Authorization: Bearer {api_key}'`}
|
||||
/>
|
||||
|
|
|
|||
|
|
@ -1586,8 +1586,8 @@ import { Row, Col, Properties, Property, Heading, SubProperty, Paragraph } from
|
|||
<Col>
|
||||
<CodeGroup
|
||||
title="リクエスト"
|
||||
tag="POST"
|
||||
label="/meta"
|
||||
tag="GET"
|
||||
label="/site"
|
||||
targetCode={`curl -X GET '${props.appDetail.api_base_url}/site' \\
|
||||
-H 'Authorization: Bearer {api_key}'`}
|
||||
/>
|
||||
|
|
|
|||
|
|
@ -1188,7 +1188,7 @@ import { Row, Col, Properties, Property, Heading, SubProperty } from '../md.tsx'
|
|||
--header 'Content-Type: application/json' \\
|
||||
--data-raw '{
|
||||
"value": "Updated Value",
|
||||
"user": "abc-123"
|
||||
"user": "abc-123"
|
||||
}'`}
|
||||
/>
|
||||
|
||||
|
|
@ -1579,8 +1579,8 @@ import { Row, Col, Properties, Property, Heading, SubProperty } from '../md.tsx'
|
|||
<Col>
|
||||
<CodeGroup
|
||||
title="Request"
|
||||
tag="POST"
|
||||
label="/meta"
|
||||
tag="GET"
|
||||
label="/site"
|
||||
targetCode={`curl -X GET '${props.appDetail.api_base_url}/site' \\
|
||||
-H 'Authorization: Bearer {api_key}'`}
|
||||
/>
|
||||
|
|
|
|||
|
|
@ -1025,8 +1025,8 @@ Workflow applications offers non-session support and is ideal for translation, a
|
|||
<Col>
|
||||
<CodeGroup
|
||||
title="Request"
|
||||
tag="POST"
|
||||
label="/meta"
|
||||
tag="GET"
|
||||
label="/site"
|
||||
targetCode={`curl -X GET '${props.appDetail.api_base_url}/site' \\
|
||||
-H 'Authorization: Bearer {api_key}'`}
|
||||
/>
|
||||
|
|
|
|||
|
|
@ -1021,8 +1021,8 @@ import { Row, Col, Properties, Property, Heading, SubProperty, Paragraph } from
|
|||
<Col>
|
||||
<CodeGroup
|
||||
title="Request"
|
||||
tag="POST"
|
||||
label="/meta"
|
||||
tag="GET"
|
||||
label="/site"
|
||||
targetCode={`curl -X GET '${props.appDetail.api_base_url}/site' \\
|
||||
-H 'Authorization: Bearer {api_key}'`}
|
||||
/>
|
||||
|
|
|
|||
|
|
@ -182,7 +182,7 @@ Workflow 应用无会话支持,适合用于翻译/文章写作/总结 AI 等
|
|||
--header 'Authorization: Bearer {api_key}' \\
|
||||
--header 'Content-Type: application/json' \\
|
||||
--data-raw '{
|
||||
"inputs": ${JSON.stringify(props.inputs)},
|
||||
"inputs": ${JSON.stringify(props.inputs)},
|
||||
"response_mode": "streaming",
|
||||
"user": "abc-123"
|
||||
}'`}
|
||||
|
|
@ -1012,8 +1012,8 @@ Workflow 应用无会话支持,适合用于翻译/文章写作/总结 AI 等
|
|||
<Col>
|
||||
<CodeGroup
|
||||
title="Request"
|
||||
tag="POST"
|
||||
label="/meta"
|
||||
tag="GET"
|
||||
label="/site"
|
||||
targetCode={`curl -X GET '${props.appDetail.api_base_url}/site' \\
|
||||
-H 'Authorization: Bearer {api_key}'`}
|
||||
/>
|
||||
|
|
|
|||
|
|
@ -118,8 +118,20 @@ const FormItem: FC<Props> = ({
|
|||
<div className={cn(className)}>
|
||||
{!isArrayLikeType && !isBooleanType && (
|
||||
<div className='system-sm-semibold mb-1 flex h-6 items-center gap-1 text-text-secondary'>
|
||||
<div className='truncate'>{typeof payload.label === 'object' ? nodeKey : payload.label}</div>
|
||||
{!payload.required && <span className='system-xs-regular text-text-tertiary'>{t('workflow.panel.optional')}</span>}
|
||||
<div className='truncate'>
|
||||
{typeof payload.label === 'object' ? nodeKey : payload.label}
|
||||
</div>
|
||||
{payload.hide === true ? (
|
||||
<span className='system-xs-regular text-text-tertiary'>
|
||||
{t('workflow.panel.optional_and_hidden')}
|
||||
</span>
|
||||
) : (
|
||||
!payload.required && (
|
||||
<span className='system-xs-regular text-text-tertiary'>
|
||||
{t('workflow.panel.optional')}
|
||||
</span>
|
||||
)
|
||||
)}
|
||||
</div>
|
||||
)}
|
||||
<div className='grow'>
|
||||
|
|
|
|||
|
|
@ -61,7 +61,6 @@ const VarList: FC<Props> = ({
|
|||
return
|
||||
}
|
||||
if (list.some(item => item.variable?.trim() === newKey.trim())) {
|
||||
console.log('new key', newKey.trim())
|
||||
setToastHandle(Toast.notify({
|
||||
type: 'error',
|
||||
message: t('appDebug.varKeyError.keyAlreadyExists', { key: newKey }),
|
||||
|
|
|
|||
|
|
@ -5,7 +5,6 @@ import produce from 'immer'
|
|||
import { useTranslation } from 'react-i18next'
|
||||
import VarItem from './var-item'
|
||||
import { ChangeType, type InputVar, type MoreInfo } from '@/app/components/workflow/types'
|
||||
import { v4 as uuid4 } from 'uuid'
|
||||
import { ReactSortable } from 'react-sortablejs'
|
||||
import { RiDraggable } from '@remixicon/react'
|
||||
import cn from '@/utils/classnames'
|
||||
|
|
@ -71,9 +70,8 @@ const VarList: FC<Props> = ({
|
|||
}, [list, onChange])
|
||||
|
||||
const listWithIds = useMemo(() => list.map((item) => {
|
||||
const id = uuid4()
|
||||
return {
|
||||
id,
|
||||
id: item.variable,
|
||||
variable: { ...item },
|
||||
}
|
||||
}), [list])
|
||||
|
|
@ -88,6 +86,8 @@ const VarList: FC<Props> = ({
|
|||
)
|
||||
}
|
||||
|
||||
const canDrag = !readonly && varCount > 1
|
||||
|
||||
return (
|
||||
<ReactSortable
|
||||
className='space-y-1'
|
||||
|
|
@ -97,30 +97,23 @@ const VarList: FC<Props> = ({
|
|||
ghostClass='opacity-50'
|
||||
animation={150}
|
||||
>
|
||||
{list.map((item, index) => {
|
||||
const canDrag = (() => {
|
||||
if (readonly)
|
||||
return false
|
||||
return varCount > 1
|
||||
})()
|
||||
return (
|
||||
<div key={index} className='group relative'>
|
||||
<VarItem
|
||||
className={cn(canDrag && 'handle')}
|
||||
readonly={readonly}
|
||||
payload={item}
|
||||
onChange={handleVarChange(index)}
|
||||
onRemove={handleVarRemove(index)}
|
||||
varKeys={list.map(item => item.variable)}
|
||||
canDrag={canDrag}
|
||||
/>
|
||||
{canDrag && <RiDraggable className={cn(
|
||||
'handle absolute left-3 top-2.5 hidden h-3 w-3 cursor-pointer text-text-tertiary',
|
||||
'group-hover:block',
|
||||
)} />}
|
||||
</div>
|
||||
)
|
||||
})}
|
||||
{listWithIds.map((itemWithId, index) => (
|
||||
<div key={itemWithId.id} className='group relative'>
|
||||
<VarItem
|
||||
className={cn(canDrag && 'handle')}
|
||||
readonly={readonly}
|
||||
payload={itemWithId.variable}
|
||||
onChange={handleVarChange(index)}
|
||||
onRemove={handleVarRemove(index)}
|
||||
varKeys={list.map(item => item.variable)}
|
||||
canDrag={canDrag}
|
||||
/>
|
||||
{canDrag && <RiDraggable className={cn(
|
||||
'handle absolute left-3 top-2.5 hidden h-3 w-3 cursor-pointer text-text-tertiary',
|
||||
'group-hover:block',
|
||||
)} />}
|
||||
</div>
|
||||
))}
|
||||
</ReactSortable>
|
||||
)
|
||||
}
|
||||
|
|
|
|||
|
|
@ -39,7 +39,7 @@ const DebugAndPreview = () => {
|
|||
const selectedNode = nodes.find(node => node.data.selected)
|
||||
const startNode = nodes.find(node => node.data.type === BlockEnum.Start)
|
||||
const variables = startNode?.data.variables || []
|
||||
const visibleVariables = variables.filter(v => v.hide !== true)
|
||||
const visibleVariables = variables
|
||||
|
||||
const [showConversationVariableModal, setShowConversationVariableModal] = useState(false)
|
||||
|
||||
|
|
|
|||
|
|
@ -14,10 +14,11 @@ import cn from '@/utils/classnames'
|
|||
const UserInput = () => {
|
||||
const workflowStore = useWorkflowStore()
|
||||
const inputs = useStore(s => s.inputs)
|
||||
const showDebugAndPreviewPanel = useStore(s => s.showDebugAndPreviewPanel)
|
||||
const nodes = useNodes<StartNodeType>()
|
||||
const startNode = nodes.find(node => node.data.type === BlockEnum.Start)
|
||||
const variables = startNode?.data.variables || []
|
||||
const visibleVariables = variables.filter(v => v.hide !== true)
|
||||
const visibleVariables = showDebugAndPreviewPanel ? variables : variables.filter(v => v.hide !== true)
|
||||
|
||||
const handleValueChange = (variable: string, v: string) => {
|
||||
const {
|
||||
|
|
|
|||
|
|
@ -160,6 +160,10 @@ const translation = {
|
|||
title: 'Cloud-Monitor',
|
||||
description: 'Die vollständig verwaltete und wartungsfreie Observability-Plattform von Alibaba Cloud ermöglicht eine sofortige Überwachung, Verfolgung und Bewertung von Dify-Anwendungen.',
|
||||
},
|
||||
tencent: {
|
||||
title: 'Tencent APM',
|
||||
description: 'Tencent Application Performance Monitoring bietet umfassendes Tracing und multidimensionale Analyse für LLM-Anwendungen.',
|
||||
},
|
||||
},
|
||||
answerIcon: {
|
||||
descriptionInExplore: 'Gibt an, ob das web app Symbol zum Ersetzen 🤖 in Explore verwendet werden soll',
|
||||
|
|
|
|||
|
|
@ -354,6 +354,7 @@ const translation = {
|
|||
optional: '(optional)',
|
||||
maximize: 'Maximize Canvas',
|
||||
minimize: 'Exit Full Screen',
|
||||
optional_and_hidden: '(optional & hidden)',
|
||||
},
|
||||
nodes: {
|
||||
common: {
|
||||
|
|
|
|||
|
|
@ -163,6 +163,10 @@ const translation = {
|
|||
title: 'Monitor de Nubes',
|
||||
description: 'La plataforma de observabilidad totalmente gestionada y sin mantenimiento proporcionada por Alibaba Cloud, permite la monitorización, trazado y evaluación de aplicaciones Dify de manera inmediata.',
|
||||
},
|
||||
tencent: {
|
||||
title: 'Tencent APM',
|
||||
description: 'Tencent Application Performance Monitoring proporciona rastreo integral y análisis multidimensional para aplicaciones LLM.',
|
||||
},
|
||||
},
|
||||
answerIcon: {
|
||||
title: 'Usar el icono de la aplicación web para reemplazar 🤖',
|
||||
|
|
|
|||
|
|
@ -171,6 +171,10 @@ const translation = {
|
|||
title: 'نظارت بر ابر',
|
||||
description: 'پلتفرم مشاهدهپذیری کاملاً مدیریتشده و بدون نیاز به نگهداری که توسط Alibaba Cloud ارائه شده، امکان نظارت، ردیابی و ارزیابی برنامههای Dify را بهصورت آماده و با تنظیمات اولیه فراهم میکند.',
|
||||
},
|
||||
tencent: {
|
||||
title: 'تنست ایپیام',
|
||||
description: 'نظارت بر عملکرد برنامههای Tencent تحلیلهای جامع و ردیابی چندبعدی برای برنامههای LLM ارائه میدهد.',
|
||||
},
|
||||
},
|
||||
answerIcon: {
|
||||
descriptionInExplore: 'آیا از نماد web app برای جایگزینی 🤖 در Explore استفاده کنیم یا خیر',
|
||||
|
|
|
|||
|
|
@ -163,6 +163,10 @@ const translation = {
|
|||
title: 'Surveillance Cloud',
|
||||
description: 'La plateforme d\'observabilité entièrement gérée et sans maintenance fournie par Alibaba Cloud permet une surveillance, un traçage et une évaluation prêts à l\'emploi des applications Dify.',
|
||||
},
|
||||
tencent: {
|
||||
title: 'Tencent APM',
|
||||
description: 'Tencent Application Performance Monitoring fournit une traçabilité complète et une analyse multidimensionnelle pour les applications LLM.',
|
||||
},
|
||||
},
|
||||
answerIcon: {
|
||||
description: 'S’il faut utiliser l’icône web app pour remplacer 🤖 dans l’application partagée',
|
||||
|
|
|
|||
|
|
@ -163,6 +163,10 @@ const translation = {
|
|||
title: 'क्लाउड मॉनिटर',
|
||||
description: 'अलीबाबा क्लाउड द्वारा प्रदान की गई पूरी तरह से प्रबंधित और रखरखाव-मुक्त अवलोकन प्लेटफ़ॉर्म, Dify अनुप्रयोगों की स्वचालित निगरानी, ट्रेसिंग और मूल्यांकन का सक्षम बनाता है।',
|
||||
},
|
||||
tencent: {
|
||||
title: 'टेनसेंट एपीएम',
|
||||
description: 'Tencent एप्लिकेशन परफॉर्मेंस मॉनिटरिंग LLM एप्लिकेशन के लिए व्यापक ट्रेसिंग और बहु-आयामी विश्लेषण प्रदान करता है।',
|
||||
},
|
||||
},
|
||||
answerIcon: {
|
||||
title: 'बदलने 🤖 के लिए web app चिह्न का उपयोग करें',
|
||||
|
|
|
|||
|
|
@ -155,6 +155,10 @@ const translation = {
|
|||
description: 'Mengonfigurasi penyedia LLMOps Pihak Ketiga dan melacak performa aplikasi.',
|
||||
inUse: 'Sedang digunakan',
|
||||
tracingDescription: 'Tangkap konteks lengkap eksekusi aplikasi, termasuk panggilan LLM, konteks, perintah, permintaan HTTP, dan lainnya, ke platform pelacakan pihak ketiga.',
|
||||
tencent: {
|
||||
title: 'Tencent APM',
|
||||
description: 'Tencent Application Performance Monitoring menyediakan pelacakan komprehensif dan analisis multi-dimensi untuk aplikasi LLM.',
|
||||
},
|
||||
},
|
||||
appSelector: {
|
||||
placeholder: 'Pilih aplikasi...',
|
||||
|
|
|
|||
|
|
@ -169,6 +169,10 @@ const translation = {
|
|||
title: 'Monitoraggio Cloud',
|
||||
description: 'La piattaforma di osservabilità completamente gestita e senza manutenzione fornita da Alibaba Cloud consente il monitoraggio, il tracciamento e la valutazione delle applicazioni Dify fin da subito.',
|
||||
},
|
||||
tencent: {
|
||||
title: 'Tencent APM',
|
||||
description: 'Tencent Application Performance Monitoring fornisce tracciamento completo e analisi multidimensionale per le applicazioni LLM.',
|
||||
},
|
||||
},
|
||||
answerIcon: {
|
||||
description: 'Se utilizzare l\'icona web app per la sostituzione 🤖 nell\'applicazione condivisa',
|
||||
|
|
|
|||
|
|
@ -175,6 +175,10 @@ const translation = {
|
|||
title: 'クラウドモニター',
|
||||
description: 'Alibaba Cloud が提供する完全管理型でメンテナンスフリーの可観測性プラットフォームは、Dify アプリケーションの即時監視、トレース、評価を可能にします。',
|
||||
},
|
||||
tencent: {
|
||||
title: 'テンセントAPM',
|
||||
description: 'Tencent アプリケーションパフォーマンスモニタリングは、LLM アプリケーションに対して包括的なトレーシングと多次元分析を提供します。',
|
||||
},
|
||||
},
|
||||
answerIcon: {
|
||||
title: 'Web アプリアイコンを使用して🤖を置き換える',
|
||||
|
|
|
|||
|
|
@ -178,6 +178,10 @@ const translation = {
|
|||
title: '클라우드 모니터',
|
||||
description: '알리바바 클라우드에서 제공하는 완전 관리형 및 유지보수가 필요 없는 가시성 플랫폼은 Dify 애플리케이션의 모니터링, 추적 및 평가를 즉시 사용할 수 있도록 지원합니다.',
|
||||
},
|
||||
tencent: {
|
||||
title: '텐센트 APM',
|
||||
description: '텐센트 애플리케이션 성능 모니터링은 LLM 애플리케이션에 대한 포괄적인 추적 및 다차원 분석을 제공합니다.',
|
||||
},
|
||||
},
|
||||
answerIcon: {
|
||||
description:
|
||||
|
|
|
|||
|
|
@ -164,6 +164,10 @@ const translation = {
|
|||
title: 'Monitor Chmury',
|
||||
description: 'W pełni zarządzana i wolna od konserwacji platforma obserwowalności oferowana przez Alibaba Cloud umożliwia gotowe monitorowanie, śledzenie i oceny aplikacji Dify.',
|
||||
},
|
||||
tencent: {
|
||||
title: 'Tencent APM',
|
||||
description: 'Tencent Application Performance Monitoring zapewnia kompleksowe śledzenie i wielowymiarową analizę dla aplikacji LLM.',
|
||||
},
|
||||
},
|
||||
answerIcon: {
|
||||
description: 'Czy w aplikacji udostępnionej ma być używana ikona aplikacji internetowej do zamiany 🤖.',
|
||||
|
|
|
|||
|
|
@ -163,6 +163,10 @@ const translation = {
|
|||
title: 'Monitoramento em Nuvem',
|
||||
description: 'A plataforma de observabilidade totalmente gerenciada e sem manutenção fornecida pela Alibaba Cloud, permite monitoramento, rastreamento e avaliação prontos para uso de aplicações Dify.',
|
||||
},
|
||||
tencent: {
|
||||
title: 'Tencent APM',
|
||||
description: 'O Monitoramento de Desempenho de Aplicações da Tencent fornece rastreamento abrangente e análise multidimensional para aplicações LLM.',
|
||||
},
|
||||
},
|
||||
answerIcon: {
|
||||
descriptionInExplore: 'Se o ícone do web app deve ser usado para substituir 🤖 no Explore',
|
||||
|
|
|
|||
|
|
@ -163,6 +163,10 @@ const translation = {
|
|||
description: 'Platforma de observabilitate SaaS oferită de Alibaba Cloud permite monitorizarea, urmărirea și evaluarea aplicațiilor Dify din cutie.',
|
||||
title: 'Monitorizarea Cloud',
|
||||
},
|
||||
tencent: {
|
||||
title: 'Tencent APM',
|
||||
description: 'Monitorizarea Performanței Aplicațiilor Tencent oferă trasabilitate cuprinzătoare și analiză multidimensională pentru aplicațiile LLM.',
|
||||
},
|
||||
},
|
||||
answerIcon: {
|
||||
descriptionInExplore: 'Dacă să utilizați pictograma web app pentru a înlocui 🤖 în Explore',
|
||||
|
|
|
|||
|
|
@ -171,6 +171,10 @@ const translation = {
|
|||
title: 'Облачный монитор',
|
||||
description: 'Полностью управляемая и не требующая обслуживания платформа наблюдения, предоставляемая Alibaba Cloud, обеспечивает мониторинг, трассировку и оценку приложений Dify из коробки.',
|
||||
},
|
||||
tencent: {
|
||||
title: 'Tencent APM',
|
||||
description: 'Мониторинг производительности приложений Tencent предоставляет всестороннее отслеживание и многомерный анализ для приложений LLM.',
|
||||
},
|
||||
},
|
||||
answerIcon: {
|
||||
title: 'Использование значка web app для замены 🤖',
|
||||
|
|
|
|||
|
|
@ -176,6 +176,10 @@ const translation = {
|
|||
title: 'Oblačni nadzor',
|
||||
description: 'Popolnoma upravljana in brez vzdrževanja platforma za opazovanje, ki jo zagotavlja Alibaba Cloud, omogoča takojšnje spremljanje, sledenje in ocenjevanje aplikacij Dify.',
|
||||
},
|
||||
tencent: {
|
||||
description: 'Tencent Application Performance Monitoring zagotavlja celovito sledenje in večdimenzionalno analizo za aplikacije LLM.',
|
||||
title: 'Tencent APM',
|
||||
},
|
||||
},
|
||||
mermaid: {
|
||||
handDrawn: 'Ročno narisano',
|
||||
|
|
|
|||
|
|
@ -172,6 +172,10 @@ const translation = {
|
|||
title: 'การตรวจสอบคลาวด์',
|
||||
description: 'แพลตฟอร์มการสังเกตการณ์ที่จัดการโดย Alibaba Cloud ซึ่งไม่ต้องดูแลและบำรุงรักษา ช่วยให้สามารถติดตาม ตรวจสอบ และประเมินแอปพลิเคชัน Dify ได้ทันที',
|
||||
},
|
||||
tencent: {
|
||||
title: 'Tencent APM',
|
||||
description: 'การติดตามประสิทธิภาพแอปพลิเคชันของ Tencent มอบการตรวจสอบแบบครบวงจรและการวิเคราะห์หลายมิติสำหรับแอป LLM',
|
||||
},
|
||||
},
|
||||
mermaid: {
|
||||
handDrawn: 'วาดด้วยมือ',
|
||||
|
|
|
|||
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue