Merge branch 'main' into feat/r2

This commit is contained in:
jyong 2025-06-19 13:32:07 +08:00
commit f7fbded8b9
129 changed files with 3916 additions and 2760 deletions

View File

@ -84,10 +84,8 @@ jobs:
elasticsearch
oceanbase
- name: Check VDB Ready (TiDB, Oceanbase)
run: |
uv run --project api python api/tests/integration_tests/vdb/tidb_vector/check_tiflash_ready.py
uv run --project api python api/tests/integration_tests/vdb/oceanbase/check_oceanbase_ready.py
- name: Check VDB Ready (TiDB)
run: uv run --project api python api/tests/integration_tests/vdb/tidb_vector/check_tiflash_ready.py
- name: Test Vector Stores
run: uv run --project api bash dev/pytest/pytest_vdb.sh

1
.gitignore vendored
View File

@ -179,6 +179,7 @@ docker/volumes/pgvecto_rs/data/*
docker/volumes/couchbase/*
docker/volumes/oceanbase/*
docker/volumes/plugin_daemon/*
docker/volumes/matrixone/*
!docker/volumes/oceanbase/init.d
docker/nginx/conf.d/default.conf

View File

@ -137,7 +137,7 @@ WEB_API_CORS_ALLOW_ORIGINS=http://127.0.0.1:3000,*
CONSOLE_CORS_ALLOW_ORIGINS=http://127.0.0.1:3000,*
# Vector database configuration
# support: weaviate, qdrant, milvus, myscale, relyt, pgvecto_rs, pgvector, pgvector, chroma, opensearch, tidb_vector, couchbase, vikingdb, upstash, lindorm, oceanbase, opengauss, tablestore
# support: weaviate, qdrant, milvus, myscale, relyt, pgvecto_rs, pgvector, pgvector, chroma, opensearch, tidb_vector, couchbase, vikingdb, upstash, lindorm, oceanbase, opengauss, tablestore, matrixone
VECTOR_STORE=weaviate
# Weaviate configuration
@ -294,6 +294,13 @@ VIKINGDB_SCHEMA=http
VIKINGDB_CONNECTION_TIMEOUT=30
VIKINGDB_SOCKET_TIMEOUT=30
# Matrixone configration
MATRIXONE_HOST=127.0.0.1
MATRIXONE_PORT=6001
MATRIXONE_USER=dump
MATRIXONE_PASSWORD=111
MATRIXONE_DATABASE=dify
# Lindorm configuration
LINDORM_URL=http://ld-*******************-proxy-search-pub.lindorm.aliyuncs.com:30070
LINDORM_USERNAME=admin
@ -332,9 +339,11 @@ PROMPT_GENERATION_MAX_TOKENS=512
CODE_GENERATION_MAX_TOKENS=1024
PLUGIN_BASED_TOKEN_COUNTING_ENABLED=false
# Mail configuration, support: resend, smtp
# Mail configuration, support: resend, smtp, sendgrid
MAIL_TYPE=
# If using SendGrid, use the 'from' field for authentication if necessary.
MAIL_DEFAULT_SEND_FROM=no-reply <no-reply@dify.ai>
# resend configuration
RESEND_API_KEY=
RESEND_API_URL=https://api.resend.com
# smtp configuration
@ -344,7 +353,8 @@ SMTP_USERNAME=123
SMTP_PASSWORD=abc
SMTP_USE_TLS=true
SMTP_OPPORTUNISTIC_TLS=false
# Sendgid configuration
SENDGRID_API_KEY=
# Sentry configuration
SENTRY_DSN=

View File

@ -281,6 +281,7 @@ def migrate_knowledge_vector_database():
VectorType.ELASTICSEARCH,
VectorType.OPENGAUSS,
VectorType.TABLESTORE,
VectorType.MATRIXONE,
}
lower_collection_vector_types = {
VectorType.ANALYTICDB,

View File

@ -609,7 +609,7 @@ class MailConfig(BaseSettings):
"""
MAIL_TYPE: Optional[str] = Field(
description="Email service provider type ('smtp' or 'resend'), default to None.",
description="Email service provider type ('smtp' or 'resend' or 'sendGrid), default to None.",
default=None,
)
@ -663,6 +663,11 @@ class MailConfig(BaseSettings):
default=50,
)
SENDGRID_API_KEY: Optional[str] = Field(
description="API key for SendGrid service",
default=None,
)
class RagEtlConfig(BaseSettings):
"""

View File

@ -24,6 +24,7 @@ from .vdb.couchbase_config import CouchbaseConfig
from .vdb.elasticsearch_config import ElasticsearchConfig
from .vdb.huawei_cloud_config import HuaweiCloudConfig
from .vdb.lindorm_config import LindormConfig
from .vdb.matrixone_config import MatrixoneConfig
from .vdb.milvus_config import MilvusConfig
from .vdb.myscale_config import MyScaleConfig
from .vdb.oceanbase_config import OceanBaseVectorConfig
@ -323,5 +324,6 @@ class MiddlewareConfig(
OpenGaussConfig,
TableStoreConfig,
DatasetQueueMonitorConfig,
MatrixoneConfig,
):
pass

View File

@ -0,0 +1,14 @@
from pydantic import BaseModel, Field
class MatrixoneConfig(BaseModel):
"""Matrixone vector database configuration."""
MATRIXONE_HOST: str = Field(default="localhost", description="Host address of the Matrixone server")
MATRIXONE_PORT: int = Field(default=6001, description="Port number of the Matrixone server")
MATRIXONE_USER: str = Field(default="dump", description="Username for authenticating with Matrixone")
MATRIXONE_PASSWORD: str = Field(default="111", description="Password for authenticating with Matrixone")
MATRIXONE_DATABASE: str = Field(default="dify", description="Name of the Matrixone database to connect to")
MATRIXONE_METRIC: str = Field(
default="l2", description="Distance metric type for vector similarity search (cosine or l2)"
)

View File

@ -208,7 +208,7 @@ class AnnotationBatchImportApi(Resource):
if len(request.files) > 1:
raise TooManyFilesError()
# check file type
if not file.filename or not file.filename.endswith(".csv"):
if not file.filename or not file.filename.lower().endswith(".csv"):
raise ValueError("Invalid file type. Only CSV files are allowed")
return AppAnnotationService.batch_import_app_annotations(app_id, file)

View File

@ -34,6 +34,20 @@ class WorkflowAppLogApi(Resource):
parser.add_argument(
"created_at__after", type=str, location="args", help="Filter logs created after this timestamp"
)
parser.add_argument(
"created_by_end_user_session_id",
type=str,
location="args",
required=False,
default=None,
)
parser.add_argument(
"created_by_account",
type=str,
location="args",
required=False,
default=None,
)
parser.add_argument("page", type=int_range(1, 99999), default=1, location="args")
parser.add_argument("limit", type=int_range(1, 100), default=20, location="args")
args = parser.parse_args()
@ -57,6 +71,8 @@ class WorkflowAppLogApi(Resource):
created_at_after=args.created_at__after,
page=args.page,
limit=args.limit,
created_by_end_user_session_id=args.created_by_end_user_session_id,
created_by_account=args.created_by_account,
)
return workflow_app_log_pagination

View File

@ -695,6 +695,7 @@ class DatasetRetrievalSettingApi(Resource):
| VectorType.TABLESTORE
| VectorType.HUAWEI_CLOUD
| VectorType.TENCENT
| VectorType.MATRIXONE
):
return {
"retrieval_method": [
@ -742,6 +743,7 @@ class DatasetRetrievalSettingMockApi(Resource):
| VectorType.TABLESTORE
| VectorType.TENCENT
| VectorType.HUAWEI_CLOUD
| VectorType.MATRIXONE
):
return {
"retrieval_method": [

View File

@ -374,7 +374,7 @@ class DatasetDocumentSegmentBatchImportApi(Resource):
if len(request.files) > 1:
raise TooManyFilesError()
# check file type
if not file.filename or not file.filename.endswith(".csv"):
if not file.filename or not file.filename.lower().endswith(".csv"):
raise ValueError("Invalid file type. Only CSV files are allowed")
try:

View File

@ -135,6 +135,20 @@ class WorkflowAppLogApi(Resource):
parser.add_argument("status", type=str, choices=["succeeded", "failed", "stopped"], location="args")
parser.add_argument("created_at__before", type=str, location="args")
parser.add_argument("created_at__after", type=str, location="args")
parser.add_argument(
"created_by_end_user_session_id",
type=str,
location="args",
required=False,
default=None,
)
parser.add_argument(
"created_by_account",
type=str,
location="args",
required=False,
default=None,
)
parser.add_argument("page", type=int_range(1, 99999), default=1, location="args")
parser.add_argument("limit", type=int_range(1, 100), default=20, location="args")
args = parser.parse_args()
@ -158,6 +172,8 @@ class WorkflowAppLogApi(Resource):
created_at_after=args.created_at__after,
page=args.page,
limit=args.limit,
created_by_end_user_session_id=args.created_by_end_user_session_id,
created_by_account=args.created_by_account,
)
return workflow_app_log_pagination

View File

@ -5,7 +5,11 @@ from werkzeug.exceptions import Forbidden, NotFound
import services.dataset_service
from controllers.service_api import api
from controllers.service_api.dataset.error import DatasetInUseError, DatasetNameDuplicateError
from controllers.service_api.wraps import DatasetApiResource, validate_dataset_token
from controllers.service_api.wraps import (
DatasetApiResource,
cloud_edition_billing_rate_limit_check,
validate_dataset_token,
)
from core.model_runtime.entities.model_entities import ModelType
from core.plugin.entities.plugin import ModelProviderID
from core.provider_manager import ProviderManager
@ -70,6 +74,7 @@ class DatasetListApi(DatasetApiResource):
response = {"data": data, "has_more": len(datasets) == limit, "limit": limit, "total": total, "page": page}
return response, 200
@cloud_edition_billing_rate_limit_check("knowledge", "dataset")
def post(self, tenant_id):
"""Resource for creating datasets."""
parser = reqparse.RequestParser()
@ -193,6 +198,7 @@ class DatasetApi(DatasetApiResource):
return data, 200
@cloud_edition_billing_rate_limit_check("knowledge", "dataset")
def patch(self, _, dataset_id):
dataset_id_str = str(dataset_id)
dataset = DatasetService.get_dataset(dataset_id_str)
@ -293,6 +299,7 @@ class DatasetApi(DatasetApiResource):
return result_data, 200
@cloud_edition_billing_rate_limit_check("knowledge", "dataset")
def delete(self, _, dataset_id):
"""
Deletes a dataset given its ID.

View File

@ -19,7 +19,11 @@ from controllers.service_api.dataset.error import (
ArchivedDocumentImmutableError,
DocumentIndexingError,
)
from controllers.service_api.wraps import DatasetApiResource, cloud_edition_billing_resource_check
from controllers.service_api.wraps import (
DatasetApiResource,
cloud_edition_billing_rate_limit_check,
cloud_edition_billing_resource_check,
)
from core.errors.error import ProviderTokenNotInitError
from extensions.ext_database import db
from fields.document_fields import document_fields, document_status_fields
@ -35,6 +39,7 @@ class DocumentAddByTextApi(DatasetApiResource):
@cloud_edition_billing_resource_check("vector_space", "dataset")
@cloud_edition_billing_resource_check("documents", "dataset")
@cloud_edition_billing_rate_limit_check("knowledge", "dataset")
def post(self, tenant_id, dataset_id):
"""Create document by text."""
parser = reqparse.RequestParser()
@ -99,6 +104,7 @@ class DocumentUpdateByTextApi(DatasetApiResource):
"""Resource for update documents."""
@cloud_edition_billing_resource_check("vector_space", "dataset")
@cloud_edition_billing_rate_limit_check("knowledge", "dataset")
def post(self, tenant_id, dataset_id, document_id):
"""Update document by text."""
parser = reqparse.RequestParser()
@ -158,6 +164,7 @@ class DocumentAddByFileApi(DatasetApiResource):
@cloud_edition_billing_resource_check("vector_space", "dataset")
@cloud_edition_billing_resource_check("documents", "dataset")
@cloud_edition_billing_rate_limit_check("knowledge", "dataset")
def post(self, tenant_id, dataset_id):
"""Create document by upload file."""
args = {}
@ -232,6 +239,7 @@ class DocumentUpdateByFileApi(DatasetApiResource):
"""Resource for update documents."""
@cloud_edition_billing_resource_check("vector_space", "dataset")
@cloud_edition_billing_rate_limit_check("knowledge", "dataset")
def post(self, tenant_id, dataset_id, document_id):
"""Update document by upload file."""
args = {}
@ -302,6 +310,7 @@ class DocumentUpdateByFileApi(DatasetApiResource):
class DocumentDeleteApi(DatasetApiResource):
@cloud_edition_billing_rate_limit_check("knowledge", "dataset")
def delete(self, tenant_id, dataset_id, document_id):
"""Delete document."""
document_id = str(document_id)

View File

@ -1,9 +1,10 @@
from controllers.console.datasets.hit_testing_base import DatasetsHitTestingBase
from controllers.service_api import api
from controllers.service_api.wraps import DatasetApiResource
from controllers.service_api.wraps import DatasetApiResource, cloud_edition_billing_rate_limit_check
class HitTestingApi(DatasetApiResource, DatasetsHitTestingBase):
@cloud_edition_billing_rate_limit_check("knowledge", "dataset")
def post(self, tenant_id, dataset_id):
dataset_id_str = str(dataset_id)

View File

@ -3,7 +3,7 @@ from flask_restful import marshal, reqparse
from werkzeug.exceptions import NotFound
from controllers.service_api import api
from controllers.service_api.wraps import DatasetApiResource
from controllers.service_api.wraps import DatasetApiResource, cloud_edition_billing_rate_limit_check
from fields.dataset_fields import dataset_metadata_fields
from services.dataset_service import DatasetService
from services.entities.knowledge_entities.knowledge_entities import (
@ -14,6 +14,7 @@ from services.metadata_service import MetadataService
class DatasetMetadataCreateServiceApi(DatasetApiResource):
@cloud_edition_billing_rate_limit_check("knowledge", "dataset")
def post(self, tenant_id, dataset_id):
parser = reqparse.RequestParser()
parser.add_argument("type", type=str, required=True, nullable=True, location="json")
@ -39,6 +40,7 @@ class DatasetMetadataCreateServiceApi(DatasetApiResource):
class DatasetMetadataServiceApi(DatasetApiResource):
@cloud_edition_billing_rate_limit_check("knowledge", "dataset")
def patch(self, tenant_id, dataset_id, metadata_id):
parser = reqparse.RequestParser()
parser.add_argument("name", type=str, required=True, nullable=True, location="json")
@ -54,6 +56,7 @@ class DatasetMetadataServiceApi(DatasetApiResource):
metadata = MetadataService.update_metadata_name(dataset_id_str, metadata_id_str, args.get("name"))
return marshal(metadata, dataset_metadata_fields), 200
@cloud_edition_billing_rate_limit_check("knowledge", "dataset")
def delete(self, tenant_id, dataset_id, metadata_id):
dataset_id_str = str(dataset_id)
metadata_id_str = str(metadata_id)
@ -73,6 +76,7 @@ class DatasetMetadataBuiltInFieldServiceApi(DatasetApiResource):
class DatasetMetadataBuiltInFieldActionServiceApi(DatasetApiResource):
@cloud_edition_billing_rate_limit_check("knowledge", "dataset")
def post(self, tenant_id, dataset_id, action):
dataset_id_str = str(dataset_id)
dataset = DatasetService.get_dataset(dataset_id_str)
@ -88,6 +92,7 @@ class DatasetMetadataBuiltInFieldActionServiceApi(DatasetApiResource):
class DocumentMetadataEditServiceApi(DatasetApiResource):
@cloud_edition_billing_rate_limit_check("knowledge", "dataset")
def post(self, tenant_id, dataset_id):
dataset_id_str = str(dataset_id)
dataset = DatasetService.get_dataset(dataset_id_str)

View File

@ -8,6 +8,7 @@ from controllers.service_api.app.error import ProviderNotInitializeError
from controllers.service_api.wraps import (
DatasetApiResource,
cloud_edition_billing_knowledge_limit_check,
cloud_edition_billing_rate_limit_check,
cloud_edition_billing_resource_check,
)
from core.errors.error import LLMBadRequestError, ProviderTokenNotInitError
@ -35,6 +36,7 @@ class SegmentApi(DatasetApiResource):
@cloud_edition_billing_resource_check("vector_space", "dataset")
@cloud_edition_billing_knowledge_limit_check("add_segment", "dataset")
@cloud_edition_billing_rate_limit_check("knowledge", "dataset")
def post(self, tenant_id, dataset_id, document_id):
"""Create single segment."""
# check dataset
@ -139,6 +141,7 @@ class SegmentApi(DatasetApiResource):
class DatasetSegmentApi(DatasetApiResource):
@cloud_edition_billing_rate_limit_check("knowledge", "dataset")
def delete(self, tenant_id, dataset_id, document_id, segment_id):
# check dataset
dataset_id = str(dataset_id)
@ -162,6 +165,7 @@ class DatasetSegmentApi(DatasetApiResource):
return 204
@cloud_edition_billing_resource_check("vector_space", "dataset")
@cloud_edition_billing_rate_limit_check("knowledge", "dataset")
def post(self, tenant_id, dataset_id, document_id, segment_id):
# check dataset
dataset_id = str(dataset_id)
@ -236,6 +240,7 @@ class ChildChunkApi(DatasetApiResource):
@cloud_edition_billing_resource_check("vector_space", "dataset")
@cloud_edition_billing_knowledge_limit_check("add_segment", "dataset")
@cloud_edition_billing_rate_limit_check("knowledge", "dataset")
def post(self, tenant_id, dataset_id, document_id, segment_id):
"""Create child chunk."""
# check dataset
@ -332,6 +337,7 @@ class DatasetChildChunkApi(DatasetApiResource):
"""Resource for updating child chunks."""
@cloud_edition_billing_knowledge_limit_check("add_segment", "dataset")
@cloud_edition_billing_rate_limit_check("knowledge", "dataset")
def delete(self, tenant_id, dataset_id, document_id, segment_id, child_chunk_id):
"""Delete child chunk."""
# check dataset
@ -370,6 +376,7 @@ class DatasetChildChunkApi(DatasetApiResource):
@cloud_edition_billing_resource_check("vector_space", "dataset")
@cloud_edition_billing_knowledge_limit_check("add_segment", "dataset")
@cloud_edition_billing_rate_limit_check("knowledge", "dataset")
def patch(self, tenant_id, dataset_id, document_id, segment_id, child_chunk_id):
"""Update child chunk."""
# check dataset

View File

@ -138,15 +138,12 @@ class DatasetConfigManager:
if not config.get("dataset_configs"):
config["dataset_configs"] = {"retrieval_model": "single"}
if not isinstance(config["dataset_configs"], dict):
raise ValueError("dataset_configs must be of object type")
if not config["dataset_configs"].get("datasets"):
config["dataset_configs"]["datasets"] = {"strategy": "router", "datasets": []}
if not isinstance(config["dataset_configs"], dict):
raise ValueError("dataset_configs must be of object type")
if not isinstance(config["dataset_configs"], dict):
raise ValueError("dataset_configs must be of object type")
need_manual_query_datasets = config.get("dataset_configs") and config["dataset_configs"].get(
"datasets", {}
).get("datasets")

View File

@ -367,6 +367,7 @@ class AdvancedChatAppGenerator(MessageBasedAppGenerator):
:param user: account or end user
:param invoke_from: invoke from source
:param application_generate_entity: application generate entity
:param workflow_execution_repository: repository for workflow execution
:param workflow_node_execution_repository: repository for workflow node execution
:param conversation: conversation
:param stream: is stream

View File

@ -195,6 +195,7 @@ class WorkflowAppGenerator(BaseAppGenerator):
:param user: account or end user
:param application_generate_entity: application generate entity
:param invoke_from: invoke from source
:param workflow_execution_repository: repository for workflow execution
:param workflow_node_execution_repository: repository for workflow node execution
:param streaming: is stream
:param workflow_thread_pool_id: workflow thread pool id

View File

@ -542,8 +542,6 @@ class LBModelManager:
return config
return None
def cooldown(self, config: ModelLoadBalancingConfiguration, expire: int = 60) -> None:
"""
Cooldown model load balancing config

View File

@ -251,7 +251,7 @@ class OpsTraceManager:
provider_config_map[tracing_provider]["trace_instance"],
provider_config_map[tracing_provider]["config_class"],
)
decrypt_trace_config_key = str(decrypt_trace_config)
decrypt_trace_config_key = json.dumps(decrypt_trace_config, sort_keys=True)
tracing_instance = cls.ops_trace_instances_cache.get(decrypt_trace_config_key)
if tracing_instance is None:
# create new tracing_instance and update the cache if it absent

View File

@ -165,9 +165,23 @@ class PluginInstallTaskStartResponse(BaseModel):
task_id: str = Field(description="The ID of the install task.")
class PluginUploadResponse(BaseModel):
class PluginVerification(BaseModel):
"""
Verification of the plugin.
"""
class AuthorizedCategory(StrEnum):
Langgenius = "langgenius"
Partner = "partner"
Community = "community"
authorized_category: AuthorizedCategory = Field(description="The authorized category of the plugin.")
class PluginDecodeResponse(BaseModel):
unique_identifier: str = Field(description="The unique identifier of the plugin.")
manifest: PluginDeclaration
verification: Optional[PluginVerification] = Field(default=None, description="Basic verification information")
class PluginOAuthAuthorizationUrlResponse(BaseModel):

View File

@ -10,10 +10,10 @@ from core.plugin.entities.plugin import (
PluginInstallationSource,
)
from core.plugin.entities.plugin_daemon import (
PluginDecodeResponse,
PluginInstallTask,
PluginInstallTaskStartResponse,
PluginListResponse,
PluginUploadResponse,
)
from core.plugin.impl.base import BasePluginClient
@ -53,7 +53,7 @@ class PluginInstaller(BasePluginClient):
tenant_id: str,
pkg: bytes,
verify_signature: bool = False,
) -> PluginUploadResponse:
) -> PluginDecodeResponse:
"""
Upload a plugin package and return the plugin unique identifier.
"""
@ -68,7 +68,7 @@ class PluginInstaller(BasePluginClient):
return self._request_with_plugin_daemon_response(
"POST",
f"plugin/{tenant_id}/management/install/upload/package",
PluginUploadResponse,
PluginDecodeResponse,
files=body,
data=data,
)
@ -176,6 +176,18 @@ class PluginInstaller(BasePluginClient):
params={"plugin_unique_identifier": plugin_unique_identifier},
)
def decode_plugin_from_identifier(self, tenant_id: str, plugin_unique_identifier: str) -> PluginDecodeResponse:
"""
Decode a plugin from an identifier.
"""
return self._request_with_plugin_daemon_response(
"GET",
f"plugin/{tenant_id}/management/decode/from_identifier",
PluginDecodeResponse,
data={"plugin_unique_identifier": plugin_unique_identifier},
headers={"Content-Type": "application/json"},
)
def fetch_plugin_installation_by_ids(
self, tenant_id: str, plugin_ids: Sequence[str]
) -> Sequence[PluginInstallation]:

View File

@ -0,0 +1,233 @@
import json
import logging
import uuid
from functools import wraps
from typing import Any, Optional
from mo_vector.client import MoVectorClient # type: ignore
from pydantic import BaseModel, model_validator
from configs import dify_config
from core.rag.datasource.vdb.vector_base import BaseVector
from core.rag.datasource.vdb.vector_factory import AbstractVectorFactory
from core.rag.datasource.vdb.vector_type import VectorType
from core.rag.embedding.embedding_base import Embeddings
from core.rag.models.document import Document
from extensions.ext_redis import redis_client
from models.dataset import Dataset
logger = logging.getLogger(__name__)
class MatrixoneConfig(BaseModel):
host: str = "localhost"
port: int = 6001
user: str = "dump"
password: str = "111"
database: str = "dify"
metric: str = "l2"
@model_validator(mode="before")
@classmethod
def validate_config(cls, values: dict) -> dict:
if not values["host"]:
raise ValueError("config host is required")
if not values["port"]:
raise ValueError("config port is required")
if not values["user"]:
raise ValueError("config user is required")
if not values["password"]:
raise ValueError("config password is required")
if not values["database"]:
raise ValueError("config database is required")
return values
def ensure_client(func):
@wraps(func)
def wrapper(self, *args, **kwargs):
if self.client is None:
self.client = self._get_client(None, False)
return func(self, *args, **kwargs)
return wrapper
class MatrixoneVector(BaseVector):
"""
Matrixone vector storage implementation.
"""
def __init__(self, collection_name: str, config: MatrixoneConfig):
super().__init__(collection_name)
self.config = config
self.collection_name = collection_name.lower()
self.client = None
@property
def collection_name(self):
return self._collection_name
@collection_name.setter
def collection_name(self, value):
self._collection_name = value
def get_type(self) -> str:
return VectorType.MATRIXONE
def create(self, texts: list[Document], embeddings: list[list[float]], **kwargs):
if self.client is None:
self.client = self._get_client(len(embeddings[0]), True)
return self.add_texts(texts, embeddings)
def _get_client(self, dimension: Optional[int] = None, create_table: bool = False) -> MoVectorClient:
"""
Create a new client for the collection.
The collection will be created if it doesn't exist.
"""
lock_name = f"vector_indexing_lock_{self._collection_name}"
with redis_client.lock(lock_name, timeout=20):
client = MoVectorClient(
connection_string=f"mysql+pymysql://{self.config.user}:{self.config.password}@{self.config.host}:{self.config.port}/{self.config.database}",
table_name=self.collection_name,
vector_dimension=dimension,
create_table=create_table,
)
collection_exist_cache_key = f"vector_indexing_{self._collection_name}"
if redis_client.get(collection_exist_cache_key):
return client
try:
client.create_full_text_index()
except Exception as e:
logger.exception("Failed to create full text index")
redis_client.set(collection_exist_cache_key, 1, ex=3600)
return client
def add_texts(self, documents: list[Document], embeddings: list[list[float]], **kwargs):
if self.client is None:
self.client = self._get_client(len(embeddings[0]), True)
assert self.client is not None
ids = []
for _, doc in enumerate(documents):
if doc.metadata is not None:
doc_id = doc.metadata.get("doc_id", str(uuid.uuid4()))
ids.append(doc_id)
self.client.insert(
texts=[doc.page_content for doc in documents],
embeddings=embeddings,
metadatas=[doc.metadata for doc in documents],
ids=ids,
)
return ids
@ensure_client
def text_exists(self, id: str) -> bool:
assert self.client is not None
result = self.client.get(ids=[id])
return len(result) > 0
@ensure_client
def delete_by_ids(self, ids: list[str]) -> None:
assert self.client is not None
if not ids:
return
self.client.delete(ids=ids)
@ensure_client
def get_ids_by_metadata_field(self, key: str, value: str):
assert self.client is not None
results = self.client.query_by_metadata(filter={key: value})
return [result.id for result in results]
@ensure_client
def delete_by_metadata_field(self, key: str, value: str) -> None:
assert self.client is not None
self.client.delete(filter={key: value})
@ensure_client
def search_by_vector(self, query_vector: list[float], **kwargs: Any) -> list[Document]:
assert self.client is not None
top_k = kwargs.get("top_k", 5)
document_ids_filter = kwargs.get("document_ids_filter")
filter = None
if document_ids_filter:
filter = {"document_id": {"$in": document_ids_filter}}
results = self.client.query(
query_vector=query_vector,
k=top_k,
filter=filter,
)
docs = []
# TODO: add the score threshold to the query
for result in results:
metadata = result.metadata
docs.append(
Document(
page_content=result.document,
metadata=metadata,
)
)
return docs
@ensure_client
def search_by_full_text(self, query: str, **kwargs: Any) -> list[Document]:
assert self.client is not None
top_k = kwargs.get("top_k", 5)
document_ids_filter = kwargs.get("document_ids_filter")
filter = None
if document_ids_filter:
filter = {"document_id": {"$in": document_ids_filter}}
score_threshold = float(kwargs.get("score_threshold", 0.0))
results = self.client.full_text_query(
keywords=[query],
k=top_k,
filter=filter,
)
docs = []
for result in results:
metadata = result.metadata
if isinstance(metadata, str):
import json
metadata = json.loads(metadata)
score = 1 - result.distance
if score >= score_threshold:
metadata["score"] = score
docs.append(
Document(
page_content=result.document,
metadata=metadata,
)
)
return docs
@ensure_client
def delete(self) -> None:
assert self.client is not None
self.client.delete()
class MatrixoneVectorFactory(AbstractVectorFactory):
def init_vector(self, dataset: Dataset, attributes: list, embeddings: Embeddings) -> MatrixoneVector:
if dataset.index_struct_dict:
class_prefix: str = dataset.index_struct_dict["vector_store"]["class_prefix"]
collection_name = class_prefix
else:
dataset_id = dataset.id
collection_name = Dataset.gen_collection_name_by_id(dataset_id)
dataset.index_struct = json.dumps(self.gen_index_struct_dict(VectorType.MATRIXONE, collection_name))
config = MatrixoneConfig(
host=dify_config.MATRIXONE_HOST or "localhost",
port=dify_config.MATRIXONE_PORT or 6001,
user=dify_config.MATRIXONE_USER or "dump",
password=dify_config.MATRIXONE_PASSWORD or "111",
database=dify_config.MATRIXONE_DATABASE or "dify",
metric=dify_config.MATRIXONE_METRIC or "l2",
)
return MatrixoneVector(collection_name=collection_name, config=config)

View File

@ -164,6 +164,10 @@ class Vector:
from core.rag.datasource.vdb.huawei.huawei_cloud_vector import HuaweiCloudVectorFactory
return HuaweiCloudVectorFactory
case VectorType.MATRIXONE:
from core.rag.datasource.vdb.matrixone.matrixone_vector import MatrixoneVectorFactory
return MatrixoneVectorFactory
case _:
raise ValueError(f"Vector store {vector_type} is not supported.")

View File

@ -29,3 +29,4 @@ class VectorType(StrEnum):
OPENGAUSS = "opengauss"
TABLESTORE = "tablestore"
HUAWEI_CLOUD = "huawei_cloud"
MATRIXONE = "matrixone"

View File

@ -45,7 +45,8 @@ class WeaviateVector(BaseVector):
# by changing the connection timeout to pypi.org from 1 second to 0.001 seconds.
# TODO: This can be removed once weaviate-client is updated to 3.26.7 or higher,
# which does not contain the deprecation check.
weaviate.connect.connection.PYPI_TIMEOUT = 0.001
if hasattr(weaviate.connect.connection, "PYPI_TIMEOUT"):
weaviate.connect.connection.PYPI_TIMEOUT = 0.001
try:
client = weaviate.Client(

View File

@ -22,6 +22,7 @@ class FirecrawlApp:
"formats": ["markdown"],
"onlyMainContent": True,
"timeout": 30000,
"integration": "dify",
}
if params:
json_data.update(params)
@ -39,7 +40,7 @@ class FirecrawlApp:
def crawl_url(self, url, params=None) -> str:
# Documentation: https://docs.firecrawl.dev/api-reference/endpoint/crawl-post
headers = self._prepare_headers()
json_data = {"url": url}
json_data = {"url": url, "integration": "dify"}
if params:
json_data.update(params)
response = self._post_request(f"{self.base_url}/v1/crawl", json_data, headers)
@ -49,7 +50,6 @@ class FirecrawlApp:
return cast(str, job_id)
else:
self._handle_error(response, "start crawl job")
# FIXME: unreachable code for mypy
return "" # unreachable
def check_crawl_status(self, job_id) -> dict[str, Any]:
@ -82,7 +82,6 @@ class FirecrawlApp:
)
else:
self._handle_error(response, "check crawl status")
# FIXME: unreachable code for mypy
return {} # unreachable
def _format_crawl_status_response(
@ -126,4 +125,31 @@ class FirecrawlApp:
def _handle_error(self, response, action) -> None:
error_message = response.json().get("error", "Unknown error occurred")
raise Exception(f"Failed to {action}. Status code: {response.status_code}. Error: {error_message}")
raise Exception(f"Failed to {action}. Status code: {response.status_code}. Error: {error_message}") # type: ignore[return]
def search(self, query: str, params: dict[str, Any] | None = None) -> dict[str, Any]:
# Documentation: https://docs.firecrawl.dev/api-reference/endpoint/search
headers = self._prepare_headers()
json_data = {
"query": query,
"limit": 5,
"lang": "en",
"country": "us",
"timeout": 60000,
"ignoreInvalidURLs": False,
"scrapeOptions": {},
"integration": "dify",
}
if params:
json_data.update(params)
response = self._post_request(f"{self.base_url}/v1/search", json_data, headers)
if response.status_code == 200:
response_data = response.json()
if not response_data.get("success"):
raise Exception(f"Search failed. Error: {response_data.get('warning', 'Unknown error')}")
return cast(dict[str, Any], response_data)
elif response.status_code in {402, 409, 500, 429, 408}:
self._handle_error(response, "perform search")
return {} # Avoid additional exception after handling error
else:
raise Exception(f"Failed to perform search. Status code: {response.status_code}")

View File

@ -79,55 +79,71 @@ class NotionExtractor(BaseExtractor):
def _get_notion_database_data(self, database_id: str, query_dict: dict[str, Any] = {}) -> list[Document]:
"""Get all the pages from a Notion database."""
assert self._notion_access_token is not None, "Notion access token is required"
res = requests.post(
DATABASE_URL_TMPL.format(database_id=database_id),
headers={
"Authorization": "Bearer " + self._notion_access_token,
"Content-Type": "application/json",
"Notion-Version": "2022-06-28",
},
json=query_dict,
)
data = res.json()
database_content = []
if "results" not in data or data["results"] is None:
next_cursor = None
has_more = True
while has_more:
current_query = query_dict.copy()
if next_cursor:
current_query["start_cursor"] = next_cursor
res = requests.post(
DATABASE_URL_TMPL.format(database_id=database_id),
headers={
"Authorization": "Bearer " + self._notion_access_token,
"Content-Type": "application/json",
"Notion-Version": "2022-06-28",
},
json=current_query,
)
response_data = res.json()
if "results" not in response_data or response_data["results"] is None:
break
for result in response_data["results"]:
properties = result["properties"]
data = {}
value: Any
for property_name, property_value in properties.items():
type = property_value["type"]
if type == "multi_select":
value = []
multi_select_list = property_value[type]
for multi_select in multi_select_list:
value.append(multi_select["name"])
elif type in {"rich_text", "title"}:
if len(property_value[type]) > 0:
value = property_value[type][0]["plain_text"]
else:
value = ""
elif type in {"select", "status"}:
if property_value[type]:
value = property_value[type]["name"]
else:
value = ""
else:
value = property_value[type]
data[property_name] = value
row_dict = {k: v for k, v in data.items() if v}
row_content = ""
for key, value in row_dict.items():
if isinstance(value, dict):
value_dict = {k: v for k, v in value.items() if v}
value_content = "".join(f"{k}:{v} " for k, v in value_dict.items())
row_content = row_content + f"{key}:{value_content}\n"
else:
row_content = row_content + f"{key}:{value}\n"
database_content.append(row_content)
has_more = response_data.get("has_more", False)
next_cursor = response_data.get("next_cursor")
if not database_content:
return []
for result in data["results"]:
properties = result["properties"]
data = {}
value: Any
for property_name, property_value in properties.items():
type = property_value["type"]
if type == "multi_select":
value = []
multi_select_list = property_value[type]
for multi_select in multi_select_list:
value.append(multi_select["name"])
elif type in {"rich_text", "title"}:
if len(property_value[type]) > 0:
value = property_value[type][0]["plain_text"]
else:
value = ""
elif type in {"select", "status"}:
if property_value[type]:
value = property_value[type]["name"]
else:
value = ""
else:
value = property_value[type]
data[property_name] = value
row_dict = {k: v for k, v in data.items() if v}
row_content = ""
for key, value in row_dict.items():
if isinstance(value, dict):
value_dict = {k: v for k, v in value.items() if v}
value_content = "".join(f"{k}:{v} " for k, v in value_dict.items())
row_content = row_content + f"{key}:{value_content}\n"
else:
row_content = row_content + f"{key}:{value}\n"
database_content.append(row_content)
return [Document(page_content="\n".join(database_content))]

View File

@ -107,7 +107,7 @@ class QAIndexProcessor(BaseIndexProcessor):
def format_by_template(self, file: FileStorage, **kwargs) -> list[Document]:
# check file type
if not file.filename or not file.filename.endswith(".csv"):
if not file.filename or not file.filename.lower().endswith(".csv"):
raise ValueError("Invalid file type. Only CSV files are allowed")
try:

View File

@ -496,6 +496,8 @@ class DatasetRetrieval:
all_documents = self.calculate_keyword_score(query, all_documents, top_k)
elif index_type == "high_quality":
all_documents = self.calculate_vector_score(all_documents, top_k, score_threshold)
else:
all_documents = all_documents[:top_k] if top_k else all_documents
self._on_query(query, dataset_ids, app_id, user_from, user_id)

View File

@ -639,26 +639,19 @@ class GraphEngine:
retry_start_at = datetime.now(UTC).replace(tzinfo=None)
# yield control to other threads
time.sleep(0.001)
generator = node_instance.run()
for item in generator:
if isinstance(item, GraphEngineEvent):
if isinstance(item, BaseIterationEvent):
# add parallel info to iteration event
item.parallel_id = parallel_id
item.parallel_start_node_id = parallel_start_node_id
item.parent_parallel_id = parent_parallel_id
item.parent_parallel_start_node_id = parent_parallel_start_node_id
elif isinstance(item, BaseLoopEvent):
# add parallel info to loop event
item.parallel_id = parallel_id
item.parallel_start_node_id = parallel_start_node_id
item.parent_parallel_id = parent_parallel_id
item.parent_parallel_start_node_id = parent_parallel_start_node_id
yield item
event_stream = node_instance.run()
for event in event_stream:
if isinstance(event, GraphEngineEvent):
# add parallel info to iteration event
if isinstance(event, BaseIterationEvent | BaseLoopEvent):
event.parallel_id = parallel_id
event.parallel_start_node_id = parallel_start_node_id
event.parent_parallel_id = parent_parallel_id
event.parent_parallel_start_node_id = parent_parallel_start_node_id
yield event
else:
if isinstance(item, RunCompletedEvent):
run_result = item.run_result
if isinstance(event, RunCompletedEvent):
run_result = event.run_result
if run_result.status == WorkflowNodeExecutionStatus.FAILED:
if (
retries == max_retries
@ -694,7 +687,7 @@ class GraphEngine:
# if run failed, handle error
run_result = self._handle_continue_on_error(
node_instance,
item.run_result,
event.run_result,
self.graph_runtime_state.variable_pool,
handle_exceptions=handle_exceptions,
)
@ -797,28 +790,28 @@ class GraphEngine:
should_continue_retry = False
break
elif isinstance(item, RunStreamChunkEvent):
elif isinstance(event, RunStreamChunkEvent):
yield NodeRunStreamChunkEvent(
id=node_instance.id,
node_id=node_instance.node_id,
node_type=node_instance.node_type,
node_data=node_instance.node_data,
chunk_content=item.chunk_content,
from_variable_selector=item.from_variable_selector,
chunk_content=event.chunk_content,
from_variable_selector=event.from_variable_selector,
route_node_state=route_node_state,
parallel_id=parallel_id,
parallel_start_node_id=parallel_start_node_id,
parent_parallel_id=parent_parallel_id,
parent_parallel_start_node_id=parent_parallel_start_node_id,
)
elif isinstance(item, RunRetrieverResourceEvent):
elif isinstance(event, RunRetrieverResourceEvent):
yield NodeRunRetrieverResourceEvent(
id=node_instance.id,
node_id=node_instance.node_id,
node_type=node_instance.node_type,
node_data=node_instance.node_data,
retriever_resources=item.retriever_resources,
context=item.context,
retriever_resources=event.retriever_resources,
context=event.context,
route_node_state=route_node_state,
parallel_id=parallel_id,
parallel_start_node_id=parallel_start_node_id,

View File

@ -57,7 +57,6 @@ class StreamProcessor(ABC):
# The branch_identify parameter is added to ensure that
# only nodes in the correct logical branch are included.
reachable_node_ids.append(edge.target_node_id)
ids = self._fetch_node_ids_in_reachable_branch(edge.target_node_id, run_result.edge_source_handle)
reachable_node_ids.extend(ids)
else:
@ -74,6 +73,8 @@ class StreamProcessor(ABC):
self._remove_node_ids_in_unreachable_branch(node_id, reachable_node_ids)
def _fetch_node_ids_in_reachable_branch(self, node_id: str, branch_identify: Optional[str] = None) -> list[str]:
if node_id not in self.rest_node_ids:
self.rest_node_ids.append(node_id)
node_ids = []
for edge in self.graph.edge_mapping.get(node_id, []):
if edge.target_node_id == self.graph.root_node_id:

View File

@ -8,4 +8,5 @@ EMPTY_VALUE_MAPPING = {
SegmentType.ARRAY_STRING: [],
SegmentType.ARRAY_NUMBER: [],
SegmentType.ARRAY_OBJECT: [],
SegmentType.ARRAY_FILE: [],
}

View File

@ -1,5 +1,6 @@
from typing import Any
from core.file import File
from core.variables import SegmentType
from .enums import Operation
@ -85,6 +86,8 @@ def is_input_value_valid(*, variable_type: SegmentType, operation: Operation, va
return isinstance(value, int | float)
case SegmentType.ARRAY_OBJECT if operation == Operation.APPEND:
return isinstance(value, dict)
case SegmentType.ARRAY_FILE if operation == Operation.APPEND:
return isinstance(value, File)
# Array & Extend / Overwrite
case SegmentType.ARRAY_ANY if operation in {Operation.EXTEND, Operation.OVER_WRITE}:
@ -95,6 +98,8 @@ def is_input_value_valid(*, variable_type: SegmentType, operation: Operation, va
return isinstance(value, list) and all(isinstance(item, int | float) for item in value)
case SegmentType.ARRAY_OBJECT if operation in {Operation.EXTEND, Operation.OVER_WRITE}:
return isinstance(value, list) and all(isinstance(item, dict) for item in value)
case SegmentType.ARRAY_FILE if operation in {Operation.EXTEND, Operation.OVER_WRITE}:
return isinstance(value, list) and all(isinstance(item, File) for item in value)
case _:
return False

View File

@ -54,6 +54,15 @@ class Mail:
use_tls=dify_config.SMTP_USE_TLS,
opportunistic_tls=dify_config.SMTP_OPPORTUNISTIC_TLS,
)
case "sendgrid":
from libs.sendgrid import SendGridClient
if not dify_config.SENDGRID_API_KEY:
raise ValueError("SENDGRID_API_KEY is required for SendGrid mail type")
self._client = SendGridClient(
sendgrid_api_key=dify_config.SENDGRID_API_KEY, _from=dify_config.MAIL_DEFAULT_SEND_FROM or ""
)
case _:
raise ValueError("Unsupported mail type {}".format(mail_type))

View File

@ -110,6 +110,8 @@ def _build_variable_from_mapping(*, mapping: Mapping[str, Any], selector: Sequen
result = ArrayNumberVariable.model_validate(mapping)
case SegmentType.ARRAY_OBJECT if isinstance(value, list):
result = ArrayObjectVariable.model_validate(mapping)
case SegmentType.ARRAY_FILE if isinstance(value, list):
result = ArrayFileVariable.model_validate(mapping)
case _:
raise VariableError(f"not supported value type {value_type}")
if result.size > dify_config.MAX_VARIABLE_SIZE:

42
api/libs/sendgrid.py Normal file
View File

@ -0,0 +1,42 @@
import logging
import sendgrid # type: ignore
from python_http_client.exceptions import ForbiddenError, UnauthorizedError
from sendgrid.helpers.mail import Content, Email, Mail, To # type: ignore
class SendGridClient:
def __init__(self, sendgrid_api_key: str, _from: str):
self.sendgrid_api_key = sendgrid_api_key
self._from = _from
def send(self, mail: dict):
logging.debug("Sending email with SendGrid")
try:
_to = mail["to"]
if not _to:
raise ValueError("SendGridClient: Cannot send email: recipient address is missing.")
sg = sendgrid.SendGridAPIClient(api_key=self.sendgrid_api_key)
from_email = Email(self._from)
to_email = To(_to)
subject = mail["subject"]
content = Content("text/html", mail["html"])
mail = Mail(from_email, to_email, subject, content)
mail_json = mail.get() # type: ignore
response = sg.client.mail.send.post(request_body=mail_json)
logging.debug(response.status_code)
logging.debug(response.body)
logging.debug(response.headers)
except TimeoutError as e:
logging.exception("SendGridClient Timeout occurred while sending email")
raise
except (UnauthorizedError, ForbiddenError) as e:
logging.exception("SendGridClient Authentication failed. Verify that your credentials and the 'from")
raise
except Exception as e:
logging.exception(f"SendGridClient Unexpected error occurred while sending email to {_to}")
raise

View File

@ -10,7 +10,6 @@ from core.plugin.entities.plugin import GenericProviderID
from core.tools.entities.tool_entities import ToolProviderType
from core.tools.signature import sign_tool_file
from core.workflow.entities.workflow_execution import WorkflowExecutionStatus
from services.plugin.plugin_service import PluginService
if TYPE_CHECKING:
from models.workflow import Workflow
@ -170,6 +169,7 @@ class App(Base):
@property
def deleted_tools(self) -> list:
from core.tools.tool_manager import ToolManager
from services.plugin.plugin_service import PluginService
# get agent mode tools
app_model_config = self.app_model_config

View File

@ -18,4 +18,3 @@ ignore_missing_imports=True
[mypy-flask_restful.inputs]
ignore_missing_imports=True

View File

@ -81,6 +81,7 @@ dependencies = [
"weave~=0.51.0",
"yarl~=1.18.3",
"webvtt-py~=0.5.1",
"sendgrid~=6.12.3",
]
# Before adding new dependency, consider place it in
# alphabet order (a-z) and suitable group.
@ -202,4 +203,5 @@ vdb = [
"volcengine-compat~=1.0.0",
"weaviate-client~=3.24.0",
"xinference-client~=1.2.2",
"mo-vector~=0.1.13",
]

View File

@ -421,7 +421,7 @@ class AppDslService:
# Set icon type
icon_type_value = icon_type or app_data.get("icon_type")
if icon_type_value in ["emoji", "link"]:
if icon_type_value in ["emoji", "link", "image"]:
icon_type = icon_type_value
else:
icon_type = "emoji"

View File

@ -1860,16 +1860,16 @@ class DocumentService:
knowledge_config.embedding_model, # type: ignore
)
dataset_collection_binding_id = dataset_collection_binding.id
if knowledge_config.retrieval_model:
retrieval_model = knowledge_config.retrieval_model
else:
retrieval_model = RetrievalModel(
search_method=RetrievalMethod.SEMANTIC_SEARCH.value,
reranking_enable=False,
reranking_model=RerankingModel(reranking_provider_name="", reranking_model_name=""),
top_k=2,
score_threshold_enabled=False,
)
if knowledge_config.retrieval_model:
retrieval_model = knowledge_config.retrieval_model
else:
retrieval_model = RetrievalModel(
search_method=RetrievalMethod.SEMANTIC_SEARCH.value,
reranking_enable=False,
reranking_model=RerankingModel(reranking_provider_name="", reranking_model_name=""),
top_k=2,
score_threshold_enabled=False,
)
# save dataset
dataset = Dataset(
tenant_id=tenant_id,

View File

@ -0,0 +1,5 @@
from services.errors.base import BaseServiceError
class PluginInstallationForbiddenError(BaseServiceError):
pass

View File

@ -88,6 +88,26 @@ class WebAppAuthModel(BaseModel):
allow_email_password_login: bool = False
class PluginInstallationScope(StrEnum):
NONE = "none"
OFFICIAL_ONLY = "official_only"
OFFICIAL_AND_SPECIFIC_PARTNERS = "official_and_specific_partners"
ALL = "all"
class PluginInstallationPermissionModel(BaseModel):
# Plugin installation scope possible values:
# none: prohibit all plugin installations
# official_only: allow only Dify official plugins
# official_and_specific_partners: allow official and specific partner plugins
# all: allow installation of all plugins
plugin_installation_scope: PluginInstallationScope = PluginInstallationScope.ALL
# If True, restrict plugin installation to the marketplace only
# Equivalent to ForceEnablePluginVerification
restrict_to_marketplace_only: bool = False
class FeatureModel(BaseModel):
billing: BillingModel = BillingModel()
education: EducationModel = EducationModel()
@ -128,6 +148,7 @@ class SystemFeatureModel(BaseModel):
license: LicenseModel = LicenseModel()
branding: BrandingModel = BrandingModel()
webapp_auth: WebAppAuthModel = WebAppAuthModel()
plugin_installation_permission: PluginInstallationPermissionModel = PluginInstallationPermissionModel()
class FeatureService:
@ -291,3 +312,12 @@ class FeatureService:
features.license.workspaces.enabled = license_info["workspaces"]["enabled"]
features.license.workspaces.limit = license_info["workspaces"]["limit"]
features.license.workspaces.size = license_info["workspaces"]["used"]
if "PluginInstallationPermission" in enterprise_info:
plugin_installation_info = enterprise_info["PluginInstallationPermission"]
features.plugin_installation_permission.plugin_installation_scope = plugin_installation_info[
"pluginInstallationScope"
]
features.plugin_installation_permission.restrict_to_marketplace_only = plugin_installation_info[
"restrictToMarketplaceOnly"
]

View File

@ -3,7 +3,7 @@ import logging
import click
from core.entities import DEFAULT_PLUGIN_ID
from core.plugin.entities.plugin import GenericProviderID, ModelProviderID, ToolProviderID
from models.engine import db
logger = logging.getLogger(__name__)
@ -12,17 +12,17 @@ logger = logging.getLogger(__name__)
class PluginDataMigration:
@classmethod
def migrate(cls) -> None:
cls.migrate_db_records("providers", "provider_name") # large table
cls.migrate_db_records("provider_models", "provider_name")
cls.migrate_db_records("provider_orders", "provider_name")
cls.migrate_db_records("tenant_default_models", "provider_name")
cls.migrate_db_records("tenant_preferred_model_providers", "provider_name")
cls.migrate_db_records("provider_model_settings", "provider_name")
cls.migrate_db_records("load_balancing_model_configs", "provider_name")
cls.migrate_db_records("providers", "provider_name", ModelProviderID) # large table
cls.migrate_db_records("provider_models", "provider_name", ModelProviderID)
cls.migrate_db_records("provider_orders", "provider_name", ModelProviderID)
cls.migrate_db_records("tenant_default_models", "provider_name", ModelProviderID)
cls.migrate_db_records("tenant_preferred_model_providers", "provider_name", ModelProviderID)
cls.migrate_db_records("provider_model_settings", "provider_name", ModelProviderID)
cls.migrate_db_records("load_balancing_model_configs", "provider_name", ModelProviderID)
cls.migrate_datasets()
cls.migrate_db_records("embeddings", "provider_name") # large table
cls.migrate_db_records("dataset_collection_bindings", "provider_name")
cls.migrate_db_records("tool_builtin_providers", "provider")
cls.migrate_db_records("embeddings", "provider_name", ModelProviderID) # large table
cls.migrate_db_records("dataset_collection_bindings", "provider_name", ModelProviderID)
cls.migrate_db_records("tool_builtin_providers", "provider_name", ToolProviderID)
@classmethod
def migrate_datasets(cls) -> None:
@ -66,9 +66,10 @@ limit 1000"""
fg="white",
)
)
retrieval_model["reranking_model"]["reranking_provider_name"] = (
f"{DEFAULT_PLUGIN_ID}/{retrieval_model['reranking_model']['reranking_provider_name']}/{retrieval_model['reranking_model']['reranking_provider_name']}"
)
# update google to langgenius/gemini/google etc.
retrieval_model["reranking_model"]["reranking_provider_name"] = ModelProviderID(
retrieval_model["reranking_model"]["reranking_provider_name"]
).to_string()
retrieval_model_changed = True
click.echo(
@ -86,9 +87,11 @@ limit 1000"""
update_retrieval_model_sql = ", retrieval_model = :retrieval_model"
params["retrieval_model"] = json.dumps(retrieval_model)
params["provider_name"] = ModelProviderID(provider_name).to_string()
sql = f"""update {table_name}
set {provider_column_name} =
concat('{DEFAULT_PLUGIN_ID}/', {provider_column_name}, '/', {provider_column_name})
:provider_name
{update_retrieval_model_sql}
where id = :record_id"""
conn.execute(db.text(sql), params)
@ -122,7 +125,9 @@ limit 1000"""
)
@classmethod
def migrate_db_records(cls, table_name: str, provider_column_name: str) -> None:
def migrate_db_records(
cls, table_name: str, provider_column_name: str, provider_cls: type[GenericProviderID]
) -> None:
click.echo(click.style(f"Migrating [{table_name}] data for plugin", fg="white"))
processed_count = 0
@ -166,7 +171,8 @@ limit 1000"""
)
try:
updated_value = f"{DEFAULT_PLUGIN_ID}/{provider_name}/{provider_name}"
# update jina to langgenius/jina_tool/jina etc.
updated_value = provider_cls(provider_name).to_string()
batch_updates.append((updated_value, record_id))
except Exception as e:
failed_ids.append(record_id)

View File

@ -17,11 +17,18 @@ from core.plugin.entities.plugin import (
PluginInstallation,
PluginInstallationSource,
)
from core.plugin.entities.plugin_daemon import PluginInstallTask, PluginListResponse, PluginUploadResponse
from core.plugin.entities.plugin_daemon import (
PluginDecodeResponse,
PluginInstallTask,
PluginListResponse,
PluginVerification,
)
from core.plugin.impl.asset import PluginAssetManager
from core.plugin.impl.debugging import PluginDebuggingClient
from core.plugin.impl.plugin import PluginInstaller
from extensions.ext_redis import redis_client
from services.errors.plugin import PluginInstallationForbiddenError
from services.feature_service import FeatureService, PluginInstallationScope
logger = logging.getLogger(__name__)
@ -86,6 +93,42 @@ class PluginService:
logger.exception("failed to fetch latest plugin version")
return result
@staticmethod
def _check_marketplace_only_permission():
"""
Check if the marketplace only permission is enabled
"""
features = FeatureService.get_system_features()
if features.plugin_installation_permission.restrict_to_marketplace_only:
raise PluginInstallationForbiddenError("Plugin installation is restricted to marketplace only")
@staticmethod
def _check_plugin_installation_scope(plugin_verification: Optional[PluginVerification]):
"""
Check the plugin installation scope
"""
features = FeatureService.get_system_features()
match features.plugin_installation_permission.plugin_installation_scope:
case PluginInstallationScope.OFFICIAL_ONLY:
if (
plugin_verification is None
or plugin_verification.authorized_category != PluginVerification.AuthorizedCategory.Langgenius
):
raise PluginInstallationForbiddenError("Plugin installation is restricted to official only")
case PluginInstallationScope.OFFICIAL_AND_SPECIFIC_PARTNERS:
if plugin_verification is None or plugin_verification.authorized_category not in [
PluginVerification.AuthorizedCategory.Langgenius,
PluginVerification.AuthorizedCategory.Partner,
]:
raise PluginInstallationForbiddenError(
"Plugin installation is restricted to official and specific partners"
)
case PluginInstallationScope.NONE:
raise PluginInstallationForbiddenError("Installing plugins is not allowed")
case PluginInstallationScope.ALL:
pass
@staticmethod
def get_debugging_key(tenant_id: str) -> str:
"""
@ -208,6 +251,8 @@ class PluginService:
# check if plugin pkg is already downloaded
manager = PluginInstaller()
features = FeatureService.get_system_features()
try:
manager.fetch_plugin_manifest(tenant_id, new_plugin_unique_identifier)
# already downloaded, skip, and record install event
@ -215,7 +260,14 @@ class PluginService:
except Exception:
# plugin not installed, download and upload pkg
pkg = download_plugin_pkg(new_plugin_unique_identifier)
manager.upload_pkg(tenant_id, pkg, verify_signature=False)
response = manager.upload_pkg(
tenant_id,
pkg,
verify_signature=features.plugin_installation_permission.restrict_to_marketplace_only,
)
# check if the plugin is available to install
PluginService._check_plugin_installation_scope(response.verification)
return manager.upgrade_plugin(
tenant_id,
@ -239,6 +291,7 @@ class PluginService:
"""
Upgrade plugin with github
"""
PluginService._check_marketplace_only_permission()
manager = PluginInstaller()
return manager.upgrade_plugin(
tenant_id,
@ -253,33 +306,43 @@ class PluginService:
)
@staticmethod
def upload_pkg(tenant_id: str, pkg: bytes, verify_signature: bool = False) -> PluginUploadResponse:
def upload_pkg(tenant_id: str, pkg: bytes, verify_signature: bool = False) -> PluginDecodeResponse:
"""
Upload plugin package files
returns: plugin_unique_identifier
"""
PluginService._check_marketplace_only_permission()
manager = PluginInstaller()
return manager.upload_pkg(tenant_id, pkg, verify_signature)
features = FeatureService.get_system_features()
response = manager.upload_pkg(
tenant_id,
pkg,
verify_signature=features.plugin_installation_permission.restrict_to_marketplace_only,
)
return response
@staticmethod
def upload_pkg_from_github(
tenant_id: str, repo: str, version: str, package: str, verify_signature: bool = False
) -> PluginUploadResponse:
) -> PluginDecodeResponse:
"""
Install plugin from github release package files,
returns plugin_unique_identifier
"""
PluginService._check_marketplace_only_permission()
pkg = download_with_size_limit(
f"https://github.com/{repo}/releases/download/{version}/{package}", dify_config.PLUGIN_MAX_PACKAGE_SIZE
)
features = FeatureService.get_system_features()
manager = PluginInstaller()
return manager.upload_pkg(
response = manager.upload_pkg(
tenant_id,
pkg,
verify_signature,
verify_signature=features.plugin_installation_permission.restrict_to_marketplace_only,
)
return response
@staticmethod
def upload_bundle(
@ -289,11 +352,15 @@ class PluginService:
Upload a plugin bundle and return the dependencies.
"""
manager = PluginInstaller()
PluginService._check_marketplace_only_permission()
return manager.upload_bundle(tenant_id, bundle, verify_signature)
@staticmethod
def install_from_local_pkg(tenant_id: str, plugin_unique_identifiers: Sequence[str]):
PluginService._check_marketplace_only_permission()
manager = PluginInstaller()
return manager.install_from_identifiers(
tenant_id,
plugin_unique_identifiers,
@ -307,6 +374,8 @@ class PluginService:
Install plugin from github release package files,
returns plugin_unique_identifier
"""
PluginService._check_marketplace_only_permission()
manager = PluginInstaller()
return manager.install_from_identifiers(
tenant_id,
@ -322,28 +391,33 @@ class PluginService:
)
@staticmethod
def fetch_marketplace_pkg(
tenant_id: str, plugin_unique_identifier: str, verify_signature: bool = False
) -> PluginDeclaration:
def fetch_marketplace_pkg(tenant_id: str, plugin_unique_identifier: str) -> PluginDeclaration:
"""
Fetch marketplace package
"""
if not dify_config.MARKETPLACE_ENABLED:
raise ValueError("marketplace is not enabled")
features = FeatureService.get_system_features()
manager = PluginInstaller()
try:
declaration = manager.fetch_plugin_manifest(tenant_id, plugin_unique_identifier)
except Exception:
pkg = download_plugin_pkg(plugin_unique_identifier)
declaration = manager.upload_pkg(tenant_id, pkg, verify_signature).manifest
response = manager.upload_pkg(
tenant_id,
pkg,
verify_signature=features.plugin_installation_permission.restrict_to_marketplace_only,
)
# check if the plugin is available to install
PluginService._check_plugin_installation_scope(response.verification)
declaration = response.manifest
return declaration
@staticmethod
def install_from_marketplace_pkg(
tenant_id: str, plugin_unique_identifiers: Sequence[str], verify_signature: bool = False
):
def install_from_marketplace_pkg(tenant_id: str, plugin_unique_identifiers: Sequence[str]):
"""
Install plugin from marketplace package files,
returns installation task id
@ -353,15 +427,26 @@ class PluginService:
manager = PluginInstaller()
features = FeatureService.get_system_features()
# check if already downloaded
for plugin_unique_identifier in plugin_unique_identifiers:
try:
manager.fetch_plugin_manifest(tenant_id, plugin_unique_identifier)
plugin_decode_response = manager.decode_plugin_from_identifier(tenant_id, plugin_unique_identifier)
# check if the plugin is available to install
PluginService._check_plugin_installation_scope(plugin_decode_response.verification)
# already downloaded, skip
except Exception:
# plugin not installed, download and upload pkg
pkg = download_plugin_pkg(plugin_unique_identifier)
manager.upload_pkg(tenant_id, pkg, verify_signature)
response = manager.upload_pkg(
tenant_id,
pkg,
verify_signature=features.plugin_installation_permission.restrict_to_marketplace_only,
)
# check if the plugin is available to install
PluginService._check_plugin_installation_scope(response.verification)
return manager.install_from_identifiers(
tenant_id,

View File

@ -5,7 +5,7 @@ from sqlalchemy import and_, func, or_, select
from sqlalchemy.orm import Session
from core.workflow.entities.workflow_execution import WorkflowExecutionStatus
from models import App, EndUser, WorkflowAppLog, WorkflowRun
from models import Account, App, EndUser, WorkflowAppLog, WorkflowRun
from models.enums import CreatorUserRole
@ -21,6 +21,8 @@ class WorkflowAppService:
created_at_after: datetime | None = None,
page: int = 1,
limit: int = 20,
created_by_end_user_session_id: str | None = None,
created_by_account: str | None = None,
) -> dict:
"""
Get paginate workflow app logs using SQLAlchemy 2.0 style
@ -32,6 +34,8 @@ class WorkflowAppService:
:param created_at_after: filter logs created after this timestamp
:param page: page number
:param limit: items per page
:param created_by_end_user_session_id: filter by end user session id
:param created_by_account: filter by account email
:return: Pagination object
"""
# Build base statement using SQLAlchemy 2.0 style
@ -71,6 +75,26 @@ class WorkflowAppService:
if created_at_after:
stmt = stmt.where(WorkflowAppLog.created_at >= created_at_after)
# Filter by end user session id or account email
if created_by_end_user_session_id:
stmt = stmt.join(
EndUser,
and_(
WorkflowAppLog.created_by == EndUser.id,
WorkflowAppLog.created_by_role == CreatorUserRole.END_USER,
EndUser.session_id == created_by_end_user_session_id,
),
)
if created_by_account:
stmt = stmt.join(
Account,
and_(
WorkflowAppLog.created_by == Account.id,
WorkflowAppLog.created_by_role == CreatorUserRole.ACCOUNT,
Account.email == created_by_account,
),
)
stmt = stmt.order_by(WorkflowAppLog.created_at.desc())
# Get total count using the same filters

View File

@ -0,0 +1,25 @@
from core.rag.datasource.vdb.matrixone.matrixone_vector import MatrixoneConfig, MatrixoneVector
from tests.integration_tests.vdb.test_vector_store import (
AbstractVectorTest,
get_example_text,
setup_mock_redis,
)
class MatrixoneVectorTest(AbstractVectorTest):
def __init__(self):
super().__init__()
self.vector = MatrixoneVector(
collection_name=self.collection_name,
config=MatrixoneConfig(
host="localhost", port=6001, user="dump", password="111", database="dify", metric="l2"
),
)
def get_ids_by_metadata_field(self):
ids = self.vector.get_ids_by_metadata_field(key="document_id", value=self.example_doc_id)
assert len(ids) == 1
def test_matrixone_vector(setup_mock_redis):
MatrixoneVectorTest().run_all_tests()

View File

@ -1,49 +0,0 @@
import time
import pymysql
def check_oceanbase_ready() -> bool:
try:
connection = pymysql.connect(
host="localhost",
port=2881,
user="root",
password="difyai123456",
)
affected_rows = connection.query("SELECT 1")
return affected_rows == 1
except Exception as e:
print(f"Oceanbase is not ready. Exception: {e}")
return False
finally:
if connection:
connection.close()
def main():
max_attempts = 50
retry_interval_seconds = 2
is_oceanbase_ready = False
for attempt in range(max_attempts):
try:
is_oceanbase_ready = check_oceanbase_ready()
except Exception as e:
print(f"Oceanbase is not ready. Exception: {e}")
is_oceanbase_ready = False
if is_oceanbase_ready:
break
else:
print(f"Attempt {attempt + 1} failed, retry in {retry_interval_seconds} seconds...")
time.sleep(retry_interval_seconds)
if is_oceanbase_ready:
print("Oceanbase is ready.")
else:
print(f"Oceanbase is not ready after {max_attempts} attempting checks.")
exit(1)
if __name__ == "__main__":
main()

File diff suppressed because it is too large Load Diff

View File

@ -399,7 +399,7 @@ SUPABASE_URL=your-server-url
# ------------------------------
# The type of vector store to use.
# Supported values are `weaviate`, `qdrant`, `milvus`, `myscale`, `relyt`, `pgvector`, `pgvecto-rs`, `chroma`, `opensearch`, `oracle`, `tencent`, `elasticsearch`, `elasticsearch-ja`, `analyticdb`, `couchbase`, `vikingdb`, `oceanbase`, `opengauss`, `tablestore`,`vastbase`,`tidb`,`tidb_on_qdrant`,`baidu`,`lindorm`,`huawei_cloud`,`upstash`.
# Supported values are `weaviate`, `qdrant`, `milvus`, `myscale`, `relyt`, `pgvector`, `pgvecto-rs`, `chroma`, `opensearch`, `oracle`, `tencent`, `elasticsearch`, `elasticsearch-ja`, `analyticdb`, `couchbase`, `vikingdb`, `oceanbase`, `opengauss`, `tablestore`,`vastbase`,`tidb`,`tidb_on_qdrant`,`baidu`,`lindorm`,`huawei_cloud`,`upstash`, `matrixone`.
VECTOR_STORE=weaviate
# The Weaviate endpoint URL. Only available when VECTOR_STORE is `weaviate`.
@ -490,6 +490,13 @@ TIDB_VECTOR_USER=
TIDB_VECTOR_PASSWORD=
TIDB_VECTOR_DATABASE=dify
# Matrixone vector configurations.
MATRIXONE_HOST=matrixone
MATRIXONE_PORT=6001
MATRIXONE_USER=dump
MATRIXONE_PASSWORD=111
MATRIXONE_DATABASE=dify
# Tidb on qdrant configuration, only available when VECTOR_STORE is `tidb_on_qdrant`
TIDB_ON_QDRANT_URL=http://127.0.0.1
TIDB_ON_QDRANT_API_KEY=dify
@ -719,10 +726,11 @@ NOTION_INTERNAL_SECRET=
# Mail related configuration
# ------------------------------
# Mail type, support: resend, smtp
# Mail type, support: resend, smtp, sendgrid
MAIL_TYPE=resend
# Default send from email address, if not specified
# If using SendGrid, use the 'from' field for authentication if necessary.
MAIL_DEFAULT_SEND_FROM=
# API-Key for the Resend email provider, used when MAIL_TYPE is `resend`.
@ -738,6 +746,9 @@ SMTP_PASSWORD=
SMTP_USE_TLS=true
SMTP_OPPORTUNISTIC_TLS=false
# Sendgid configuration
SENDGRID_API_KEY=
# ------------------------------
# Others Configuration
# ------------------------------
@ -815,7 +826,8 @@ TEXT_GENERATION_TIMEOUT_MS=60000
# Environment Variables for db Service
# ------------------------------
PGUSER=${DB_USERNAME}
# The name of the default postgres user.
POSTGRES_USER=${DB_USERNAME}
# The password for the default postgres user.
POSTGRES_PASSWORD=${DB_PASSWORD}
# The name of the default postgres database.
@ -1067,7 +1079,7 @@ PLUGIN_MEDIA_CACHE_PATH=assets
# Plugin oss bucket
PLUGIN_STORAGE_OSS_BUCKET=
# Plugin oss s3 credentials
PLUGIN_S3_USE_AWS=
PLUGIN_S3_USE_AWS=false
PLUGIN_S3_USE_AWS_MANAGED_IAM=false
PLUGIN_S3_ENDPOINT=
PLUGIN_S3_USE_PATH_STYLE=false

View File

@ -84,7 +84,7 @@ services:
image: postgres:15-alpine
restart: always
environment:
PGUSER: ${PGUSER:-postgres}
POSTGRES_USER: ${POSTGRES_USER:-postgres}
POSTGRES_PASSWORD: ${POSTGRES_PASSWORD:-difyai123456}
POSTGRES_DB: ${POSTGRES_DB:-dify}
PGDATA: ${PGDATA:-/var/lib/postgresql/data/pgdata}
@ -451,6 +451,14 @@ services:
OB_CLUSTER_NAME: ${OCEANBASE_CLUSTER_NAME:-difyai}
OB_SERVER_IP: 127.0.0.1
MODE: mini
ports:
- "${OCEANBASE_VECTOR_PORT:-2881}:2881"
healthcheck:
test: [ 'CMD-SHELL', 'obclient -h127.0.0.1 -P2881 -uroot@test -p$${OB_TENANT_PASSWORD} -e "SELECT 1;"' ]
interval: 10s
retries: 30
start_period: 30s
timeout: 10s
# Oracle vector database
oracle:
@ -609,6 +617,18 @@ services:
ports:
- ${MYSCALE_PORT:-8123}:${MYSCALE_PORT:-8123}
# Matrixone vector store.
matrixone:
hostname: matrixone
image: matrixorigin/matrixone:2.1.1
profiles:
- matrixone
restart: always
volumes:
- ./volumes/matrixone/data:/mo-data
ports:
- ${MATRIXONE_PORT:-6001}:${MATRIXONE_PORT:-6001}
# https://www.elastic.co/guide/en/elasticsearch/reference/current/settings.html
# https://www.elastic.co/guide/en/elasticsearch/reference/current/docker.html#docker-prod-prerequisites
elasticsearch:

View File

@ -103,7 +103,7 @@ services:
PLUGIN_PACKAGE_CACHE_PATH: ${PLUGIN_PACKAGE_CACHE_PATH:-plugin_packages}
PLUGIN_MEDIA_CACHE_PATH: ${PLUGIN_MEDIA_CACHE_PATH:-assets}
PLUGIN_STORAGE_OSS_BUCKET: ${PLUGIN_STORAGE_OSS_BUCKET:-}
S3_USE_AWS: ${PLUGIN_S3_USE_AWS:-}
S3_USE_AWS: ${PLUGIN_S3_USE_AWS:-false}
S3_USE_AWS_MANAGED_IAM: ${PLUGIN_S3_USE_AWS_MANAGED_IAM:-false}
S3_ENDPOINT: ${PLUGIN_S3_ENDPOINT:-}
S3_USE_PATH_STYLE: ${PLUGIN_S3_USE_PATH_STYLE:-false}

View File

@ -195,6 +195,11 @@ x-shared-env: &shared-api-worker-env
TIDB_VECTOR_USER: ${TIDB_VECTOR_USER:-}
TIDB_VECTOR_PASSWORD: ${TIDB_VECTOR_PASSWORD:-}
TIDB_VECTOR_DATABASE: ${TIDB_VECTOR_DATABASE:-dify}
MATRIXONE_HOST: ${MATRIXONE_HOST:-matrixone}
MATRIXONE_PORT: ${MATRIXONE_PORT:-6001}
MATRIXONE_USER: ${MATRIXONE_USER:-dump}
MATRIXONE_PASSWORD: ${MATRIXONE_PASSWORD:-111}
MATRIXONE_DATABASE: ${MATRIXONE_DATABASE:-dify}
TIDB_ON_QDRANT_URL: ${TIDB_ON_QDRANT_URL:-http://127.0.0.1}
TIDB_ON_QDRANT_API_KEY: ${TIDB_ON_QDRANT_API_KEY:-dify}
TIDB_ON_QDRANT_CLIENT_TIMEOUT: ${TIDB_ON_QDRANT_CLIENT_TIMEOUT:-20}
@ -322,6 +327,7 @@ x-shared-env: &shared-api-worker-env
SMTP_PASSWORD: ${SMTP_PASSWORD:-}
SMTP_USE_TLS: ${SMTP_USE_TLS:-true}
SMTP_OPPORTUNISTIC_TLS: ${SMTP_OPPORTUNISTIC_TLS:-false}
SENDGRID_API_KEY: ${SENDGRID_API_KEY:-}
INDEXING_MAX_SEGMENTATION_TOKENS_LENGTH: ${INDEXING_MAX_SEGMENTATION_TOKENS_LENGTH:-4000}
INVITE_EXPIRY_HOURS: ${INVITE_EXPIRY_HOURS:-72}
RESET_PASSWORD_TOKEN_EXPIRY_MINUTES: ${RESET_PASSWORD_TOKEN_EXPIRY_MINUTES:-5}
@ -356,7 +362,7 @@ x-shared-env: &shared-api-worker-env
MAX_PARALLEL_LIMIT: ${MAX_PARALLEL_LIMIT:-10}
MAX_ITERATIONS_NUM: ${MAX_ITERATIONS_NUM:-99}
TEXT_GENERATION_TIMEOUT_MS: ${TEXT_GENERATION_TIMEOUT_MS:-60000}
PGUSER: ${PGUSER:-${DB_USERNAME}}
POSTGRES_USER: ${POSTGRES_USER:-${DB_USERNAME}}
POSTGRES_PASSWORD: ${POSTGRES_PASSWORD:-${DB_PASSWORD}}
POSTGRES_DB: ${POSTGRES_DB:-${DB_DATABASE}}
PGDATA: ${PGDATA:-/var/lib/postgresql/data/pgdata}
@ -467,7 +473,7 @@ x-shared-env: &shared-api-worker-env
PLUGIN_PACKAGE_CACHE_PATH: ${PLUGIN_PACKAGE_CACHE_PATH:-plugin_packages}
PLUGIN_MEDIA_CACHE_PATH: ${PLUGIN_MEDIA_CACHE_PATH:-assets}
PLUGIN_STORAGE_OSS_BUCKET: ${PLUGIN_STORAGE_OSS_BUCKET:-}
PLUGIN_S3_USE_AWS: ${PLUGIN_S3_USE_AWS:-}
PLUGIN_S3_USE_AWS: ${PLUGIN_S3_USE_AWS:-false}
PLUGIN_S3_USE_AWS_MANAGED_IAM: ${PLUGIN_S3_USE_AWS_MANAGED_IAM:-false}
PLUGIN_S3_ENDPOINT: ${PLUGIN_S3_ENDPOINT:-}
PLUGIN_S3_USE_PATH_STYLE: ${PLUGIN_S3_USE_PATH_STYLE:-false}
@ -591,7 +597,7 @@ services:
image: postgres:15-alpine
restart: always
environment:
PGUSER: ${PGUSER:-postgres}
POSTGRES_USER: ${POSTGRES_USER:-postgres}
POSTGRES_PASSWORD: ${POSTGRES_PASSWORD:-difyai123456}
POSTGRES_DB: ${POSTGRES_DB:-dify}
PGDATA: ${PGDATA:-/var/lib/postgresql/data/pgdata}
@ -958,6 +964,14 @@ services:
OB_CLUSTER_NAME: ${OCEANBASE_CLUSTER_NAME:-difyai}
OB_SERVER_IP: 127.0.0.1
MODE: mini
ports:
- "${OCEANBASE_VECTOR_PORT:-2881}:2881"
healthcheck:
test: [ 'CMD-SHELL', 'obclient -h127.0.0.1 -P2881 -uroot@test -p$${OB_TENANT_PASSWORD} -e "SELECT 1;"' ]
interval: 10s
retries: 30
start_period: 30s
timeout: 10s
# Oracle vector database
oracle:
@ -1116,6 +1130,18 @@ services:
ports:
- ${MYSCALE_PORT:-8123}:${MYSCALE_PORT:-8123}
# Matrixone vector store.
matrixone:
hostname: matrixone
image: matrixorigin/matrixone:2.1.1
profiles:
- matrixone
restart: always
volumes:
- ./volumes/matrixone/data:/mo-data
ports:
- ${MATRIXONE_PORT:-6001}:${MATRIXONE_PORT:-6001}
# https://www.elastic.co/guide/en/elasticsearch/reference/current/settings.html
# https://www.elastic.co/guide/en/elasticsearch/reference/current/docker.html#docker-prod-prerequisites
elasticsearch:

View File

@ -1,7 +1,7 @@
# ------------------------------
# Environment Variables for db Service
# ------------------------------
PGUSER=postgres
POSTGRES_USER=postgres
# The password for the default postgres user.
POSTGRES_PASSWORD=difyai123456
# The name of the default postgres database.
@ -133,7 +133,7 @@ PLUGIN_MEDIA_CACHE_PATH=assets
PLUGIN_STORAGE_OSS_BUCKET=
# Plugin oss s3 credentials
PLUGIN_S3_USE_AWS_MANAGED_IAM=false
PLUGIN_S3_USE_AWS=
PLUGIN_S3_USE_AWS=false
PLUGIN_S3_ENDPOINT=
PLUGIN_S3_USE_PATH_STYLE=false
PLUGIN_AWS_ACCESS_KEY=

View File

@ -9,6 +9,7 @@ import { useTranslation } from 'react-i18next'
import { useDebounceFn } from 'ahooks'
import {
RiApps2Line,
RiDragDropLine,
RiExchange2Line,
RiFile4Line,
RiMessage3Line,
@ -16,7 +17,8 @@ import {
} from '@remixicon/react'
import AppCard from './AppCard'
import NewAppCard from './NewAppCard'
import useAppsQueryState from './hooks/useAppsQueryState'
import useAppsQueryState from './hooks/use-apps-query-state'
import { useDSLDragDrop } from './hooks/use-dsl-drag-drop'
import type { AppListResponse } from '@/models/app'
import { fetchAppList } from '@/service/apps'
import { useAppContext } from '@/context/app-context'
@ -29,6 +31,7 @@ import { useStore as useTagStore } from '@/app/components/base/tag-management/st
import TagManagementModal from '@/app/components/base/tag-management'
import TagFilter from '@/app/components/base/tag-management/filter'
import CheckboxWithLabel from '@/app/components/datasets/create/website/base/checkbox-with-label'
import CreateFromDSLModal from '@/app/components/app/create-from-dsl-modal'
const getKey = (
pageIndex: number,
@ -67,6 +70,9 @@ const Apps = () => {
const [tagFilterValue, setTagFilterValue] = useState<string[]>(tagIDs)
const [searchKeywords, setSearchKeywords] = useState(keywords)
const newAppCardRef = useRef<HTMLDivElement>(null)
const containerRef = useRef<HTMLDivElement>(null)
const [showCreateFromDSLModal, setShowCreateFromDSLModal] = useState(false)
const [droppedDSLFile, setDroppedDSLFile] = useState<File | undefined>()
const setKeywords = useCallback((keywords: string) => {
setQuery(prev => ({ ...prev, keywords }))
}, [setQuery])
@ -74,6 +80,17 @@ const Apps = () => {
setQuery(prev => ({ ...prev, tagIDs }))
}, [setQuery])
const handleDSLFileDropped = useCallback((file: File) => {
setDroppedDSLFile(file)
setShowCreateFromDSLModal(true)
}, [])
const { dragging } = useDSLDragDrop({
onDSLFileDropped: handleDSLFileDropped,
containerRef,
enabled: isCurrentWorkspaceEditor,
})
const { data, isLoading, error, setSize, mutate } = useSWRInfinite(
(pageIndex: number, previousPageData: AppListResponse) => getKey(pageIndex, previousPageData, activeTab, isCreatedByMe, tagIDs, searchKeywords),
fetchAppList,
@ -151,47 +168,81 @@ const Apps = () => {
return (
<>
<div className='sticky top-0 z-10 flex flex-wrap items-center justify-between gap-y-2 bg-background-body px-12 pb-2 pt-4 leading-[56px]'>
<TabSliderNew
value={activeTab}
onChange={setActiveTab}
options={options}
/>
<div className='flex items-center gap-2'>
<CheckboxWithLabel
className='mr-2'
label={t('app.showMyCreatedAppsOnly')}
isChecked={isCreatedByMe}
onChange={handleCreatedByMeChange}
/>
<TagFilter type='app' value={tagFilterValue} onChange={handleTagsChange} />
<Input
showLeftIcon
showClearIcon
wrapperClassName='w-[200px]'
value={keywords}
onChange={e => handleKeywordsChange(e.target.value)}
onClear={() => handleKeywordsChange('')}
<div ref={containerRef} className='relative flex h-0 shrink-0 grow flex-col overflow-y-auto bg-background-body'>
{dragging && (
<div className="absolute inset-0 z-50 m-0.5 rounded-2xl border-2 border-dashed border-components-dropzone-border-accent bg-[rgba(21,90,239,0.14)] p-2">
</div>
)}
<div className='sticky top-0 z-10 flex flex-wrap items-center justify-between gap-y-2 bg-background-body px-12 pb-2 pt-4 leading-[56px]'>
<TabSliderNew
value={activeTab}
onChange={setActiveTab}
options={options}
/>
<div className='flex items-center gap-2'>
<CheckboxWithLabel
className='mr-2'
label={t('app.showMyCreatedAppsOnly')}
isChecked={isCreatedByMe}
onChange={handleCreatedByMeChange}
/>
<TagFilter type='app' value={tagFilterValue} onChange={handleTagsChange} />
<Input
showLeftIcon
showClearIcon
wrapperClassName='w-[200px]'
value={keywords}
onChange={e => handleKeywordsChange(e.target.value)}
onClear={() => handleKeywordsChange('')}
/>
</div>
</div>
{(data && data[0].total > 0)
? <div className='relative grid grow grid-cols-1 content-start gap-4 px-12 pt-2 sm:grid-cols-1 md:grid-cols-2 xl:grid-cols-4 2xl:grid-cols-5 2k:grid-cols-6'>
{isCurrentWorkspaceEditor
&& <NewAppCard ref={newAppCardRef} onSuccess={mutate} />}
{data.map(({ data: apps }) => apps.map(app => (
<AppCard key={app.id} app={app} onRefresh={mutate} />
)))}
</div>
: <div className='relative grid grow grid-cols-1 content-start gap-4 overflow-hidden px-12 pt-2 sm:grid-cols-1 md:grid-cols-2 xl:grid-cols-4 2xl:grid-cols-5 2k:grid-cols-6'>
{isCurrentWorkspaceEditor
&& <NewAppCard ref={newAppCardRef} className='z-10' onSuccess={mutate} />}
<NoAppsFound />
</div>}
{isCurrentWorkspaceEditor && (
<div
className={`flex items-center justify-center gap-2 py-4 ${dragging ? 'text-text-accent' : 'text-text-quaternary'}`}
role="region"
aria-label={t('app.newApp.dropDSLToCreateApp')}
>
<RiDragDropLine className="h-4 w-4" />
<span className="system-xs-regular">{t('app.newApp.dropDSLToCreateApp')}</span>
</div>
)}
<CheckModal />
<div ref={anchorRef} className='h-0'> </div>
{showTagManagementModal && (
<TagManagementModal type='app' show={showTagManagementModal} />
)}
</div>
{(data && data[0].total > 0)
? <div className='relative grid grow grid-cols-1 content-start gap-4 px-12 pt-2 sm:grid-cols-1 md:grid-cols-2 xl:grid-cols-4 2xl:grid-cols-5 2k:grid-cols-6'>
{isCurrentWorkspaceEditor
&& <NewAppCard ref={newAppCardRef} onSuccess={mutate} />}
{data.map(({ data: apps }) => apps.map(app => (
<AppCard key={app.id} app={app} onRefresh={mutate} />
)))}
</div>
: <div className='relative grid grow grid-cols-1 content-start gap-4 overflow-hidden px-12 pt-2 sm:grid-cols-1 md:grid-cols-2 xl:grid-cols-4 2xl:grid-cols-5 2k:grid-cols-6'>
{isCurrentWorkspaceEditor
&& <NewAppCard ref={newAppCardRef} className='z-10' onSuccess={mutate} />}
<NoAppsFound />
</div>}
<CheckModal />
<div ref={anchorRef} className='h-0'> </div>
{showTagManagementModal && (
<TagManagementModal type='app' show={showTagManagementModal} />
{showCreateFromDSLModal && (
<CreateFromDSLModal
show={showCreateFromDSLModal}
onClose={() => {
setShowCreateFromDSLModal(false)
setDroppedDSLFile(undefined)
}}
onSuccess={() => {
setShowCreateFromDSLModal(false)
setDroppedDSLFile(undefined)
mutate()
}}
droppedFile={droppedDSLFile}
/>
)}
</>
)

View File

@ -0,0 +1,72 @@
import { useEffect, useState } from 'react'
type DSLDragDropHookProps = {
onDSLFileDropped: (file: File) => void
containerRef: React.RefObject<HTMLDivElement>
enabled?: boolean
}
export const useDSLDragDrop = ({ onDSLFileDropped, containerRef, enabled = true }: DSLDragDropHookProps) => {
const [dragging, setDragging] = useState(false)
const handleDragEnter = (e: DragEvent) => {
e.preventDefault()
e.stopPropagation()
if (e.dataTransfer?.types.includes('Files'))
setDragging(true)
}
const handleDragOver = (e: DragEvent) => {
e.preventDefault()
e.stopPropagation()
}
const handleDragLeave = (e: DragEvent) => {
e.preventDefault()
e.stopPropagation()
if (e.relatedTarget === null || !containerRef.current?.contains(e.relatedTarget as Node))
setDragging(false)
}
const handleDrop = (e: DragEvent) => {
e.preventDefault()
e.stopPropagation()
setDragging(false)
if (!e.dataTransfer)
return
const files = [...e.dataTransfer.files]
if (files.length === 0)
return
const file = files[0]
if (file.name.toLowerCase().endsWith('.yaml') || file.name.toLowerCase().endsWith('.yml'))
onDSLFileDropped(file)
}
useEffect(() => {
if (!enabled)
return
const current = containerRef.current
if (current) {
current.addEventListener('dragenter', handleDragEnter)
current.addEventListener('dragover', handleDragOver)
current.addEventListener('dragleave', handleDragLeave)
current.addEventListener('drop', handleDrop)
}
return () => {
if (current) {
current.removeEventListener('dragenter', handleDragEnter)
current.removeEventListener('dragover', handleDragOver)
current.removeEventListener('dragleave', handleDragLeave)
current.removeEventListener('drop', handleDrop)
}
}
}, [containerRef, enabled])
return {
dragging: enabled ? dragging : false,
}
}

View File

@ -314,10 +314,10 @@ const AppPublisher = ({
{!isAppAccessSet && <p className='system-xs-regular mt-1 text-text-warning'>{t('app.publishApp.notSetDesc')}</p>}
</div>}
<div className='flex flex-col gap-y-1 border-t-[0.5px] border-t-divider-regular p-4 pt-3'>
<Tooltip triggerClassName='flex' disabled={!systemFeatures.webapp_auth.enabled || userCanAccessApp?.result} popupContent={t('app.noAccessPermission')} asChild={false}>
<Tooltip triggerClassName='flex' disabled={!systemFeatures.webapp_auth.enabled || appDetail?.access_mode === AccessMode.EXTERNAL_MEMBERS || userCanAccessApp?.result} popupContent={t('app.noAccessPermission')} asChild={false}>
<SuggestedAction
className='flex-1'
disabled={!publishedAt || (systemFeatures.webapp_auth.enabled && !userCanAccessApp?.result)}
disabled={!publishedAt || (systemFeatures.webapp_auth.enabled && appDetail?.access_mode !== AccessMode.EXTERNAL_MEMBERS && !userCanAccessApp?.result)}
link={appURL}
icon={<RiPlayCircleLine className='h-4 w-4' />}
>
@ -326,10 +326,10 @@ const AppPublisher = ({
</Tooltip>
{appDetail?.mode === 'workflow' || appDetail?.mode === 'completion'
? (
<Tooltip triggerClassName='flex' disabled={!systemFeatures.webapp_auth.enabled || userCanAccessApp?.result} popupContent={t('app.noAccessPermission')} asChild={false}>
<Tooltip triggerClassName='flex' disabled={!systemFeatures.webapp_auth.enabled || appDetail.access_mode === AccessMode.EXTERNAL_MEMBERS || userCanAccessApp?.result} popupContent={t('app.noAccessPermission')} asChild={false}>
<SuggestedAction
className='flex-1'
disabled={!publishedAt || (systemFeatures.webapp_auth.enabled && !userCanAccessApp?.result)}
disabled={!publishedAt || (systemFeatures.webapp_auth.enabled && appDetail.access_mode !== AccessMode.EXTERNAL_MEMBERS && !userCanAccessApp?.result)}
link={`${appURL}${appURL.includes('?') ? '&' : '?'}mode=batch`}
icon={<RiPlayList2Line className='h-4 w-4' />}
>

View File

@ -156,12 +156,11 @@ const Debug: FC<IDebug> = ({
}
let hasEmptyInput = ''
const requiredVars = modelConfig.configs.prompt_variables.filter(({ key, name, required, type }) => {
if (type !== 'string' && type !== 'paragraph' && type !== 'select')
if (type !== 'string' && type !== 'paragraph' && type !== 'select' && type !== 'number')
return false
const res = (!key || !key.trim()) || (!name || !name.trim()) || (required || required === undefined || required === null)
return res
}) // compatible with old version
// debugger
requiredVars.forEach(({ key, name }) => {
if (hasEmptyInput)
return

View File

@ -1,7 +1,7 @@
'use client'
import type { MouseEventHandler } from 'react'
import { useMemo, useRef, useState } from 'react'
import { useEffect, useMemo, useRef, useState } from 'react'
import { useRouter } from 'next/navigation'
import { useContext } from 'use-context-selector'
import { useTranslation } from 'react-i18next'
@ -35,6 +35,7 @@ type CreateFromDSLModalProps = {
onClose: () => void
activeTab?: string
dslUrl?: string
droppedFile?: File
}
export enum CreateFromDSLModalTab {
@ -42,11 +43,11 @@ export enum CreateFromDSLModalTab {
FROM_URL = 'from-url',
}
const CreateFromDSLModal = ({ show, onSuccess, onClose, activeTab = CreateFromDSLModalTab.FROM_FILE, dslUrl = '' }: CreateFromDSLModalProps) => {
const CreateFromDSLModal = ({ show, onSuccess, onClose, activeTab = CreateFromDSLModalTab.FROM_FILE, dslUrl = '', droppedFile }: CreateFromDSLModalProps) => {
const { push } = useRouter()
const { t } = useTranslation()
const { notify } = useContext(ToastContext)
const [currentFile, setDSLFile] = useState<File>()
const [currentFile, setDSLFile] = useState<File | undefined>(droppedFile)
const [fileContent, setFileContent] = useState<string>()
const [currentTab, setCurrentTab] = useState(activeTab)
const [dslUrlValue, setDslUrlValue] = useState(dslUrl)
@ -78,6 +79,11 @@ const CreateFromDSLModal = ({ show, onSuccess, onClose, activeTab = CreateFromDS
const isCreatingRef = useRef(false)
useEffect(() => {
if (droppedFile)
handleFile(droppedFile)
}, [droppedFile])
const onCreate: MouseEventHandler = async () => {
if (currentTab === CreateFromDSLModalTab.FROM_FILE && !currentFile)
return

View File

@ -50,6 +50,10 @@ const OPTION_MAP = {
// user_id: 'YOU CAN DEFINE USER ID HERE',
// conversation_id: 'YOU CAN DEFINE CONVERSATION ID HERE, IT MUST BE A VALID UUID',
},
userVariables: {
// avatar_url: 'YOU CAN DEFINE USER AVATAR URL HERE',
// name: 'YOU CAN DEFINE USER NAME HERE',
},
}
</script>
<script

View File

@ -25,6 +25,7 @@ import SuggestedQuestions from '@/app/components/base/chat/chat/answer/suggested
import { Markdown } from '@/app/components/base/markdown'
import cn from '@/utils/classnames'
import type { FileEntity } from '../../file-uploader/types'
import Avatar from '../../avatar'
const ChatWrapper = () => {
const {
@ -49,6 +50,7 @@ const ChatWrapper = () => {
setClearChatList,
setIsResponding,
allInputsHidden,
initUserVariables,
} = useEmbeddedChatbotContext()
const appConfig = useMemo(() => {
const config = appParams || {}
@ -261,6 +263,14 @@ const ChatWrapper = () => {
switchSibling={siblingMessageId => setTargetMessageId(siblingMessageId)}
inputDisabled={inputDisabled}
isMobile={isMobile}
questionIcon={
initUserVariables?.avatar_url
? <Avatar
avatar={initUserVariables.avatar_url}
name={initUserVariables.name || 'user'}
size={40}
/> : undefined
}
/>
)
}

View File

@ -52,6 +52,10 @@ export type EmbeddedChatbotContextValue = {
currentConversationInputs: Record<string, any> | null,
setCurrentConversationInputs: (v: Record<string, any>) => void,
allInputsHidden: boolean
initUserVariables?: {
name?: string
avatar_url?: string
}
}
export const EmbeddedChatbotContext = createContext<EmbeddedChatbotContextValue>({
@ -81,5 +85,6 @@ export const EmbeddedChatbotContext = createContext<EmbeddedChatbotContextValue>
currentConversationInputs: {},
setCurrentConversationInputs: noop,
allInputsHidden: false,
initUserVariables: {},
})
export const useEmbeddedChatbotContext = () => useContext(EmbeddedChatbotContext)

View File

@ -15,7 +15,7 @@ import type {
Feedback,
} from '../types'
import { CONVERSATION_ID_INFO } from '../constants'
import { buildChatItemTree, getProcessedInputsFromUrlParams, getProcessedSystemVariablesFromUrlParams } from '../utils'
import { buildChatItemTree, getProcessedInputsFromUrlParams, getProcessedSystemVariablesFromUrlParams, getProcessedUserVariablesFromUrlParams } from '../utils'
import { getProcessedFilesFromResponse } from '../../file-uploader/utils'
import {
fetchAppInfo,
@ -169,6 +169,7 @@ export const useEmbeddedChatbot = () => {
const newConversationInputsRef = useRef<Record<string, any>>({})
const [newConversationInputs, setNewConversationInputs] = useState<Record<string, any>>({})
const [initInputs, setInitInputs] = useState<Record<string, any>>({})
const [initUserVariables, setInitUserVariables] = useState<Record<string, any>>({})
const handleNewConversationInputsChange = useCallback((newInputs: Record<string, any>) => {
newConversationInputsRef.current = newInputs
setNewConversationInputs(newInputs)
@ -237,7 +238,9 @@ export const useEmbeddedChatbot = () => {
// init inputs from url params
(async () => {
const inputs = await getProcessedInputsFromUrlParams()
const userVariables = await getProcessedUserVariablesFromUrlParams()
setInitInputs(inputs)
setInitUserVariables(userVariables)
})()
}, [])
useEffect(() => {
@ -418,5 +421,6 @@ export const useEmbeddedChatbot = () => {
currentConversationInputs,
setCurrentConversationInputs,
allInputsHidden,
initUserVariables,
}
}

View File

@ -195,6 +195,7 @@ const EmbeddedChatbotWrapper = () => {
currentConversationInputs,
setCurrentConversationInputs,
allInputsHidden,
initUserVariables,
} = useEmbeddedChatbot()
return <EmbeddedChatbotContext.Provider value={{
@ -233,6 +234,7 @@ const EmbeddedChatbotWrapper = () => {
currentConversationInputs,
setCurrentConversationInputs,
allInputsHidden,
initUserVariables,
}}>
<Chatbot />
</EmbeddedChatbotContext.Provider>

View File

@ -32,7 +32,8 @@ async function getProcessedInputsFromUrlParams(): Promise<Record<string, any>> {
const entriesArray = Array.from(urlParams.entries())
await Promise.all(
entriesArray.map(async ([key, value]) => {
if (!key.startsWith('sys.'))
const prefixArray = ['sys.', 'user.']
if (!prefixArray.some(prefix => key.startsWith(prefix)))
inputs[key] = await decodeBase64AndDecompress(decodeURIComponent(value))
}),
)
@ -52,6 +53,19 @@ async function getProcessedSystemVariablesFromUrlParams(): Promise<Record<string
return systemVariables
}
async function getProcessedUserVariablesFromUrlParams(): Promise<Record<string, any>> {
const urlParams = new URLSearchParams(window.location.search)
const userVariables: Record<string, any> = {}
const entriesArray = Array.from(urlParams.entries())
await Promise.all(
entriesArray.map(async ([key, value]) => {
if (key.startsWith('user.'))
userVariables[key.slice(5)] = await decodeBase64AndDecompress(decodeURIComponent(value))
}),
)
return userVariables
}
function isValidGeneratedAnswer(item?: ChatItem | ChatItemInTree): boolean {
return !!item && item.isAnswer && !item.id.startsWith('answer-placeholder-') && !item.isOpeningStatement
}
@ -198,6 +212,7 @@ export {
getRawInputsFromUrlParams,
getProcessedInputsFromUrlParams,
getProcessedSystemVariablesFromUrlParams,
getProcessedUserVariablesFromUrlParams,
isValidGeneratedAnswer,
getLastAnswer,
buildChatItemTree,

View File

@ -1,4 +1,4 @@
import { memo, useEffect, useMemo, useRef, useState } from 'react'
import { memo, useCallback, useEffect, useMemo, useRef, useState } from 'react'
import ReactEcharts from 'echarts-for-react'
import SyntaxHighlighter from 'react-syntax-highlighter'
import {
@ -62,6 +62,17 @@ const getCorrectCapitalizationLanguageName = (language: string) => {
// visit https://reactjs.org/docs/error-decoder.html?invariant=185 for the full message
// or use the non-minified dev environment for full errors and additional helpful warnings.
// Define ECharts event parameter types
interface EChartsEventParams {
type: string;
seriesIndex?: number;
dataIndex?: number;
name?: string;
value?: any;
currentIndex?: number; // Added for timeline events
[key: string]: any;
}
const CodeBlock: any = memo(({ inline, className, children = '', ...props }: any) => {
const { theme } = useTheme()
const [isSVG, setIsSVG] = useState(true)
@ -70,6 +81,11 @@ const CodeBlock: any = memo(({ inline, className, children = '', ...props }: any
const echartsRef = useRef<any>(null)
const contentRef = useRef<string>('')
const processedRef = useRef<boolean>(false) // Track if content was successfully processed
const instanceIdRef = useRef<string>(`chart-${Date.now()}-${Math.random().toString(36).substring(2, 9)}`) // Unique ID for logging
const isInitialRenderRef = useRef<boolean>(true) // Track if this is initial render
const chartInstanceRef = useRef<any>(null) // Direct reference to ECharts instance
const resizeTimerRef = useRef<NodeJS.Timeout | null>(null) // For debounce handling
const finishedEventCountRef = useRef<number>(0) // Track finished event trigger count
const match = /language-(\w+)/.exec(className || '')
const language = match?.[1]
const languageShowName = getCorrectCapitalizationLanguageName(language || '')
@ -85,36 +101,64 @@ const CodeBlock: any = memo(({ inline, className, children = '', ...props }: any
width: 'auto',
}) as any, [])
const echartsOnEvents = useMemo(() => ({
finished: () => {
const instance = echartsRef.current?.getEchartsInstance?.()
if (instance)
instance.resize()
// Debounce resize operations
const debouncedResize = useCallback(() => {
if (resizeTimerRef.current)
clearTimeout(resizeTimerRef.current)
resizeTimerRef.current = setTimeout(() => {
if (chartInstanceRef.current)
chartInstanceRef.current.resize()
resizeTimerRef.current = null
}, 200)
}, [])
// Handle ECharts instance initialization
const handleChartReady = useCallback((instance: any) => {
chartInstanceRef.current = instance
// Force resize to ensure timeline displays correctly
setTimeout(() => {
if (chartInstanceRef.current)
chartInstanceRef.current.resize()
}, 200)
}, [])
// Store event handlers in useMemo to avoid recreating them
const echartsEvents = useMemo(() => ({
finished: (params: EChartsEventParams) => {
// Limit finished event frequency to avoid infinite loops
finishedEventCountRef.current++
if (finishedEventCountRef.current > 3) {
// Stop processing after 3 times to avoid infinite loops
return
}
if (chartInstanceRef.current) {
// Use debounced resize
debouncedResize()
}
},
}), [echartsRef]) // echartsRef is stable, so this effectively runs once.
}), [debouncedResize])
// Handle container resize for echarts
useEffect(() => {
if (language !== 'echarts' || !echartsRef.current) return
if (language !== 'echarts' || !chartInstanceRef.current) return
const handleResize = () => {
// This gets the echarts instance from the component
const instance = echartsRef.current?.getEchartsInstance?.()
if (instance)
instance.resize()
if (chartInstanceRef.current)
// Use debounced resize
debouncedResize()
}
window.addEventListener('resize', handleResize)
// Also manually trigger resize after a short delay to ensure proper sizing
const resizeTimer = setTimeout(handleResize, 200)
return () => {
window.removeEventListener('resize', handleResize)
clearTimeout(resizeTimer)
if (resizeTimerRef.current)
clearTimeout(resizeTimerRef.current)
}
}, [language, echartsRef.current])
}, [language, debouncedResize])
// Process chart data when content changes
useEffect(() => {
// Only process echarts content
@ -222,6 +266,7 @@ const CodeBlock: any = memo(({ inline, className, children = '', ...props }: any
}
}, [language, children])
// Cache rendered content to avoid unnecessary re-renders
const renderCodeContent = useMemo(() => {
const content = String(children).replace(/\n$/, '')
switch (language) {
@ -274,6 +319,9 @@ const CodeBlock: any = memo(({ inline, className, children = '', ...props }: any
// Success state: show the chart
if (chartState === 'success' && finalChartOption) {
// Reset finished event counter
finishedEventCountRef.current = 0
return (
<div style={{
minWidth: '300px',
@ -286,13 +334,20 @@ const CodeBlock: any = memo(({ inline, className, children = '', ...props }: any
}}>
<ErrorBoundary>
<ReactEcharts
ref={echartsRef}
ref={(e) => {
if (e && isInitialRenderRef.current) {
echartsRef.current = e
isInitialRenderRef.current = false
}
}}
option={finalChartOption}
style={echartsStyle}
theme={isDarkMode ? 'dark' : undefined}
opts={echartsOpts}
notMerge={true}
onEvents={echartsOnEvents}
notMerge={false}
lazyUpdate={false}
onEvents={echartsEvents}
onChartReady={handleChartReady}
/>
</ErrorBoundary>
</div>
@ -363,7 +418,7 @@ const CodeBlock: any = memo(({ inline, className, children = '', ...props }: any
</SyntaxHighlighter>
)
}
}, [children, language, isSVG, finalChartOption, props, theme, match, chartState, isDarkMode, echartsStyle, echartsOpts, echartsOnEvents])
}, [children, language, isSVG, finalChartOption, props, theme, match, chartState, isDarkMode, echartsStyle, echartsOpts, handleChartReady, echartsEvents])
if (inline || !match)
return <code {...props} className={className}>{children}</code>

View File

@ -28,7 +28,7 @@ export const preprocessLaTeX = (content: string) => {
}
export const preprocessThinkTag = (content: string) => {
const thinkOpenTagRegex = /<think>\n/g
const thinkOpenTagRegex = /(<think>\n)+/g
const thinkCloseTagRegex = /\n<\/think>/g
return flow([
(str: string) => str.replace(thinkOpenTagRegex, '<details data-think=true>\n'),

View File

@ -533,6 +533,12 @@ Workflow applications offers non-session support and is ideal for translation, a
<Property name='limit' type='int' key='limit'>
How many chat history messages to return in one request, default is 20.
</Property>
<Property name='created_by_end_user_session_id' type='str' key='created_by_end_user_session_id'>
Created by which endUser, for example, `abc-123`.
</Property>
<Property name='created_by_account' type='str' key='created_by_account'>
Created by which email account, for example, lizb@test.com.
</Property>
</Properties>
### Response

View File

@ -534,6 +534,12 @@ import { Row, Col, Properties, Property, Heading, SubProperty, Paragraph } from
<Property name='limit' type='int' key='limit'>
1回のリクエストで返すチャット履歴メッセージの数、デフォルトは20。
</Property>
<Property name='created_by_end_user_session_id' type='str' key='created_by_end_user_session_id'>
どのendUserによって作成されたか、例えば、`abc-123`。
</Property>
<Property name='created_by_account' type='str' key='created_by_account'>
どのメールアカウントによって作成されたか、例えば、lizb@test.com。
</Property>
</Properties>
### 応答

View File

@ -522,6 +522,12 @@ Workflow 应用无会话支持,适合用于翻译/文章写作/总结 AI 等
<Property name='limit' type='int' key='limit'>
每页条数, 默认20.
</Property>
<Property name='created_by_end_user_session_id' type='str' key='created_by_end_user_session_id'>
由哪个endUser创建例如`abc-123`.
</Property>
<Property name='created_by_account' type='str' key='created_by_account'>
由哪个邮箱账户创建例如lizb@test.com.
</Property>
</Properties>
### Response

View File

@ -17,9 +17,9 @@ import Loading from '@/app/components/base/loading'
import ProviderCard from '@/app/components/plugins/provider-card'
import List from '@/app/components/plugins/marketplace/list'
import type { Plugin } from '@/app/components/plugins/types'
import { MARKETPLACE_URL_PREFIX } from '@/config'
import cn from '@/utils/classnames'
import { getLocaleOnClient } from '@/i18n'
import { getMarketplaceUrl } from '@/utils/var'
type InstallFromMarketplaceProps = {
providers: ModelProvider[]
@ -55,7 +55,7 @@ const InstallFromMarketplace = ({
</div>
<div className='mb-2 flex items-center pt-2'>
<span className='system-sm-regular pr-1 text-text-tertiary'>{t('common.modelProvider.discoverMore')}</span>
<Link target="_blank" href={`${MARKETPLACE_URL_PREFIX}${theme ? `?theme=${theme}` : ''}`} className='system-sm-medium inline-flex items-center text-text-accent'>
<Link target="_blank" href={getMarketplaceUrl('', { theme })} className='system-sm-medium inline-flex items-center text-text-accent'>
{t('plugin.marketplace.difyMarketplace')}
<RiArrowRightUpLine className='h-4 w-4' />
</Link>

View File

@ -31,7 +31,7 @@ const PluginsNav = ({
)}>
<div
className={classNames(
'relative flex flex-row h-8 p-1.5 gap-0.5 border border-transparent items-center justify-center rounded-xl system-sm-medium-uppercase',
'relative flex flex-row h-8 p-1.5 gap-0.5 border border-transparent items-center justify-center rounded-xl system-sm-medium',
activated && 'border-components-main-nav-nav-button-border bg-components-main-nav-nav-button-bg-active shadow-md text-components-main-nav-nav-button-text',
!activated && 'text-text-tertiary hover:bg-state-base-hover hover:text-text-secondary',
(isInstallingWithError || isFailed) && !activated && 'border-components-panel-border-subtle',

View File

@ -15,6 +15,7 @@ import { renderI18nObject } from '@/i18n'
import { useMixedTranslation } from '@/app/components/plugins/marketplace/hooks'
import Partner from '../base/badges/partner'
import Verified from '../base/badges/verified'
import { RiAlertFill } from '@remixicon/react'
export type Props = {
className?: string
@ -28,6 +29,7 @@ export type Props = {
isLoading?: boolean
loadingFileName?: string
locale?: string
limitedInstall?: boolean
}
const Card = ({
@ -42,6 +44,7 @@ const Card = ({
isLoading = false,
loadingFileName,
locale: localeFromProps,
limitedInstall = false,
}: Props) => {
const defaultLocale = useGetLanguage()
const locale = localeFromProps ? getLanguage(localeFromProps) : defaultLocale
@ -54,7 +57,7 @@ const Card = ({
obj ? renderI18nObject(obj, locale) : ''
const isPartner = badges.includes('partner')
const wrapClassName = cn('hover-bg-components-panel-on-panel-item-bg relative rounded-xl border-[0.5px] border-components-panel-border bg-components-panel-on-panel-item-bg p-4 pb-3 shadow-xs', className)
const wrapClassName = cn('hover-bg-components-panel-on-panel-item-bg relative overflow-hidden rounded-xl border-[0.5px] border-components-panel-border bg-components-panel-on-panel-item-bg shadow-xs', className)
if (isLoading) {
return (
<Placeholder
@ -66,30 +69,39 @@ const Card = ({
return (
<div className={wrapClassName}>
{!hideCornerMark && <CornerMark text={cornerMark} />}
{/* Header */}
<div className="flex">
<Icon src={icon} installed={installed} installFailed={installFailed} />
<div className="ml-3 w-0 grow">
<div className="flex h-5 items-center">
<Title title={getLocalizedText(label)} />
{isPartner && <Partner className='ml-0.5 h-4 w-4' text={t('plugin.marketplace.partnerTip')} />}
{verified && <Verified className='ml-0.5 h-4 w-4' text={t('plugin.marketplace.verifiedTip')} />}
{titleLeft} {/* This can be version badge */}
<div className={cn('p-4 pb-3', limitedInstall && 'pb-1')}>
{!hideCornerMark && <CornerMark text={cornerMark} />}
{/* Header */}
<div className="flex">
<Icon src={icon} installed={installed} installFailed={installFailed} />
<div className="ml-3 w-0 grow">
<div className="flex h-5 items-center">
<Title title={getLocalizedText(label)} />
{isPartner && <Partner className='ml-0.5 h-4 w-4' text={t('plugin.marketplace.partnerTip')} />}
{verified && <Verified className='ml-0.5 h-4 w-4' text={t('plugin.marketplace.verifiedTip')} />}
{titleLeft} {/* This can be version badge */}
</div>
<OrgInfo
className="mt-0.5"
orgName={org}
packageName={name}
/>
</div>
<OrgInfo
className="mt-0.5"
orgName={org}
packageName={name}
/>
</div>
<Description
className="mt-3"
text={getLocalizedText(brief)}
descriptionLineRows={descriptionLineRows}
/>
{footer && <div>{footer}</div>}
</div>
<Description
className="mt-3"
text={getLocalizedText(brief)}
descriptionLineRows={descriptionLineRows}
/>
{footer && <div>{footer}</div>}
{limitedInstall
&& <div className='relative flex h-8 items-center gap-x-2 px-3 after:absolute after:bottom-0 after:left-0 after:right-0 after:top-0 after:bg-toast-warning-bg after:opacity-40'>
<RiAlertFill className='h-3 w-3 shrink-0 text-text-warning-secondary' />
<p className='system-xs-regular z-10 grow text-text-secondary'>
{t('plugin.installModal.installWarning')}
</p>
</div>}
</div>
)
}

View File

@ -0,0 +1,46 @@
import { useGlobalPublicStore } from '@/context/global-public-context'
import type { SystemFeatures } from '@/types/feature'
import { InstallationScope } from '@/types/feature'
import type { Plugin, PluginManifestInMarket } from '../../types'
type PluginProps = (Plugin | PluginManifestInMarket) & { from: 'github' | 'marketplace' | 'package' }
export function pluginInstallLimit(plugin: PluginProps, systemFeatures: SystemFeatures) {
if (systemFeatures.plugin_installation_permission.restrict_to_marketplace_only) {
if (plugin.from === 'github' || plugin.from === 'package')
return { canInstall: false }
}
if (systemFeatures.plugin_installation_permission.plugin_installation_scope === InstallationScope.ALL) {
return {
canInstall: true,
}
}
if (systemFeatures.plugin_installation_permission.plugin_installation_scope === InstallationScope.NONE) {
return {
canInstall: false,
}
}
const verification = plugin.verification || {}
if (!plugin.verification || !plugin.verification.authorized_category)
verification.authorized_category = 'langgenius'
if (systemFeatures.plugin_installation_permission.plugin_installation_scope === InstallationScope.OFFICIAL_ONLY) {
return {
canInstall: verification.authorized_category === 'langgenius',
}
}
if (systemFeatures.plugin_installation_permission.plugin_installation_scope === InstallationScope.OFFICIAL_AND_PARTNER) {
return {
canInstall: verification.authorized_category === 'langgenius' || verification.authorized_category === 'partner',
}
}
return {
canInstall: true,
}
}
export default function usePluginInstallLimit(plugin: PluginProps) {
const systemFeatures = useGlobalPublicStore(s => s.systemFeatures)
return pluginInstallLimit(plugin, systemFeatures)
}

View File

@ -39,7 +39,7 @@ const Item: FC<Props> = ({
plugin_id: data.unique_identifier,
}
onFetchedPayload(payload)
setPayload(payload)
setPayload({ ...payload, from: dependency.type })
}
// eslint-disable-next-line react-hooks/exhaustive-deps
}, [data])

View File

@ -8,6 +8,7 @@ import useGetIcon from '../../base/use-get-icon'
import { MARKETPLACE_API_PREFIX } from '@/config'
import Version from '../../base/version'
import type { VersionProps } from '../../../types'
import usePluginInstallLimit from '../../hooks/use-install-plugin-limit'
type Props = {
checked: boolean
@ -29,9 +30,11 @@ const LoadedItem: FC<Props> = ({
...particleVersionInfo,
toInstallVersion: payload.version,
}
const { canInstall } = usePluginInstallLimit(payload)
return (
<div className='flex items-center space-x-2'>
<Checkbox
disabled={!canInstall}
className='shrink-0'
checked={checked}
onCheck={() => onCheckedChange(payload)}
@ -43,6 +46,7 @@ const LoadedItem: FC<Props> = ({
icon: isFromMarketPlace ? `${MARKETPLACE_API_PREFIX}/plugins/${payload.org}/${payload.name}/icon` : getIconUrl(payload.icon),
}}
titleLeft={payload.version ? <Version {...versionInfo} /> : null}
limitedInstall={!canInstall}
/>
</div>
)

View File

@ -29,7 +29,7 @@ const PackageItem: FC<Props> = ({
const plugin = pluginManifestToCardPluginProps(payload.value.manifest)
return (
<LoadedItem
payload={plugin}
payload={{ ...plugin, from: payload.type }}
checked={checked}
onCheckedChange={onCheckedChange}
isFromMarketPlace={isFromMarketPlace}

View File

@ -1,5 +1,6 @@
'use client'
import type { FC } from 'react'
import type { ForwardRefRenderFunction } from 'react'
import { useImperativeHandle } from 'react'
import React, { useCallback, useEffect, useMemo, useState } from 'react'
import type { Dependency, GitHubItemAndMarketPlaceDependency, PackageDependency, Plugin, VersionInfo } from '../../../types'
import MarketplaceItem from '../item/marketplace-item'
@ -9,22 +10,34 @@ import useCheckInstalled from '@/app/components/plugins/install-plugin/hooks/use
import produce from 'immer'
import PackageItem from '../item/package-item'
import LoadingError from '../../base/loading-error'
import { useGlobalPublicStore } from '@/context/global-public-context'
import { pluginInstallLimit } from '../../hooks/use-install-plugin-limit'
type Props = {
allPlugins: Dependency[]
selectedPlugins: Plugin[]
onSelect: (plugin: Plugin, selectedIndex: number) => void
onSelect: (plugin: Plugin, selectedIndex: number, allCanInstallPluginsLength: number) => void
onSelectAll: (plugins: Plugin[], selectedIndexes: number[]) => void
onDeSelectAll: () => void
onLoadedAllPlugin: (installedInfo: Record<string, VersionInfo>) => void
isFromMarketPlace?: boolean
}
const InstallByDSLList: FC<Props> = ({
export type ExposeRefs = {
selectAllPlugins: () => void
deSelectAllPlugins: () => void
}
const InstallByDSLList: ForwardRefRenderFunction<ExposeRefs, Props> = ({
allPlugins,
selectedPlugins,
onSelect,
onSelectAll,
onDeSelectAll,
onLoadedAllPlugin,
isFromMarketPlace,
}) => {
}, ref) => {
const systemFeatures = useGlobalPublicStore(s => s.systemFeatures)
// DSL has id, to get plugin info to show more info
const { isLoading: isFetchingMarketplaceDataById, data: infoGetById, error: infoByIdError } = useFetchPluginsInMarketPlaceByInfo(allPlugins.filter(d => d.type === 'marketplace').map((d) => {
const dependecy = (d as GitHubItemAndMarketPlaceDependency).value
@ -97,7 +110,8 @@ const InstallByDSLList: FC<Props> = ({
const sortedList = allPlugins.filter(d => d.type === 'marketplace').map((d) => {
const p = d as GitHubItemAndMarketPlaceDependency
const id = p.value.marketplace_plugin_unique_identifier?.split(':')[0]
return infoGetById.data.list.find(item => item.plugin.plugin_id === id)?.plugin
const retPluginInfo = infoGetById.data.list.find(item => item.plugin.plugin_id === id)?.plugin
return { ...retPluginInfo, from: d.type } as Plugin
})
const payloads = sortedList
const failedIndex: number[] = []
@ -106,7 +120,7 @@ const InstallByDSLList: FC<Props> = ({
if (payloads[i]) {
draft[index] = {
...payloads[i],
version: payloads[i].version || payloads[i].latest_version,
version: payloads[i]!.version || payloads[i]!.latest_version,
}
}
else { failedIndex.push(index) }
@ -181,9 +195,35 @@ const InstallByDSLList: FC<Props> = ({
const handleSelect = useCallback((index: number) => {
return () => {
onSelect(plugins[index]!, index)
const canSelectPlugins = plugins.filter((p) => {
const { canInstall } = pluginInstallLimit(p!, systemFeatures)
return canInstall
})
onSelect(plugins[index]!, index, canSelectPlugins.length)
}
}, [onSelect, plugins])
}, [onSelect, plugins, systemFeatures])
useImperativeHandle(ref, () => ({
selectAllPlugins: () => {
const selectedIndexes: number[] = []
const selectedPlugins: Plugin[] = []
allPlugins.forEach((d, index) => {
const p = plugins[index]
if (!p)
return
const { canInstall } = pluginInstallLimit(p, systemFeatures)
if (canInstall) {
selectedIndexes.push(index)
selectedPlugins.push(p)
}
})
onSelectAll(selectedPlugins, selectedIndexes)
},
deSelectAllPlugins: () => {
onDeSelectAll()
},
}))
return (
<>
{allPlugins.map((d, index) => {
@ -211,7 +251,7 @@ const InstallByDSLList: FC<Props> = ({
key={index}
checked={!!selectedPlugins.find(p => p.plugin_id === plugins[index]?.plugin_id)}
onCheckedChange={handleSelect(index)}
payload={plugin}
payload={{ ...plugin, from: d.type } as Plugin}
version={(d as GitHubItemAndMarketPlaceDependency).value.version! || plugin?.version || ''}
versionInfo={getVersionInfo(`${plugin?.org || plugin?.author}/${plugin?.name}`)}
/>
@ -234,4 +274,4 @@ const InstallByDSLList: FC<Props> = ({
</>
)
}
export default React.memo(InstallByDSLList)
export default React.forwardRef(InstallByDSLList)

View File

@ -1,15 +1,18 @@
'use client'
import type { FC } from 'react'
import { useRef } from 'react'
import React, { useCallback, useState } from 'react'
import type { Dependency, InstallStatusResponse, Plugin, VersionInfo } from '../../../types'
import Button from '@/app/components/base/button'
import { RiLoader2Line } from '@remixicon/react'
import { useTranslation } from 'react-i18next'
import type { ExposeRefs } from './install-multi'
import InstallMulti from './install-multi'
import { useInstallOrUpdate } from '@/service/use-plugins'
import useRefreshPluginList from '../../hooks/use-refresh-plugin-list'
import { useCanInstallPluginFromMarketplace } from '@/app/components/plugins/plugin-page/use-permission'
import { useMittContextSelector } from '@/context/mitt-context'
import Checkbox from '@/app/components/base/checkbox'
const i18nPrefix = 'plugin.installModal'
type Props = {
@ -34,18 +37,8 @@ const Install: FC<Props> = ({
const [selectedPlugins, setSelectedPlugins] = React.useState<Plugin[]>([])
const [selectedIndexes, setSelectedIndexes] = React.useState<number[]>([])
const selectedPluginsNum = selectedPlugins.length
const installMultiRef = useRef<ExposeRefs>(null)
const { refreshPluginList } = useRefreshPluginList()
const handleSelect = (plugin: Plugin, selectedIndex: number) => {
const isSelected = !!selectedPlugins.find(p => p.plugin_id === plugin.plugin_id)
let nextSelectedPlugins
if (isSelected)
nextSelectedPlugins = selectedPlugins.filter(p => p.plugin_id !== plugin.plugin_id)
else
nextSelectedPlugins = [...selectedPlugins, plugin]
setSelectedPlugins(nextSelectedPlugins)
const nextSelectedIndexes = isSelected ? selectedIndexes.filter(i => i !== selectedIndex) : [...selectedIndexes, selectedIndex]
setSelectedIndexes(nextSelectedIndexes)
}
const [canInstall, setCanInstall] = React.useState(false)
const [installedInfo, setInstalledInfo] = useState<Record<string, VersionInfo> | undefined>(undefined)
@ -81,6 +74,51 @@ const Install: FC<Props> = ({
installedInfo: installedInfo!,
})
}
const [isSelectAll, setIsSelectAll] = useState(false)
const [isIndeterminate, setIsIndeterminate] = useState(false)
const handleClickSelectAll = useCallback(() => {
if (isSelectAll)
installMultiRef.current?.deSelectAllPlugins()
else
installMultiRef.current?.selectAllPlugins()
}, [isSelectAll])
const handleSelectAll = useCallback((plugins: Plugin[], selectedIndexes: number[]) => {
setSelectedPlugins(plugins)
setSelectedIndexes(selectedIndexes)
setIsSelectAll(true)
setIsIndeterminate(false)
}, [])
const handleDeSelectAll = useCallback(() => {
setSelectedPlugins([])
setSelectedIndexes([])
setIsSelectAll(false)
setIsIndeterminate(false)
}, [])
const handleSelect = useCallback((plugin: Plugin, selectedIndex: number, allPluginsLength: number) => {
const isSelected = !!selectedPlugins.find(p => p.plugin_id === plugin.plugin_id)
let nextSelectedPlugins
if (isSelected)
nextSelectedPlugins = selectedPlugins.filter(p => p.plugin_id !== plugin.plugin_id)
else
nextSelectedPlugins = [...selectedPlugins, plugin]
setSelectedPlugins(nextSelectedPlugins)
const nextSelectedIndexes = isSelected ? selectedIndexes.filter(i => i !== selectedIndex) : [...selectedIndexes, selectedIndex]
setSelectedIndexes(nextSelectedIndexes)
if (nextSelectedPlugins.length === 0) {
setIsSelectAll(false)
setIsIndeterminate(false)
}
else if (nextSelectedPlugins.length === allPluginsLength) {
setIsSelectAll(true)
setIsIndeterminate(false)
}
else {
setIsIndeterminate(true)
setIsSelectAll(false)
}
}, [selectedPlugins, selectedIndexes])
const { canInstallPluginFromMarketplace } = useCanInstallPluginFromMarketplace()
return (
<>
@ -90,9 +128,12 @@ const Install: FC<Props> = ({
</div>
<div className='w-full space-y-1 rounded-2xl bg-background-section-burn p-2'>
<InstallMulti
ref={installMultiRef}
allPlugins={allPlugins}
selectedPlugins={selectedPlugins}
onSelect={handleSelect}
onSelectAll={handleSelectAll}
onDeSelectAll={handleDeSelectAll}
onLoadedAllPlugin={handleLoadedAllPlugin}
isFromMarketPlace={isFromMarketPlace}
/>
@ -100,21 +141,29 @@ const Install: FC<Props> = ({
</div>
{/* Action Buttons */}
{!isHideButton && (
<div className='flex items-center justify-end gap-2 self-stretch p-6 pt-5'>
{!canInstall && (
<Button variant='secondary' className='min-w-[72px]' onClick={onCancel}>
{t('common.operation.cancel')}
<div className='flex items-center justify-between gap-2 self-stretch p-6 pt-5'>
<div className='px-2'>
{canInstall && <div className='flex items-center gap-x-2' onClick={handleClickSelectAll}>
<Checkbox checked={isSelectAll} indeterminate={isIndeterminate} />
<p className='system-sm-medium cursor-pointer text-text-secondary'>{isSelectAll ? t('common.operation.deSelectAll') : t('common.operation.selectAll')}</p>
</div>}
</div>
<div className='flex items-center justify-end gap-2 self-stretch'>
{!canInstall && (
<Button variant='secondary' className='min-w-[72px]' onClick={onCancel}>
{t('common.operation.cancel')}
</Button>
)}
<Button
variant='primary'
className='flex min-w-[72px] space-x-0.5'
disabled={!canInstall || isInstalling || selectedPlugins.length === 0 || !canInstallPluginFromMarketplace}
onClick={handleInstall}
>
{isInstalling && <RiLoader2Line className='h-4 w-4 animate-spin-slow' />}
<span>{t(`${i18nPrefix}.${isInstalling ? 'installing' : 'install'}`)}</span>
</Button>
)}
<Button
variant='primary'
className='flex min-w-[72px] space-x-0.5'
disabled={!canInstall || isInstalling || selectedPlugins.length === 0 || !canInstallPluginFromMarketplace}
onClick={handleInstall}
>
{isInstalling && <RiLoader2Line className='h-4 w-4 animate-spin-slow' />}
<span>{t(`${i18nPrefix}.${isInstalling ? 'installing' : 'install'}`)}</span>
</Button>
</div>
</div>
)}

View File

@ -124,7 +124,7 @@ const Installed: FC<Props> = ({
/>
</p>
{!isDifyVersionCompatible && (
<p className='system-md-regular flex items-center gap-1 text-text-secondary text-text-warning'>
<p className='system-md-regular flex items-center gap-1 text-text-warning'>
{t('plugin.difyVersionNotCompatible', { minimalDifyVersion: payload.meta.minimum_dify_version })}
</p>
)}

View File

@ -15,6 +15,7 @@ import Version from '../../base/version'
import { usePluginTaskList } from '@/service/use-plugins'
import { gte } from 'semver'
import { useAppContext } from '@/context/app-context'
import useInstallPluginLimit from '../../hooks/use-install-plugin-limit'
const i18nPrefix = 'plugin.installModal'
@ -124,15 +125,16 @@ const Installed: FC<Props> = ({
const isDifyVersionCompatible = useMemo(() => {
if (!pluginDeclaration || !langeniusVersionInfo.current_version) return true
return gte(langeniusVersionInfo.current_version, pluginDeclaration?.manifest.meta.minimum_dify_version ?? '0.0.0')
}, [langeniusVersionInfo.current_version, pluginDeclaration?.manifest.meta.minimum_dify_version])
}, [langeniusVersionInfo.current_version, pluginDeclaration])
const { canInstall } = useInstallPluginLimit({ ...payload, from: 'marketplace' })
return (
<>
<div className='flex flex-col items-start justify-center gap-4 self-stretch px-6 py-3'>
<div className='system-md-regular text-text-secondary'>
<p>{t(`${i18nPrefix}.readyToInstall`)}</p>
{!isDifyVersionCompatible && (
<p className='system-md-regular text-text-secondary text-text-warning'>
<p className='system-md-regular text-text-warning'>
{t('plugin.difyVersionNotCompatible', { minimalDifyVersion: pluginDeclaration?.manifest.meta.minimum_dify_version })}
</p>
)}
@ -146,6 +148,7 @@ const Installed: FC<Props> = ({
installedVersion={installedVersion}
toInstallVersion={toInstallVersion}
/>}
limitedInstall={!canInstall}
/>
</div>
</div>
@ -159,7 +162,7 @@ const Installed: FC<Props> = ({
<Button
variant='primary'
className='flex min-w-[72px] space-x-0.5'
disabled={isInstalling || isLoading}
disabled={isInstalling || isLoading || !canInstall}
onClick={handleInstall}
>
{isInstalling && <RiLoader2Line className='h-4 w-4 animate-spin-slow' />}

View File

@ -1,5 +1,6 @@
import type { Plugin, PluginDeclaration, PluginManifestInMarket } from '../types'
import type { GitHubUrlInfo } from '@/app/components/plugins/types'
import { isEmpty } from 'lodash-es'
export const pluginManifestToCardPluginProps = (pluginManifest: PluginDeclaration): Plugin => {
return {
@ -47,6 +48,7 @@ export const pluginManifestInMarketToPluginProps = (pluginManifest: PluginManife
},
tags: [],
badges: pluginManifest.badges,
verification: isEmpty(pluginManifest.verification) ? { authorized_category: 'langgenius' } : pluginManifest.verification,
}
}

View File

@ -56,7 +56,7 @@ const CardWrapper = ({
>
{t('plugin.detailPanel.operation.install')}
</Button>
<a href={`${getPluginLinkInMarketplace(plugin)}?language=${localeFromLocale}${theme ? `&theme=${theme}` : ''}`} target='_blank' className='block w-[calc(50%-4px)] flex-1 shrink-0'>
<a href={getPluginLinkInMarketplace(plugin, { language: localeFromLocale, theme })} target='_blank' className='block w-[calc(50%-4px)] flex-1 shrink-0'>
<Button
className='w-full gap-0.5'
>

View File

@ -8,8 +8,8 @@ import type {
} from '@/app/components/plugins/marketplace/types'
import {
MARKETPLACE_API_PREFIX,
MARKETPLACE_URL_PREFIX,
} from '@/config'
import { getMarketplaceUrl } from '@/utils/var'
export const getPluginIconInMarketplace = (plugin: Plugin) => {
if (plugin.type === 'bundle')
@ -32,10 +32,10 @@ export const getFormattedPlugin = (bundle: any) => {
}
}
export const getPluginLinkInMarketplace = (plugin: Plugin) => {
export const getPluginLinkInMarketplace = (plugin: Plugin, params?: Record<string, string | undefined>) => {
if (plugin.type === 'bundle')
return `${MARKETPLACE_URL_PREFIX}/bundles/${plugin.org}/${plugin.name}`
return `${MARKETPLACE_URL_PREFIX}/plugins/${plugin.org}/${plugin.name}`
return getMarketplaceUrl(`/bundles/${plugin.org}/${plugin.name}`, params)
return getMarketplaceUrl(`/plugins/${plugin.org}/${plugin.name}`, params)
}
export const getMarketplacePluginsByCollectionId = async (collectionId: string, query?: CollectionsAndPluginsSearchParams) => {

View File

@ -33,8 +33,9 @@ import { useGetLanguage } from '@/context/i18n'
import { useModalContext } from '@/context/modal-context'
import { useProviderContext } from '@/context/provider-context'
import { useInvalidateAllToolProviders } from '@/service/use-tools'
import { API_PREFIX, MARKETPLACE_URL_PREFIX } from '@/config'
import { API_PREFIX } from '@/config'
import cn from '@/utils/classnames'
import { getMarketplaceUrl } from '@/utils/var'
const i18nPrefix = 'plugin.action'
@ -87,7 +88,7 @@ const DetailHeader = ({
if (isFromGitHub)
return `https://github.com/${meta!.repo}`
if (isFromMarketplace)
return `${MARKETPLACE_URL_PREFIX}/plugins/${author}/${name}${theme ? `?theme=${theme}` : ''}`
return getMarketplaceUrl(`/plugins/${author}/${name}`, { theme })
return ''
}, [author, isFromGitHub, isFromMarketplace, meta, name, theme])

View File

@ -21,13 +21,14 @@ import OrgInfo from '../card/base/org-info'
import Title from '../card/base/title'
import Action from './action'
import cn from '@/utils/classnames'
import { API_PREFIX, MARKETPLACE_URL_PREFIX } from '@/config'
import { API_PREFIX } from '@/config'
import { useSingleCategories } from '../hooks'
import { useRenderI18nObject } from '@/hooks/use-i18n'
import useRefreshPluginList from '@/app/components/plugins/install-plugin/hooks/use-refresh-plugin-list'
import { useAppContext } from '@/context/app-context'
import { gte } from 'semver'
import Tooltip from '@/app/components/base/tooltip'
import { getMarketplaceUrl } from '@/utils/var'
type Props = {
className?: string
@ -166,7 +167,7 @@ const PluginItem: FC<Props> = ({
}
{source === PluginSource.marketplace
&& <>
<a href={`${MARKETPLACE_URL_PREFIX}/plugins/${author}/${name}${theme ? `?theme=${theme}` : ''}`} target='_blank' className='flex items-center gap-0.5'>
<a href={getMarketplaceUrl(`/plugins/${author}/${name}`, { theme })} target='_blank' className='flex items-center gap-0.5'>
<div className='system-2xs-medium-uppercase text-text-tertiary'>{t('plugin.from')} <span className='text-text-secondary'>marketplace</span></div>
<RiArrowRightUpLine className='h-3 w-3 text-text-tertiary' />
</a>

View File

@ -1,4 +1,5 @@
import React, { useMemo, useRef, useState } from 'react'
'use client'
import React, { useEffect, useMemo, useRef, useState } from 'react'
import { MagicBox } from '@/app/components/base/icons/src/vender/solid/mediaAndDevices'
import { FileZip } from '@/app/components/base/icons/src/vender/solid/files'
import { Github } from '@/app/components/base/icons/src/vender/solid/general'
@ -14,12 +15,18 @@ import { noop } from 'lodash-es'
import { useGlobalPublicStore } from '@/context/global-public-context'
import Button from '@/app/components/base/button'
type InstallMethod = {
icon: React.FC<{ className?: string }>
text: string
action: string
}
const Empty = () => {
const { t } = useTranslation()
const fileInputRef = useRef<HTMLInputElement>(null)
const [selectedAction, setSelectedAction] = useState<string | null>(null)
const [selectedFile, setSelectedFile] = useState<File | null>(null)
const { enable_marketplace } = useGlobalPublicStore(s => s.systemFeatures)
const { enable_marketplace, plugin_installation_permission } = useGlobalPublicStore(s => s.systemFeatures)
const setActiveTab = usePluginPageContext(v => v.setActiveTab)
const handleFileChange = (event: React.ChangeEvent<HTMLInputElement>) => {
@ -39,6 +46,22 @@ const Empty = () => {
return t('plugin.list.notFound')
}, [pluginList?.plugins.length, t, filters.categories.length, filters.tags.length, filters.searchQuery])
const [installMethods, setInstallMethods] = useState<InstallMethod[]>([])
useEffect(() => {
const methods = []
if (enable_marketplace)
methods.push({ icon: MagicBox, text: t('plugin.source.marketplace'), action: 'marketplace' })
if (plugin_installation_permission.restrict_to_marketplace_only) {
setInstallMethods(methods)
}
else {
methods.push({ icon: Github, text: t('plugin.source.github'), action: 'github' })
methods.push({ icon: FileZip, text: t('plugin.source.local'), action: 'local' })
setInstallMethods(methods)
}
}, [plugin_installation_permission, enable_marketplace, t])
return (
<div className='relative z-0 w-full grow'>
{/* skeleton */}
@ -71,15 +94,7 @@ const Empty = () => {
accept={SUPPORT_INSTALL_LOCAL_FILE_EXTENSIONS}
/>
<div className='flex w-full flex-col gap-y-1'>
{[
...(
(enable_marketplace)
? [{ icon: MagicBox, text: t('plugin.list.source.marketplace'), action: 'marketplace' }]
: []
),
{ icon: Github, text: t('plugin.list.source.github'), action: 'github' },
{ icon: FileZip, text: t('plugin.list.source.local'), action: 'local' },
].map(({ icon: Icon, text, action }) => (
{installMethods.map(({ icon: Icon, text, action }) => (
<Button
key={action}
className='justify-start gap-x-0.5 px-3'

View File

@ -136,7 +136,7 @@ const PluginPage = ({
const options = usePluginPageContext(v => v.options)
const activeTab = usePluginPageContext(v => v.activeTab)
const setActiveTab = usePluginPageContext(v => v.setActiveTab)
const { enable_marketplace } = useGlobalPublicStore(s => s.systemFeatures)
const { enable_marketplace, branding } = useGlobalPublicStore(s => s.systemFeatures)
const isPluginsTab = useMemo(() => activeTab === PLUGIN_PAGE_TABS_MAP.plugins, [activeTab])
const isExploringMarketplace = useMemo(() => {
@ -225,7 +225,7 @@ const PluginPage = ({
)
}
{
canSetPermissions && (
canSetPermissions && !branding.enabled && (
<Tooltip
popupContent={t('plugin.privilege.title')}
>

View File

@ -1,6 +1,6 @@
'use client'
import { useRef, useState } from 'react'
import { useEffect, useRef, useState } from 'react'
import { RiAddLine, RiArrowDownSLine } from '@remixicon/react'
import Button from '@/app/components/base/button'
import { MagicBox } from '@/app/components/base/icons/src/vender/solid/mediaAndDevices'
@ -22,6 +22,13 @@ import { useGlobalPublicStore } from '@/context/global-public-context'
type Props = {
onSwitchToMarketplaceTab: () => void
}
type InstallMethod = {
icon: React.FC<{ className?: string }>
text: string
action: string
}
const InstallPluginDropdown = ({
onSwitchToMarketplaceTab,
}: Props) => {
@ -30,7 +37,7 @@ const InstallPluginDropdown = ({
const [isMenuOpen, setIsMenuOpen] = useState(false)
const [selectedAction, setSelectedAction] = useState<string | null>(null)
const [selectedFile, setSelectedFile] = useState<File | null>(null)
const { enable_marketplace } = useGlobalPublicStore(s => s.systemFeatures)
const { enable_marketplace, plugin_installation_permission } = useGlobalPublicStore(s => s.systemFeatures)
const handleFileChange = (event: React.ChangeEvent<HTMLInputElement>) => {
const file = event.target.files?.[0]
@ -54,6 +61,22 @@ const InstallPluginDropdown = ({
// console.log(res)
// }
const [installMethods, setInstallMethods] = useState<InstallMethod[]>([])
useEffect(() => {
const methods = []
if (enable_marketplace)
methods.push({ icon: MagicBox, text: t('plugin.source.marketplace'), action: 'marketplace' })
if (plugin_installation_permission.restrict_to_marketplace_only) {
setInstallMethods(methods)
}
else {
methods.push({ icon: Github, text: t('plugin.source.github'), action: 'github' })
methods.push({ icon: FileZip, text: t('plugin.source.local'), action: 'local' })
setInstallMethods(methods)
}
}, [plugin_installation_permission, enable_marketplace, t])
return (
<PortalToFollowElem
open={isMenuOpen}
@ -84,15 +107,7 @@ const InstallPluginDropdown = ({
accept={SUPPORT_INSTALL_LOCAL_FILE_EXTENSIONS}
/>
<div className='w-full'>
{[
...(
(enable_marketplace)
? [{ icon: MagicBox, text: t('plugin.source.marketplace'), action: 'marketplace' }]
: []
),
{ icon: Github, text: t('plugin.source.github'), action: 'github' },
{ icon: FileZip, text: t('plugin.source.local'), action: 'local' },
].map(({ icon: Icon, text, action }) => (
{installMethods.map(({ icon: Icon, text, action }) => (
<div
key={action}
className='flex w-full !cursor-pointer items-center gap-1 rounded-lg px-2 py-1.5 hover:bg-state-base-hover'

View File

@ -94,7 +94,11 @@ export type PluginManifestInMarket = {
introduction: string
verified: boolean
install_count: number
badges: string[]
badges: string[],
verification: {
authorized_category: 'langgenius' | 'partner' | 'community'
},
from: Dependency['type']
}
export type PluginDetail = {
@ -145,7 +149,11 @@ export type Plugin = {
settings: CredentialFormSchemaBase[]
}
tags: { name: string }[]
badges: string[]
badges: string[],
verification: {
authorized_category: 'langgenius' | 'partner' | 'community'
},
from: Dependency['type']
}
export enum PermissionType {

View File

@ -12,7 +12,7 @@ import { useMarketplace } from './hooks'
import List from '@/app/components/plugins/marketplace/list'
import Loading from '@/app/components/base/loading'
import { getLocaleOnClient } from '@/i18n'
import { MARKETPLACE_URL_PREFIX } from '@/config'
import { getMarketplaceUrl } from '@/utils/var'
type MarketplaceProps = {
searchPluginText: string
@ -84,7 +84,7 @@ const Marketplace = ({
</span>
{t('common.operation.in')}
<a
href={`${MARKETPLACE_URL_PREFIX}?language=${locale}&q=${searchPluginText}&tags=${filterPluginTags.join(',')}${theme ? `&theme=${theme}` : ''}`}
href={getMarketplaceUrl('', { language: locale, q: searchPluginText, tags: filterPluginTags.join(','), theme })}
className='system-sm-medium ml-1 flex items-center text-text-accent'
target='_blank'
>

View File

@ -12,9 +12,9 @@ import {
PortalToFollowElemTrigger,
} from '@/app/components/base/portal-to-follow-elem'
import cn from '@/utils/classnames'
import { MARKETPLACE_URL_PREFIX } from '@/config'
import { useDownloadPlugin } from '@/service/use-plugins'
import { downloadFile } from '@/utils/format'
import { getMarketplaceUrl } from '@/utils/var'
type Props = {
open: boolean
@ -80,7 +80,7 @@ const OperationDropdown: FC<Props> = ({
<PortalToFollowElemContent className='z-[9999]'>
<div className='w-[112px] rounded-xl border-[0.5px] border-components-panel-border bg-components-panel-bg-blur p-1 shadow-lg'>
<div onClick={handleDownload} className='system-md-regular cursor-pointer rounded-lg px-3 py-1.5 text-text-secondary hover:bg-state-base-hover'>{t('common.operation.download')}</div>
<a href={`${MARKETPLACE_URL_PREFIX}/plugins/${author}/${name}${theme ? `?theme=${theme}` : ''}`} target='_blank' className='system-md-regular block cursor-pointer rounded-lg px-3 py-1.5 text-text-secondary hover:bg-state-base-hover'>{t('common.operation.viewDetails')}</a>
<a href={getMarketplaceUrl(`/plugins/${author}/${name}`, { theme })} target='_blank' className='system-md-regular block cursor-pointer rounded-lg px-3 py-1.5 text-text-secondary hover:bg-state-base-hover'>{t('common.operation.viewDetails')}</a>
</div>
</PortalToFollowElemContent>
</PortalToFollowElem>

View File

@ -61,7 +61,7 @@ export const useShortcuts = (): void => {
return !showFeaturesPanel && !isEventTargetInputArea(e.target as HTMLElement)
}, [workflowStore])
useKeyPress(['delete'], (e) => {
useKeyPress(['delete', 'backspace'], (e) => {
if (shouldHandleShortcut(e)) {
e.preventDefault()
handleNodesDelete()

View File

@ -37,6 +37,7 @@ const typeList = [
ChatVarType.ArrayString,
ChatVarType.ArrayNumber,
ChatVarType.ArrayObject,
ChatVarType.ArrayFile,
]
const objectPlaceholder = `# example
@ -127,6 +128,7 @@ const ChatVariableModal = ({
case ChatVarType.ArrayString:
case ChatVarType.ArrayNumber:
case ChatVarType.ArrayObject:
case ChatVarType.ArrayFile:
return value?.filter(Boolean) || []
}
}
@ -294,84 +296,86 @@ const ChatVariableModal = ({
</div>
</div>
{/* default value */}
<div className='mb-4'>
<div className='system-sm-semibold mb-1 flex h-6 items-center justify-between text-text-secondary'>
<div>{t('workflow.chatVariable.modal.value')}</div>
{(type === ChatVarType.ArrayString || type === ChatVarType.ArrayNumber) && (
<Button
variant='ghost'
size='small'
className='text-text-tertiary'
onClick={() => handleEditorChange(!editInJSON)}
>
{editInJSON ? <RiInputField className='mr-1 h-3.5 w-3.5' /> : <RiDraftLine className='mr-1 h-3.5 w-3.5' />}
{editInJSON ? t('workflow.chatVariable.modal.oneByOne') : t('workflow.chatVariable.modal.editInJSON')}
</Button>
)}
{type === ChatVarType.Object && (
<Button
variant='ghost'
size='small'
className='text-text-tertiary'
onClick={() => handleEditorChange(!editInJSON)}
>
{editInJSON ? <RiInputField className='mr-1 h-3.5 w-3.5' /> : <RiDraftLine className='mr-1 h-3.5 w-3.5' />}
{editInJSON ? t('workflow.chatVariable.modal.editInForm') : t('workflow.chatVariable.modal.editInJSON')}
</Button>
)}
</div>
<div className='flex'>
{type === ChatVarType.String && (
// Input will remove \n\r, so use Textarea just like description area
<textarea
className='system-sm-regular placeholder:system-sm-regular block h-20 w-full resize-none appearance-none rounded-lg border border-transparent bg-components-input-bg-normal p-2 caret-primary-600 outline-none placeholder:text-components-input-text-placeholder hover:border-components-input-border-hover hover:bg-components-input-bg-hover focus:border-components-input-border-active focus:bg-components-input-bg-active focus:shadow-xs'
value={value}
placeholder={t('workflow.chatVariable.modal.valuePlaceholder') || ''}
onChange={e => setValue(e.target.value)}
/>
)}
{type === ChatVarType.Number && (
<Input
placeholder={t('workflow.chatVariable.modal.valuePlaceholder') || ''}
value={value}
onChange={e => setValue(Number(e.target.value))}
type='number'
/>
)}
{type === ChatVarType.Object && !editInJSON && (
<ObjectValueList
list={objectValue}
onChange={setObjectValue}
/>
)}
{type === ChatVarType.ArrayString && !editInJSON && (
<ArrayValueList
isString
list={value || [undefined]}
onChange={setValue}
/>
)}
{type === ChatVarType.ArrayNumber && !editInJSON && (
<ArrayValueList
isString={false}
list={value || [undefined]}
onChange={setValue}
/>
)}
{editInJSON && (
<div className='w-full rounded-[10px] bg-components-input-bg-normal py-2 pl-3 pr-1' style={{ height: editorMinHeight }}>
<CodeEditor
isExpand
noWrapper
language={CodeLanguage.json}
value={editorContent}
placeholder={<div className='whitespace-pre'>{placeholder}</div>}
onChange={handleEditorValueChange}
{type !== ChatVarType.ArrayFile && (
<div className='mb-4'>
<div className='system-sm-semibold mb-1 flex h-6 items-center justify-between text-text-secondary'>
<div>{t('workflow.chatVariable.modal.value')}</div>
{(type === ChatVarType.ArrayString || type === ChatVarType.ArrayNumber) && (
<Button
variant='ghost'
size='small'
className='text-text-tertiary'
onClick={() => handleEditorChange(!editInJSON)}
>
{editInJSON ? <RiInputField className='mr-1 h-3.5 w-3.5' /> : <RiDraftLine className='mr-1 h-3.5 w-3.5' />}
{editInJSON ? t('workflow.chatVariable.modal.oneByOne') : t('workflow.chatVariable.modal.editInJSON')}
</Button>
)}
{type === ChatVarType.Object && (
<Button
variant='ghost'
size='small'
className='text-text-tertiary'
onClick={() => handleEditorChange(!editInJSON)}
>
{editInJSON ? <RiInputField className='mr-1 h-3.5 w-3.5' /> : <RiDraftLine className='mr-1 h-3.5 w-3.5' />}
{editInJSON ? t('workflow.chatVariable.modal.editInForm') : t('workflow.chatVariable.modal.editInJSON')}
</Button>
)}
</div>
<div className='flex'>
{type === ChatVarType.String && (
// Input will remove \n\r, so use Textarea just like description area
<textarea
className='system-sm-regular placeholder:system-sm-regular block h-20 w-full resize-none appearance-none rounded-lg border border-transparent bg-components-input-bg-normal p-2 caret-primary-600 outline-none placeholder:text-components-input-text-placeholder hover:border-components-input-border-hover hover:bg-components-input-bg-hover focus:border-components-input-border-active focus:bg-components-input-bg-active focus:shadow-xs'
value={value}
placeholder={t('workflow.chatVariable.modal.valuePlaceholder') || ''}
onChange={e => setValue(e.target.value)}
/>
</div>
)}
)}
{type === ChatVarType.Number && (
<Input
placeholder={t('workflow.chatVariable.modal.valuePlaceholder') || ''}
value={value}
onChange={e => setValue(Number(e.target.value))}
type='number'
/>
)}
{type === ChatVarType.Object && !editInJSON && (
<ObjectValueList
list={objectValue}
onChange={setObjectValue}
/>
)}
{type === ChatVarType.ArrayString && !editInJSON && (
<ArrayValueList
isString
list={value || [undefined]}
onChange={setValue}
/>
)}
{type === ChatVarType.ArrayNumber && !editInJSON && (
<ArrayValueList
isString={false}
list={value || [undefined]}
onChange={setValue}
/>
)}
{editInJSON && (
<div className='w-full rounded-[10px] bg-components-input-bg-normal py-2 pl-3 pr-1' style={{ height: editorMinHeight }}>
<CodeEditor
isExpand
noWrapper
language={CodeLanguage.json}
value={editorContent}
placeholder={<div className='whitespace-pre'>{placeholder}</div>}
onChange={handleEditorValueChange}
/>
</div>
)}
</div>
</div>
</div>
)}
{/* description */}
<div className=''>
<div className='system-sm-semibold mb-1 flex h-6 items-center text-text-secondary'>{t('workflow.chatVariable.modal.description')}</div>

Some files were not shown because too many files have changed in this diff Show More