Merge remote-tracking branch 'origin/main' into feat/end-user-oauth

# Conflicts:
#	web/app/components/app/configuration/config/agent/agent-tools/index.tsx
This commit is contained in:
zhsama 2025-12-04 16:16:04 +08:00
commit eb0e63c336
49 changed files with 3886 additions and 3429 deletions

View File

@ -324,10 +324,13 @@ class AppListApi(Resource):
NodeType.TRIGGER_PLUGIN,
}
for workflow in draft_workflows:
for _, node_data in workflow.walk_nodes():
if node_data.get("type") in trigger_node_types:
draft_trigger_app_ids.add(str(workflow.app_id))
break
try:
for _, node_data in workflow.walk_nodes():
if node_data.get("type") in trigger_node_types:
draft_trigger_app_ids.add(str(workflow.app_id))
break
except Exception:
continue
for app in app_pagination.items:
app.has_draft_trigger = str(app.id) in draft_trigger_app_ids

View File

@ -49,7 +49,6 @@ class CompletionConversationQuery(BaseConversationQuery):
class ChatConversationQuery(BaseConversationQuery):
message_count_gte: int | None = Field(default=None, ge=1, description="Minimum message count")
sort_by: Literal["created_at", "-created_at", "updated_at", "-updated_at"] = Field(
default="-updated_at", description="Sort field and direction"
)
@ -509,14 +508,6 @@ class ChatConversationApi(Resource):
.having(func.count(MessageAnnotation.id) == 0)
)
if args.message_count_gte and args.message_count_gte >= 1:
query = (
query.options(joinedload(Conversation.messages)) # type: ignore
.join(Message, Message.conversation_id == Conversation.id)
.group_by(Conversation.id)
.having(func.count(Message.id) >= args.message_count_gte)
)
if app_model.mode == AppMode.ADVANCED_CHAT:
query = query.where(Conversation.invoke_from != InvokeFrom.DEBUGGER)

View File

@ -316,18 +316,16 @@ def validate_and_get_api_token(scope: str | None = None):
ApiToken.type == scope,
)
.values(last_used_at=current_time)
.returning(ApiToken)
)
stmt = select(ApiToken).where(ApiToken.token == auth_token, ApiToken.type == scope)
result = session.execute(update_stmt)
api_token = result.scalar_one_or_none()
api_token = session.scalar(stmt)
if hasattr(result, "rowcount") and result.rowcount > 0:
session.commit()
if not api_token:
stmt = select(ApiToken).where(ApiToken.token == auth_token, ApiToken.type == scope)
api_token = session.scalar(stmt)
if not api_token:
raise Unauthorized("Access token is invalid")
else:
session.commit()
raise Unauthorized("Access token is invalid")
return api_token

View File

@ -1,3 +1,4 @@
import logging
import time
from collections.abc import Mapping, Sequence
from dataclasses import dataclass
@ -55,6 +56,7 @@ from models import Account, EndUser
from services.variable_truncator import BaseTruncator, DummyVariableTruncator, VariableTruncator
NodeExecutionId = NewType("NodeExecutionId", str)
logger = logging.getLogger(__name__)
@dataclass(slots=True)
@ -289,26 +291,30 @@ class WorkflowResponseConverter:
),
)
if event.node_type == NodeType.TOOL:
response.data.extras["icon"] = ToolManager.get_tool_icon(
tenant_id=self._application_generate_entity.app_config.tenant_id,
provider_type=ToolProviderType(event.provider_type),
provider_id=event.provider_id,
)
elif event.node_type == NodeType.DATASOURCE:
manager = PluginDatasourceManager()
provider_entity = manager.fetch_datasource_provider(
self._application_generate_entity.app_config.tenant_id,
event.provider_id,
)
response.data.extras["icon"] = provider_entity.declaration.identity.generate_datasource_icon_url(
self._application_generate_entity.app_config.tenant_id
)
elif event.node_type == NodeType.TRIGGER_PLUGIN:
response.data.extras["icon"] = TriggerManager.get_trigger_plugin_icon(
self._application_generate_entity.app_config.tenant_id,
event.provider_id,
)
try:
if event.node_type == NodeType.TOOL:
response.data.extras["icon"] = ToolManager.get_tool_icon(
tenant_id=self._application_generate_entity.app_config.tenant_id,
provider_type=ToolProviderType(event.provider_type),
provider_id=event.provider_id,
)
elif event.node_type == NodeType.DATASOURCE:
manager = PluginDatasourceManager()
provider_entity = manager.fetch_datasource_provider(
self._application_generate_entity.app_config.tenant_id,
event.provider_id,
)
response.data.extras["icon"] = provider_entity.declaration.identity.generate_datasource_icon_url(
self._application_generate_entity.app_config.tenant_id
)
elif event.node_type == NodeType.TRIGGER_PLUGIN:
response.data.extras["icon"] = TriggerManager.get_trigger_plugin_icon(
self._application_generate_entity.app_config.tenant_id,
event.provider_id,
)
except Exception:
# metadata fetch may fail, for example, the plugin daemon is down or plugin is uninstalled.
logger.warning("failed to fetch icon for %s", event.provider_id)
return response

View File

@ -29,6 +29,7 @@ class SimpleModelProviderEntity(BaseModel):
provider: str
label: I18nObject
icon_small: I18nObject | None = None
icon_small_dark: I18nObject | None = None
icon_large: I18nObject | None = None
supported_model_types: list[ModelType]
@ -42,6 +43,7 @@ class SimpleModelProviderEntity(BaseModel):
provider=provider_entity.provider,
label=provider_entity.label,
icon_small=provider_entity.icon_small,
icon_small_dark=provider_entity.icon_small_dark,
icon_large=provider_entity.icon_large,
supported_model_types=provider_entity.supported_model_types,
)

View File

@ -99,6 +99,7 @@ class SimpleProviderEntity(BaseModel):
provider: str
label: I18nObject
icon_small: I18nObject | None = None
icon_small_dark: I18nObject | None = None
icon_large: I18nObject | None = None
supported_model_types: Sequence[ModelType]
models: list[AIModelEntity] = []
@ -124,7 +125,6 @@ class ProviderEntity(BaseModel):
icon_small: I18nObject | None = None
icon_large: I18nObject | None = None
icon_small_dark: I18nObject | None = None
icon_large_dark: I18nObject | None = None
background: str | None = None
help: ProviderHelpEntity | None = None
supported_model_types: Sequence[ModelType]

View File

@ -300,6 +300,14 @@ class ModelProviderFactory:
file_name = provider_schema.icon_small.zh_Hans
else:
file_name = provider_schema.icon_small.en_US
elif icon_type.lower() == "icon_small_dark":
if not provider_schema.icon_small_dark:
raise ValueError(f"Provider {provider} does not have small dark icon.")
if lang.lower() == "zh_hans":
file_name = provider_schema.icon_small_dark.zh_Hans
else:
file_name = provider_schema.icon_small_dark.en_US
else:
if not provider_schema.icon_large:
raise ValueError(f"Provider {provider} does not have large icon.")

View File

@ -203,7 +203,7 @@ class WorkflowTool(Tool):
Resolve user object in both HTTP and worker contexts.
In HTTP context: dereference the current_user LocalProxy (can return Account or EndUser).
In worker context: load Account from database by user_id (only returns Account, never EndUser).
In worker context: load Account(knowledge pipeline) or EndUser(trigger) from database by user_id.
Returns:
Account | EndUser | None: The resolved user object, or None if resolution fails.
@ -224,24 +224,28 @@ class WorkflowTool(Tool):
logger.warning("Failed to resolve user from request context: %s", e)
return None
def _resolve_user_from_database(self, user_id: str) -> Account | None:
def _resolve_user_from_database(self, user_id: str) -> Account | EndUser | None:
"""
Resolve user from database (worker/Celery context).
"""
user_stmt = select(Account).where(Account.id == user_id)
user = db.session.scalar(user_stmt)
if not user:
return None
tenant_stmt = select(Tenant).where(Tenant.id == self.runtime.tenant_id)
tenant = db.session.scalar(tenant_stmt)
if not tenant:
return None
user.current_tenant = tenant
user_stmt = select(Account).where(Account.id == user_id)
user = db.session.scalar(user_stmt)
if user:
user.current_tenant = tenant
return user
return user
end_user_stmt = select(EndUser).where(EndUser.id == user_id, EndUser.tenant_id == tenant.id)
end_user = db.session.scalar(end_user_stmt)
if end_user:
return end_user
return None
def _get_workflow(self, app_id: str, version: str) -> Workflow:
"""

View File

@ -10,6 +10,7 @@ from collections.abc import Sequence
from typing import Any, Literal
import sqlalchemy as sa
from redis.exceptions import LockNotOwnedError
from sqlalchemy import exists, func, select
from sqlalchemy.orm import Session
from werkzeug.exceptions import NotFound
@ -1593,173 +1594,176 @@ class DocumentService:
db.session.add(dataset_process_rule)
db.session.flush()
lock_name = f"add_document_lock_dataset_id_{dataset.id}"
with redis_client.lock(lock_name, timeout=600):
assert dataset_process_rule
position = DocumentService.get_documents_position(dataset.id)
document_ids = []
duplicate_document_ids = []
if knowledge_config.data_source.info_list.data_source_type == "upload_file":
if not knowledge_config.data_source.info_list.file_info_list:
raise ValueError("File source info is required")
upload_file_list = knowledge_config.data_source.info_list.file_info_list.file_ids
for file_id in upload_file_list:
file = (
db.session.query(UploadFile)
.where(UploadFile.tenant_id == dataset.tenant_id, UploadFile.id == file_id)
.first()
)
# raise error if file not found
if not file:
raise FileNotExistsError()
file_name = file.name
data_source_info: dict[str, str | bool] = {
"upload_file_id": file_id,
}
# check duplicate
if knowledge_config.duplicate:
document = (
db.session.query(Document)
.filter_by(
dataset_id=dataset.id,
tenant_id=current_user.current_tenant_id,
data_source_type="upload_file",
enabled=True,
name=file_name,
)
try:
with redis_client.lock(lock_name, timeout=600):
assert dataset_process_rule
position = DocumentService.get_documents_position(dataset.id)
document_ids = []
duplicate_document_ids = []
if knowledge_config.data_source.info_list.data_source_type == "upload_file":
if not knowledge_config.data_source.info_list.file_info_list:
raise ValueError("File source info is required")
upload_file_list = knowledge_config.data_source.info_list.file_info_list.file_ids
for file_id in upload_file_list:
file = (
db.session.query(UploadFile)
.where(UploadFile.tenant_id == dataset.tenant_id, UploadFile.id == file_id)
.first()
)
if document:
document.dataset_process_rule_id = dataset_process_rule.id
document.updated_at = naive_utc_now()
document.created_from = created_from
document.doc_form = knowledge_config.doc_form
document.doc_language = knowledge_config.doc_language
document.data_source_info = json.dumps(data_source_info)
document.batch = batch
document.indexing_status = "waiting"
db.session.add(document)
documents.append(document)
duplicate_document_ids.append(document.id)
continue
document = DocumentService.build_document(
dataset,
dataset_process_rule.id,
knowledge_config.data_source.info_list.data_source_type,
knowledge_config.doc_form,
knowledge_config.doc_language,
data_source_info,
created_from,
position,
account,
file_name,
batch,
)
db.session.add(document)
db.session.flush()
document_ids.append(document.id)
documents.append(document)
position += 1
elif knowledge_config.data_source.info_list.data_source_type == "notion_import":
notion_info_list = knowledge_config.data_source.info_list.notion_info_list # type: ignore
if not notion_info_list:
raise ValueError("No notion info list found.")
exist_page_ids = []
exist_document = {}
documents = (
db.session.query(Document)
.filter_by(
dataset_id=dataset.id,
tenant_id=current_user.current_tenant_id,
data_source_type="notion_import",
enabled=True,
)
.all()
)
if documents:
for document in documents:
data_source_info = json.loads(document.data_source_info)
exist_page_ids.append(data_source_info["notion_page_id"])
exist_document[data_source_info["notion_page_id"]] = document.id
for notion_info in notion_info_list:
workspace_id = notion_info.workspace_id
for page in notion_info.pages:
if page.page_id not in exist_page_ids:
data_source_info = {
"credential_id": notion_info.credential_id,
"notion_workspace_id": workspace_id,
"notion_page_id": page.page_id,
"notion_page_icon": page.page_icon.model_dump() if page.page_icon else None, # type: ignore
"type": page.type,
}
# Truncate page name to 255 characters to prevent DB field length errors
truncated_page_name = page.page_name[:255] if page.page_name else "nopagename"
document = DocumentService.build_document(
dataset,
dataset_process_rule.id,
knowledge_config.data_source.info_list.data_source_type,
knowledge_config.doc_form,
knowledge_config.doc_language,
data_source_info,
created_from,
position,
account,
truncated_page_name,
batch,
)
db.session.add(document)
db.session.flush()
document_ids.append(document.id)
documents.append(document)
position += 1
else:
exist_document.pop(page.page_id)
# delete not selected documents
if len(exist_document) > 0:
clean_notion_document_task.delay(list(exist_document.values()), dataset.id)
elif knowledge_config.data_source.info_list.data_source_type == "website_crawl":
website_info = knowledge_config.data_source.info_list.website_info_list
if not website_info:
raise ValueError("No website info list found.")
urls = website_info.urls
for url in urls:
data_source_info = {
"url": url,
"provider": website_info.provider,
"job_id": website_info.job_id,
"only_main_content": website_info.only_main_content,
"mode": "crawl",
}
if len(url) > 255:
document_name = url[:200] + "..."
else:
document_name = url
document = DocumentService.build_document(
dataset,
dataset_process_rule.id,
knowledge_config.data_source.info_list.data_source_type,
knowledge_config.doc_form,
knowledge_config.doc_language,
data_source_info,
created_from,
position,
account,
document_name,
batch,
)
db.session.add(document)
db.session.flush()
document_ids.append(document.id)
documents.append(document)
position += 1
db.session.commit()
# trigger async task
if document_ids:
DocumentIndexingTaskProxy(dataset.tenant_id, dataset.id, document_ids).delay()
if duplicate_document_ids:
duplicate_document_indexing_task.delay(dataset.id, duplicate_document_ids)
# raise error if file not found
if not file:
raise FileNotExistsError()
file_name = file.name
data_source_info: dict[str, str | bool] = {
"upload_file_id": file_id,
}
# check duplicate
if knowledge_config.duplicate:
document = (
db.session.query(Document)
.filter_by(
dataset_id=dataset.id,
tenant_id=current_user.current_tenant_id,
data_source_type="upload_file",
enabled=True,
name=file_name,
)
.first()
)
if document:
document.dataset_process_rule_id = dataset_process_rule.id
document.updated_at = naive_utc_now()
document.created_from = created_from
document.doc_form = knowledge_config.doc_form
document.doc_language = knowledge_config.doc_language
document.data_source_info = json.dumps(data_source_info)
document.batch = batch
document.indexing_status = "waiting"
db.session.add(document)
documents.append(document)
duplicate_document_ids.append(document.id)
continue
document = DocumentService.build_document(
dataset,
dataset_process_rule.id,
knowledge_config.data_source.info_list.data_source_type,
knowledge_config.doc_form,
knowledge_config.doc_language,
data_source_info,
created_from,
position,
account,
file_name,
batch,
)
db.session.add(document)
db.session.flush()
document_ids.append(document.id)
documents.append(document)
position += 1
elif knowledge_config.data_source.info_list.data_source_type == "notion_import":
notion_info_list = knowledge_config.data_source.info_list.notion_info_list # type: ignore
if not notion_info_list:
raise ValueError("No notion info list found.")
exist_page_ids = []
exist_document = {}
documents = (
db.session.query(Document)
.filter_by(
dataset_id=dataset.id,
tenant_id=current_user.current_tenant_id,
data_source_type="notion_import",
enabled=True,
)
.all()
)
if documents:
for document in documents:
data_source_info = json.loads(document.data_source_info)
exist_page_ids.append(data_source_info["notion_page_id"])
exist_document[data_source_info["notion_page_id"]] = document.id
for notion_info in notion_info_list:
workspace_id = notion_info.workspace_id
for page in notion_info.pages:
if page.page_id not in exist_page_ids:
data_source_info = {
"credential_id": notion_info.credential_id,
"notion_workspace_id": workspace_id,
"notion_page_id": page.page_id,
"notion_page_icon": page.page_icon.model_dump() if page.page_icon else None, # type: ignore
"type": page.type,
}
# Truncate page name to 255 characters to prevent DB field length errors
truncated_page_name = page.page_name[:255] if page.page_name else "nopagename"
document = DocumentService.build_document(
dataset,
dataset_process_rule.id,
knowledge_config.data_source.info_list.data_source_type,
knowledge_config.doc_form,
knowledge_config.doc_language,
data_source_info,
created_from,
position,
account,
truncated_page_name,
batch,
)
db.session.add(document)
db.session.flush()
document_ids.append(document.id)
documents.append(document)
position += 1
else:
exist_document.pop(page.page_id)
# delete not selected documents
if len(exist_document) > 0:
clean_notion_document_task.delay(list(exist_document.values()), dataset.id)
elif knowledge_config.data_source.info_list.data_source_type == "website_crawl":
website_info = knowledge_config.data_source.info_list.website_info_list
if not website_info:
raise ValueError("No website info list found.")
urls = website_info.urls
for url in urls:
data_source_info = {
"url": url,
"provider": website_info.provider,
"job_id": website_info.job_id,
"only_main_content": website_info.only_main_content,
"mode": "crawl",
}
if len(url) > 255:
document_name = url[:200] + "..."
else:
document_name = url
document = DocumentService.build_document(
dataset,
dataset_process_rule.id,
knowledge_config.data_source.info_list.data_source_type,
knowledge_config.doc_form,
knowledge_config.doc_language,
data_source_info,
created_from,
position,
account,
document_name,
batch,
)
db.session.add(document)
db.session.flush()
document_ids.append(document.id)
documents.append(document)
position += 1
db.session.commit()
# trigger async task
if document_ids:
DocumentIndexingTaskProxy(dataset.tenant_id, dataset.id, document_ids).delay()
if duplicate_document_ids:
duplicate_document_indexing_task.delay(dataset.id, duplicate_document_ids)
except LockNotOwnedError:
pass
return documents, batch
@ -2699,50 +2703,55 @@ class SegmentService:
# calc embedding use tokens
tokens = embedding_model.get_text_embedding_num_tokens(texts=[content])[0]
lock_name = f"add_segment_lock_document_id_{document.id}"
with redis_client.lock(lock_name, timeout=600):
max_position = (
db.session.query(func.max(DocumentSegment.position))
.where(DocumentSegment.document_id == document.id)
.scalar()
)
segment_document = DocumentSegment(
tenant_id=current_user.current_tenant_id,
dataset_id=document.dataset_id,
document_id=document.id,
index_node_id=doc_id,
index_node_hash=segment_hash,
position=max_position + 1 if max_position else 1,
content=content,
word_count=len(content),
tokens=tokens,
status="completed",
indexing_at=naive_utc_now(),
completed_at=naive_utc_now(),
created_by=current_user.id,
)
if document.doc_form == "qa_model":
segment_document.word_count += len(args["answer"])
segment_document.answer = args["answer"]
try:
with redis_client.lock(lock_name, timeout=600):
max_position = (
db.session.query(func.max(DocumentSegment.position))
.where(DocumentSegment.document_id == document.id)
.scalar()
)
segment_document = DocumentSegment(
tenant_id=current_user.current_tenant_id,
dataset_id=document.dataset_id,
document_id=document.id,
index_node_id=doc_id,
index_node_hash=segment_hash,
position=max_position + 1 if max_position else 1,
content=content,
word_count=len(content),
tokens=tokens,
status="completed",
indexing_at=naive_utc_now(),
completed_at=naive_utc_now(),
created_by=current_user.id,
)
if document.doc_form == "qa_model":
segment_document.word_count += len(args["answer"])
segment_document.answer = args["answer"]
db.session.add(segment_document)
# update document word count
assert document.word_count is not None
document.word_count += segment_document.word_count
db.session.add(document)
db.session.commit()
# save vector index
try:
VectorService.create_segments_vector([args["keywords"]], [segment_document], dataset, document.doc_form)
except Exception as e:
logger.exception("create segment index failed")
segment_document.enabled = False
segment_document.disabled_at = naive_utc_now()
segment_document.status = "error"
segment_document.error = str(e)
db.session.add(segment_document)
# update document word count
assert document.word_count is not None
document.word_count += segment_document.word_count
db.session.add(document)
db.session.commit()
segment = db.session.query(DocumentSegment).where(DocumentSegment.id == segment_document.id).first()
return segment
# save vector index
try:
VectorService.create_segments_vector(
[args["keywords"]], [segment_document], dataset, document.doc_form
)
except Exception as e:
logger.exception("create segment index failed")
segment_document.enabled = False
segment_document.disabled_at = naive_utc_now()
segment_document.status = "error"
segment_document.error = str(e)
db.session.commit()
segment = db.session.query(DocumentSegment).where(DocumentSegment.id == segment_document.id).first()
return segment
except LockNotOwnedError:
pass
@classmethod
def multi_create_segment(cls, segments: list, document: Document, dataset: Dataset):
@ -2751,84 +2760,89 @@ class SegmentService:
lock_name = f"multi_add_segment_lock_document_id_{document.id}"
increment_word_count = 0
with redis_client.lock(lock_name, timeout=600):
embedding_model = None
if dataset.indexing_technique == "high_quality":
model_manager = ModelManager()
embedding_model = model_manager.get_model_instance(
tenant_id=current_user.current_tenant_id,
provider=dataset.embedding_model_provider,
model_type=ModelType.TEXT_EMBEDDING,
model=dataset.embedding_model,
try:
with redis_client.lock(lock_name, timeout=600):
embedding_model = None
if dataset.indexing_technique == "high_quality":
model_manager = ModelManager()
embedding_model = model_manager.get_model_instance(
tenant_id=current_user.current_tenant_id,
provider=dataset.embedding_model_provider,
model_type=ModelType.TEXT_EMBEDDING,
model=dataset.embedding_model,
)
max_position = (
db.session.query(func.max(DocumentSegment.position))
.where(DocumentSegment.document_id == document.id)
.scalar()
)
max_position = (
db.session.query(func.max(DocumentSegment.position))
.where(DocumentSegment.document_id == document.id)
.scalar()
)
pre_segment_data_list = []
segment_data_list = []
keywords_list = []
position = max_position + 1 if max_position else 1
for segment_item in segments:
content = segment_item["content"]
doc_id = str(uuid.uuid4())
segment_hash = helper.generate_text_hash(content)
tokens = 0
if dataset.indexing_technique == "high_quality" and embedding_model:
# calc embedding use tokens
pre_segment_data_list = []
segment_data_list = []
keywords_list = []
position = max_position + 1 if max_position else 1
for segment_item in segments:
content = segment_item["content"]
doc_id = str(uuid.uuid4())
segment_hash = helper.generate_text_hash(content)
tokens = 0
if dataset.indexing_technique == "high_quality" and embedding_model:
# calc embedding use tokens
if document.doc_form == "qa_model":
tokens = embedding_model.get_text_embedding_num_tokens(
texts=[content + segment_item["answer"]]
)[0]
else:
tokens = embedding_model.get_text_embedding_num_tokens(texts=[content])[0]
segment_document = DocumentSegment(
tenant_id=current_user.current_tenant_id,
dataset_id=document.dataset_id,
document_id=document.id,
index_node_id=doc_id,
index_node_hash=segment_hash,
position=position,
content=content,
word_count=len(content),
tokens=tokens,
keywords=segment_item.get("keywords", []),
status="completed",
indexing_at=naive_utc_now(),
completed_at=naive_utc_now(),
created_by=current_user.id,
)
if document.doc_form == "qa_model":
tokens = embedding_model.get_text_embedding_num_tokens(
texts=[content + segment_item["answer"]]
)[0]
segment_document.answer = segment_item["answer"]
segment_document.word_count += len(segment_item["answer"])
increment_word_count += segment_document.word_count
db.session.add(segment_document)
segment_data_list.append(segment_document)
position += 1
pre_segment_data_list.append(segment_document)
if "keywords" in segment_item:
keywords_list.append(segment_item["keywords"])
else:
tokens = embedding_model.get_text_embedding_num_tokens(texts=[content])[0]
segment_document = DocumentSegment(
tenant_id=current_user.current_tenant_id,
dataset_id=document.dataset_id,
document_id=document.id,
index_node_id=doc_id,
index_node_hash=segment_hash,
position=position,
content=content,
word_count=len(content),
tokens=tokens,
keywords=segment_item.get("keywords", []),
status="completed",
indexing_at=naive_utc_now(),
completed_at=naive_utc_now(),
created_by=current_user.id,
)
if document.doc_form == "qa_model":
segment_document.answer = segment_item["answer"]
segment_document.word_count += len(segment_item["answer"])
increment_word_count += segment_document.word_count
db.session.add(segment_document)
segment_data_list.append(segment_document)
position += 1
pre_segment_data_list.append(segment_document)
if "keywords" in segment_item:
keywords_list.append(segment_item["keywords"])
else:
keywords_list.append(None)
# update document word count
assert document.word_count is not None
document.word_count += increment_word_count
db.session.add(document)
try:
# save vector index
VectorService.create_segments_vector(keywords_list, pre_segment_data_list, dataset, document.doc_form)
except Exception as e:
logger.exception("create segment index failed")
for segment_document in segment_data_list:
segment_document.enabled = False
segment_document.disabled_at = naive_utc_now()
segment_document.status = "error"
segment_document.error = str(e)
db.session.commit()
return segment_data_list
keywords_list.append(None)
# update document word count
assert document.word_count is not None
document.word_count += increment_word_count
db.session.add(document)
try:
# save vector index
VectorService.create_segments_vector(
keywords_list, pre_segment_data_list, dataset, document.doc_form
)
except Exception as e:
logger.exception("create segment index failed")
for segment_document in segment_data_list:
segment_document.enabled = False
segment_document.disabled_at = naive_utc_now()
segment_document.status = "error"
segment_document.error = str(e)
db.session.commit()
return segment_data_list
except LockNotOwnedError:
pass
@classmethod
def update_segment(cls, args: SegmentUpdateArgs, segment: DocumentSegment, document: Document, dataset: Dataset):

View File

@ -69,6 +69,7 @@ class ProviderResponse(BaseModel):
label: I18nObject
description: I18nObject | None = None
icon_small: I18nObject | None = None
icon_small_dark: I18nObject | None = None
icon_large: I18nObject | None = None
background: str | None = None
help: ProviderHelpEntity | None = None
@ -92,6 +93,11 @@ class ProviderResponse(BaseModel):
self.icon_small = I18nObject(
en_US=f"{url_prefix}/icon_small/en_US", zh_Hans=f"{url_prefix}/icon_small/zh_Hans"
)
if self.icon_small_dark is not None:
self.icon_small_dark = I18nObject(
en_US=f"{url_prefix}/icon_small_dark/en_US",
zh_Hans=f"{url_prefix}/icon_small_dark/zh_Hans",
)
if self.icon_large is not None:
self.icon_large = I18nObject(
@ -109,6 +115,7 @@ class ProviderWithModelsResponse(BaseModel):
provider: str
label: I18nObject
icon_small: I18nObject | None = None
icon_small_dark: I18nObject | None = None
icon_large: I18nObject | None = None
status: CustomConfigurationStatus
models: list[ProviderModelWithStatusEntity]
@ -123,6 +130,11 @@ class ProviderWithModelsResponse(BaseModel):
en_US=f"{url_prefix}/icon_small/en_US", zh_Hans=f"{url_prefix}/icon_small/zh_Hans"
)
if self.icon_small_dark is not None:
self.icon_small_dark = I18nObject(
en_US=f"{url_prefix}/icon_small_dark/en_US", zh_Hans=f"{url_prefix}/icon_small_dark/zh_Hans"
)
if self.icon_large is not None:
self.icon_large = I18nObject(
en_US=f"{url_prefix}/icon_large/en_US", zh_Hans=f"{url_prefix}/icon_large/zh_Hans"
@ -147,6 +159,11 @@ class SimpleProviderEntityResponse(SimpleProviderEntity):
en_US=f"{url_prefix}/icon_small/en_US", zh_Hans=f"{url_prefix}/icon_small/zh_Hans"
)
if self.icon_small_dark is not None:
self.icon_small_dark = I18nObject(
en_US=f"{url_prefix}/icon_small_dark/en_US", zh_Hans=f"{url_prefix}/icon_small_dark/zh_Hans"
)
if self.icon_large is not None:
self.icon_large = I18nObject(
en_US=f"{url_prefix}/icon_large/en_US", zh_Hans=f"{url_prefix}/icon_large/zh_Hans"

View File

@ -79,6 +79,7 @@ class ModelProviderService:
label=provider_configuration.provider.label,
description=provider_configuration.provider.description,
icon_small=provider_configuration.provider.icon_small,
icon_small_dark=provider_configuration.provider.icon_small_dark,
icon_large=provider_configuration.provider.icon_large,
background=provider_configuration.provider.background,
help=provider_configuration.provider.help,
@ -402,6 +403,7 @@ class ModelProviderService:
provider=provider,
label=first_model.provider.label,
icon_small=first_model.provider.icon_small,
icon_small_dark=first_model.provider.icon_small_dark,
icon_large=first_model.provider.icon_large,
status=CustomConfigurationStatus.ACTIVE,
models=[

View File

@ -227,6 +227,7 @@ class TestModelProviderService:
mock_provider_entity.label = {"en_US": "OpenAI", "zh_Hans": "OpenAI"}
mock_provider_entity.description = {"en_US": "OpenAI provider", "zh_Hans": "OpenAI 提供商"}
mock_provider_entity.icon_small = {"en_US": "icon_small.png", "zh_Hans": "icon_small.png"}
mock_provider_entity.icon_small_dark = None
mock_provider_entity.icon_large = {"en_US": "icon_large.png", "zh_Hans": "icon_large.png"}
mock_provider_entity.background = "#FF6B6B"
mock_provider_entity.help = None
@ -300,6 +301,7 @@ class TestModelProviderService:
mock_provider_entity_llm.label = {"en_US": "OpenAI", "zh_Hans": "OpenAI"}
mock_provider_entity_llm.description = {"en_US": "OpenAI provider", "zh_Hans": "OpenAI 提供商"}
mock_provider_entity_llm.icon_small = {"en_US": "icon_small.png", "zh_Hans": "icon_small.png"}
mock_provider_entity_llm.icon_small_dark = None
mock_provider_entity_llm.icon_large = {"en_US": "icon_large.png", "zh_Hans": "icon_large.png"}
mock_provider_entity_llm.background = "#FF6B6B"
mock_provider_entity_llm.help = None
@ -313,6 +315,7 @@ class TestModelProviderService:
mock_provider_entity_embedding.label = {"en_US": "Cohere", "zh_Hans": "Cohere"}
mock_provider_entity_embedding.description = {"en_US": "Cohere provider", "zh_Hans": "Cohere 提供商"}
mock_provider_entity_embedding.icon_small = {"en_US": "icon_small.png", "zh_Hans": "icon_small.png"}
mock_provider_entity_embedding.icon_small_dark = None
mock_provider_entity_embedding.icon_large = {"en_US": "icon_large.png", "zh_Hans": "icon_large.png"}
mock_provider_entity_embedding.background = "#4ECDC4"
mock_provider_entity_embedding.help = None
@ -1023,6 +1026,7 @@ class TestModelProviderService:
provider="openai",
label={"en_US": "OpenAI", "zh_Hans": "OpenAI"},
icon_small={"en_US": "icon_small.png", "zh_Hans": "icon_small.png"},
icon_small_dark=None,
icon_large={"en_US": "icon_large.png", "zh_Hans": "icon_large.png"},
),
model="gpt-3.5-turbo",
@ -1040,6 +1044,7 @@ class TestModelProviderService:
provider="openai",
label={"en_US": "OpenAI", "zh_Hans": "OpenAI"},
icon_small={"en_US": "icon_small.png", "zh_Hans": "icon_small.png"},
icon_small_dark=None,
icon_large={"en_US": "icon_large.png", "zh_Hans": "icon_large.png"},
),
model="gpt-4",

View File

@ -1,3 +1,5 @@
from types import SimpleNamespace
import pytest
from core.app.entities.app_invoke_entities import InvokeFrom
@ -214,3 +216,76 @@ def test_create_variable_message():
assert message.message.variable_name == var_name
assert message.message.variable_value == var_value
assert message.message.stream is False
def test_resolve_user_from_database_falls_back_to_end_user(monkeypatch: pytest.MonkeyPatch):
"""Ensure worker context can resolve EndUser when Account is missing."""
class StubSession:
def __init__(self, results: list):
self.results = results
def scalar(self, _stmt):
return self.results.pop(0)
tenant = SimpleNamespace(id="tenant_id")
end_user = SimpleNamespace(id="end_user_id", tenant_id="tenant_id")
db_stub = SimpleNamespace(session=StubSession([tenant, None, end_user]))
monkeypatch.setattr("core.tools.workflow_as_tool.tool.db", db_stub)
entity = ToolEntity(
identity=ToolIdentity(author="test", name="test tool", label=I18nObject(en_US="test tool"), provider="test"),
parameters=[],
description=None,
has_runtime_parameters=False,
)
runtime = ToolRuntime(tenant_id="tenant_id", invoke_from=InvokeFrom.SERVICE_API)
tool = WorkflowTool(
workflow_app_id="",
workflow_as_tool_id="",
version="1",
workflow_entities={},
workflow_call_depth=1,
entity=entity,
runtime=runtime,
)
resolved_user = tool._resolve_user_from_database(user_id=end_user.id)
assert resolved_user is end_user
def test_resolve_user_from_database_returns_none_when_no_tenant(monkeypatch: pytest.MonkeyPatch):
"""Return None if tenant cannot be found in worker context."""
class StubSession:
def __init__(self, results: list):
self.results = results
def scalar(self, _stmt):
return self.results.pop(0)
db_stub = SimpleNamespace(session=StubSession([None]))
monkeypatch.setattr("core.tools.workflow_as_tool.tool.db", db_stub)
entity = ToolEntity(
identity=ToolIdentity(author="test", name="test tool", label=I18nObject(en_US="test tool"), provider="test"),
parameters=[],
description=None,
has_runtime_parameters=False,
)
runtime = ToolRuntime(tenant_id="missing_tenant", invoke_from=InvokeFrom.SERVICE_API)
tool = WorkflowTool(
workflow_app_id="",
workflow_as_tool_id="",
version="1",
workflow_entities={},
workflow_call_depth=1,
entity=entity,
runtime=runtime,
)
resolved_user = tool._resolve_user_from_database(user_id="any")
assert resolved_user is None

View File

@ -0,0 +1,177 @@
import types
from unittest.mock import Mock, create_autospec
import pytest
from redis.exceptions import LockNotOwnedError
from models.account import Account
from models.dataset import Dataset, Document
from services.dataset_service import DocumentService, SegmentService
class FakeLock:
"""Lock that always fails on enter with LockNotOwnedError."""
def __enter__(self):
raise LockNotOwnedError("simulated")
def __exit__(self, exc_type, exc, tb):
# Normal contextmanager signature; return False so exceptions propagate
return False
@pytest.fixture
def fake_current_user(monkeypatch):
user = create_autospec(Account, instance=True)
user.id = "user-1"
user.current_tenant_id = "tenant-1"
monkeypatch.setattr("services.dataset_service.current_user", user)
return user
@pytest.fixture
def fake_features(monkeypatch):
"""Features.billing.enabled == False to skip quota logic."""
features = types.SimpleNamespace(
billing=types.SimpleNamespace(enabled=False, subscription=types.SimpleNamespace(plan="ENTERPRISE")),
documents_upload_quota=types.SimpleNamespace(limit=10_000, size=0),
)
monkeypatch.setattr(
"services.dataset_service.FeatureService.get_features",
lambda tenant_id: features,
)
return features
@pytest.fixture
def fake_lock(monkeypatch):
"""Patch redis_client.lock to always raise LockNotOwnedError on enter."""
def _fake_lock(name, timeout=None, *args, **kwargs):
return FakeLock()
# DatasetService imports redis_client directly from extensions.ext_redis
monkeypatch.setattr("services.dataset_service.redis_client.lock", _fake_lock)
# ---------------------------------------------------------------------------
# 1. Knowledge Pipeline document creation (save_document_with_dataset_id)
# ---------------------------------------------------------------------------
def test_save_document_with_dataset_id_ignores_lock_not_owned(
monkeypatch,
fake_current_user,
fake_features,
fake_lock,
):
# Arrange
dataset = create_autospec(Dataset, instance=True)
dataset.id = "ds-1"
dataset.tenant_id = fake_current_user.current_tenant_id
dataset.data_source_type = "upload_file"
dataset.indexing_technique = "high_quality" # so we skip re-initialization branch
# Minimal knowledge_config stub that satisfies pre-lock code
info_list = types.SimpleNamespace(data_source_type="upload_file")
data_source = types.SimpleNamespace(info_list=info_list)
knowledge_config = types.SimpleNamespace(
doc_form="qa_model",
original_document_id=None, # go into "new document" branch
data_source=data_source,
indexing_technique="high_quality",
embedding_model=None,
embedding_model_provider=None,
retrieval_model=None,
process_rule=None,
duplicate=False,
doc_language="en",
)
account = fake_current_user
# Avoid touching real doc_form logic
monkeypatch.setattr("services.dataset_service.DatasetService.check_doc_form", lambda *a, **k: None)
# Avoid real DB interactions
monkeypatch.setattr("services.dataset_service.db", Mock())
# Act: this would hit the redis lock, whose __enter__ raises LockNotOwnedError.
# Our implementation should catch it and still return (documents, batch).
documents, batch = DocumentService.save_document_with_dataset_id(
dataset=dataset,
knowledge_config=knowledge_config,
account=account,
)
# Assert
# We mainly care that:
# - No exception is raised
# - The function returns a sensible tuple
assert isinstance(documents, list)
assert isinstance(batch, str)
# ---------------------------------------------------------------------------
# 2. Single-segment creation (add_segment)
# ---------------------------------------------------------------------------
def test_add_segment_ignores_lock_not_owned(
monkeypatch,
fake_current_user,
fake_lock,
):
# Arrange
dataset = create_autospec(Dataset, instance=True)
dataset.id = "ds-1"
dataset.tenant_id = fake_current_user.current_tenant_id
dataset.indexing_technique = "economy" # skip embedding/token calculation branch
document = create_autospec(Document, instance=True)
document.id = "doc-1"
document.dataset_id = dataset.id
document.word_count = 0
document.doc_form = "qa_model"
# Minimal args required by add_segment
args = {
"content": "question text",
"answer": "answer text",
"keywords": ["k1", "k2"],
}
# Avoid real DB operations
db_mock = Mock()
db_mock.session = Mock()
monkeypatch.setattr("services.dataset_service.db", db_mock)
monkeypatch.setattr("services.dataset_service.VectorService", Mock())
# Act
result = SegmentService.create_segment(args=args, document=document, dataset=dataset)
# Assert
# Under LockNotOwnedError except, add_segment should swallow the error and return None.
assert result is None
# ---------------------------------------------------------------------------
# 3. Multi-segment creation (multi_create_segment)
# ---------------------------------------------------------------------------
def test_multi_create_segment_ignores_lock_not_owned(
monkeypatch,
fake_current_user,
fake_lock,
):
# Arrange
dataset = create_autospec(Dataset, instance=True)
dataset.id = "ds-1"
dataset.tenant_id = fake_current_user.current_tenant_id
dataset.indexing_technique = "economy" # again, skip high_quality path
document = create_autospec(Document, instance=True)
document.id = "doc-1"
document.dataset_id = dataset.id
document.word_count = 0
document.doc_form = "qa_model"

File diff suppressed because it is too large Load Diff

View File

@ -233,7 +233,7 @@ NEXT_PUBLIC_ENABLE_SINGLE_DOLLAR_LATEX=false
# Database type, supported values are `postgresql` and `mysql`
DB_TYPE=postgresql
# For MySQL, only `root` user is supported for now
DB_USERNAME=postgres
DB_PASSWORD=difyai123456
DB_HOST=db_postgres
@ -1076,24 +1076,10 @@ MAX_TREE_DEPTH=50
# ------------------------------
# Environment Variables for database Service
# ------------------------------
# The name of the default postgres user.
POSTGRES_USER=${DB_USERNAME}
# The password for the default postgres user.
POSTGRES_PASSWORD=${DB_PASSWORD}
# The name of the default postgres database.
POSTGRES_DB=${DB_DATABASE}
# Postgres data directory
PGDATA=/var/lib/postgresql/data/pgdata
# MySQL Default Configuration
# The name of the default mysql user.
MYSQL_USERNAME=${DB_USERNAME}
# The password for the default mysql user.
MYSQL_PASSWORD=${DB_PASSWORD}
# The name of the default mysql database.
MYSQL_DATABASE=${DB_DATABASE}
# MySQL data directory
MYSQL_HOST_VOLUME=./volumes/mysql/data
# ------------------------------

View File

@ -139,9 +139,9 @@ services:
- postgresql
restart: always
environment:
POSTGRES_USER: ${POSTGRES_USER:-postgres}
POSTGRES_PASSWORD: ${POSTGRES_PASSWORD:-difyai123456}
POSTGRES_DB: ${POSTGRES_DB:-dify}
POSTGRES_USER: ${DB_USERNAME:-postgres}
POSTGRES_PASSWORD: ${DB_PASSWORD:-difyai123456}
POSTGRES_DB: ${DB_DATABASE:-dify}
PGDATA: ${PGDATA:-/var/lib/postgresql/data/pgdata}
command: >
postgres -c 'max_connections=${POSTGRES_MAX_CONNECTIONS:-100}'
@ -161,7 +161,7 @@ services:
"-h",
"db_postgres",
"-U",
"${PGUSER:-postgres}",
"${DB_USERNAME:-postgres}",
"-d",
"${DB_DATABASE:-dify}",
]
@ -176,8 +176,8 @@ services:
- mysql
restart: always
environment:
MYSQL_ROOT_PASSWORD: ${MYSQL_PASSWORD:-difyai123456}
MYSQL_DATABASE: ${MYSQL_DATABASE:-dify}
MYSQL_ROOT_PASSWORD: ${DB_PASSWORD:-difyai123456}
MYSQL_DATABASE: ${DB_DATABASE:-dify}
command: >
--max_connections=1000
--innodb_buffer_pool_size=${MYSQL_INNODB_BUFFER_POOL_SIZE:-512M}
@ -193,7 +193,7 @@ services:
"ping",
"-u",
"root",
"-p${MYSQL_PASSWORD:-difyai123456}",
"-p${DB_PASSWORD:-difyai123456}",
]
interval: 1s
timeout: 3s

View File

@ -9,8 +9,8 @@ services:
env_file:
- ./middleware.env
environment:
POSTGRES_PASSWORD: ${POSTGRES_PASSWORD:-difyai123456}
POSTGRES_DB: ${POSTGRES_DB:-dify}
POSTGRES_PASSWORD: ${DB_PASSWORD:-difyai123456}
POSTGRES_DB: ${DB_DATABASE:-dify}
PGDATA: ${PGDATA:-/var/lib/postgresql/data/pgdata}
command: >
postgres -c 'max_connections=${POSTGRES_MAX_CONNECTIONS:-100}'
@ -32,9 +32,9 @@ services:
"-h",
"db_postgres",
"-U",
"${PGUSER:-postgres}",
"${DB_USERNAME:-postgres}",
"-d",
"${POSTGRES_DB:-dify}",
"${DB_DATABASE:-dify}",
]
interval: 1s
timeout: 3s
@ -48,8 +48,8 @@ services:
env_file:
- ./middleware.env
environment:
MYSQL_ROOT_PASSWORD: ${MYSQL_PASSWORD:-difyai123456}
MYSQL_DATABASE: ${MYSQL_DATABASE:-dify}
MYSQL_ROOT_PASSWORD: ${DB_PASSWORD:-difyai123456}
MYSQL_DATABASE: ${DB_DATABASE:-dify}
command: >
--max_connections=1000
--innodb_buffer_pool_size=${MYSQL_INNODB_BUFFER_POOL_SIZE:-512M}
@ -67,7 +67,7 @@ services:
"ping",
"-u",
"root",
"-p${MYSQL_PASSWORD:-difyai123456}",
"-p${DB_PASSWORD:-difyai123456}",
]
interval: 1s
timeout: 3s

View File

@ -455,13 +455,7 @@ x-shared-env: &shared-api-worker-env
TEXT_GENERATION_TIMEOUT_MS: ${TEXT_GENERATION_TIMEOUT_MS:-60000}
ALLOW_UNSAFE_DATA_SCHEME: ${ALLOW_UNSAFE_DATA_SCHEME:-false}
MAX_TREE_DEPTH: ${MAX_TREE_DEPTH:-50}
POSTGRES_USER: ${POSTGRES_USER:-${DB_USERNAME}}
POSTGRES_PASSWORD: ${POSTGRES_PASSWORD:-${DB_PASSWORD}}
POSTGRES_DB: ${POSTGRES_DB:-${DB_DATABASE}}
PGDATA: ${PGDATA:-/var/lib/postgresql/data/pgdata}
MYSQL_USERNAME: ${MYSQL_USERNAME:-${DB_USERNAME}}
MYSQL_PASSWORD: ${MYSQL_PASSWORD:-${DB_PASSWORD}}
MYSQL_DATABASE: ${MYSQL_DATABASE:-${DB_DATABASE}}
MYSQL_HOST_VOLUME: ${MYSQL_HOST_VOLUME:-./volumes/mysql/data}
SANDBOX_API_KEY: ${SANDBOX_API_KEY:-dify-sandbox}
SANDBOX_GIN_MODE: ${SANDBOX_GIN_MODE:-release}
@ -774,9 +768,9 @@ services:
- postgresql
restart: always
environment:
POSTGRES_USER: ${POSTGRES_USER:-postgres}
POSTGRES_PASSWORD: ${POSTGRES_PASSWORD:-difyai123456}
POSTGRES_DB: ${POSTGRES_DB:-dify}
POSTGRES_USER: ${DB_USERNAME:-postgres}
POSTGRES_PASSWORD: ${DB_PASSWORD:-difyai123456}
POSTGRES_DB: ${DB_DATABASE:-dify}
PGDATA: ${PGDATA:-/var/lib/postgresql/data/pgdata}
command: >
postgres -c 'max_connections=${POSTGRES_MAX_CONNECTIONS:-100}'
@ -796,7 +790,7 @@ services:
"-h",
"db_postgres",
"-U",
"${PGUSER:-postgres}",
"${DB_USERNAME:-postgres}",
"-d",
"${DB_DATABASE:-dify}",
]
@ -811,8 +805,8 @@ services:
- mysql
restart: always
environment:
MYSQL_ROOT_PASSWORD: ${MYSQL_PASSWORD:-difyai123456}
MYSQL_DATABASE: ${MYSQL_DATABASE:-dify}
MYSQL_ROOT_PASSWORD: ${DB_PASSWORD:-difyai123456}
MYSQL_DATABASE: ${DB_DATABASE:-dify}
command: >
--max_connections=1000
--innodb_buffer_pool_size=${MYSQL_INNODB_BUFFER_POOL_SIZE:-512M}
@ -828,7 +822,7 @@ services:
"ping",
"-u",
"root",
"-p${MYSQL_PASSWORD:-difyai123456}",
"-p${DB_PASSWORD:-difyai123456}",
]
interval: 1s
timeout: 3s

View File

@ -4,6 +4,7 @@
# Database Configuration
# Database type, supported values are `postgresql` and `mysql`
DB_TYPE=postgresql
# For MySQL, only `root` user is supported for now
DB_USERNAME=postgres
DB_PASSWORD=difyai123456
DB_HOST=db_postgres
@ -11,11 +12,6 @@ DB_PORT=5432
DB_DATABASE=dify
# PostgreSQL Configuration
POSTGRES_USER=${DB_USERNAME}
# The password for the default postgres user.
POSTGRES_PASSWORD=${DB_PASSWORD}
# The name of the default postgres database.
POSTGRES_DB=${DB_DATABASE}
# postgres data directory
PGDATA=/var/lib/postgresql/data/pgdata
PGDATA_HOST_VOLUME=./volumes/db/data
@ -65,11 +61,6 @@ POSTGRES_STATEMENT_TIMEOUT=0
POSTGRES_IDLE_IN_TRANSACTION_SESSION_TIMEOUT=0
# MySQL Configuration
MYSQL_USERNAME=${DB_USERNAME}
# MySQL password
MYSQL_PASSWORD=${DB_PASSWORD}
# MySQL database name
MYSQL_DATABASE=${DB_DATABASE}
# MySQL data directory host volume
MYSQL_HOST_VOLUME=./volumes/mysql/data

View File

@ -8,7 +8,7 @@ const PluginList = async () => {
return (
<PluginPage
plugins={<PluginsPanel />}
marketplace={<Marketplace locale={locale} pluginTypeSwitchClassName='top-[60px]' searchBoxAutoAnimate={false} showSearchParams={false} />}
marketplace={<Marketplace locale={locale} pluginTypeSwitchClassName='top-[60px]' showSearchParams={false} />}
/>
)
}

View File

@ -32,6 +32,7 @@ import { canFindTool } from '@/utils'
import { useAllBuiltInTools, useAllCustomTools, useAllMCPTools, useAllWorkflowTools } from '@/service/use-tools'
import type { ToolWithProvider } from '@/app/components/workflow/types'
import { useMittContextSelector } from '@/context/mitt-context'
import { DefaultToolIcon } from '@/app/components/base/icons/src/public/other'
type AgentToolWithMoreInfo = (AgentTool & { icon: any; collection?: Collection; use_end_user_credentials?: boolean; end_user_credential_type?: string }) | null
const AgentTools: FC = () => {
@ -383,10 +384,138 @@ const AgentTools: FC = () => {
</div>
))}
</div>
<div className='grid grid-cols-1 flex-wrap items-center justify-between gap-1 2xl:grid-cols-2'>
{tools.map((item: AgentTool & { icon: any; collection?: Collection }, index) => (
<div key={index}
className={cn(
'cursor group relative flex w-full items-center justify-between rounded-lg border-[0.5px] border-components-panel-border-subtle bg-components-panel-on-panel-item-bg p-1.5 pr-2 shadow-xs last-of-type:mb-0 hover:bg-components-panel-on-panel-item-bg-hover hover:shadow-sm',
isDeleting === index && 'border-state-destructive-border hover:bg-state-destructive-hover',
)}
>
<div className='flex w-0 grow items-center'>
{item.isDeleted && <DefaultToolIcon className='h-5 w-5' />}
{!item.isDeleted && (
<div className={cn((item.notAuthor || !item.enabled) && 'shrink-0 opacity-50')}>
{typeof item.icon === 'string' && <div className='h-5 w-5 rounded-md bg-cover bg-center' style={{ backgroundImage: `url(${item.icon})` }} />}
{typeof item.icon !== 'string' && <AppIcon className='rounded-md' size='xs' icon={item.icon?.content} background={item.icon?.background} />}
</div>
)}
<div
className={cn(
'system-xs-regular ml-1.5 flex w-0 grow items-center truncate',
(item.isDeleted || item.notAuthor || !item.enabled) ? 'opacity-50' : '',
)}
>
<span className='system-xs-medium pr-1.5 text-text-secondary'>{getProviderShowName(item)}</span>
<span className='text-text-tertiary'>{item.tool_label}</span>
{!item.isDeleted && (
<Tooltip
popupContent={
<div className='w-[180px]'>
<div className='mb-1.5 text-text-secondary'>{item.tool_name}</div>
<div className='mb-1.5 text-text-tertiary'>{t('tools.toolNameUsageTip')}</div>
<div className='cursor-pointer text-text-accent' onClick={() => copy(item.tool_name)}>{t('tools.copyToolName')}</div>
</div>
}
>
<div className='h-4 w-4'>
<div className='ml-0.5 hidden group-hover:inline-block'>
<RiInformation2Line className='h-4 w-4 text-text-tertiary' />
</div>
</div>
</Tooltip>
)}
</div>
</div>
<div className='ml-1 flex shrink-0 items-center'>
{item.isDeleted && (
<div className='mr-2 flex items-center'>
<Tooltip
popupContent={t('tools.toolRemoved')}
>
<div className='mr-1 cursor-pointer rounded-md p-1 hover:bg-black/5'>
<AlertTriangle className='h-4 w-4 text-[#F79009]' />
</div>
</Tooltip>
<div
className='cursor-pointer rounded-md p-1 text-text-tertiary hover:text-text-destructive'
onClick={() => {
const newModelConfig = produce(modelConfig, (draft) => {
draft.agentConfig.tools.splice(index, 1)
})
setModelConfig(newModelConfig)
formattingChangedDispatcher()
}}
onMouseOver={() => setIsDeleting(index)}
onMouseLeave={() => setIsDeleting(-1)}
>
<RiDeleteBinLine className='h-4 w-4' />
</div>
</div>
)}
{!item.isDeleted && (
<div className='mr-2 hidden items-center gap-1 group-hover:flex'>
{!item.notAuthor && (
<Tooltip
popupContent={t('tools.setBuiltInTools.infoAndSetting')}
needsDelay={false}
>
<div className='cursor-pointer rounded-md p-1 hover:bg-black/5' onClick={() => {
setCurrentTool(item)
setIsShowSettingTool(true)
}}>
<RiEqualizer2Line className='h-4 w-4 text-text-tertiary' />
</div>
</Tooltip>
)}
<div
className='cursor-pointer rounded-md p-1 text-text-tertiary hover:text-text-destructive'
onClick={() => {
const newModelConfig = produce(modelConfig, (draft) => {
draft.agentConfig.tools.splice(index, 1)
})
setModelConfig(newModelConfig)
formattingChangedDispatcher()
}}
onMouseOver={() => setIsDeleting(index)}
onMouseLeave={() => setIsDeleting(-1)}
>
<RiDeleteBinLine className='h-4 w-4' />
</div>
</div>
)}
<div className={cn(item.isDeleted && 'opacity-50')}>
{!item.notAuthor && (
<Switch
defaultValue={item.isDeleted ? false : item.enabled}
disabled={item.isDeleted}
size='md'
onChange={(enabled) => {
const newModelConfig = produce(modelConfig, (draft) => {
(draft.agentConfig.tools[index] as any).enabled = enabled
})
setModelConfig(newModelConfig)
formattingChangedDispatcher()
}} />
)}
{item.notAuthor && (
<Button variant='secondary' size='small' onClick={() => {
setCurrentTool(item)
setIsShowSettingTool(true)
}}>
{t('tools.notAuthorized')}
<Indicator className='ml-2' color='orange' />
</Button>
)}
</div>
</div>
</div>
))}
</div>
</div>
))}
</div>
</Panel >
</Panel>
{isShowSettingTool && (
<SettingBuiltInTool
toolName={currentTool?.tool_name as string}

View File

@ -217,6 +217,7 @@ export type ModelProvider = {
url: TypeWithI18N
}
icon_small: TypeWithI18N
icon_small_dark?: TypeWithI18N
icon_large: TypeWithI18N
background?: string
supported_model_types: ModelTypeEnum[]
@ -255,6 +256,7 @@ export type Model = {
provider: string
icon_large: TypeWithI18N
icon_small: TypeWithI18N
icon_small_dark?: TypeWithI18N
label: TypeWithI18N
models: ModelItem[]
status: ModelStatusEnum

View File

@ -6,8 +6,10 @@ import type {
import { useLanguage } from '../hooks'
import { Group } from '@/app/components/base/icons/src/vender/other'
import { OpenaiBlue, OpenaiTeal, OpenaiViolet, OpenaiYellow } from '@/app/components/base/icons/src/public/llm'
import cn from '@/utils/classnames'
import { renderI18nObject } from '@/i18n-config'
import { Theme } from '@/types/app'
import cn from '@/utils/classnames'
import useTheme from '@/hooks/use-theme'
type ModelIconProps = {
provider?: Model | ModelProvider
@ -23,6 +25,7 @@ const ModelIcon: FC<ModelIconProps> = ({
iconClassName,
isDeprecated = false,
}) => {
const { theme } = useTheme()
const language = useLanguage()
if (provider?.provider && ['openai', 'langgenius/openai/openai'].includes(provider.provider) && modelName?.startsWith('o'))
return <div className='flex items-center justify-center'><OpenaiYellow className={cn('h-5 w-5', className)} /></div>
@ -36,7 +39,16 @@ const ModelIcon: FC<ModelIconProps> = ({
if (provider?.icon_small) {
return (
<div className={cn('flex h-5 w-5 items-center justify-center', isDeprecated && 'opacity-50', className)}>
<img alt='model-icon' src={renderI18nObject(provider.icon_small, language)} className={iconClassName} />
<img
alt='model-icon'
src={renderI18nObject(
theme === Theme.dark && provider.icon_small_dark
? provider.icon_small_dark
: provider.icon_small,
language,
)}
className={iconClassName}
/>
</div>
)
}

View File

@ -40,7 +40,12 @@ const ProviderIcon: FC<ProviderIconProps> = ({
<div className={cn('inline-flex items-center gap-2', className)}>
<img
alt='provider-icon'
src={renderI18nObject(provider.icon_small, language)}
src={renderI18nObject(
theme === Theme.dark && provider.icon_small_dark
? provider.icon_small_dark
: provider.icon_small,
language,
)}
className='h-6 w-6'
/>
<div className='system-md-semibold text-text-primary'>

View File

@ -6,6 +6,8 @@ import { getLanguage } from '@/i18n-config/language'
import cn from '@/utils/classnames'
import { RiAlertFill } from '@remixicon/react'
import React from 'react'
import useTheme from '@/hooks/use-theme'
import { Theme } from '@/types/app'
import Partner from '../base/badges/partner'
import Verified from '../base/badges/verified'
import Icon from '../card/base/card-icon'
@ -50,7 +52,9 @@ const Card = ({
const locale = localeFromProps ? getLanguage(localeFromProps) : defaultLocale
const { t } = useMixedTranslation(localeFromProps)
const { categoriesMap } = useCategories(t, true)
const { category, type, name, org, label, brief, icon, verified, badges = [] } = payload
const { category, type, name, org, label, brief, icon, icon_dark, verified, badges = [] } = payload
const { theme } = useTheme()
const iconSrc = theme === Theme.dark && icon_dark ? icon_dark : icon
const getLocalizedText = (obj: Record<string, string> | undefined) =>
obj ? renderI18nObject(obj, locale) : ''
const isPartner = badges.includes('partner')
@ -71,7 +75,7 @@ const Card = ({
{!hideCornerMark && <CornerMark text={categoriesMap[type === 'bundle' ? type : category]?.label} />}
{/* Header */}
<div className="flex">
<Icon src={icon} installed={installed} installFailed={installFailed} />
<Icon src={iconSrc} installed={installed} installFailed={installFailed} />
<div className="ml-3 w-0 grow">
<div className="flex h-5 items-center">
<Title title={getLocalizedText(label)} />

View File

@ -64,10 +64,12 @@ const InstallFromLocalPackage: React.FC<InstallFromLocalPackageProps> = ({
uniqueIdentifier,
} = result
const icon = await getIconUrl(manifest!.icon)
const iconDark = manifest.icon_dark ? await getIconUrl(manifest.icon_dark) : undefined
setUniqueIdentifier(uniqueIdentifier)
setManifest({
...manifest,
icon,
icon_dark: iconDark,
})
setStep(InstallStep.readyToInstall)
}, [getIconUrl])

View File

@ -17,6 +17,7 @@ export const pluginManifestToCardPluginProps = (pluginManifest: PluginDeclaratio
brief: pluginManifest.description,
description: pluginManifest.description,
icon: pluginManifest.icon,
icon_dark: pluginManifest.icon_dark,
verified: pluginManifest.verified,
introduction: '',
repository: '',

View File

@ -41,8 +41,6 @@ import { useInstalledPluginList } from '@/service/use-plugins'
import { debounce, noop } from 'lodash-es'
export type MarketplaceContextValue = {
intersected: boolean
setIntersected: (intersected: boolean) => void
searchPluginText: string
handleSearchPluginTextChange: (text: string) => void
filterPluginTags: string[]
@ -67,8 +65,6 @@ export type MarketplaceContextValue = {
}
export const MarketplaceContext = createContext<MarketplaceContextValue>({
intersected: true,
setIntersected: noop,
searchPluginText: '',
handleSearchPluginTextChange: noop,
filterPluginTags: [],
@ -121,7 +117,6 @@ export const MarketplaceContextProvider = ({
const hasValidTags = !!tagsFromSearchParams.length
const hasValidCategory = getValidCategoryKeys(searchParams?.category)
const categoryFromSearchParams = hasValidCategory || PLUGIN_TYPE_SEARCH_MAP.all
const [intersected, setIntersected] = useState(true)
const [searchPluginText, setSearchPluginText] = useState(queryFromSearchParams)
const searchPluginTextRef = useRef(searchPluginText)
const [filterPluginTags, setFilterPluginTags] = useState<string[]>(tagsFromSearchParams)
@ -300,8 +295,6 @@ export const MarketplaceContextProvider = ({
return (
<MarketplaceContext.Provider
value={{
intersected,
setIntersected,
searchPluginText,
handleSearchPluginTextChange,
filterPluginTags,

View File

@ -259,34 +259,3 @@ export const useMarketplaceContainerScroll = (
}
}, [handleScroll])
}
export const useSearchBoxAutoAnimate = (searchBoxAutoAnimate?: boolean) => {
const [searchBoxCanAnimate, setSearchBoxCanAnimate] = useState(true)
const handleSearchBoxCanAnimateChange = useCallback(() => {
if (!searchBoxAutoAnimate) {
const clientWidth = document.documentElement.clientWidth
if (clientWidth < 1400)
setSearchBoxCanAnimate(false)
else
setSearchBoxCanAnimate(true)
}
}, [searchBoxAutoAnimate])
useEffect(() => {
handleSearchBoxCanAnimateChange()
}, [handleSearchBoxCanAnimateChange])
useEffect(() => {
window.addEventListener('resize', handleSearchBoxCanAnimateChange)
return () => {
window.removeEventListener('resize', handleSearchBoxCanAnimateChange)
}
}, [handleSearchBoxCanAnimateChange])
return {
searchBoxCanAnimate,
}
}

View File

@ -1,8 +1,6 @@
import { MarketplaceContextProvider } from './context'
import Description from './description'
import IntersectionLine from './intersection-line'
import SearchBoxWrapper from './search-box/search-box-wrapper'
import PluginTypeSwitch from './plugin-type-switch'
import StickySearchAndSwitchWrapper from './sticky-search-and-switch-wrapper'
import ListWrapper from './list/list-wrapper'
import type { MarketplaceCollection, SearchParams } from './types'
import type { Plugin } from '@/app/components/plugins/types'
@ -11,23 +9,19 @@ import { TanstackQueryInitializer } from '@/context/query-client'
type MarketplaceProps = {
locale: string
searchBoxAutoAnimate?: boolean
showInstallButton?: boolean
shouldExclude?: boolean
searchParams?: SearchParams
pluginTypeSwitchClassName?: string
intersectionContainerId?: string
scrollContainerId?: string
showSearchParams?: boolean
}
const Marketplace = async ({
locale,
searchBoxAutoAnimate = true,
showInstallButton = true,
shouldExclude,
searchParams,
pluginTypeSwitchClassName,
intersectionContainerId,
scrollContainerId,
showSearchParams = true,
}: MarketplaceProps) => {
@ -48,15 +42,9 @@ const Marketplace = async ({
showSearchParams={showSearchParams}
>
<Description locale={locale} />
<IntersectionLine intersectionContainerId={intersectionContainerId} />
<SearchBoxWrapper
<StickySearchAndSwitchWrapper
locale={locale}
searchBoxAutoAnimate={searchBoxAutoAnimate}
/>
<PluginTypeSwitch
locale={locale}
className={pluginTypeSwitchClassName}
searchBoxAutoAnimate={searchBoxAutoAnimate}
pluginTypeSwitchClassName={pluginTypeSwitchClassName}
showSearchParams={showSearchParams}
/>
<ListWrapper

View File

@ -1,30 +0,0 @@
import { useEffect } from 'react'
import { useMarketplaceContext } from '@/app/components/plugins/marketplace/context'
export const useScrollIntersection = (
anchorRef: React.RefObject<HTMLDivElement | null>,
intersectionContainerId = 'marketplace-container',
) => {
const intersected = useMarketplaceContext(v => v.intersected)
const setIntersected = useMarketplaceContext(v => v.setIntersected)
useEffect(() => {
const container = document.getElementById(intersectionContainerId)
let observer: IntersectionObserver | undefined
if (container && anchorRef.current) {
observer = new IntersectionObserver((entries) => {
const isIntersecting = entries[0].isIntersecting
if (isIntersecting && !intersected)
setIntersected(true)
if (!isIntersecting && intersected)
setIntersected(false)
}, {
root: container,
})
observer.observe(anchorRef.current)
}
return () => observer?.disconnect()
}, [anchorRef, intersected, setIntersected, intersectionContainerId])
}

View File

@ -1,21 +0,0 @@
'use client'
import { useRef } from 'react'
import { useScrollIntersection } from './hooks'
type IntersectionLineProps = {
intersectionContainerId?: string
}
const IntersectionLine = ({
intersectionContainerId,
}: IntersectionLineProps) => {
const ref = useRef<HTMLDivElement>(null)
useScrollIntersection(ref, intersectionContainerId)
return (
<div ref={ref} className='mb-4 h-px shrink-0 bg-transparent'></div>
)
}
export default IntersectionLine

View File

@ -12,10 +12,7 @@ import {
import { useCallback, useEffect } from 'react'
import { PluginCategoryEnum } from '../types'
import { useMarketplaceContext } from './context'
import {
useMixedTranslation,
useSearchBoxAutoAnimate,
} from './hooks'
import { useMixedTranslation } from './hooks'
export const PLUGIN_TYPE_SEARCH_MAP = {
all: 'all',
@ -30,19 +27,16 @@ export const PLUGIN_TYPE_SEARCH_MAP = {
type PluginTypeSwitchProps = {
locale?: string
className?: string
searchBoxAutoAnimate?: boolean
showSearchParams?: boolean
}
const PluginTypeSwitch = ({
locale,
className,
searchBoxAutoAnimate,
showSearchParams,
}: PluginTypeSwitchProps) => {
const { t } = useMixedTranslation(locale)
const activePluginType = useMarketplaceContext(s => s.activePluginType)
const handleActivePluginTypeChange = useMarketplaceContext(s => s.handleActivePluginTypeChange)
const { searchBoxCanAnimate } = useSearchBoxAutoAnimate(searchBoxAutoAnimate)
const options = [
{
@ -105,7 +99,6 @@ const PluginTypeSwitch = ({
return (
<div className={cn(
'flex shrink-0 items-center justify-center space-x-2 bg-background-body py-3',
searchBoxCanAnimate && 'sticky top-[56px] z-10',
className,
)}>
{

View File

@ -1,36 +1,24 @@
'use client'
import { useMarketplaceContext } from '../context'
import {
useMixedTranslation,
useSearchBoxAutoAnimate,
} from '../hooks'
import { useMixedTranslation } from '../hooks'
import SearchBox from './index'
import cn from '@/utils/classnames'
type SearchBoxWrapperProps = {
locale?: string
searchBoxAutoAnimate?: boolean
}
const SearchBoxWrapper = ({
locale,
searchBoxAutoAnimate,
}: SearchBoxWrapperProps) => {
const { t } = useMixedTranslation(locale)
const intersected = useMarketplaceContext(v => v.intersected)
const searchPluginText = useMarketplaceContext(v => v.searchPluginText)
const handleSearchPluginTextChange = useMarketplaceContext(v => v.handleSearchPluginTextChange)
const filterPluginTags = useMarketplaceContext(v => v.filterPluginTags)
const handleFilterPluginTagsChange = useMarketplaceContext(v => v.handleFilterPluginTagsChange)
const { searchBoxCanAnimate } = useSearchBoxAutoAnimate(searchBoxAutoAnimate)
return (
<SearchBox
wrapperClassName={cn(
'z-[0] mx-auto w-[640px] shrink-0',
searchBoxCanAnimate && 'sticky top-3 z-[11]',
!intersected && searchBoxCanAnimate && 'w-[508px] transition-[width] duration-300',
)}
wrapperClassName='z-[11] mx-auto w-[640px] shrink-0'
inputClassName='w-full'
search={searchPluginText}
onSearchChange={handleSearchPluginTextChange}

View File

@ -0,0 +1,37 @@
'use client'
import SearchBoxWrapper from './search-box/search-box-wrapper'
import PluginTypeSwitch from './plugin-type-switch'
import cn from '@/utils/classnames'
type StickySearchAndSwitchWrapperProps = {
locale?: string
pluginTypeSwitchClassName?: string
showSearchParams?: boolean
}
const StickySearchAndSwitchWrapper = ({
locale,
pluginTypeSwitchClassName,
showSearchParams,
}: StickySearchAndSwitchWrapperProps) => {
const hasCustomTopClass = pluginTypeSwitchClassName?.includes('top-')
return (
<div
className={cn(
'mt-4 bg-background-body',
hasCustomTopClass && 'sticky z-10',
pluginTypeSwitchClassName,
)}
>
<SearchBoxWrapper locale={locale} />
<PluginTypeSwitch
locale={locale}
showSearchParams={showSearchParams}
/>
</div>
)
}
export default StickySearchAndSwitchWrapper

View File

@ -28,9 +28,9 @@ import {
RiHardDrive3Line,
} from '@remixicon/react'
import { useBoolean } from 'ahooks'
import { useTheme } from 'next-themes'
import React, { useCallback, useMemo, useState } from 'react'
import { useTranslation } from 'react-i18next'
import useTheme from '@/hooks/use-theme'
import Verified from '../base/badges/verified'
import { AutoUpdateLine } from '../../base/icons/src/vender/system'
import DeprecationNotice from '../base/deprecation-notice'
@ -86,7 +86,7 @@ const DetailHeader = ({
alternative_plugin_id,
} = detail
const { author, category, name, label, description, icon, verified, tool } = detail.declaration || detail
const { author, category, name, label, description, icon, icon_dark, verified, tool } = detail.declaration || detail
const isTool = category === PluginCategoryEnum.tool
const providerBriefInfo = tool?.identity
const providerKey = `${plugin_id}/${providerBriefInfo?.name}`
@ -109,6 +109,11 @@ const DetailHeader = ({
return false
}, [isFromMarketplace, latest_version, version])
const iconFileName = theme === 'dark' && icon_dark ? icon_dark : icon
const iconSrc = iconFileName
? (iconFileName.startsWith('http') ? iconFileName : `${API_PREFIX}/workspaces/current/plugin/icon?tenant_id=${tenant_id}&filename=${iconFileName}`)
: ''
const detailUrl = useMemo(() => {
if (isFromGitHub)
return `https://github.com/${meta!.repo}`
@ -214,7 +219,7 @@ const DetailHeader = ({
<div className={cn('shrink-0 border-b border-divider-subtle bg-components-panel-bg p-4 pb-3', isReadmeView && 'border-b-0 bg-transparent p-0')}>
<div className="flex">
<div className={cn('overflow-hidden rounded-xl border border-components-panel-border-subtle', isReadmeView && 'bg-components-panel-bg')}>
<Icon src={icon.startsWith('http') ? icon : `${API_PREFIX}/workspaces/current/plugin/icon?tenant_id=${tenant_id}&filename=${icon}`} />
<Icon src={iconSrc} />
</div>
<div className="ml-3 w-0 grow">
<div className="flex h-5 items-center">

View File

@ -14,11 +14,11 @@ import {
RiHardDrive3Line,
RiLoginCircleLine,
} from '@remixicon/react'
import { useTheme } from 'next-themes'
import type { FC } from 'react'
import React, { useCallback, useMemo } from 'react'
import { useTranslation } from 'react-i18next'
import { gte } from 'semver'
import useTheme from '@/hooks/use-theme'
import Verified from '../base/badges/verified'
import Badge from '../../base/badge'
import { Github } from '../../base/icons/src/public/common'
@ -58,7 +58,7 @@ const PluginItem: FC<Props> = ({
status,
deprecated_reason,
} = plugin
const { category, author, name, label, description, icon, verified, meta: declarationMeta } = plugin.declaration
const { category, author, name, label, description, icon, icon_dark, verified, meta: declarationMeta } = plugin.declaration
const orgName = useMemo(() => {
return [PluginSource.github, PluginSource.marketplace].includes(source) ? author : ''
@ -84,6 +84,10 @@ const PluginItem: FC<Props> = ({
const title = getValueFromI18nObject(label)
const descriptionText = getValueFromI18nObject(description)
const { enable_marketplace } = useGlobalPublicStore(s => s.systemFeatures)
const iconFileName = theme === 'dark' && icon_dark ? icon_dark : icon
const iconSrc = iconFileName
? (iconFileName.startsWith('http') ? iconFileName : `${API_PREFIX}/workspaces/current/plugin/icon?tenant_id=${tenant_id}&filename=${iconFileName}`)
: ''
return (
<div
@ -105,7 +109,7 @@ const PluginItem: FC<Props> = ({
<div className='flex h-10 w-10 items-center justify-center overflow-hidden rounded-xl border-[1px] border-components-panel-border-subtle'>
<img
className='h-full w-full'
src={`${API_PREFIX}/workspaces/current/plugin/icon?tenant_id=${tenant_id}&filename=${icon}`}
src={iconSrc}
alt={`plugin-${plugin_unique_identifier}-logo`}
/>
</div>

View File

@ -71,6 +71,7 @@ export type PluginDeclaration = {
version: string
author: string
icon: string
icon_dark?: string
name: string
category: PluginCategoryEnum
label: Record<Locale, string>
@ -248,7 +249,7 @@ export type PluginInfoFromMarketPlace = {
}
export type Plugin = {
type: 'plugin' | 'bundle' | 'model' | 'extension' | 'tool' | 'agent_strategy'
type: 'plugin' | 'bundle' | 'model' | 'extension' | 'tool' | 'agent_strategy' | 'datasource' | 'trigger'
org: string
author?: string
name: string
@ -257,6 +258,7 @@ export type Plugin = {
latest_version: string
latest_package_identifier: string
icon: string
icon_dark?: string
verified: boolean
label: Record<Locale, string>
brief: Record<Locale, string>

View File

@ -49,6 +49,7 @@ export type Collection = {
author: string
description: TypeWithI18N
icon: string | Emoji
icon_dark?: string | Emoji
label: TypeWithI18N
type: CollectionType | string
team_credentials: Record<string, any>

View File

@ -1,6 +1,6 @@
'use client'
import type { FC } from 'react'
import React from 'react'
import React, { useMemo } from 'react'
import type { ToolWithProvider } from '../../types'
import { BlockEnum } from '../../types'
import type { ToolDefaultValue } from '../types'
@ -10,9 +10,13 @@ import { useGetLanguage } from '@/context/i18n'
import BlockIcon from '../../block-icon'
import cn from '@/utils/classnames'
import { useTranslation } from 'react-i18next'
import useTheme from '@/hooks/use-theme'
import { Theme } from '@/types/app'
import { basePath } from '@/utils/var'
const normalizeProviderIcon = (icon: ToolWithProvider['icon']) => {
const normalizeProviderIcon = (icon?: ToolWithProvider['icon']) => {
if (!icon)
return icon
if (typeof icon === 'string' && basePath && icon.startsWith('/') && !icon.startsWith(`${basePath}/`))
return `${basePath}${icon}`
return icon
@ -36,6 +40,20 @@ const ToolItem: FC<Props> = ({
const { t } = useTranslation()
const language = useGetLanguage()
const { theme } = useTheme()
const normalizedIcon = useMemo<ToolWithProvider['icon']>(() => {
return normalizeProviderIcon(provider.icon) ?? provider.icon
}, [provider.icon])
const normalizedIconDark = useMemo(() => {
if (!provider.icon_dark)
return undefined
return normalizeProviderIcon(provider.icon_dark) ?? provider.icon_dark
}, [provider.icon_dark])
const providerIcon = useMemo(() => {
if (theme === Theme.dark && normalizedIconDark)
return normalizedIconDark
return normalizedIcon
}, [theme, normalizedIcon, normalizedIconDark])
return (
<Tooltip
@ -49,7 +67,7 @@ const ToolItem: FC<Props> = ({
size='md'
className='mb-2'
type={BlockEnum.Tool}
toolIcon={provider.icon}
toolIcon={providerIcon}
/>
<div className='mb-1 text-sm leading-5 text-text-primary'>{payload.label[language]}</div>
<div className='text-xs leading-[18px] text-text-secondary'>{payload.description[language]}</div>
@ -73,7 +91,8 @@ const ToolItem: FC<Props> = ({
provider_name: provider.name,
plugin_id: provider.plugin_id,
plugin_unique_identifier: provider.plugin_unique_identifier,
provider_icon: normalizeProviderIcon(provider.icon),
provider_icon: normalizedIcon,
provider_icon_dark: normalizedIconDark,
tool_name: payload.name,
tool_label: payload.label[language],
tool_description: payload.description[language],

View File

@ -14,11 +14,15 @@ import ActionItem from './action-item'
import BlockIcon from '../../block-icon'
import { useTranslation } from 'react-i18next'
import { useHover } from 'ahooks'
import useTheme from '@/hooks/use-theme'
import { Theme } from '@/types/app'
import McpToolNotSupportTooltip from '../../nodes/_base/components/mcp-tool-not-support-tooltip'
import { Mcp } from '@/app/components/base/icons/src/vender/other'
import { basePath } from '@/utils/var'
const normalizeProviderIcon = (icon: ToolWithProvider['icon']) => {
const normalizeProviderIcon = (icon?: ToolWithProvider['icon']) => {
if (!icon)
return icon
if (typeof icon === 'string' && basePath && icon.startsWith('/') && !icon.startsWith(`${basePath}/`))
return `${basePath}${icon}`
return icon
@ -59,6 +63,20 @@ const Tool: FC<Props> = ({
const isHovering = useHover(ref)
const isMCPTool = payload.type === CollectionType.mcp
const isShowCanNotChooseMCPTip = !canChooseMCPTool && isMCPTool
const { theme } = useTheme()
const normalizedIcon = useMemo<ToolWithProvider['icon']>(() => {
return normalizeProviderIcon(payload.icon) ?? payload.icon
}, [payload.icon])
const normalizedIconDark = useMemo(() => {
if (!payload.icon_dark)
return undefined
return normalizeProviderIcon(payload.icon_dark) ?? payload.icon_dark
}, [payload.icon_dark])
const providerIcon = useMemo<ToolWithProvider['icon']>(() => {
if (theme === Theme.dark && normalizedIconDark)
return normalizedIconDark
return normalizedIcon
}, [theme, normalizedIcon, normalizedIconDark])
const getIsDisabled = useCallback((tool: ToolType) => {
if (!selectedTools || !selectedTools.length) return false
return selectedTools.some(selectedTool => (selectedTool.provider_name === payload.name || selectedTool.provider_name === payload.id) && selectedTool.tool_name === tool.name)
@ -95,7 +113,8 @@ const Tool: FC<Props> = ({
provider_name: payload.name,
plugin_id: payload.plugin_id,
plugin_unique_identifier: payload.plugin_unique_identifier,
provider_icon: normalizeProviderIcon(payload.icon),
provider_icon: normalizedIcon,
provider_icon_dark: normalizedIconDark,
tool_name: tool.name,
tool_label: tool.label[language],
tool_description: tool.description[language],
@ -177,7 +196,8 @@ const Tool: FC<Props> = ({
provider_name: payload.name,
plugin_id: payload.plugin_id,
plugin_unique_identifier: payload.plugin_unique_identifier,
provider_icon: normalizeProviderIcon(payload.icon),
provider_icon: normalizedIcon,
provider_icon_dark: normalizedIconDark,
tool_name: tool.name,
tool_label: tool.label[language],
tool_description: tool.description[language],
@ -192,7 +212,7 @@ const Tool: FC<Props> = ({
<BlockIcon
className='shrink-0'
type={BlockEnum.Tool}
toolIcon={payload.icon}
toolIcon={providerIcon}
/>
<div className='ml-2 flex w-0 grow items-center text-sm text-text-primary'>
<span className='max-w-[250px] truncate'>{notShowProvider ? actions[0]?.label[language] : payload.label[language]}</span>

View File

@ -10,6 +10,17 @@ import BlockIcon from '@/app/components/workflow/block-icon'
import { BlockEnum } from '@/app/components/workflow/types'
import type { TriggerDefaultValue, TriggerWithProvider } from '@/app/components/workflow/block-selector/types'
import TriggerPluginActionItem from './action-item'
import { Theme } from '@/types/app'
import useTheme from '@/hooks/use-theme'
import { basePath } from '@/utils/var'
const normalizeProviderIcon = (icon?: TriggerWithProvider['icon']) => {
if (!icon)
return icon
if (typeof icon === 'string' && basePath && icon.startsWith('/') && !icon.startsWith(`${basePath}/`))
return `${basePath}${icon}`
return icon
}
type Props = {
className?: string
@ -26,6 +37,7 @@ const TriggerPluginItem: FC<Props> = ({
}) => {
const { t } = useTranslation()
const language = useGetLanguage()
const { theme } = useTheme()
const notShowProvider = payload.type === CollectionType.workflow
const actions = payload.events
const hasAction = !notShowProvider
@ -55,6 +67,23 @@ const TriggerPluginItem: FC<Props> = ({
return payload.author || ''
}, [payload.author, payload.type, t])
const normalizedIcon = useMemo<TriggerWithProvider['icon']>(() => {
return normalizeProviderIcon(payload.icon) ?? payload.icon
}, [payload.icon])
const normalizedIconDark = useMemo(() => {
if (!payload.icon_dark)
return undefined
return normalizeProviderIcon(payload.icon_dark) ?? payload.icon_dark
}, [payload.icon_dark])
const providerIcon = useMemo<TriggerWithProvider['icon']>(() => {
if (theme === Theme.dark && normalizedIconDark)
return normalizedIconDark
return normalizedIcon
}, [normalizedIcon, normalizedIconDark, theme])
const providerWithResolvedIcon = useMemo(() => ({
...payload,
icon: providerIcon,
}), [payload, providerIcon])
return (
<div
@ -99,7 +128,7 @@ const TriggerPluginItem: FC<Props> = ({
<BlockIcon
className='shrink-0'
type={BlockEnum.TriggerPlugin}
toolIcon={payload.icon}
toolIcon={providerIcon}
/>
<div className='ml-2 flex min-w-0 flex-1 items-center text-sm text-text-primary'>
<span className='max-w-[200px] truncate'>{notShowProvider ? actions[0]?.label[language] : payload.label[language]}</span>
@ -118,7 +147,7 @@ const TriggerPluginItem: FC<Props> = ({
actions.map(action => (
<TriggerPluginActionItem
key={action.name}
provider={payload}
provider={providerWithResolvedIcon}
payload={action}
onSelect={onSelect}
disabled={false}

View File

@ -61,6 +61,7 @@ export type ToolDefaultValue = PluginCommonDefaultValue & {
meta?: PluginMeta
plugin_id?: string
provider_icon?: Collection['icon']
provider_icon_dark?: Collection['icon']
plugin_unique_identifier?: string
}

View File

@ -15,6 +15,7 @@ import type { PluginTriggerNodeType } from '../nodes/trigger-plugin/types'
import type { ToolNodeType } from '../nodes/tool/types'
import type { DataSourceNodeType } from '../nodes/data-source/types'
import type { TriggerWithProvider } from '../block-selector/types'
import useTheme from '@/hooks/use-theme'
const isTriggerPluginNode = (data: Node['data']): data is PluginTriggerNodeType => data.type === BlockEnum.TriggerPlugin
@ -22,17 +23,30 @@ const isToolNode = (data: Node['data']): data is ToolNodeType => data.type === B
const isDataSourceNode = (data: Node['data']): data is DataSourceNodeType => data.type === BlockEnum.DataSource
type IconValue = ToolWithProvider['icon']
const resolveIconByTheme = (
currentTheme: string | undefined,
icon?: IconValue,
iconDark?: IconValue,
) => {
if (currentTheme === 'dark' && iconDark)
return iconDark
return icon
}
const findTriggerPluginIcon = (
identifiers: (string | undefined)[],
triggers: TriggerWithProvider[] | undefined,
currentTheme?: string,
) => {
const targetTriggers = triggers || []
for (const identifier of identifiers) {
if (!identifier)
continue
const matched = targetTriggers.find(trigger => trigger.id === identifier || canFindTool(trigger.id, identifier))
if (matched?.icon)
return matched.icon
if (matched)
return resolveIconByTheme(currentTheme, matched.icon, matched.icon_dark)
}
return undefined
}
@ -44,6 +58,7 @@ export const useToolIcon = (data?: Node['data']) => {
const { data: mcpTools } = useAllMCPTools()
const dataSourceList = useStore(s => s.dataSourceList)
const { data: triggerPlugins } = useAllTriggerPlugins()
const { theme } = useTheme()
const toolIcon = useMemo(() => {
if (!data)
@ -57,6 +72,7 @@ export const useToolIcon = (data?: Node['data']) => {
data.provider_name,
],
triggerPlugins,
theme,
)
if (icon)
return icon
@ -100,12 +116,16 @@ export const useToolIcon = (data?: Node['data']) => {
return true
return data.provider_name === toolWithProvider.name
})
if (matched?.icon)
return matched.icon
if (matched) {
const icon = resolveIconByTheme(theme, matched.icon, matched.icon_dark)
if (icon)
return icon
}
}
if (data.provider_icon)
return data.provider_icon
const fallbackIcon = resolveIconByTheme(theme, data.provider_icon, data.provider_icon_dark)
if (fallbackIcon)
return fallbackIcon
return ''
}
@ -114,7 +134,7 @@ export const useToolIcon = (data?: Node['data']) => {
return dataSourceList?.find(toolWithProvider => toolWithProvider.plugin_id === data.plugin_id)?.icon || ''
return ''
}, [data, dataSourceList, buildInTools, customTools, workflowTools, mcpTools, triggerPlugins])
}, [data, dataSourceList, buildInTools, customTools, workflowTools, mcpTools, triggerPlugins, theme])
return toolIcon
}
@ -126,6 +146,7 @@ export const useGetToolIcon = () => {
const { data: mcpTools } = useAllMCPTools()
const { data: triggerPlugins } = useAllTriggerPlugins()
const workflowStore = useWorkflowStore()
const { theme } = useTheme()
const getToolIcon = useCallback((data: Node['data']) => {
const {
@ -144,6 +165,7 @@ export const useGetToolIcon = () => {
data.provider_name,
],
triggerPlugins,
theme,
)
}
@ -182,12 +204,16 @@ export const useGetToolIcon = () => {
return true
return data.provider_name === toolWithProvider.name
})
if (matched?.icon)
return matched.icon
if (matched) {
const icon = resolveIconByTheme(theme, matched.icon, matched.icon_dark)
if (icon)
return icon
}
}
if (data.provider_icon)
return data.provider_icon
const fallbackIcon = resolveIconByTheme(theme, data.provider_icon, data.provider_icon_dark)
if (fallbackIcon)
return fallbackIcon
return undefined
}
@ -196,7 +222,7 @@ export const useGetToolIcon = () => {
return dataSourceList?.find(toolWithProvider => toolWithProvider.plugin_id === data.plugin_id)?.icon
return undefined
}, [workflowStore, triggerPlugins, buildInTools, customTools, workflowTools, mcpTools])
}, [workflowStore, triggerPlugins, buildInTools, customTools, workflowTools, mcpTools, theme])
return getToolIcon
}

View File

@ -22,5 +22,6 @@ export type ToolNodeType = CommonNodeType & {
params?: Record<string, any>
plugin_id?: string
provider_icon?: Collection['icon']
provider_icon_dark?: Collection['icon_dark']
plugin_unique_identifier?: string
}

View File

@ -104,15 +104,15 @@
"mime": "^4.1.0",
"mitt": "^3.0.1",
"negotiator": "^1.0.0",
"next": "~15.5.6",
"next": "~15.5.7",
"next-pwa": "^5.6.0",
"next-themes": "^0.4.6",
"pinyin-pro": "^3.27.0",
"qrcode.react": "^4.2.0",
"qs": "^6.14.0",
"react": "19.1.1",
"react": "19.2.1",
"react-18-input-autosize": "^3.0.0",
"react-dom": "19.1.1",
"react-dom": "19.2.1",
"react-easy-crop": "^5.5.3",
"react-hook-form": "^7.65.0",
"react-hotkeys-hook": "^4.6.2",
@ -153,9 +153,9 @@
"@happy-dom/jest-environment": "^20.0.8",
"@mdx-js/loader": "^3.1.1",
"@mdx-js/react": "^3.1.1",
"@next/bundle-analyzer": "15.5.4",
"@next/eslint-plugin-next": "15.5.4",
"@next/mdx": "15.5.4",
"@next/bundle-analyzer": "15.5.7",
"@next/eslint-plugin-next": "15.5.7",
"@next/mdx": "15.5.7",
"@rgrove/parse-xml": "^4.2.0",
"@storybook/addon-docs": "9.1.13",
"@storybook/addon-links": "9.1.13",
@ -173,8 +173,8 @@
"@types/negotiator": "^0.6.4",
"@types/node": "18.15.0",
"@types/qs": "^6.14.0",
"@types/react": "~19.1.17",
"@types/react-dom": "~19.1.11",
"@types/react": "~19.2.7",
"@types/react-dom": "~19.2.3",
"@types/react-slider": "^1.3.6",
"@types/react-syntax-highlighter": "^15.5.13",
"@types/react-window": "^1.8.8",
@ -210,8 +210,8 @@
"uglify-js": "^3.19.3"
},
"resolutions": {
"@types/react": "~19.1.17",
"@types/react-dom": "~19.1.11",
"@types/react": "~19.2.7",
"@types/react-dom": "~19.2.3",
"string-width": "~4.2.3",
"@eslint/plugin-kit": "~0.3",
"canvas": "^3.2.0",

File diff suppressed because it is too large Load Diff

View File

@ -25,6 +25,7 @@ const convertToTriggerWithProvider = (provider: TriggerProviderApiEntity): Trigg
author: provider.author,
description: provider.description,
icon: provider.icon || '',
icon_dark: provider.icon_dark || '',
label: provider.label,
type: CollectionType.trigger,
team_credentials: {},