Merge branch 'main' into feat/rag-2

This commit is contained in:
twwu 2025-07-09 15:54:57 +08:00
commit 9e44f2b805
197 changed files with 8147 additions and 992 deletions

View File

@ -90,23 +90,11 @@ class ChatMessageTextApi(Resource):
message_id = args.get("message_id", None)
text = args.get("text", None)
if (
app_model.mode in {AppMode.ADVANCED_CHAT.value, AppMode.WORKFLOW.value}
and app_model.workflow
and app_model.workflow.features_dict
):
text_to_speech = app_model.workflow.features_dict.get("text_to_speech")
if text_to_speech is None:
raise ValueError("TTS is not enabled")
voice = args.get("voice") or text_to_speech.get("voice")
else:
try:
if app_model.app_model_config is None:
raise ValueError("AppModelConfig not found")
voice = args.get("voice") or app_model.app_model_config.text_to_speech_dict.get("voice")
except Exception:
voice = None
response = AudioService.transcript_tts(app_model=app_model, text=text, message_id=message_id, voice=voice)
voice = args.get("voice", None)
response = AudioService.transcript_tts(
app_model=app_model, text=text, voice=voice, message_id=message_id, is_draft=True
)
return response
except services.errors.app_model_config.AppModelConfigBrokenError:
logging.exception("App model config broken.")

View File

@ -18,7 +18,6 @@ from controllers.console.app.error import (
from controllers.console.explore.wraps import InstalledAppResource
from core.errors.error import ModelCurrentlyNotSupportError, ProviderTokenNotInitError, QuotaExceededError
from core.model_runtime.errors.invoke import InvokeError
from models.model import AppMode
from services.audio_service import AudioService
from services.errors.audio import (
AudioTooLargeServiceError,
@ -79,19 +78,9 @@ class ChatTextApi(InstalledAppResource):
message_id = args.get("message_id", None)
text = args.get("text", None)
if (
app_model.mode in {AppMode.ADVANCED_CHAT.value, AppMode.WORKFLOW.value}
and app_model.workflow
and app_model.workflow.features_dict
):
text_to_speech = app_model.workflow.features_dict.get("text_to_speech")
voice = args.get("voice") or text_to_speech.get("voice")
else:
try:
voice = args.get("voice") or app_model.app_model_config.text_to_speech_dict.get("voice")
except Exception:
voice = None
response = AudioService.transcript_tts(app_model=app_model, message_id=message_id, voice=voice, text=text)
voice = args.get("voice", None)
response = AudioService.transcript_tts(app_model=app_model, text=text, voice=voice, message_id=message_id)
return response
except services.errors.app_model_config.AppModelConfigBrokenError:
logging.exception("App model config broken.")

View File

@ -87,7 +87,5 @@ class PluginUploadFileApi(Resource):
except services.errors.file.UnsupportedFileTypeError:
raise UnsupportedFileTypeError()
return tool_file, 201
api.add_resource(PluginUploadFileApi, "/files/upload/for-plugin")

View File

@ -20,7 +20,7 @@ from controllers.service_api.app.error import (
from controllers.service_api.wraps import FetchUserArg, WhereisUserArg, validate_app_token
from core.errors.error import ModelCurrentlyNotSupportError, ProviderTokenNotInitError, QuotaExceededError
from core.model_runtime.errors.invoke import InvokeError
from models.model import App, AppMode, EndUser
from models.model import App, EndUser
from services.audio_service import AudioService
from services.errors.audio import (
AudioTooLargeServiceError,
@ -78,20 +78,9 @@ class TextApi(Resource):
message_id = args.get("message_id", None)
text = args.get("text", None)
if (
app_model.mode in {AppMode.ADVANCED_CHAT.value, AppMode.WORKFLOW.value}
and app_model.workflow
and app_model.workflow.features_dict
):
text_to_speech = app_model.workflow.features_dict.get("text_to_speech", {})
voice = args.get("voice") or text_to_speech.get("voice")
else:
try:
voice = args.get("voice") or app_model.app_model_config.text_to_speech_dict.get("voice")
except Exception:
voice = None
voice = args.get("voice", None)
response = AudioService.transcript_tts(
app_model=app_model, message_id=message_id, end_user=end_user.external_user_id, voice=voice, text=text
app_model=app_model, text=text, voice=voice, end_user=end_user.external_user_id, message_id=message_id
)
return response

View File

@ -211,6 +211,9 @@ class DocumentAddByFileApi(DatasetApiResource):
if not dataset:
raise ValueError("Dataset does not exist.")
if dataset.provider == "external":
raise ValueError("External datasets are not supported.")
indexing_technique = args.get("indexing_technique") or dataset.indexing_technique
if not indexing_technique:
raise ValueError("indexing_technique is required.")
@ -301,6 +304,9 @@ class DocumentUpdateByFileApi(DatasetApiResource):
if not dataset:
raise ValueError("Dataset does not exist.")
if dataset.provider == "external":
raise ValueError("External datasets are not supported.")
# indexing_technique is already set in dataset since this is an update
args["indexing_technique"] = dataset.indexing_technique

View File

@ -19,7 +19,7 @@ from controllers.web.error import (
from controllers.web.wraps import WebApiResource
from core.errors.error import ModelCurrentlyNotSupportError, ProviderTokenNotInitError, QuotaExceededError
from core.model_runtime.errors.invoke import InvokeError
from models.model import App, AppMode
from models.model import App
from services.audio_service import AudioService
from services.errors.audio import (
AudioTooLargeServiceError,
@ -77,21 +77,9 @@ class TextApi(WebApiResource):
message_id = args.get("message_id", None)
text = args.get("text", None)
if (
app_model.mode in {AppMode.ADVANCED_CHAT.value, AppMode.WORKFLOW.value}
and app_model.workflow
and app_model.workflow.features_dict
):
text_to_speech = app_model.workflow.features_dict.get("text_to_speech", {})
voice = args.get("voice") or text_to_speech.get("voice")
else:
try:
voice = args.get("voice") or app_model.app_model_config.text_to_speech_dict.get("voice")
except Exception:
voice = None
voice = args.get("voice", None)
response = AudioService.transcript_tts(
app_model=app_model, message_id=message_id, end_user=end_user.external_user_id, voice=voice, text=text
app_model=app_model, text=text, voice=voice, end_user=end_user.external_user_id, message_id=message_id
)
return response

View File

@ -19,6 +19,7 @@ from core.app.entities.task_entities import (
from core.errors.error import QuotaExceededError
from core.model_runtime.errors.invoke import InvokeAuthorizationError, InvokeError
from core.moderation.output_moderation import ModerationRule, OutputModeration
from models.enums import MessageStatus
from models.model import Message
logger = logging.getLogger(__name__)
@ -62,7 +63,7 @@ class BasedGenerateTaskPipeline:
return err
err_desc = self._error_to_desc(err)
message.status = "error"
message.status = MessageStatus.ERROR
message.error = err_desc
return err

View File

@ -51,7 +51,7 @@ class File(BaseModel):
# It should be set to `ToolFile.id` when `transfer_method` is `tool_file`.
related_id: Optional[str] = None
filename: Optional[str] = None
extension: Optional[str] = Field(default=None, description="File extension, should contains dot")
extension: Optional[str] = Field(default=None, description="File extension, should contain dot")
mime_type: Optional[str] = None
size: int = -1

View File

@ -1,67 +0,0 @@
import base64
import logging
import time
from typing import Optional
from configs import dify_config
from constants import IMAGE_EXTENSIONS
from core.helper.url_signer import UrlSigner
from extensions.ext_storage import storage
class UploadFileParser:
@classmethod
def get_image_data(cls, upload_file, force_url: bool = False) -> Optional[str]:
if not upload_file:
return None
if upload_file.extension not in IMAGE_EXTENSIONS:
return None
if dify_config.MULTIMODAL_SEND_FORMAT == "url" or force_url:
return cls.get_signed_temp_image_url(upload_file.id)
else:
# get image file base64
try:
data = storage.load(upload_file.key)
except FileNotFoundError:
logging.exception(f"File not found: {upload_file.key}")
return None
encoded_string = base64.b64encode(data).decode("utf-8")
return f"data:{upload_file.mime_type};base64,{encoded_string}"
@classmethod
def get_signed_temp_image_url(cls, upload_file_id) -> str:
"""
get signed url from upload file
:param upload_file_id: the id of UploadFile object
:return:
"""
base_url = dify_config.FILES_URL
image_preview_url = f"{base_url}/files/{upload_file_id}/image-preview"
return UrlSigner.get_signed_url(url=image_preview_url, sign_key=upload_file_id, prefix="image-preview")
@classmethod
def verify_image_file_signature(cls, upload_file_id: str, timestamp: str, nonce: str, sign: str) -> bool:
"""
verify signature
:param upload_file_id: file id
:param timestamp: timestamp
:param nonce: nonce
:param sign: signature
:return:
"""
result = UrlSigner.verify(
sign_key=upload_file_id, timestamp=timestamp, nonce=nonce, sign=sign, prefix="image-preview"
)
# verify signature
if not result:
return False
current_time = int(time.time())
return current_time - int(timestamp) <= dify_config.FILES_ACCESS_TIMEOUT

View File

@ -28,7 +28,7 @@ class TemplateTransformer(ABC):
def extract_result_str_from_response(cls, response: str):
result = re.search(rf"{cls._result_tag}(.*){cls._result_tag}", response, re.DOTALL)
if not result:
raise ValueError("Failed to parse result")
raise ValueError(f"Failed to parse result: no result tag found in response. Response: {response[:200]}...")
return result.group(1)
@classmethod
@ -38,16 +38,53 @@ class TemplateTransformer(ABC):
:param response: response
:return:
"""
try:
result = json.loads(cls.extract_result_str_from_response(response))
except json.JSONDecodeError:
raise ValueError("failed to parse response")
result_str = cls.extract_result_str_from_response(response)
result = json.loads(result_str)
except json.JSONDecodeError as e:
raise ValueError(f"Failed to parse JSON response: {str(e)}. Response content: {result_str[:200]}...")
except ValueError as e:
# Re-raise ValueError from extract_result_str_from_response
raise e
except Exception as e:
raise ValueError(f"Unexpected error during response transformation: {str(e)}")
# Check if the result contains an error
if isinstance(result, dict) and "error" in result:
raise ValueError(f"JavaScript execution error: {result['error']}")
if not isinstance(result, dict):
raise ValueError("result must be a dict")
raise ValueError(f"Result must be a dict, got {type(result).__name__}")
if not all(isinstance(k, str) for k in result):
raise ValueError("result keys must be strings")
raise ValueError("Result keys must be strings")
# Post-process the result to convert scientific notation strings back to numbers
result = cls._post_process_result(result)
return result
@classmethod
def _post_process_result(cls, result: dict[Any, Any]) -> dict[Any, Any]:
"""
Post-process the result to convert scientific notation strings back to numbers
"""
def convert_scientific_notation(value):
if isinstance(value, str):
# Check if the string looks like scientific notation
if re.match(r"^-?\d+\.?\d*e[+-]\d+$", value, re.IGNORECASE):
try:
return float(value)
except ValueError:
pass
elif isinstance(value, dict):
return {k: convert_scientific_notation(v) for k, v in value.items()}
elif isinstance(value, list):
return [convert_scientific_notation(v) for v in value]
return value
return convert_scientific_notation(result) # type: ignore[no-any-return]
@classmethod
@abstractmethod
def get_runner_script(cls) -> str:

View File

@ -1,22 +0,0 @@
from collections import OrderedDict
from typing import Any
class LRUCache:
def __init__(self, capacity: int):
self.cache: OrderedDict[Any, Any] = OrderedDict()
self.capacity = capacity
def get(self, key: Any) -> Any:
if key not in self.cache:
return None
else:
self.cache.move_to_end(key) # move the key to the end of the OrderedDict
return self.cache[key]
def put(self, key: Any, value: Any) -> None:
if key in self.cache:
self.cache.move_to_end(key)
self.cache[key] = value
if len(self.cache) > self.capacity:
self.cache.popitem(last=False) # pop the first item

View File

@ -317,9 +317,10 @@ class IndexingRunner:
image_upload_file_ids = get_image_upload_file_ids(document.page_content)
for upload_file_id in image_upload_file_ids:
image_file = db.session.query(UploadFile).filter(UploadFile.id == upload_file_id).first()
if image_file is None:
continue
try:
if image_file:
storage.delete(image_file.key)
storage.delete(image_file.key)
except Exception:
logging.exception(
"Delete image_files failed while indexing_estimate, \

View File

@ -23,6 +23,7 @@ from core.model_runtime.entities.message_entities import (
PromptMessage,
PromptMessageTool,
SystemPromptMessage,
TextPromptMessageContent,
)
from core.model_runtime.entities.model_entities import AIModelEntity, ParameterRule
@ -170,10 +171,15 @@ def invoke_llm_with_structured_output(
system_fingerprint: Optional[str] = None
for event in llm_result:
if isinstance(event, LLMResultChunk):
prompt_messages = event.prompt_messages
system_fingerprint = event.system_fingerprint
if isinstance(event.delta.message.content, str):
result_text += event.delta.message.content
prompt_messages = event.prompt_messages
system_fingerprint = event.system_fingerprint
elif isinstance(event.delta.message.content, list):
for item in event.delta.message.content:
if isinstance(item, TextPromptMessageContent):
result_text += item.data
yield LLMResultChunkWithStructuredOutput(
model=model_schema.model,

View File

@ -53,6 +53,37 @@ class LLMUsage(ModelUsage):
latency=0.0,
)
@classmethod
def from_metadata(cls, metadata: dict) -> "LLMUsage":
"""
Create LLMUsage instance from metadata dictionary with default values.
Args:
metadata: Dictionary containing usage metadata
Returns:
LLMUsage instance with values from metadata or defaults
"""
total_tokens = metadata.get("total_tokens", 0)
completion_tokens = metadata.get("completion_tokens", 0)
if total_tokens > 0 and completion_tokens == 0:
completion_tokens = total_tokens
return cls(
prompt_tokens=metadata.get("prompt_tokens", 0),
completion_tokens=completion_tokens,
total_tokens=total_tokens,
prompt_unit_price=Decimal(str(metadata.get("prompt_unit_price", 0))),
completion_unit_price=Decimal(str(metadata.get("completion_unit_price", 0))),
total_price=Decimal(str(metadata.get("total_price", 0))),
currency=metadata.get("currency", "USD"),
prompt_price_unit=Decimal(str(metadata.get("prompt_price_unit", 0))),
completion_price_unit=Decimal(str(metadata.get("completion_price_unit", 0))),
prompt_price=Decimal(str(metadata.get("prompt_price", 0))),
completion_price=Decimal(str(metadata.get("completion_price", 0))),
latency=metadata.get("latency", 0.0),
)
def plus(self, other: "LLMUsage") -> "LLMUsage":
"""
Add two LLMUsage instances together.

View File

View File

@ -0,0 +1,487 @@
import json
import logging
from collections.abc import Sequence
from typing import Optional
from urllib.parse import urljoin
from opentelemetry.trace import Status, StatusCode
from sqlalchemy.orm import Session, sessionmaker
from core.ops.aliyun_trace.data_exporter.traceclient import (
TraceClient,
convert_datetime_to_nanoseconds,
convert_to_span_id,
convert_to_trace_id,
generate_span_id,
)
from core.ops.aliyun_trace.entities.aliyun_trace_entity import SpanData
from core.ops.aliyun_trace.entities.semconv import (
GEN_AI_COMPLETION,
GEN_AI_FRAMEWORK,
GEN_AI_MODEL_NAME,
GEN_AI_PROMPT,
GEN_AI_PROMPT_TEMPLATE_TEMPLATE,
GEN_AI_PROMPT_TEMPLATE_VARIABLE,
GEN_AI_RESPONSE_FINISH_REASON,
GEN_AI_SESSION_ID,
GEN_AI_SPAN_KIND,
GEN_AI_SYSTEM,
GEN_AI_USAGE_INPUT_TOKENS,
GEN_AI_USAGE_OUTPUT_TOKENS,
GEN_AI_USAGE_TOTAL_TOKENS,
GEN_AI_USER_ID,
INPUT_VALUE,
OUTPUT_VALUE,
RETRIEVAL_DOCUMENT,
RETRIEVAL_QUERY,
TOOL_DESCRIPTION,
TOOL_NAME,
TOOL_PARAMETERS,
GenAISpanKind,
)
from core.ops.base_trace_instance import BaseTraceInstance
from core.ops.entities.config_entity import AliyunConfig
from core.ops.entities.trace_entity import (
BaseTraceInfo,
DatasetRetrievalTraceInfo,
GenerateNameTraceInfo,
MessageTraceInfo,
ModerationTraceInfo,
SuggestedQuestionTraceInfo,
ToolTraceInfo,
WorkflowTraceInfo,
)
from core.rag.models.document import Document
from core.repositories import SQLAlchemyWorkflowNodeExecutionRepository
from core.workflow.entities.workflow_node_execution import (
WorkflowNodeExecution,
WorkflowNodeExecutionMetadataKey,
WorkflowNodeExecutionStatus,
)
from core.workflow.nodes import NodeType
from models import Account, App, EndUser, TenantAccountJoin, WorkflowNodeExecutionTriggeredFrom, db
logger = logging.getLogger(__name__)
class AliyunDataTrace(BaseTraceInstance):
def __init__(
self,
aliyun_config: AliyunConfig,
):
super().__init__(aliyun_config)
base_url = aliyun_config.endpoint.rstrip("/")
endpoint = urljoin(base_url, f"adapt_{aliyun_config.license_key}/api/otlp/traces")
self.trace_client = TraceClient(service_name=aliyun_config.app_name, endpoint=endpoint)
def trace(self, trace_info: BaseTraceInfo):
if isinstance(trace_info, WorkflowTraceInfo):
self.workflow_trace(trace_info)
if isinstance(trace_info, MessageTraceInfo):
self.message_trace(trace_info)
if isinstance(trace_info, ModerationTraceInfo):
pass
if isinstance(trace_info, SuggestedQuestionTraceInfo):
self.suggested_question_trace(trace_info)
if isinstance(trace_info, DatasetRetrievalTraceInfo):
self.dataset_retrieval_trace(trace_info)
if isinstance(trace_info, ToolTraceInfo):
self.tool_trace(trace_info)
if isinstance(trace_info, GenerateNameTraceInfo):
pass
def api_check(self):
return self.trace_client.api_check()
def get_project_url(self):
try:
return self.trace_client.get_project_url()
except Exception as e:
logger.info(f"Aliyun get run url failed: {str(e)}", exc_info=True)
raise ValueError(f"Aliyun get run url failed: {str(e)}")
def workflow_trace(self, trace_info: WorkflowTraceInfo):
trace_id = convert_to_trace_id(trace_info.workflow_run_id)
workflow_span_id = convert_to_span_id(trace_info.workflow_run_id, "workflow")
self.add_workflow_span(trace_id, workflow_span_id, trace_info)
workflow_node_executions = self.get_workflow_node_executions(trace_info)
for node_execution in workflow_node_executions:
node_span = self.build_workflow_node_span(node_execution, trace_id, trace_info, workflow_span_id)
self.trace_client.add_span(node_span)
def message_trace(self, trace_info: MessageTraceInfo):
message_data = trace_info.message_data
if message_data is None:
return
message_id = trace_info.message_id
user_id = message_data.from_account_id
if message_data.from_end_user_id:
end_user_data: Optional[EndUser] = (
db.session.query(EndUser).filter(EndUser.id == message_data.from_end_user_id).first()
)
if end_user_data is not None:
user_id = end_user_data.session_id
status: Status = Status(StatusCode.OK)
if trace_info.error:
status = Status(StatusCode.ERROR, trace_info.error)
trace_id = convert_to_trace_id(message_id)
message_span_id = convert_to_span_id(message_id, "message")
message_span = SpanData(
trace_id=trace_id,
parent_span_id=None,
span_id=message_span_id,
name="message",
start_time=convert_datetime_to_nanoseconds(trace_info.start_time),
end_time=convert_datetime_to_nanoseconds(trace_info.end_time),
attributes={
GEN_AI_SESSION_ID: trace_info.metadata.get("conversation_id", ""),
GEN_AI_USER_ID: str(user_id),
GEN_AI_SPAN_KIND: GenAISpanKind.CHAIN.value,
GEN_AI_FRAMEWORK: "dify",
INPUT_VALUE: json.dumps(trace_info.inputs, ensure_ascii=False),
OUTPUT_VALUE: str(trace_info.outputs),
},
status=status,
)
self.trace_client.add_span(message_span)
app_model_config = getattr(trace_info.message_data, "app_model_config", {})
pre_prompt = getattr(app_model_config, "pre_prompt", "")
inputs_data = getattr(trace_info.message_data, "inputs", {})
llm_span = SpanData(
trace_id=trace_id,
parent_span_id=message_span_id,
span_id=convert_to_span_id(message_id, "llm"),
name="llm",
start_time=convert_datetime_to_nanoseconds(trace_info.start_time),
end_time=convert_datetime_to_nanoseconds(trace_info.end_time),
attributes={
GEN_AI_SESSION_ID: trace_info.metadata.get("conversation_id", ""),
GEN_AI_USER_ID: str(user_id),
GEN_AI_SPAN_KIND: GenAISpanKind.LLM.value,
GEN_AI_FRAMEWORK: "dify",
GEN_AI_MODEL_NAME: trace_info.metadata.get("ls_model_name", ""),
GEN_AI_SYSTEM: trace_info.metadata.get("ls_provider", ""),
GEN_AI_USAGE_INPUT_TOKENS: str(trace_info.message_tokens),
GEN_AI_USAGE_OUTPUT_TOKENS: str(trace_info.answer_tokens),
GEN_AI_USAGE_TOTAL_TOKENS: str(trace_info.total_tokens),
GEN_AI_PROMPT_TEMPLATE_VARIABLE: json.dumps(inputs_data, ensure_ascii=False),
GEN_AI_PROMPT_TEMPLATE_TEMPLATE: pre_prompt,
GEN_AI_PROMPT: json.dumps(trace_info.inputs, ensure_ascii=False),
GEN_AI_COMPLETION: str(trace_info.outputs),
INPUT_VALUE: json.dumps(trace_info.inputs, ensure_ascii=False),
OUTPUT_VALUE: str(trace_info.outputs),
},
status=status,
)
self.trace_client.add_span(llm_span)
def dataset_retrieval_trace(self, trace_info: DatasetRetrievalTraceInfo):
if trace_info.message_data is None:
return
message_id = trace_info.message_id
documents_data = extract_retrieval_documents(trace_info.documents)
dataset_retrieval_span = SpanData(
trace_id=convert_to_trace_id(message_id),
parent_span_id=convert_to_span_id(message_id, "message"),
span_id=generate_span_id(),
name="dataset_retrieval",
start_time=convert_datetime_to_nanoseconds(trace_info.start_time),
end_time=convert_datetime_to_nanoseconds(trace_info.end_time),
attributes={
GEN_AI_SPAN_KIND: GenAISpanKind.RETRIEVER.value,
GEN_AI_FRAMEWORK: "dify",
RETRIEVAL_QUERY: str(trace_info.inputs),
RETRIEVAL_DOCUMENT: json.dumps(documents_data, ensure_ascii=False),
INPUT_VALUE: str(trace_info.inputs),
OUTPUT_VALUE: json.dumps(documents_data, ensure_ascii=False),
},
)
self.trace_client.add_span(dataset_retrieval_span)
def tool_trace(self, trace_info: ToolTraceInfo):
if trace_info.message_data is None:
return
message_id = trace_info.message_id
status: Status = Status(StatusCode.OK)
if trace_info.error:
status = Status(StatusCode.ERROR, trace_info.error)
tool_span = SpanData(
trace_id=convert_to_trace_id(message_id),
parent_span_id=convert_to_span_id(message_id, "message"),
span_id=generate_span_id(),
name=trace_info.tool_name,
start_time=convert_datetime_to_nanoseconds(trace_info.start_time),
end_time=convert_datetime_to_nanoseconds(trace_info.end_time),
attributes={
GEN_AI_SPAN_KIND: GenAISpanKind.TOOL.value,
GEN_AI_FRAMEWORK: "dify",
TOOL_NAME: trace_info.tool_name,
TOOL_DESCRIPTION: json.dumps(trace_info.tool_config, ensure_ascii=False),
TOOL_PARAMETERS: json.dumps(trace_info.tool_inputs, ensure_ascii=False),
INPUT_VALUE: json.dumps(trace_info.inputs, ensure_ascii=False),
OUTPUT_VALUE: str(trace_info.tool_outputs),
},
status=status,
)
self.trace_client.add_span(tool_span)
def get_workflow_node_executions(self, trace_info: WorkflowTraceInfo) -> Sequence[WorkflowNodeExecution]:
# through workflow_run_id get all_nodes_execution using repository
session_factory = sessionmaker(bind=db.engine)
# Find the app's creator account
with Session(db.engine, expire_on_commit=False) as session:
# Get the app to find its creator
app_id = trace_info.metadata.get("app_id")
if not app_id:
raise ValueError("No app_id found in trace_info metadata")
app = session.query(App).filter(App.id == app_id).first()
if not app:
raise ValueError(f"App with id {app_id} not found")
if not app.created_by:
raise ValueError(f"App with id {app_id} has no creator (created_by is None)")
service_account = session.query(Account).filter(Account.id == app.created_by).first()
if not service_account:
raise ValueError(f"Creator account with id {app.created_by} not found for app {app_id}")
current_tenant = (
session.query(TenantAccountJoin).filter_by(account_id=service_account.id, current=True).first()
)
if not current_tenant:
raise ValueError(f"Current tenant not found for account {service_account.id}")
service_account.set_tenant_id(current_tenant.tenant_id)
workflow_node_execution_repository = SQLAlchemyWorkflowNodeExecutionRepository(
session_factory=session_factory,
user=service_account,
app_id=trace_info.metadata.get("app_id"),
triggered_from=WorkflowNodeExecutionTriggeredFrom.WORKFLOW_RUN,
)
# Get all executions for this workflow run
workflow_node_executions = workflow_node_execution_repository.get_by_workflow_run(
workflow_run_id=trace_info.workflow_run_id
)
return workflow_node_executions
def build_workflow_node_span(
self, node_execution: WorkflowNodeExecution, trace_id: int, trace_info: WorkflowTraceInfo, workflow_span_id: int
):
try:
if node_execution.node_type == NodeType.LLM:
node_span = self.build_workflow_llm_span(trace_id, workflow_span_id, trace_info, node_execution)
elif node_execution.node_type == NodeType.KNOWLEDGE_RETRIEVAL:
node_span = self.build_workflow_retrieval_span(trace_id, workflow_span_id, trace_info, node_execution)
elif node_execution.node_type == NodeType.TOOL:
node_span = self.build_workflow_tool_span(trace_id, workflow_span_id, trace_info, node_execution)
else:
node_span = self.build_workflow_task_span(trace_id, workflow_span_id, trace_info, node_execution)
return node_span
except Exception:
return None
def get_workflow_node_status(self, node_execution: WorkflowNodeExecution) -> Status:
span_status: Status = Status(StatusCode.UNSET)
if node_execution.status == WorkflowNodeExecutionStatus.SUCCEEDED:
span_status = Status(StatusCode.OK)
elif node_execution.status in [WorkflowNodeExecutionStatus.FAILED, WorkflowNodeExecutionStatus.EXCEPTION]:
span_status = Status(StatusCode.ERROR, str(node_execution.error))
return span_status
def build_workflow_task_span(
self, trace_id: int, workflow_span_id: int, trace_info: WorkflowTraceInfo, node_execution: WorkflowNodeExecution
) -> SpanData:
return SpanData(
trace_id=trace_id,
parent_span_id=workflow_span_id,
span_id=convert_to_span_id(node_execution.id, "node"),
name=node_execution.title,
start_time=convert_datetime_to_nanoseconds(node_execution.created_at),
end_time=convert_datetime_to_nanoseconds(node_execution.finished_at),
attributes={
GEN_AI_SESSION_ID: trace_info.metadata.get("conversation_id", ""),
GEN_AI_SPAN_KIND: GenAISpanKind.TASK.value,
GEN_AI_FRAMEWORK: "dify",
INPUT_VALUE: json.dumps(node_execution.inputs, ensure_ascii=False),
OUTPUT_VALUE: json.dumps(node_execution.outputs, ensure_ascii=False),
},
status=self.get_workflow_node_status(node_execution),
)
def build_workflow_tool_span(
self, trace_id: int, workflow_span_id: int, trace_info: WorkflowTraceInfo, node_execution: WorkflowNodeExecution
) -> SpanData:
tool_des = {}
if node_execution.metadata:
tool_des = node_execution.metadata.get(WorkflowNodeExecutionMetadataKey.TOOL_INFO, {})
return SpanData(
trace_id=trace_id,
parent_span_id=workflow_span_id,
span_id=convert_to_span_id(node_execution.id, "node"),
name=node_execution.title,
start_time=convert_datetime_to_nanoseconds(node_execution.created_at),
end_time=convert_datetime_to_nanoseconds(node_execution.finished_at),
attributes={
GEN_AI_SPAN_KIND: GenAISpanKind.TOOL.value,
GEN_AI_FRAMEWORK: "dify",
TOOL_NAME: node_execution.title,
TOOL_DESCRIPTION: json.dumps(tool_des, ensure_ascii=False),
TOOL_PARAMETERS: json.dumps(node_execution.inputs if node_execution.inputs else {}, ensure_ascii=False),
INPUT_VALUE: json.dumps(node_execution.inputs if node_execution.inputs else {}, ensure_ascii=False),
OUTPUT_VALUE: json.dumps(node_execution.outputs, ensure_ascii=False),
},
status=self.get_workflow_node_status(node_execution),
)
def build_workflow_retrieval_span(
self, trace_id: int, workflow_span_id: int, trace_info: WorkflowTraceInfo, node_execution: WorkflowNodeExecution
) -> SpanData:
input_value = ""
if node_execution.inputs:
input_value = str(node_execution.inputs.get("query", ""))
output_value = ""
if node_execution.outputs:
output_value = json.dumps(node_execution.outputs.get("result", []), ensure_ascii=False)
return SpanData(
trace_id=trace_id,
parent_span_id=workflow_span_id,
span_id=convert_to_span_id(node_execution.id, "node"),
name=node_execution.title,
start_time=convert_datetime_to_nanoseconds(node_execution.created_at),
end_time=convert_datetime_to_nanoseconds(node_execution.finished_at),
attributes={
GEN_AI_SPAN_KIND: GenAISpanKind.RETRIEVER.value,
GEN_AI_FRAMEWORK: "dify",
RETRIEVAL_QUERY: input_value,
RETRIEVAL_DOCUMENT: output_value,
INPUT_VALUE: input_value,
OUTPUT_VALUE: output_value,
},
status=self.get_workflow_node_status(node_execution),
)
def build_workflow_llm_span(
self, trace_id: int, workflow_span_id: int, trace_info: WorkflowTraceInfo, node_execution: WorkflowNodeExecution
) -> SpanData:
process_data = node_execution.process_data or {}
outputs = node_execution.outputs or {}
usage_data = process_data.get("usage", {}) if "usage" in process_data else outputs.get("usage", {})
return SpanData(
trace_id=trace_id,
parent_span_id=workflow_span_id,
span_id=convert_to_span_id(node_execution.id, "node"),
name=node_execution.title,
start_time=convert_datetime_to_nanoseconds(node_execution.created_at),
end_time=convert_datetime_to_nanoseconds(node_execution.finished_at),
attributes={
GEN_AI_SESSION_ID: trace_info.metadata.get("conversation_id", ""),
GEN_AI_SPAN_KIND: GenAISpanKind.LLM.value,
GEN_AI_FRAMEWORK: "dify",
GEN_AI_MODEL_NAME: process_data.get("model_name", ""),
GEN_AI_SYSTEM: process_data.get("model_provider", ""),
GEN_AI_USAGE_INPUT_TOKENS: str(usage_data.get("prompt_tokens", 0)),
GEN_AI_USAGE_OUTPUT_TOKENS: str(usage_data.get("completion_tokens", 0)),
GEN_AI_USAGE_TOTAL_TOKENS: str(usage_data.get("total_tokens", 0)),
GEN_AI_PROMPT: json.dumps(process_data.get("prompts", []), ensure_ascii=False),
GEN_AI_COMPLETION: str(outputs.get("text", "")),
GEN_AI_RESPONSE_FINISH_REASON: outputs.get("finish_reason", ""),
INPUT_VALUE: json.dumps(process_data.get("prompts", []), ensure_ascii=False),
OUTPUT_VALUE: str(outputs.get("text", "")),
},
status=self.get_workflow_node_status(node_execution),
)
def add_workflow_span(self, trace_id: int, workflow_span_id: int, trace_info: WorkflowTraceInfo):
message_span_id = None
if trace_info.message_id:
message_span_id = convert_to_span_id(trace_info.message_id, "message")
user_id = trace_info.metadata.get("user_id")
status: Status = Status(StatusCode.OK)
if trace_info.error:
status = Status(StatusCode.ERROR, trace_info.error)
if message_span_id: # chatflow
message_span = SpanData(
trace_id=trace_id,
parent_span_id=None,
span_id=message_span_id,
name="message",
start_time=convert_datetime_to_nanoseconds(trace_info.start_time),
end_time=convert_datetime_to_nanoseconds(trace_info.end_time),
attributes={
GEN_AI_SESSION_ID: trace_info.metadata.get("conversation_id", ""),
GEN_AI_USER_ID: str(user_id),
GEN_AI_SPAN_KIND: GenAISpanKind.CHAIN.value,
GEN_AI_FRAMEWORK: "dify",
INPUT_VALUE: trace_info.workflow_run_inputs.get("sys.query", ""),
OUTPUT_VALUE: json.dumps(trace_info.workflow_run_outputs, ensure_ascii=False),
},
status=status,
)
self.trace_client.add_span(message_span)
workflow_span = SpanData(
trace_id=trace_id,
parent_span_id=message_span_id,
span_id=workflow_span_id,
name="workflow",
start_time=convert_datetime_to_nanoseconds(trace_info.start_time),
end_time=convert_datetime_to_nanoseconds(trace_info.end_time),
attributes={
GEN_AI_USER_ID: str(user_id),
GEN_AI_SPAN_KIND: GenAISpanKind.CHAIN.value,
GEN_AI_FRAMEWORK: "dify",
INPUT_VALUE: json.dumps(trace_info.workflow_run_inputs, ensure_ascii=False),
OUTPUT_VALUE: json.dumps(trace_info.workflow_run_outputs, ensure_ascii=False),
},
status=status,
)
self.trace_client.add_span(workflow_span)
def suggested_question_trace(self, trace_info: SuggestedQuestionTraceInfo):
message_id = trace_info.message_id
status: Status = Status(StatusCode.OK)
if trace_info.error:
status = Status(StatusCode.ERROR, trace_info.error)
suggested_question_span = SpanData(
trace_id=convert_to_trace_id(message_id),
parent_span_id=convert_to_span_id(message_id, "message"),
span_id=convert_to_span_id(message_id, "suggested_question"),
name="suggested_question",
start_time=convert_datetime_to_nanoseconds(trace_info.start_time),
end_time=convert_datetime_to_nanoseconds(trace_info.end_time),
attributes={
GEN_AI_SPAN_KIND: GenAISpanKind.LLM.value,
GEN_AI_FRAMEWORK: "dify",
GEN_AI_MODEL_NAME: trace_info.metadata.get("ls_model_name", ""),
GEN_AI_SYSTEM: trace_info.metadata.get("ls_provider", ""),
GEN_AI_PROMPT: json.dumps(trace_info.inputs, ensure_ascii=False),
GEN_AI_COMPLETION: json.dumps(trace_info.suggested_question, ensure_ascii=False),
INPUT_VALUE: json.dumps(trace_info.inputs, ensure_ascii=False),
OUTPUT_VALUE: json.dumps(trace_info.suggested_question, ensure_ascii=False),
},
status=status,
)
self.trace_client.add_span(suggested_question_span)
def extract_retrieval_documents(documents: list[Document]):
documents_data = []
for document in documents:
document_data = {
"content": document.page_content,
"metadata": {
"dataset_id": document.metadata.get("dataset_id"),
"doc_id": document.metadata.get("doc_id"),
"document_id": document.metadata.get("document_id"),
},
"score": document.metadata.get("score"),
}
documents_data.append(document_data)
return documents_data

View File

@ -0,0 +1,200 @@
import hashlib
import logging
import random
import socket
import threading
import uuid
from collections import deque
from collections.abc import Sequence
from datetime import datetime
from typing import Optional
import requests
from opentelemetry import trace as trace_api
from opentelemetry.exporter.otlp.proto.http.trace_exporter import OTLPSpanExporter
from opentelemetry.sdk.resources import Resource
from opentelemetry.sdk.trace import ReadableSpan
from opentelemetry.sdk.util.instrumentation import InstrumentationScope
from opentelemetry.semconv.resource import ResourceAttributes
from configs import dify_config
from core.ops.aliyun_trace.entities.aliyun_trace_entity import SpanData
INVALID_SPAN_ID = 0x0000000000000000
INVALID_TRACE_ID = 0x00000000000000000000000000000000
logger = logging.getLogger(__name__)
class TraceClient:
def __init__(
self,
service_name: str,
endpoint: str,
max_queue_size: int = 1000,
schedule_delay_sec: int = 5,
max_export_batch_size: int = 50,
):
self.endpoint = endpoint
self.resource = Resource(
attributes={
ResourceAttributes.SERVICE_NAME: service_name,
ResourceAttributes.SERVICE_VERSION: f"dify-{dify_config.project.version}-{dify_config.COMMIT_SHA}",
ResourceAttributes.DEPLOYMENT_ENVIRONMENT: f"{dify_config.DEPLOY_ENV}-{dify_config.EDITION}",
ResourceAttributes.HOST_NAME: socket.gethostname(),
}
)
self.span_builder = SpanBuilder(self.resource)
self.exporter = OTLPSpanExporter(endpoint=endpoint)
self.max_queue_size = max_queue_size
self.schedule_delay_sec = schedule_delay_sec
self.max_export_batch_size = max_export_batch_size
self.queue: deque = deque(maxlen=max_queue_size)
self.condition = threading.Condition(threading.Lock())
self.done = False
self.worker_thread = threading.Thread(target=self._worker, daemon=True)
self.worker_thread.start()
self._spans_dropped = False
def export(self, spans: Sequence[ReadableSpan]):
self.exporter.export(spans)
def api_check(self):
try:
response = requests.head(self.endpoint, timeout=5)
if response.status_code == 405:
return True
else:
logger.debug(f"AliyunTrace API check failed: Unexpected status code: {response.status_code}")
return False
except requests.exceptions.RequestException as e:
logger.debug(f"AliyunTrace API check failed: {str(e)}")
raise ValueError(f"AliyunTrace API check failed: {str(e)}")
def get_project_url(self):
return "https://arms.console.aliyun.com/#/llm"
def add_span(self, span_data: SpanData):
if span_data is None:
return
span: ReadableSpan = self.span_builder.build_span(span_data)
with self.condition:
if len(self.queue) == self.max_queue_size:
if not self._spans_dropped:
logger.warning("Queue is full, likely spans will be dropped.")
self._spans_dropped = True
self.queue.appendleft(span)
if len(self.queue) >= self.max_export_batch_size:
self.condition.notify()
def _worker(self):
while not self.done:
with self.condition:
if len(self.queue) < self.max_export_batch_size and not self.done:
self.condition.wait(timeout=self.schedule_delay_sec)
self._export_batch()
def _export_batch(self):
spans_to_export: list[ReadableSpan] = []
with self.condition:
while len(spans_to_export) < self.max_export_batch_size and self.queue:
spans_to_export.append(self.queue.pop())
if spans_to_export:
try:
self.exporter.export(spans_to_export)
except Exception as e:
logger.debug(f"Error exporting spans: {e}")
def shutdown(self):
with self.condition:
self.done = True
self.condition.notify_all()
self.worker_thread.join()
self._export_batch()
self.exporter.shutdown()
class SpanBuilder:
def __init__(self, resource):
self.resource = resource
self.instrumentation_scope = InstrumentationScope(
__name__,
"",
None,
None,
)
def build_span(self, span_data: SpanData) -> ReadableSpan:
span_context = trace_api.SpanContext(
trace_id=span_data.trace_id,
span_id=span_data.span_id,
is_remote=False,
trace_flags=trace_api.TraceFlags(trace_api.TraceFlags.SAMPLED),
trace_state=None,
)
parent_span_context = None
if span_data.parent_span_id is not None:
parent_span_context = trace_api.SpanContext(
trace_id=span_data.trace_id,
span_id=span_data.parent_span_id,
is_remote=False,
trace_flags=trace_api.TraceFlags(trace_api.TraceFlags.SAMPLED),
trace_state=None,
)
span = ReadableSpan(
name=span_data.name,
context=span_context,
parent=parent_span_context,
resource=self.resource,
attributes=span_data.attributes,
events=span_data.events,
links=span_data.links,
kind=trace_api.SpanKind.INTERNAL,
status=span_data.status,
start_time=span_data.start_time,
end_time=span_data.end_time,
instrumentation_scope=self.instrumentation_scope,
)
return span
def generate_span_id() -> int:
span_id = random.getrandbits(64)
while span_id == INVALID_SPAN_ID:
span_id = random.getrandbits(64)
return span_id
def convert_to_trace_id(uuid_v4: Optional[str]) -> int:
try:
uuid_obj = uuid.UUID(uuid_v4)
return uuid_obj.int
except Exception as e:
raise ValueError(f"Invalid UUID input: {e}")
def convert_to_span_id(uuid_v4: Optional[str], span_type: str) -> int:
try:
uuid_obj = uuid.UUID(uuid_v4)
except Exception as e:
raise ValueError(f"Invalid UUID input: {e}")
combined_key = f"{uuid_obj.hex}-{span_type}"
hash_bytes = hashlib.sha256(combined_key.encode("utf-8")).digest()
span_id = int.from_bytes(hash_bytes[:8], byteorder="big", signed=False)
return span_id
def convert_datetime_to_nanoseconds(start_time_a: Optional[datetime]) -> Optional[int]:
if start_time_a is None:
return None
timestamp_in_seconds = start_time_a.timestamp()
timestamp_in_nanoseconds = int(timestamp_in_seconds * 1e9)
return timestamp_in_nanoseconds

View File

@ -0,0 +1,21 @@
from collections.abc import Sequence
from typing import Optional
from opentelemetry import trace as trace_api
from opentelemetry.sdk.trace import Event, Status, StatusCode
from pydantic import BaseModel, Field
class SpanData(BaseModel):
model_config = {"arbitrary_types_allowed": True}
trace_id: int = Field(..., description="The unique identifier for the trace.")
parent_span_id: Optional[int] = Field(None, description="The ID of the parent span, if any.")
span_id: int = Field(..., description="The unique identifier for this span.")
name: str = Field(..., description="The name of the span.")
attributes: dict[str, str] = Field(default_factory=dict, description="Attributes associated with the span.")
events: Sequence[Event] = Field(default_factory=list, description="Events recorded in the span.")
links: Sequence[trace_api.Link] = Field(default_factory=list, description="Links to other spans.")
status: Status = Field(default=Status(StatusCode.UNSET), description="The status of the span.")
start_time: Optional[int] = Field(..., description="The start time of the span in nanoseconds.")
end_time: Optional[int] = Field(..., description="The end time of the span in nanoseconds.")

View File

@ -0,0 +1,64 @@
from enum import Enum
# public
GEN_AI_SESSION_ID = "gen_ai.session.id"
GEN_AI_USER_ID = "gen_ai.user.id"
GEN_AI_USER_NAME = "gen_ai.user.name"
GEN_AI_SPAN_KIND = "gen_ai.span.kind"
GEN_AI_FRAMEWORK = "gen_ai.framework"
# Chain
INPUT_VALUE = "input.value"
OUTPUT_VALUE = "output.value"
# Retriever
RETRIEVAL_QUERY = "retrieval.query"
RETRIEVAL_DOCUMENT = "retrieval.document"
# LLM
GEN_AI_MODEL_NAME = "gen_ai.model_name"
GEN_AI_SYSTEM = "gen_ai.system"
GEN_AI_USAGE_INPUT_TOKENS = "gen_ai.usage.input_tokens"
GEN_AI_USAGE_OUTPUT_TOKENS = "gen_ai.usage.output_tokens"
GEN_AI_USAGE_TOTAL_TOKENS = "gen_ai.usage.total_tokens"
GEN_AI_PROMPT_TEMPLATE_TEMPLATE = "gen_ai.prompt_template.template"
GEN_AI_PROMPT_TEMPLATE_VARIABLE = "gen_ai.prompt_template.variable"
GEN_AI_PROMPT = "gen_ai.prompt"
GEN_AI_COMPLETION = "gen_ai.completion"
GEN_AI_RESPONSE_FINISH_REASON = "gen_ai.response.finish_reason"
# Tool
TOOL_NAME = "tool.name"
TOOL_DESCRIPTION = "tool.description"
TOOL_PARAMETERS = "tool.parameters"
class GenAISpanKind(Enum):
CHAIN = "CHAIN"
RETRIEVER = "RETRIEVER"
RERANKER = "RERANKER"
LLM = "LLM"
EMBEDDING = "EMBEDDING"
TOOL = "TOOL"
AGENT = "AGENT"
TASK = "TASK"

View File

@ -0,0 +1,726 @@
import hashlib
import json
import logging
import os
from datetime import datetime, timedelta
from typing import Optional, Union, cast
from openinference.semconv.trace import OpenInferenceSpanKindValues, SpanAttributes
from opentelemetry import trace
from opentelemetry.exporter.otlp.proto.grpc.trace_exporter import OTLPSpanExporter as GrpcOTLPSpanExporter
from opentelemetry.exporter.otlp.proto.http.trace_exporter import OTLPSpanExporter as HttpOTLPSpanExporter
from opentelemetry.sdk import trace as trace_sdk
from opentelemetry.sdk.resources import Resource
from opentelemetry.sdk.trace.export import SimpleSpanProcessor
from opentelemetry.sdk.trace.id_generator import RandomIdGenerator
from opentelemetry.trace import SpanContext, TraceFlags, TraceState
from core.ops.base_trace_instance import BaseTraceInstance
from core.ops.entities.config_entity import ArizeConfig, PhoenixConfig
from core.ops.entities.trace_entity import (
BaseTraceInfo,
DatasetRetrievalTraceInfo,
GenerateNameTraceInfo,
MessageTraceInfo,
ModerationTraceInfo,
SuggestedQuestionTraceInfo,
ToolTraceInfo,
TraceTaskName,
WorkflowTraceInfo,
)
from extensions.ext_database import db
from models.model import EndUser, MessageFile
from models.workflow import WorkflowNodeExecutionModel
logger = logging.getLogger(__name__)
def setup_tracer(arize_phoenix_config: ArizeConfig | PhoenixConfig) -> tuple[trace_sdk.Tracer, SimpleSpanProcessor]:
"""Configure OpenTelemetry tracer with OTLP exporter for Arize/Phoenix."""
try:
# Choose the appropriate exporter based on config type
exporter: Union[GrpcOTLPSpanExporter, HttpOTLPSpanExporter]
if isinstance(arize_phoenix_config, ArizeConfig):
arize_endpoint = f"{arize_phoenix_config.endpoint}/v1"
arize_headers = {
"api_key": arize_phoenix_config.api_key or "",
"space_id": arize_phoenix_config.space_id or "",
"authorization": f"Bearer {arize_phoenix_config.api_key or ''}",
}
exporter = GrpcOTLPSpanExporter(
endpoint=arize_endpoint,
headers=arize_headers,
timeout=30,
)
else:
phoenix_endpoint = f"{arize_phoenix_config.endpoint}/v1/traces"
phoenix_headers = {
"api_key": arize_phoenix_config.api_key or "",
"authorization": f"Bearer {arize_phoenix_config.api_key or ''}",
}
exporter = HttpOTLPSpanExporter(
endpoint=phoenix_endpoint,
headers=phoenix_headers,
timeout=30,
)
attributes = {
"openinference.project.name": arize_phoenix_config.project or "",
"model_id": arize_phoenix_config.project or "",
}
resource = Resource(attributes=attributes)
provider = trace_sdk.TracerProvider(resource=resource)
processor = SimpleSpanProcessor(
exporter,
)
provider.add_span_processor(processor)
# Create a named tracer instead of setting the global provider
tracer_name = f"arize_phoenix_tracer_{arize_phoenix_config.project}"
logger.info(f"[Arize/Phoenix] Created tracer with name: {tracer_name}")
return cast(trace_sdk.Tracer, provider.get_tracer(tracer_name)), processor
except Exception as e:
logger.error(f"[Arize/Phoenix] Failed to setup the tracer: {str(e)}", exc_info=True)
raise
def datetime_to_nanos(dt: Optional[datetime]) -> int:
"""Convert datetime to nanoseconds since epoch. If None, use current time."""
if dt is None:
dt = datetime.now()
return int(dt.timestamp() * 1_000_000_000)
def uuid_to_trace_id(string: Optional[str]) -> int:
"""Convert UUID string to a valid trace ID (16-byte integer)."""
if string is None:
string = ""
hash_object = hashlib.sha256(string.encode())
# Take the first 16 bytes (128 bits) of the hash
digest = hash_object.digest()[:16]
# Convert to integer (128 bits)
return int.from_bytes(digest, byteorder="big")
class ArizePhoenixDataTrace(BaseTraceInstance):
def __init__(
self,
arize_phoenix_config: ArizeConfig | PhoenixConfig,
):
super().__init__(arize_phoenix_config)
import logging
logging.basicConfig()
logging.getLogger().setLevel(logging.DEBUG)
self.arize_phoenix_config = arize_phoenix_config
self.tracer, self.processor = setup_tracer(arize_phoenix_config)
self.project = arize_phoenix_config.project
self.file_base_url = os.getenv("FILES_URL", "http://127.0.0.1:5001")
def trace(self, trace_info: BaseTraceInfo):
logger.info(f"[Arize/Phoenix] Trace: {trace_info}")
try:
if isinstance(trace_info, WorkflowTraceInfo):
self.workflow_trace(trace_info)
if isinstance(trace_info, MessageTraceInfo):
self.message_trace(trace_info)
if isinstance(trace_info, ModerationTraceInfo):
self.moderation_trace(trace_info)
if isinstance(trace_info, SuggestedQuestionTraceInfo):
self.suggested_question_trace(trace_info)
if isinstance(trace_info, DatasetRetrievalTraceInfo):
self.dataset_retrieval_trace(trace_info)
if isinstance(trace_info, ToolTraceInfo):
self.tool_trace(trace_info)
if isinstance(trace_info, GenerateNameTraceInfo):
self.generate_name_trace(trace_info)
except Exception as e:
logger.error(f"[Arize/Phoenix] Error in the trace: {str(e)}", exc_info=True)
raise
def workflow_trace(self, trace_info: WorkflowTraceInfo):
if trace_info.message_data is None:
return
workflow_metadata = {
"workflow_id": trace_info.workflow_run_id or "",
"message_id": trace_info.message_id or "",
"workflow_app_log_id": trace_info.workflow_app_log_id or "",
"status": trace_info.workflow_run_status or "",
"status_message": trace_info.error or "",
"level": "ERROR" if trace_info.error else "DEFAULT",
"total_tokens": trace_info.total_tokens or 0,
}
workflow_metadata.update(trace_info.metadata)
trace_id = uuid_to_trace_id(trace_info.message_id)
span_id = RandomIdGenerator().generate_span_id()
context = SpanContext(
trace_id=trace_id,
span_id=span_id,
is_remote=False,
trace_flags=TraceFlags(TraceFlags.SAMPLED),
trace_state=TraceState(),
)
workflow_span = self.tracer.start_span(
name=TraceTaskName.WORKFLOW_TRACE.value,
attributes={
SpanAttributes.INPUT_VALUE: json.dumps(trace_info.workflow_run_inputs, ensure_ascii=False),
SpanAttributes.OUTPUT_VALUE: json.dumps(trace_info.workflow_run_outputs, ensure_ascii=False),
SpanAttributes.OPENINFERENCE_SPAN_KIND: OpenInferenceSpanKindValues.CHAIN.value,
SpanAttributes.METADATA: json.dumps(workflow_metadata, ensure_ascii=False),
SpanAttributes.SESSION_ID: trace_info.conversation_id or "",
},
start_time=datetime_to_nanos(trace_info.start_time),
context=trace.set_span_in_context(trace.NonRecordingSpan(context)),
)
try:
# Process workflow nodes
for node_execution in self._get_workflow_nodes(trace_info.workflow_run_id):
created_at = node_execution.created_at or datetime.now()
elapsed_time = node_execution.elapsed_time
finished_at = created_at + timedelta(seconds=elapsed_time)
process_data = json.loads(node_execution.process_data) if node_execution.process_data else {}
node_metadata = {
"node_id": node_execution.id,
"node_type": node_execution.node_type,
"node_status": node_execution.status,
"tenant_id": node_execution.tenant_id,
"app_id": node_execution.app_id,
"app_name": node_execution.title,
"status": node_execution.status,
"level": "ERROR" if node_execution.status != "succeeded" else "DEFAULT",
}
if node_execution.execution_metadata:
node_metadata.update(json.loads(node_execution.execution_metadata))
# Determine the correct span kind based on node type
span_kind = OpenInferenceSpanKindValues.CHAIN.value
if node_execution.node_type == "llm":
span_kind = OpenInferenceSpanKindValues.LLM.value
provider = process_data.get("model_provider")
model = process_data.get("model_name")
if provider:
node_metadata["ls_provider"] = provider
if model:
node_metadata["ls_model_name"] = model
outputs = json.loads(node_execution.outputs).get("usage", {})
usage_data = process_data.get("usage", {}) if "usage" in process_data else outputs.get("usage", {})
if usage_data:
node_metadata["total_tokens"] = usage_data.get("total_tokens", 0)
node_metadata["prompt_tokens"] = usage_data.get("prompt_tokens", 0)
node_metadata["completion_tokens"] = usage_data.get("completion_tokens", 0)
elif node_execution.node_type == "dataset_retrieval":
span_kind = OpenInferenceSpanKindValues.RETRIEVER.value
elif node_execution.node_type == "tool":
span_kind = OpenInferenceSpanKindValues.TOOL.value
else:
span_kind = OpenInferenceSpanKindValues.CHAIN.value
node_span = self.tracer.start_span(
name=node_execution.node_type,
attributes={
SpanAttributes.INPUT_VALUE: node_execution.inputs or "{}",
SpanAttributes.OUTPUT_VALUE: node_execution.outputs or "{}",
SpanAttributes.OPENINFERENCE_SPAN_KIND: span_kind,
SpanAttributes.METADATA: json.dumps(node_metadata, ensure_ascii=False),
SpanAttributes.SESSION_ID: trace_info.conversation_id or "",
},
start_time=datetime_to_nanos(created_at),
)
try:
if node_execution.node_type == "llm":
provider = process_data.get("model_provider")
model = process_data.get("model_name")
if provider:
node_span.set_attribute(SpanAttributes.LLM_PROVIDER, provider)
if model:
node_span.set_attribute(SpanAttributes.LLM_MODEL_NAME, model)
outputs = json.loads(node_execution.outputs).get("usage", {})
usage_data = (
process_data.get("usage", {}) if "usage" in process_data else outputs.get("usage", {})
)
if usage_data:
node_span.set_attribute(
SpanAttributes.LLM_TOKEN_COUNT_TOTAL, usage_data.get("total_tokens", 0)
)
node_span.set_attribute(
SpanAttributes.LLM_TOKEN_COUNT_PROMPT, usage_data.get("prompt_tokens", 0)
)
node_span.set_attribute(
SpanAttributes.LLM_TOKEN_COUNT_COMPLETION, usage_data.get("completion_tokens", 0)
)
finally:
node_span.end(end_time=datetime_to_nanos(finished_at))
finally:
workflow_span.end(end_time=datetime_to_nanos(trace_info.end_time))
def message_trace(self, trace_info: MessageTraceInfo):
if trace_info.message_data is None:
return
file_list = cast(list[str], trace_info.file_list) or []
message_file_data: Optional[MessageFile] = trace_info.message_file_data
if message_file_data is not None:
file_url = f"{self.file_base_url}/{message_file_data.url}" if message_file_data else ""
file_list.append(file_url)
message_metadata = {
"message_id": trace_info.message_id or "",
"conversation_mode": str(trace_info.conversation_mode or ""),
"user_id": trace_info.message_data.from_account_id or "",
"file_list": json.dumps(file_list),
"status": trace_info.message_data.status or "",
"status_message": trace_info.error or "",
"level": "ERROR" if trace_info.error else "DEFAULT",
"total_tokens": trace_info.total_tokens or 0,
"prompt_tokens": trace_info.message_tokens or 0,
"completion_tokens": trace_info.answer_tokens or 0,
"ls_provider": trace_info.message_data.model_provider or "",
"ls_model_name": trace_info.message_data.model_id or "",
}
message_metadata.update(trace_info.metadata)
# Add end user data if available
if trace_info.message_data.from_end_user_id:
end_user_data: Optional[EndUser] = (
db.session.query(EndUser).filter(EndUser.id == trace_info.message_data.from_end_user_id).first()
)
if end_user_data is not None:
message_metadata["end_user_id"] = end_user_data.session_id
attributes = {
SpanAttributes.INPUT_VALUE: trace_info.message_data.query,
SpanAttributes.OUTPUT_VALUE: trace_info.message_data.answer,
SpanAttributes.OPENINFERENCE_SPAN_KIND: OpenInferenceSpanKindValues.CHAIN.value,
SpanAttributes.METADATA: json.dumps(message_metadata, ensure_ascii=False),
SpanAttributes.SESSION_ID: trace_info.message_data.conversation_id,
}
trace_id = uuid_to_trace_id(trace_info.message_id)
message_span_id = RandomIdGenerator().generate_span_id()
span_context = SpanContext(
trace_id=trace_id,
span_id=message_span_id,
is_remote=False,
trace_flags=TraceFlags(TraceFlags.SAMPLED),
trace_state=TraceState(),
)
message_span = self.tracer.start_span(
name=TraceTaskName.MESSAGE_TRACE.value,
attributes=attributes,
start_time=datetime_to_nanos(trace_info.start_time),
context=trace.set_span_in_context(trace.NonRecordingSpan(span_context)),
)
try:
if trace_info.error:
message_span.add_event(
"exception",
attributes={
"exception.message": trace_info.error,
"exception.type": "Error",
"exception.stacktrace": trace_info.error,
},
)
# Convert outputs to string based on type
if isinstance(trace_info.outputs, dict | list):
outputs_str = json.dumps(trace_info.outputs, ensure_ascii=False)
elif isinstance(trace_info.outputs, str):
outputs_str = trace_info.outputs
else:
outputs_str = str(trace_info.outputs)
llm_attributes = {
SpanAttributes.OPENINFERENCE_SPAN_KIND: OpenInferenceSpanKindValues.LLM.value,
SpanAttributes.INPUT_VALUE: json.dumps(trace_info.inputs, ensure_ascii=False),
SpanAttributes.OUTPUT_VALUE: outputs_str,
SpanAttributes.METADATA: json.dumps(message_metadata, ensure_ascii=False),
SpanAttributes.SESSION_ID: trace_info.message_data.conversation_id,
}
if isinstance(trace_info.inputs, list):
for i, msg in enumerate(trace_info.inputs):
if isinstance(msg, dict):
llm_attributes[f"{SpanAttributes.LLM_INPUT_MESSAGES}.{i}.message.content"] = msg.get("text", "")
llm_attributes[f"{SpanAttributes.LLM_INPUT_MESSAGES}.{i}.message.role"] = msg.get(
"role", "user"
)
# todo: handle assistant and tool role messages, as they don't always
# have a text field, but may have a tool_calls field instead
# e.g. 'tool_calls': [{'id': '98af3a29-b066-45a5-b4b1-46c74ddafc58',
# 'type': 'function', 'function': {'name': 'current_time', 'arguments': '{}'}}]}
elif isinstance(trace_info.inputs, dict):
llm_attributes[f"{SpanAttributes.LLM_INPUT_MESSAGES}.0.message.content"] = json.dumps(trace_info.inputs)
llm_attributes[f"{SpanAttributes.LLM_INPUT_MESSAGES}.0.message.role"] = "user"
elif isinstance(trace_info.inputs, str):
llm_attributes[f"{SpanAttributes.LLM_INPUT_MESSAGES}.0.message.content"] = trace_info.inputs
llm_attributes[f"{SpanAttributes.LLM_INPUT_MESSAGES}.0.message.role"] = "user"
if trace_info.total_tokens is not None and trace_info.total_tokens > 0:
llm_attributes[SpanAttributes.LLM_TOKEN_COUNT_TOTAL] = trace_info.total_tokens
if trace_info.message_tokens is not None and trace_info.message_tokens > 0:
llm_attributes[SpanAttributes.LLM_TOKEN_COUNT_PROMPT] = trace_info.message_tokens
if trace_info.answer_tokens is not None and trace_info.answer_tokens > 0:
llm_attributes[SpanAttributes.LLM_TOKEN_COUNT_COMPLETION] = trace_info.answer_tokens
if trace_info.message_data.model_id is not None:
llm_attributes[SpanAttributes.LLM_MODEL_NAME] = trace_info.message_data.model_id
if trace_info.message_data.model_provider is not None:
llm_attributes[SpanAttributes.LLM_PROVIDER] = trace_info.message_data.model_provider
if trace_info.message_data and trace_info.message_data.message_metadata:
metadata_dict = json.loads(trace_info.message_data.message_metadata)
if model_params := metadata_dict.get("model_parameters"):
llm_attributes[SpanAttributes.LLM_INVOCATION_PARAMETERS] = json.dumps(model_params)
llm_span = self.tracer.start_span(
name="llm",
attributes=llm_attributes,
start_time=datetime_to_nanos(trace_info.start_time),
context=trace.set_span_in_context(trace.NonRecordingSpan(span_context)),
)
try:
if trace_info.error:
llm_span.add_event(
"exception",
attributes={
"exception.message": trace_info.error,
"exception.type": "Error",
"exception.stacktrace": trace_info.error,
},
)
finally:
llm_span.end(end_time=datetime_to_nanos(trace_info.end_time))
finally:
message_span.end(end_time=datetime_to_nanos(trace_info.end_time))
def moderation_trace(self, trace_info: ModerationTraceInfo):
if trace_info.message_data is None:
return
metadata = {
"message_id": trace_info.message_id,
"tool_name": "moderation",
"status": trace_info.message_data.status,
"status_message": trace_info.message_data.error or "",
"level": "ERROR" if trace_info.message_data.error else "DEFAULT",
}
metadata.update(trace_info.metadata)
trace_id = uuid_to_trace_id(trace_info.message_id)
span_id = RandomIdGenerator().generate_span_id()
context = SpanContext(
trace_id=trace_id,
span_id=span_id,
is_remote=False,
trace_flags=TraceFlags(TraceFlags.SAMPLED),
trace_state=TraceState(),
)
span = self.tracer.start_span(
name=TraceTaskName.MODERATION_TRACE.value,
attributes={
SpanAttributes.INPUT_VALUE: json.dumps(trace_info.inputs, ensure_ascii=False),
SpanAttributes.OUTPUT_VALUE: json.dumps(
{
"action": trace_info.action,
"flagged": trace_info.flagged,
"preset_response": trace_info.preset_response,
"inputs": trace_info.inputs,
},
ensure_ascii=False,
),
SpanAttributes.OPENINFERENCE_SPAN_KIND: OpenInferenceSpanKindValues.CHAIN.value,
SpanAttributes.METADATA: json.dumps(metadata, ensure_ascii=False),
},
start_time=datetime_to_nanos(trace_info.start_time),
context=trace.set_span_in_context(trace.NonRecordingSpan(context)),
)
try:
if trace_info.message_data.error:
span.add_event(
"exception",
attributes={
"exception.message": trace_info.message_data.error,
"exception.type": "Error",
"exception.stacktrace": trace_info.message_data.error,
},
)
finally:
span.end(end_time=datetime_to_nanos(trace_info.end_time))
def suggested_question_trace(self, trace_info: SuggestedQuestionTraceInfo):
if trace_info.message_data is None:
return
start_time = trace_info.start_time or trace_info.message_data.created_at
end_time = trace_info.end_time or trace_info.message_data.updated_at
metadata = {
"message_id": trace_info.message_id,
"tool_name": "suggested_question",
"status": trace_info.status,
"status_message": trace_info.error or "",
"level": "ERROR" if trace_info.error else "DEFAULT",
"total_tokens": trace_info.total_tokens,
"ls_provider": trace_info.model_provider or "",
"ls_model_name": trace_info.model_id or "",
}
metadata.update(trace_info.metadata)
trace_id = uuid_to_trace_id(trace_info.message_id)
span_id = RandomIdGenerator().generate_span_id()
context = SpanContext(
trace_id=trace_id,
span_id=span_id,
is_remote=False,
trace_flags=TraceFlags(TraceFlags.SAMPLED),
trace_state=TraceState(),
)
span = self.tracer.start_span(
name=TraceTaskName.SUGGESTED_QUESTION_TRACE.value,
attributes={
SpanAttributes.INPUT_VALUE: json.dumps(trace_info.inputs, ensure_ascii=False),
SpanAttributes.OUTPUT_VALUE: json.dumps(trace_info.suggested_question, ensure_ascii=False),
SpanAttributes.OPENINFERENCE_SPAN_KIND: OpenInferenceSpanKindValues.CHAIN.value,
SpanAttributes.METADATA: json.dumps(metadata, ensure_ascii=False),
},
start_time=datetime_to_nanos(start_time),
context=trace.set_span_in_context(trace.NonRecordingSpan(context)),
)
try:
if trace_info.error:
span.add_event(
"exception",
attributes={
"exception.message": trace_info.error,
"exception.type": "Error",
"exception.stacktrace": trace_info.error,
},
)
finally:
span.end(end_time=datetime_to_nanos(end_time))
def dataset_retrieval_trace(self, trace_info: DatasetRetrievalTraceInfo):
if trace_info.message_data is None:
return
start_time = trace_info.start_time or trace_info.message_data.created_at
end_time = trace_info.end_time or trace_info.message_data.updated_at
metadata = {
"message_id": trace_info.message_id,
"tool_name": "dataset_retrieval",
"status": trace_info.message_data.status,
"status_message": trace_info.message_data.error or "",
"level": "ERROR" if trace_info.message_data.error else "DEFAULT",
"ls_provider": trace_info.message_data.model_provider or "",
"ls_model_name": trace_info.message_data.model_id or "",
}
metadata.update(trace_info.metadata)
trace_id = uuid_to_trace_id(trace_info.message_id)
span_id = RandomIdGenerator().generate_span_id()
context = SpanContext(
trace_id=trace_id,
span_id=span_id,
is_remote=False,
trace_flags=TraceFlags(TraceFlags.SAMPLED),
trace_state=TraceState(),
)
span = self.tracer.start_span(
name=TraceTaskName.DATASET_RETRIEVAL_TRACE.value,
attributes={
SpanAttributes.INPUT_VALUE: json.dumps(trace_info.inputs, ensure_ascii=False),
SpanAttributes.OUTPUT_VALUE: json.dumps({"documents": trace_info.documents}, ensure_ascii=False),
SpanAttributes.OPENINFERENCE_SPAN_KIND: OpenInferenceSpanKindValues.RETRIEVER.value,
SpanAttributes.METADATA: json.dumps(metadata, ensure_ascii=False),
"start_time": start_time.isoformat() if start_time else "",
"end_time": end_time.isoformat() if end_time else "",
},
start_time=datetime_to_nanos(start_time),
context=trace.set_span_in_context(trace.NonRecordingSpan(context)),
)
try:
if trace_info.message_data.error:
span.add_event(
"exception",
attributes={
"exception.message": trace_info.message_data.error,
"exception.type": "Error",
"exception.stacktrace": trace_info.message_data.error,
},
)
finally:
span.end(end_time=datetime_to_nanos(end_time))
def tool_trace(self, trace_info: ToolTraceInfo):
if trace_info.message_data is None:
logger.warning("[Arize/Phoenix] Message data is None, skipping tool trace.")
return
metadata = {
"message_id": trace_info.message_id,
"tool_config": json.dumps(trace_info.tool_config, ensure_ascii=False),
}
trace_id = uuid_to_trace_id(trace_info.message_id)
tool_span_id = RandomIdGenerator().generate_span_id()
logger.info(f"[Arize/Phoenix] Creating tool trace with trace_id: {trace_id}, span_id: {tool_span_id}")
# Create span context with the same trace_id as the parent
# todo: Create with the appropriate parent span context, so that the tool span is
# a child of the appropriate span (e.g. message span)
span_context = SpanContext(
trace_id=trace_id,
span_id=tool_span_id,
is_remote=False,
trace_flags=TraceFlags(TraceFlags.SAMPLED),
trace_state=TraceState(),
)
tool_params_str = (
json.dumps(trace_info.tool_parameters, ensure_ascii=False)
if isinstance(trace_info.tool_parameters, dict)
else str(trace_info.tool_parameters)
)
span = self.tracer.start_span(
name=trace_info.tool_name,
attributes={
SpanAttributes.INPUT_VALUE: json.dumps(trace_info.tool_inputs, ensure_ascii=False),
SpanAttributes.OUTPUT_VALUE: trace_info.tool_outputs,
SpanAttributes.OPENINFERENCE_SPAN_KIND: OpenInferenceSpanKindValues.TOOL.value,
SpanAttributes.METADATA: json.dumps(metadata, ensure_ascii=False),
SpanAttributes.TOOL_NAME: trace_info.tool_name,
SpanAttributes.TOOL_PARAMETERS: tool_params_str,
},
start_time=datetime_to_nanos(trace_info.start_time),
context=trace.set_span_in_context(trace.NonRecordingSpan(span_context)),
)
try:
if trace_info.error:
span.add_event(
"exception",
attributes={
"exception.message": trace_info.error,
"exception.type": "Error",
"exception.stacktrace": trace_info.error,
},
)
finally:
span.end(end_time=datetime_to_nanos(trace_info.end_time))
def generate_name_trace(self, trace_info: GenerateNameTraceInfo):
if trace_info.message_data is None:
return
metadata = {
"project_name": self.project,
"message_id": trace_info.message_id,
"status": trace_info.message_data.status,
"status_message": trace_info.message_data.error or "",
"level": "ERROR" if trace_info.message_data.error else "DEFAULT",
}
metadata.update(trace_info.metadata)
trace_id = uuid_to_trace_id(trace_info.message_id)
span_id = RandomIdGenerator().generate_span_id()
context = SpanContext(
trace_id=trace_id,
span_id=span_id,
is_remote=False,
trace_flags=TraceFlags(TraceFlags.SAMPLED),
trace_state=TraceState(),
)
span = self.tracer.start_span(
name=TraceTaskName.GENERATE_NAME_TRACE.value,
attributes={
SpanAttributes.INPUT_VALUE: json.dumps(trace_info.inputs, ensure_ascii=False),
SpanAttributes.OUTPUT_VALUE: json.dumps(trace_info.outputs, ensure_ascii=False),
SpanAttributes.OPENINFERENCE_SPAN_KIND: OpenInferenceSpanKindValues.CHAIN.value,
SpanAttributes.METADATA: json.dumps(metadata, ensure_ascii=False),
SpanAttributes.SESSION_ID: trace_info.message_data.conversation_id,
"start_time": trace_info.start_time.isoformat() if trace_info.start_time else "",
"end_time": trace_info.end_time.isoformat() if trace_info.end_time else "",
},
start_time=datetime_to_nanos(trace_info.start_time),
context=trace.set_span_in_context(trace.NonRecordingSpan(context)),
)
try:
if trace_info.message_data.error:
span.add_event(
"exception",
attributes={
"exception.message": trace_info.message_data.error,
"exception.type": "Error",
"exception.stacktrace": trace_info.message_data.error,
},
)
finally:
span.end(end_time=datetime_to_nanos(trace_info.end_time))
def api_check(self):
try:
with self.tracer.start_span("api_check") as span:
span.set_attribute("test", "true")
return True
except Exception as e:
logger.info(f"[Arize/Phoenix] API check failed: {str(e)}", exc_info=True)
raise ValueError(f"[Arize/Phoenix] API check failed: {str(e)}")
def get_project_url(self):
try:
if self.arize_phoenix_config.endpoint == "https://otlp.arize.com":
return "https://app.arize.com/"
else:
return f"{self.arize_phoenix_config.endpoint}/projects/"
except Exception as e:
logger.info(f"[Arize/Phoenix] Get run url failed: {str(e)}", exc_info=True)
raise ValueError(f"[Arize/Phoenix] Get run url failed: {str(e)}")
def _get_workflow_nodes(self, workflow_run_id: str):
"""Helper method to get workflow nodes"""
workflow_nodes = (
db.session.query(
WorkflowNodeExecutionModel.id,
WorkflowNodeExecutionModel.tenant_id,
WorkflowNodeExecutionModel.app_id,
WorkflowNodeExecutionModel.title,
WorkflowNodeExecutionModel.node_type,
WorkflowNodeExecutionModel.status,
WorkflowNodeExecutionModel.inputs,
WorkflowNodeExecutionModel.outputs,
WorkflowNodeExecutionModel.created_at,
WorkflowNodeExecutionModel.elapsed_time,
WorkflowNodeExecutionModel.process_data,
WorkflowNodeExecutionModel.execution_metadata,
)
.filter(WorkflowNodeExecutionModel.workflow_run_id == workflow_run_id)
.all()
)
return workflow_nodes

View File

@ -2,20 +2,92 @@ from enum import StrEnum
from pydantic import BaseModel, ValidationInfo, field_validator
from core.ops.utils import validate_project_name, validate_url, validate_url_with_path
class TracingProviderEnum(StrEnum):
ARIZE = "arize"
PHOENIX = "phoenix"
LANGFUSE = "langfuse"
LANGSMITH = "langsmith"
OPIK = "opik"
WEAVE = "weave"
ALIYUN = "aliyun"
class BaseTracingConfig(BaseModel):
"""
Base model class for tracing
Base model class for tracing configurations
"""
...
@classmethod
def validate_endpoint_url(cls, v: str, default_url: str) -> str:
"""
Common endpoint URL validation logic
Args:
v: URL value to validate
default_url: Default URL to use if input is None or empty
Returns:
Validated and normalized URL
"""
return validate_url(v, default_url)
@classmethod
def validate_project_field(cls, v: str, default_name: str) -> str:
"""
Common project name validation logic
Args:
v: Project name to validate
default_name: Default name to use if input is None or empty
Returns:
Validated project name
"""
return validate_project_name(v, default_name)
class ArizeConfig(BaseTracingConfig):
"""
Model class for Arize tracing config.
"""
api_key: str | None = None
space_id: str | None = None
project: str | None = None
endpoint: str = "https://otlp.arize.com"
@field_validator("project")
@classmethod
def project_validator(cls, v, info: ValidationInfo):
return cls.validate_project_field(v, "default")
@field_validator("endpoint")
@classmethod
def endpoint_validator(cls, v, info: ValidationInfo):
return cls.validate_endpoint_url(v, "https://otlp.arize.com")
class PhoenixConfig(BaseTracingConfig):
"""
Model class for Phoenix tracing config.
"""
api_key: str | None = None
project: str | None = None
endpoint: str = "https://app.phoenix.arize.com"
@field_validator("project")
@classmethod
def project_validator(cls, v, info: ValidationInfo):
return cls.validate_project_field(v, "default")
@field_validator("endpoint")
@classmethod
def endpoint_validator(cls, v, info: ValidationInfo):
return cls.validate_endpoint_url(v, "https://app.phoenix.arize.com")
class LangfuseConfig(BaseTracingConfig):
@ -29,13 +101,8 @@ class LangfuseConfig(BaseTracingConfig):
@field_validator("host")
@classmethod
def set_value(cls, v, info: ValidationInfo):
if v is None or v == "":
v = "https://api.langfuse.com"
if not v.startswith("https://") and not v.startswith("http://"):
raise ValueError("host must start with https:// or http://")
return v
def host_validator(cls, v, info: ValidationInfo):
return cls.validate_endpoint_url(v, "https://api.langfuse.com")
class LangSmithConfig(BaseTracingConfig):
@ -49,13 +116,9 @@ class LangSmithConfig(BaseTracingConfig):
@field_validator("endpoint")
@classmethod
def set_value(cls, v, info: ValidationInfo):
if v is None or v == "":
v = "https://api.smith.langchain.com"
if not v.startswith("https://"):
raise ValueError("endpoint must start with https://")
return v
def endpoint_validator(cls, v, info: ValidationInfo):
# LangSmith only allows HTTPS
return validate_url(v, "https://api.smith.langchain.com", allowed_schemes=("https",))
class OpikConfig(BaseTracingConfig):
@ -71,22 +134,12 @@ class OpikConfig(BaseTracingConfig):
@field_validator("project")
@classmethod
def project_validator(cls, v, info: ValidationInfo):
if v is None or v == "":
v = "Default Project"
return v
return cls.validate_project_field(v, "Default Project")
@field_validator("url")
@classmethod
def url_validator(cls, v, info: ValidationInfo):
if v is None or v == "":
v = "https://www.comet.com/opik/api/"
if not v.startswith(("https://", "http://")):
raise ValueError("url must start with https:// or http://")
if not v.endswith("/api/"):
raise ValueError("url should ends with /api/")
return v
return validate_url_with_path(v, "https://www.comet.com/opik/api/", required_suffix="/api/")
class WeaveConfig(BaseTracingConfig):
@ -102,22 +155,44 @@ class WeaveConfig(BaseTracingConfig):
@field_validator("endpoint")
@classmethod
def set_value(cls, v, info: ValidationInfo):
if v is None or v == "":
v = "https://trace.wandb.ai"
if not v.startswith("https://"):
raise ValueError("endpoint must start with https://")
return v
def endpoint_validator(cls, v, info: ValidationInfo):
# Weave only allows HTTPS for endpoint
return validate_url(v, "https://trace.wandb.ai", allowed_schemes=("https",))
@field_validator("host")
@classmethod
def validate_host(cls, v, info: ValidationInfo):
if v is not None and v != "":
if not v.startswith(("https://", "http://")):
raise ValueError("host must start with https:// or http://")
def host_validator(cls, v, info: ValidationInfo):
if v is not None and v.strip() != "":
return validate_url(v, v, allowed_schemes=("https", "http"))
return v
class AliyunConfig(BaseTracingConfig):
"""
Model class for Aliyun tracing config.
"""
app_name: str = "dify_app"
license_key: str
endpoint: str
@field_validator("app_name")
@classmethod
def app_name_validator(cls, v, info: ValidationInfo):
return cls.validate_project_field(v, "dify_app")
@field_validator("license_key")
@classmethod
def license_key_validator(cls, v, info: ValidationInfo):
if not v or v.strip() == "":
raise ValueError("License key cannot be empty")
return v
@field_validator("endpoint")
@classmethod
def endpoint_validator(cls, v, info: ValidationInfo):
return cls.validate_endpoint_url(v, "https://tracing-analysis-dc-hz.aliyuncs.com")
OPS_FILE_PATH = "ops_trace/"
OPS_TRACE_FAILED_KEY = "FAILED_OPS_TRACE"

View File

@ -32,6 +32,7 @@ from core.repositories import SQLAlchemyWorkflowNodeExecutionRepository
from core.workflow.nodes.enums import NodeType
from extensions.ext_database import db
from models import EndUser, WorkflowNodeExecutionTriggeredFrom
from models.enums import MessageStatus
logger = logging.getLogger(__name__)
@ -180,12 +181,9 @@ class LangFuseDataTrace(BaseTraceInstance):
prompt_tokens = 0
completion_tokens = 0
try:
if outputs.get("usage"):
prompt_tokens = outputs.get("usage", {}).get("prompt_tokens", 0)
completion_tokens = outputs.get("usage", {}).get("completion_tokens", 0)
else:
prompt_tokens = process_data.get("usage", {}).get("prompt_tokens", 0)
completion_tokens = process_data.get("usage", {}).get("completion_tokens", 0)
usage_data = process_data.get("usage", {}) if "usage" in process_data else outputs.get("usage", {})
prompt_tokens = usage_data.get("prompt_tokens", 0)
completion_tokens = usage_data.get("completion_tokens", 0)
except Exception:
logger.error("Failed to extract usage", exc_info=True)
@ -293,7 +291,7 @@ class LangFuseDataTrace(BaseTraceInstance):
input=trace_info.inputs,
output=message_data.answer,
metadata=metadata,
level=(LevelEnum.DEFAULT if message_data.status != "error" else LevelEnum.ERROR),
level=(LevelEnum.DEFAULT if message_data.status != MessageStatus.ERROR else LevelEnum.ERROR),
status_message=message_data.error or "",
usage=generation_usage,
)
@ -339,7 +337,7 @@ class LangFuseDataTrace(BaseTraceInstance):
start_time=trace_info.start_time,
end_time=trace_info.end_time,
metadata=trace_info.metadata,
level=(LevelEnum.DEFAULT if message_data.status != "error" else LevelEnum.ERROR),
level=(LevelEnum.DEFAULT if message_data.status != MessageStatus.ERROR else LevelEnum.ERROR),
status_message=message_data.error or "",
usage=generation_usage,
)

View File

@ -206,12 +206,9 @@ class LangSmithDataTrace(BaseTraceInstance):
prompt_tokens = 0
completion_tokens = 0
try:
if outputs.get("usage"):
prompt_tokens = outputs.get("usage", {}).get("prompt_tokens", 0)
completion_tokens = outputs.get("usage", {}).get("completion_tokens", 0)
else:
prompt_tokens = process_data.get("usage", {}).get("prompt_tokens", 0)
completion_tokens = process_data.get("usage", {}).get("completion_tokens", 0)
usage_data = process_data.get("usage", {}) if "usage" in process_data else outputs.get("usage", {})
prompt_tokens = usage_data.get("prompt_tokens", 0)
completion_tokens = usage_data.get("completion_tokens", 0)
except Exception:
logger.error("Failed to extract usage", exc_info=True)

View File

@ -222,10 +222,10 @@ class OpikDataTrace(BaseTraceInstance):
)
try:
if outputs.get("usage"):
total_tokens = outputs["usage"].get("total_tokens", 0)
prompt_tokens = outputs["usage"].get("prompt_tokens", 0)
completion_tokens = outputs["usage"].get("completion_tokens", 0)
usage_data = process_data.get("usage", {}) if "usage" in process_data else outputs.get("usage", {})
total_tokens = usage_data.get("total_tokens", 0)
prompt_tokens = usage_data.get("prompt_tokens", 0)
completion_tokens = usage_data.get("completion_tokens", 0)
except Exception:
logger.error("Failed to extract usage", exc_info=True)

View File

@ -84,6 +84,36 @@ class OpsTraceProviderConfigMap(dict[str, dict[str, Any]]):
"other_keys": ["project", "entity", "endpoint", "host"],
"trace_instance": WeaveDataTrace,
}
case TracingProviderEnum.ARIZE:
from core.ops.arize_phoenix_trace.arize_phoenix_trace import ArizePhoenixDataTrace
from core.ops.entities.config_entity import ArizeConfig
return {
"config_class": ArizeConfig,
"secret_keys": ["api_key", "space_id"],
"other_keys": ["project", "endpoint"],
"trace_instance": ArizePhoenixDataTrace,
}
case TracingProviderEnum.PHOENIX:
from core.ops.arize_phoenix_trace.arize_phoenix_trace import ArizePhoenixDataTrace
from core.ops.entities.config_entity import PhoenixConfig
return {
"config_class": PhoenixConfig,
"secret_keys": ["api_key"],
"other_keys": ["project", "endpoint"],
"trace_instance": ArizePhoenixDataTrace,
}
case TracingProviderEnum.ALIYUN:
from core.ops.aliyun_trace.aliyun_trace import AliyunDataTrace
from core.ops.entities.config_entity import AliyunConfig
return {
"config_class": AliyunConfig,
"secret_keys": ["license_key"],
"other_keys": ["endpoint", "app_name"],
"trace_instance": AliyunDataTrace,
}
case _:
raise KeyError(f"Unsupported tracing provider: {provider}")

View File

@ -1,6 +1,7 @@
from contextlib import contextmanager
from datetime import datetime
from typing import Optional, Union
from urllib.parse import urlparse
from extensions.ext_database import db
from models.model import Message
@ -60,3 +61,83 @@ def generate_dotted_order(
return current_segment
return f"{parent_dotted_order}.{current_segment}"
def validate_url(url: str, default_url: str, allowed_schemes: tuple = ("https", "http")) -> str:
"""
Validate and normalize URL with proper error handling
Args:
url: The URL to validate
default_url: Default URL to use if input is None or empty
allowed_schemes: Tuple of allowed URL schemes (default: https, http)
Returns:
Normalized URL string
Raises:
ValueError: If URL format is invalid or scheme not allowed
"""
if not url or url.strip() == "":
return default_url
# Parse URL to validate format
parsed = urlparse(url)
# Check if scheme is allowed
if parsed.scheme not in allowed_schemes:
raise ValueError(f"URL scheme must be one of: {', '.join(allowed_schemes)}")
# Reconstruct URL with only scheme, netloc (removing path, query, fragment)
normalized_url = f"{parsed.scheme}://{parsed.netloc}"
return normalized_url
def validate_url_with_path(url: str, default_url: str, required_suffix: str | None = None) -> str:
"""
Validate URL that may include path components
Args:
url: The URL to validate
default_url: Default URL to use if input is None or empty
required_suffix: Optional suffix that URL must end with
Returns:
Validated URL string
Raises:
ValueError: If URL format is invalid or doesn't match required suffix
"""
if not url or url.strip() == "":
return default_url
# Parse URL to validate format
parsed = urlparse(url)
# Check if scheme is allowed
if parsed.scheme not in ("https", "http"):
raise ValueError("URL must start with https:// or http://")
# Check required suffix if specified
if required_suffix and not url.endswith(required_suffix):
raise ValueError(f"URL should end with {required_suffix}")
return url
def validate_project_name(project: str, default_name: str) -> str:
"""
Validate and normalize project name
Args:
project: Project name to validate
default_name: Default name to use if input is None or empty
Returns:
Normalized project name
"""
if not project or project.strip() == "":
return default_name
return project.strip()

View File

@ -1,7 +1,6 @@
"""Document loader helpers."""
import concurrent.futures
from pathlib import Path
from typing import NamedTuple, Optional, cast
@ -16,7 +15,7 @@ class FileEncoding(NamedTuple):
"""The language of the file."""
def detect_file_encodings(file_path: str, timeout: int = 5) -> list[FileEncoding]:
def detect_file_encodings(file_path: str, timeout: int = 5, sample_size: int = 1024 * 1024) -> list[FileEncoding]:
"""Try to detect the file encoding.
Returns a list of `FileEncoding` tuples with the detected encodings ordered
@ -25,11 +24,16 @@ def detect_file_encodings(file_path: str, timeout: int = 5) -> list[FileEncoding
Args:
file_path: The path to the file to detect the encoding for.
timeout: The timeout in seconds for the encoding detection.
sample_size: The number of bytes to read for encoding detection. Default is 1MB.
For large files, reading only a sample is sufficient and prevents timeout.
"""
import chardet
def read_and_detect(file_path: str) -> list[dict]:
rawdata = Path(file_path).read_bytes()
with open(file_path, "rb") as f:
# Read only a sample of the file for encoding detection
# This prevents timeout on large files while still providing accurate encoding detection
rawdata = f.read(sample_size)
return cast(list[dict], chardet.detect_all(rawdata))
with concurrent.futures.ThreadPoolExecutor() as executor:

View File

@ -36,8 +36,12 @@ class TextExtractor(BaseExtractor):
break
except UnicodeDecodeError:
continue
else:
raise RuntimeError(
f"Decode failed: {self._file_path}, all detected encodings failed. Original error: {e}"
)
else:
raise RuntimeError(f"Error loading {self._file_path}") from e
raise RuntimeError(f"Decode failed: {self._file_path}, specified encoding failed. Original error: {e}")
except Exception as e:
raise RuntimeError(f"Error loading {self._file_path}") from e

View File

@ -31,6 +31,14 @@ class TTSTool(BuiltinTool):
model_type=ModelType.TTS,
model=model,
)
if not voice:
voices = model_instance.get_tts_voices()
if voices:
voice = voices[0].get("value")
if not voice:
raise ValueError("Sorry, no voice available.")
else:
raise ValueError("Sorry, no voice available.")
tts = model_instance.invoke_tts(
content_text=tool_parameters.get("text"), # type: ignore
user=user_id,

View File

@ -103,7 +103,7 @@ class GraphEngine:
call_depth: int,
graph: Graph,
graph_config: Mapping[str, Any],
variable_pool: VariablePool,
graph_runtime_state: GraphRuntimeState,
max_execution_steps: int,
max_execution_time: int,
thread_pool_id: Optional[str] = None,
@ -140,7 +140,7 @@ class GraphEngine:
call_depth=call_depth,
)
self.graph_runtime_state = GraphRuntimeState(variable_pool=variable_pool, start_at=time.perf_counter())
self.graph_runtime_state = graph_runtime_state
self.max_execution_steps = max_execution_steps
self.max_execution_time = max_execution_time

View File

@ -1,4 +1,5 @@
import json
import uuid
from collections.abc import Generator, Mapping, Sequence
from typing import Any, Optional, cast
@ -13,7 +14,7 @@ from core.model_runtime.entities.model_entities import AIModelEntity, ModelType
from core.plugin.impl.exc import PluginDaemonClientSideError
from core.plugin.impl.plugin import PluginInstaller
from core.provider_manager import ProviderManager
from core.tools.entities.tool_entities import ToolParameter, ToolProviderType
from core.tools.entities.tool_entities import ToolInvokeMessage, ToolParameter, ToolProviderType
from core.tools.tool_manager import ToolManager
from core.variables.segments import StringSegment
from core.workflow.entities.node_entities import NodeRunResult
@ -102,6 +103,32 @@ class AgentNode(ToolNode):
try:
# convert tool messages
agent_thoughts: list = []
thought_log_message = ToolInvokeMessage(
type=ToolInvokeMessage.MessageType.LOG,
message=ToolInvokeMessage.LogMessage(
id=str(uuid.uuid4()),
label=f"Agent Strategy: {cast(AgentNodeData, self.node_data).agent_strategy_name}",
parent_id=None,
error=None,
status=ToolInvokeMessage.LogMessage.LogStatus.START,
data={
"strategy": cast(AgentNodeData, self.node_data).agent_strategy_name,
"parameters": parameters_for_log,
"thought_process": "Agent strategy execution started",
},
metadata={
"icon": self.agent_strategy_icon,
"agent_strategy": cast(AgentNodeData, self.node_data).agent_strategy_name,
},
),
)
def enhanced_message_stream():
yield thought_log_message
yield from message_stream
yield from self._transform_message(
message_stream,
@ -110,6 +137,7 @@ class AgentNode(ToolNode):
"agent_strategy": cast(AgentNodeData, self.node_data).agent_strategy_name,
},
parameters_for_log,
agent_thoughts,
)
except PluginDaemonClientSideError as e:
yield RunCompletedEvent(

View File

@ -8,6 +8,7 @@ from typing import Any, Literal
from urllib.parse import urlencode, urlparse
import httpx
from json_repair import repair_json
from configs import dify_config
from core.file import file_manager
@ -178,7 +179,8 @@ class Executor:
raise RequestBodyError("json body type should have exactly one item")
json_string = self.variable_pool.convert_template(data[0].value).text
try:
json_object = json.loads(json_string, strict=False)
repaired = repair_json(json_string)
json_object = json.loads(repaired, strict=False)
except json.JSONDecodeError as e:
raise RequestBodyError(f"Failed to parse JSON: {json_string}") from e
self.json = json_object

View File

@ -1,5 +1,6 @@
import contextvars
import logging
import time
import uuid
from collections.abc import Generator, Mapping, Sequence
from concurrent.futures import Future, wait
@ -133,8 +134,11 @@ class IterationNode(BaseNode[IterationNodeData]):
variable_pool.add([self.node_id, "item"], iterator_list_value[0])
# init graph engine
from core.workflow.graph_engine.entities.graph_runtime_state import GraphRuntimeState
from core.workflow.graph_engine.graph_engine import GraphEngine, GraphEngineThreadPool
graph_runtime_state = GraphRuntimeState(variable_pool=variable_pool, start_at=time.perf_counter())
graph_engine = GraphEngine(
tenant_id=self.tenant_id,
app_id=self.app_id,
@ -146,7 +150,7 @@ class IterationNode(BaseNode[IterationNodeData]):
call_depth=self.workflow_call_depth,
graph=iteration_graph,
graph_config=graph_config,
variable_pool=variable_pool,
graph_runtime_state=graph_runtime_state,
max_execution_steps=dify_config.WORKFLOW_MAX_EXECUTION_STEPS,
max_execution_time=dify_config.WORKFLOW_MAX_EXECUTION_TIME,
thread_pool_id=self.thread_pool_id,

View File

@ -221,15 +221,6 @@ class LLMNode(BaseNode[LLMNodeData]):
jinja2_variables=self.node_data.prompt_config.jinja2_variables,
)
process_data = {
"model_mode": model_config.mode,
"prompts": PromptMessageUtil.prompt_messages_to_prompt_for_saving(
model_mode=model_config.mode, prompt_messages=prompt_messages
),
"model_provider": model_config.provider,
"model_name": model_config.model,
}
# handle invoke result
generator = self._invoke_llm(
node_data_model=self.node_data.model,
@ -253,6 +244,17 @@ class LLMNode(BaseNode[LLMNodeData]):
elif isinstance(event, LLMStructuredOutput):
structured_output = event
process_data = {
"model_mode": model_config.mode,
"prompts": PromptMessageUtil.prompt_messages_to_prompt_for_saving(
model_mode=model_config.mode, prompt_messages=prompt_messages
),
"usage": jsonable_encoder(usage),
"finish_reason": finish_reason,
"model_provider": model_config.provider,
"model_name": model_config.model,
}
outputs = {"text": result_text, "usage": jsonable_encoder(usage), "finish_reason": finish_reason}
if structured_output:
outputs["structured_output"] = structured_output.structured_output

View File

@ -1,5 +1,6 @@
import json
import logging
import time
from collections.abc import Generator, Mapping, Sequence
from datetime import UTC, datetime
from typing import TYPE_CHECKING, Any, Literal, cast
@ -101,8 +102,11 @@ class LoopNode(BaseNode[LoopNodeData]):
loop_variable_selectors[loop_variable.label] = variable_selector
inputs[loop_variable.label] = processed_segment.value
from core.workflow.graph_engine.entities.graph_runtime_state import GraphRuntimeState
from core.workflow.graph_engine.graph_engine import GraphEngine
graph_runtime_state = GraphRuntimeState(variable_pool=variable_pool, start_at=time.perf_counter())
graph_engine = GraphEngine(
tenant_id=self.tenant_id,
app_id=self.app_id,
@ -114,7 +118,7 @@ class LoopNode(BaseNode[LoopNodeData]):
call_depth=self.workflow_call_depth,
graph=loop_graph,
graph_config=self.graph_config,
variable_pool=variable_pool,
graph_runtime_state=graph_runtime_state,
max_execution_steps=dify_config.WORKFLOW_MAX_EXECUTION_STEPS,
max_execution_time=dify_config.WORKFLOW_MAX_EXECUTION_TIME,
thread_pool_id=self.thread_pool_id,

View File

@ -253,7 +253,12 @@ class ParameterExtractorNode(BaseNode):
status=WorkflowNodeExecutionStatus.SUCCEEDED,
inputs=inputs,
process_data=process_data,
outputs={"__is_success": 1 if not error else 0, "__reason": error, **result},
outputs={
"__is_success": 1 if not error else 0,
"__reason": error,
"__usage": jsonable_encoder(usage),
**result,
},
metadata={
WorkflowNodeExecutionMetadataKey.TOTAL_TOKENS: usage.total_tokens,
WorkflowNodeExecutionMetadataKey.TOTAL_PRICE: usage.total_price,

View File

@ -145,7 +145,11 @@ class QuestionClassifierNode(LLMNode):
"model_provider": model_config.provider,
"model_name": model_config.model,
}
outputs = {"class_name": category_name, "class_id": category_id}
outputs = {
"class_name": category_name,
"class_id": category_id,
"usage": jsonable_encoder(usage),
}
return NodeRunResult(
status=WorkflowNodeExecutionStatus.SUCCEEDED,

View File

@ -1,11 +1,12 @@
from collections.abc import Generator, Mapping, Sequence
from typing import Any, cast
from typing import Any, Optional, cast
from sqlalchemy import select
from sqlalchemy.orm import Session
from core.callback_handler.workflow_tool_callback_handler import DifyWorkflowCallbackHandler
from core.file import File, FileTransferMethod
from core.model_runtime.entities.llm_entities import LLMUsage
from core.plugin.impl.exc import PluginDaemonClientSideError
from core.plugin.impl.plugin import PluginInstaller
from core.tools.entities.tool_entities import ToolInvokeMessage, ToolParameter
@ -190,6 +191,7 @@ class ToolNode(BaseNode[ToolNodeData]):
messages: Generator[ToolInvokeMessage, None, None],
tool_info: Mapping[str, Any],
parameters_for_log: dict[str, Any],
agent_thoughts: Optional[list] = None,
) -> Generator:
"""
Convert ToolInvokeMessages into tuple[plain_text, files]
@ -208,7 +210,7 @@ class ToolNode(BaseNode[ToolNodeData]):
agent_logs: list[AgentLogEvent] = []
agent_execution_metadata: Mapping[WorkflowNodeExecutionMetadataKey, Any] = {}
llm_usage: LLMUsage | None = None
variables: dict[str, Any] = {}
for message in message_stream:
@ -276,9 +278,10 @@ class ToolNode(BaseNode[ToolNodeData]):
elif message.type == ToolInvokeMessage.MessageType.JSON:
assert isinstance(message.message, ToolInvokeMessage.JsonMessage)
if self.node_type == NodeType.AGENT:
msg_metadata = message.message.json_object.pop("execution_metadata", {})
msg_metadata: dict[str, Any] = message.message.json_object.pop("execution_metadata", {})
llm_usage = LLMUsage.from_metadata(msg_metadata)
agent_execution_metadata = {
key: value
WorkflowNodeExecutionMetadataKey(key): value
for key, value in msg_metadata.items()
if key in WorkflowNodeExecutionMetadataKey.__members__.values()
}
@ -366,17 +369,42 @@ class ToolNode(BaseNode[ToolNodeData]):
agent_logs.append(agent_log)
yield agent_log
# Add agent_logs to outputs['json'] to ensure frontend can access thinking process
json_output: dict[str, Any] = {}
if json:
if isinstance(json, list) and len(json) == 1:
# If json is a list with only one element, convert it to a dictionary
json_output = json[0] if isinstance(json[0], dict) else {"data": json[0]}
elif isinstance(json, list):
# If json is a list with multiple elements, create a dictionary containing all data
json_output = {"data": json}
if agent_logs:
# Add agent_logs to json output
json_output["agent_logs"] = [
{
"id": log.id,
"parent_id": log.parent_id,
"error": log.error,
"status": log.status,
"data": log.data,
"label": log.label,
"metadata": log.metadata,
"node_id": log.node_id,
}
for log in agent_logs
]
yield RunCompletedEvent(
run_result=NodeRunResult(
status=WorkflowNodeExecutionStatus.SUCCEEDED,
outputs={"text": text, "files": ArrayFileSegment(value=files), "json": json, **variables},
outputs={"text": text, "files": ArrayFileSegment(value=files), "json": json_output, **variables},
metadata={
**agent_execution_metadata,
WorkflowNodeExecutionMetadataKey.TOOL_INFO: tool_info,
WorkflowNodeExecutionMetadataKey.AGENT_LOG: agent_logs,
},
inputs=parameters_for_log,
llm_usage=llm_usage,
)
)

View File

@ -69,6 +69,7 @@ class WorkflowEntry:
raise ValueError("Max workflow call depth {} reached.".format(workflow_call_max_depth))
# init workflow run state
graph_runtime_state = GraphRuntimeState(variable_pool=variable_pool, start_at=time.perf_counter())
self.graph_engine = GraphEngine(
tenant_id=tenant_id,
app_id=app_id,
@ -80,7 +81,7 @@ class WorkflowEntry:
call_depth=call_depth,
graph=graph,
graph_config=graph_config,
variable_pool=variable_pool,
graph_runtime_state=graph_runtime_state,
max_execution_steps=dify_config.WORKFLOW_MAX_EXECUTION_STEPS,
max_execution_time=dify_config.WORKFLOW_MAX_EXECUTION_TIME,
thread_pool_id=thread_pool_id,

View File

@ -17,6 +17,7 @@ class EnvironmentVariableField(fields.Raw):
"name": value.name,
"value": encrypter.obfuscated_token(value.value),
"value_type": value.value_type.value,
"description": value.description,
}
if isinstance(value, Variable):
return {
@ -24,6 +25,7 @@ class EnvironmentVariableField(fields.Raw):
"name": value.name,
"value": value.value,
"value_type": value.value_type.value,
"description": value.description,
}
if isinstance(value, dict):
value_type = value.get("value_type")

View File

@ -23,3 +23,12 @@ class DraftVariableType(StrEnum):
NODE = "node"
SYS = "sys"
CONVERSATION = "conversation"
class MessageStatus(StrEnum):
"""
Message Status Enum
"""
NORMAL = "normal"
ERROR = "error"

View File

@ -4,6 +4,7 @@ version = "1.5.1"
requires-python = ">=3.11,<3.13"
dependencies = [
"arize-phoenix-otel~=0.9.2",
"authlib==1.3.1",
"azure-identity==1.16.1",
"beautifulsoup4==4.12.2",

View File

@ -1,13 +1,17 @@
import io
import logging
import uuid
from collections.abc import Generator
from typing import Optional
from flask import Response, stream_with_context
from werkzeug.datastructures import FileStorage
from constants import AUDIO_EXTENSIONS
from core.model_manager import ModelManager
from core.model_runtime.entities.model_entities import ModelType
from extensions.ext_database import db
from models.enums import MessageStatus
from models.model import App, AppMode, AppModelConfig, Message
from services.errors.audio import (
AudioTooLargeServiceError,
@ -16,6 +20,7 @@ from services.errors.audio import (
ProviderNotSupportTextToSpeechServiceError,
UnsupportedAudioTypeServiceError,
)
from services.workflow_service import WorkflowService
FILE_SIZE = 30
FILE_SIZE_LIMIT = FILE_SIZE * 1024 * 1024
@ -74,35 +79,36 @@ class AudioService:
voice: Optional[str] = None,
end_user: Optional[str] = None,
message_id: Optional[str] = None,
is_draft: bool = False,
):
from collections.abc import Generator
from flask import Response, stream_with_context
from app import app
from extensions.ext_database import db
def invoke_tts(text_content: str, app_model: App, voice: Optional[str] = None):
def invoke_tts(text_content: str, app_model: App, voice: Optional[str] = None, is_draft: bool = False):
with app.app_context():
if app_model.mode in {AppMode.ADVANCED_CHAT.value, AppMode.WORKFLOW.value}:
workflow = app_model.workflow
if workflow is None:
raise ValueError("TTS is not enabled")
if voice is None:
if app_model.mode in {AppMode.ADVANCED_CHAT.value, AppMode.WORKFLOW.value}:
if is_draft:
workflow = WorkflowService().get_draft_workflow(app_model=app_model)
else:
workflow = app_model.workflow
if (
workflow is None
or "text_to_speech" not in workflow.features_dict
or not workflow.features_dict["text_to_speech"].get("enabled")
):
raise ValueError("TTS is not enabled")
features_dict = workflow.features_dict
if "text_to_speech" not in features_dict or not features_dict["text_to_speech"].get("enabled"):
raise ValueError("TTS is not enabled")
voice = workflow.features_dict["text_to_speech"].get("voice")
else:
if not is_draft:
if app_model.app_model_config is None:
raise ValueError("AppModelConfig not found")
text_to_speech_dict = app_model.app_model_config.text_to_speech_dict
voice = features_dict["text_to_speech"].get("voice") if voice is None else voice
else:
if app_model.app_model_config is None:
raise ValueError("AppModelConfig not found")
text_to_speech_dict = app_model.app_model_config.text_to_speech_dict
if not text_to_speech_dict.get("enabled"):
raise ValueError("TTS is not enabled")
if not text_to_speech_dict.get("enabled"):
raise ValueError("TTS is not enabled")
voice = text_to_speech_dict.get("voice") if voice is None else voice
voice = text_to_speech_dict.get("voice")
model_manager = ModelManager()
model_instance = model_manager.get_default_model_instance(
@ -132,18 +138,18 @@ class AudioService:
message = db.session.query(Message).filter(Message.id == message_id).first()
if message is None:
return None
if message.answer == "" and message.status == "normal":
if message.answer == "" and message.status == MessageStatus.NORMAL:
return None
else:
response = invoke_tts(message.answer, app_model=app_model, voice=voice)
response = invoke_tts(text_content=message.answer, app_model=app_model, voice=voice, is_draft=is_draft)
if isinstance(response, Generator):
return Response(stream_with_context(response), content_type="audio/mpeg")
return response
else:
if text is None:
raise ValueError("Text is required")
response = invoke_tts(text, app_model, voice)
response = invoke_tts(text_content=text, app_model=app_model, voice=voice, is_draft=is_draft)
if isinstance(response, Generator):
return Response(stream_with_context(response), content_type="audio/mpeg")
return response

View File

@ -34,6 +34,24 @@ class OpsService:
)
new_decrypt_tracing_config = OpsTraceManager.obfuscated_decrypt_token(tracing_provider, decrypt_tracing_config)
if tracing_provider == "arize" and (
"project_url" not in decrypt_tracing_config or not decrypt_tracing_config.get("project_url")
):
try:
project_url = OpsTraceManager.get_trace_config_project_url(decrypt_tracing_config, tracing_provider)
new_decrypt_tracing_config.update({"project_url": project_url})
except Exception:
new_decrypt_tracing_config.update({"project_url": "https://app.arize.com/"})
if tracing_provider == "phoenix" and (
"project_url" not in decrypt_tracing_config or not decrypt_tracing_config.get("project_url")
):
try:
project_url = OpsTraceManager.get_trace_config_project_url(decrypt_tracing_config, tracing_provider)
new_decrypt_tracing_config.update({"project_url": project_url})
except Exception:
new_decrypt_tracing_config.update({"project_url": "https://app.phoenix.arize.com/projects/"})
if tracing_provider == "langfuse" and (
"project_key" not in decrypt_tracing_config or not decrypt_tracing_config.get("project_key")
):
@ -76,6 +94,16 @@ class OpsService:
new_decrypt_tracing_config.update({"project_url": project_url})
except Exception:
new_decrypt_tracing_config.update({"project_url": "https://wandb.ai/"})
if tracing_provider == "aliyun" and (
"project_url" not in decrypt_tracing_config or not decrypt_tracing_config.get("project_url")
):
try:
project_url = OpsTraceManager.get_trace_config_project_url(decrypt_tracing_config, tracing_provider)
new_decrypt_tracing_config.update({"project_url": project_url})
except Exception:
new_decrypt_tracing_config.update({"project_url": "https://arms.console.aliyun.com/"})
trace_config_data.tracing_config = new_decrypt_tracing_config
return trace_config_data.to_dict()
@ -107,7 +135,9 @@ class OpsService:
return {"error": "Invalid Credentials"}
# get project url
if tracing_provider == "langfuse":
if tracing_provider in ("arize", "phoenix"):
project_url = OpsTraceManager.get_trace_config_project_url(tracing_config, tracing_provider)
elif tracing_provider == "langfuse":
project_key = OpsTraceManager.get_trace_config_project_key(tracing_config, tracing_provider)
project_url = "{host}/project/{key}".format(host=tracing_config.get("host"), key=project_key)
elif tracing_provider in ("langsmith", "opik"):

View File

@ -1,6 +1,7 @@
import os
from flask import Flask
from packaging.version import Version
from yarl import URL
from configs.app_config import DifyConfig
@ -40,6 +41,9 @@ def test_dify_config(monkeypatch):
assert config.WORKFLOW_PARALLEL_DEPTH_LIMIT == 3
# values from pyproject.toml
assert Version(config.project.version) >= Version("1.0.0")
# NOTE: If there is a `.env` file in your Workspace, this test might not succeed as expected.
# This is due to `pymilvus` loading all the variables from the `.env` file into `os.environ`.

View File

@ -0,0 +1,194 @@
from unittest.mock import patch
from urllib.parse import parse_qs, urlparse
import pytest
from core.helper.url_signer import SignedUrlParams, UrlSigner
class TestUrlSigner:
"""Test cases for UrlSigner class"""
@patch("configs.dify_config.SECRET_KEY", "test-secret-key-12345")
def test_should_generate_signed_url_params(self):
"""Test generation of signed URL parameters with all required fields"""
sign_key = "test-sign-key"
prefix = "test-prefix"
params = UrlSigner.get_signed_url_params(sign_key, prefix)
# Verify the returned object and required fields
assert isinstance(params, SignedUrlParams)
assert params.sign_key == sign_key
assert params.timestamp is not None
assert params.nonce is not None
assert params.sign is not None
# Verify nonce format (32 character hex string)
assert len(params.nonce) == 32
assert all(c in "0123456789abcdef" for c in params.nonce)
@patch("configs.dify_config.SECRET_KEY", "test-secret-key-12345")
def test_should_generate_complete_signed_url(self):
"""Test generation of complete signed URL with query parameters"""
base_url = "https://example.com/api/test"
sign_key = "test-sign-key"
prefix = "test-prefix"
signed_url = UrlSigner.get_signed_url(base_url, sign_key, prefix)
# Parse URL and verify structure
parsed = urlparse(signed_url)
assert f"{parsed.scheme}://{parsed.netloc}{parsed.path}" == base_url
# Verify query parameters
query_params = parse_qs(parsed.query)
assert "timestamp" in query_params
assert "nonce" in query_params
assert "sign" in query_params
# Verify each parameter has exactly one value
assert len(query_params["timestamp"]) == 1
assert len(query_params["nonce"]) == 1
assert len(query_params["sign"]) == 1
# Verify parameter values are not empty
assert query_params["timestamp"][0]
assert query_params["nonce"][0]
assert query_params["sign"][0]
@patch("configs.dify_config.SECRET_KEY", "test-secret-key-12345")
def test_should_verify_valid_signature(self):
"""Test verification of valid signature"""
sign_key = "test-sign-key"
prefix = "test-prefix"
# Generate and verify signature
params = UrlSigner.get_signed_url_params(sign_key, prefix)
is_valid = UrlSigner.verify(
sign_key=sign_key, timestamp=params.timestamp, nonce=params.nonce, sign=params.sign, prefix=prefix
)
assert is_valid is True
@patch("configs.dify_config.SECRET_KEY", "test-secret-key-12345")
@pytest.mark.parametrize(
("field", "modifier"),
[
("sign_key", lambda _: "wrong-sign-key"),
("timestamp", lambda t: str(int(t) + 1000)),
("nonce", lambda _: "different-nonce-123456789012345"),
("prefix", lambda _: "wrong-prefix"),
("sign", lambda s: s + "tampered"),
],
)
def test_should_reject_invalid_signature_params(self, field, modifier):
"""Test signature verification rejects invalid parameters"""
sign_key = "test-sign-key"
prefix = "test-prefix"
# Generate valid signed parameters
params = UrlSigner.get_signed_url_params(sign_key, prefix)
# Prepare verification parameters
verify_params = {
"sign_key": sign_key,
"timestamp": params.timestamp,
"nonce": params.nonce,
"sign": params.sign,
"prefix": prefix,
}
# Modify the specific field
verify_params[field] = modifier(verify_params[field])
# Verify should fail
is_valid = UrlSigner.verify(**verify_params)
assert is_valid is False
@patch("configs.dify_config.SECRET_KEY", None)
def test_should_raise_error_without_secret_key(self):
"""Test that signing fails when SECRET_KEY is not configured"""
with pytest.raises(Exception) as exc_info:
UrlSigner.get_signed_url_params("key", "prefix")
assert "SECRET_KEY is not set" in str(exc_info.value)
@patch("configs.dify_config.SECRET_KEY", "test-secret-key-12345")
def test_should_generate_unique_signatures(self):
"""Test that different inputs produce different signatures"""
params1 = UrlSigner.get_signed_url_params("key1", "prefix1")
params2 = UrlSigner.get_signed_url_params("key2", "prefix2")
# Different inputs should produce different signatures
assert params1.sign != params2.sign
assert params1.nonce != params2.nonce
@patch("configs.dify_config.SECRET_KEY", "test-secret-key-12345")
def test_should_handle_special_characters(self):
"""Test handling of special characters in parameters"""
special_cases = [
"test with spaces",
"test/with/slashes",
"test中文字符",
]
for sign_key in special_cases:
params = UrlSigner.get_signed_url_params(sign_key, "prefix")
# Should generate valid signature and verify correctly
is_valid = UrlSigner.verify(
sign_key=sign_key, timestamp=params.timestamp, nonce=params.nonce, sign=params.sign, prefix="prefix"
)
assert is_valid is True
@patch("configs.dify_config.SECRET_KEY", "test-secret-key-12345")
def test_should_ensure_nonce_randomness(self):
"""Test that nonce is random for each generation - critical for security"""
sign_key = "test-sign-key"
prefix = "test-prefix"
# Generate multiple nonces
nonces = set()
for _ in range(5):
params = UrlSigner.get_signed_url_params(sign_key, prefix)
nonces.add(params.nonce)
# All nonces should be unique
assert len(nonces) == 5
@patch("configs.dify_config.SECRET_KEY", "test-secret-key-12345")
@patch("time.time", return_value=1234567890)
@patch("os.urandom", return_value=b"\xab\xcd\xef\x12\x34\x56\x78\x90\xab\xcd\xef\x12\x34\x56\x78\x90")
def test_should_produce_consistent_signatures(self, mock_urandom, mock_time):
"""Test that same inputs produce same signature - ensures deterministic behavior"""
sign_key = "test-sign-key"
prefix = "test-prefix"
# Generate signature multiple times with same inputs (time and nonce are mocked)
params1 = UrlSigner.get_signed_url_params(sign_key, prefix)
params2 = UrlSigner.get_signed_url_params(sign_key, prefix)
# With mocked time and random, should produce identical results
assert params1.timestamp == params2.timestamp
assert params1.nonce == params2.nonce
assert params1.sign == params2.sign
# Verify the signature is valid
assert UrlSigner.verify(
sign_key=sign_key, timestamp=params1.timestamp, nonce=params1.nonce, sign=params1.sign, prefix=prefix
)
@patch("configs.dify_config.SECRET_KEY", "test-secret-key-12345")
def test_should_handle_empty_strings(self):
"""Test handling of empty string parameters - common edge case"""
# Empty sign_key and prefix should still work
params = UrlSigner.get_signed_url_params("", "")
assert params.sign is not None
# Should verify correctly
is_valid = UrlSigner.verify(
sign_key="", timestamp=params.timestamp, nonce=params.nonce, sign=params.sign, prefix=""
)
assert is_valid is True

View File

@ -0,0 +1 @@
# Unit tests for core ops module

View File

@ -0,0 +1,385 @@
import pytest
from pydantic import ValidationError
from core.ops.entities.config_entity import (
AliyunConfig,
ArizeConfig,
LangfuseConfig,
LangSmithConfig,
OpikConfig,
PhoenixConfig,
TracingProviderEnum,
WeaveConfig,
)
class TestTracingProviderEnum:
"""Test cases for TracingProviderEnum"""
def test_enum_values(self):
"""Test that all expected enum values are present"""
assert TracingProviderEnum.ARIZE == "arize"
assert TracingProviderEnum.PHOENIX == "phoenix"
assert TracingProviderEnum.LANGFUSE == "langfuse"
assert TracingProviderEnum.LANGSMITH == "langsmith"
assert TracingProviderEnum.OPIK == "opik"
assert TracingProviderEnum.WEAVE == "weave"
assert TracingProviderEnum.ALIYUN == "aliyun"
class TestArizeConfig:
"""Test cases for ArizeConfig"""
def test_valid_config(self):
"""Test valid Arize configuration"""
config = ArizeConfig(
api_key="test_key", space_id="test_space", project="test_project", endpoint="https://custom.arize.com"
)
assert config.api_key == "test_key"
assert config.space_id == "test_space"
assert config.project == "test_project"
assert config.endpoint == "https://custom.arize.com"
def test_default_values(self):
"""Test default values are set correctly"""
config = ArizeConfig()
assert config.api_key is None
assert config.space_id is None
assert config.project is None
assert config.endpoint == "https://otlp.arize.com"
def test_project_validation_empty(self):
"""Test project validation with empty value"""
config = ArizeConfig(project="")
assert config.project == "default"
def test_project_validation_none(self):
"""Test project validation with None value"""
config = ArizeConfig(project=None)
assert config.project == "default"
def test_endpoint_validation_empty(self):
"""Test endpoint validation with empty value"""
config = ArizeConfig(endpoint="")
assert config.endpoint == "https://otlp.arize.com"
def test_endpoint_validation_with_path(self):
"""Test endpoint validation normalizes URL by removing path"""
config = ArizeConfig(endpoint="https://custom.arize.com/api/v1")
assert config.endpoint == "https://custom.arize.com"
def test_endpoint_validation_invalid_scheme(self):
"""Test endpoint validation rejects invalid schemes"""
with pytest.raises(ValidationError, match="URL scheme must be one of"):
ArizeConfig(endpoint="ftp://invalid.com")
def test_endpoint_validation_no_scheme(self):
"""Test endpoint validation rejects URLs without scheme"""
with pytest.raises(ValidationError, match="URL scheme must be one of"):
ArizeConfig(endpoint="invalid.com")
class TestPhoenixConfig:
"""Test cases for PhoenixConfig"""
def test_valid_config(self):
"""Test valid Phoenix configuration"""
config = PhoenixConfig(api_key="test_key", project="test_project", endpoint="https://custom.phoenix.com")
assert config.api_key == "test_key"
assert config.project == "test_project"
assert config.endpoint == "https://custom.phoenix.com"
def test_default_values(self):
"""Test default values are set correctly"""
config = PhoenixConfig()
assert config.api_key is None
assert config.project is None
assert config.endpoint == "https://app.phoenix.arize.com"
def test_project_validation_empty(self):
"""Test project validation with empty value"""
config = PhoenixConfig(project="")
assert config.project == "default"
def test_endpoint_validation_with_path(self):
"""Test endpoint validation normalizes URL by removing path"""
config = PhoenixConfig(endpoint="https://custom.phoenix.com/api/v1")
assert config.endpoint == "https://custom.phoenix.com"
class TestLangfuseConfig:
"""Test cases for LangfuseConfig"""
def test_valid_config(self):
"""Test valid Langfuse configuration"""
config = LangfuseConfig(public_key="public_key", secret_key="secret_key", host="https://custom.langfuse.com")
assert config.public_key == "public_key"
assert config.secret_key == "secret_key"
assert config.host == "https://custom.langfuse.com"
def test_default_values(self):
"""Test default values are set correctly"""
config = LangfuseConfig(public_key="public", secret_key="secret")
assert config.host == "https://api.langfuse.com"
def test_missing_required_fields(self):
"""Test that required fields are enforced"""
with pytest.raises(ValidationError):
LangfuseConfig()
with pytest.raises(ValidationError):
LangfuseConfig(public_key="public")
with pytest.raises(ValidationError):
LangfuseConfig(secret_key="secret")
def test_host_validation_empty(self):
"""Test host validation with empty value"""
config = LangfuseConfig(public_key="public", secret_key="secret", host="")
assert config.host == "https://api.langfuse.com"
class TestLangSmithConfig:
"""Test cases for LangSmithConfig"""
def test_valid_config(self):
"""Test valid LangSmith configuration"""
config = LangSmithConfig(api_key="test_key", project="test_project", endpoint="https://custom.smith.com")
assert config.api_key == "test_key"
assert config.project == "test_project"
assert config.endpoint == "https://custom.smith.com"
def test_default_values(self):
"""Test default values are set correctly"""
config = LangSmithConfig(api_key="key", project="project")
assert config.endpoint == "https://api.smith.langchain.com"
def test_missing_required_fields(self):
"""Test that required fields are enforced"""
with pytest.raises(ValidationError):
LangSmithConfig()
with pytest.raises(ValidationError):
LangSmithConfig(api_key="key")
with pytest.raises(ValidationError):
LangSmithConfig(project="project")
def test_endpoint_validation_https_only(self):
"""Test endpoint validation only allows HTTPS"""
with pytest.raises(ValidationError, match="URL scheme must be one of"):
LangSmithConfig(api_key="key", project="project", endpoint="http://insecure.com")
class TestOpikConfig:
"""Test cases for OpikConfig"""
def test_valid_config(self):
"""Test valid Opik configuration"""
config = OpikConfig(
api_key="test_key",
project="test_project",
workspace="test_workspace",
url="https://custom.comet.com/opik/api/",
)
assert config.api_key == "test_key"
assert config.project == "test_project"
assert config.workspace == "test_workspace"
assert config.url == "https://custom.comet.com/opik/api/"
def test_default_values(self):
"""Test default values are set correctly"""
config = OpikConfig()
assert config.api_key is None
assert config.project is None
assert config.workspace is None
assert config.url == "https://www.comet.com/opik/api/"
def test_project_validation_empty(self):
"""Test project validation with empty value"""
config = OpikConfig(project="")
assert config.project == "Default Project"
def test_url_validation_empty(self):
"""Test URL validation with empty value"""
config = OpikConfig(url="")
assert config.url == "https://www.comet.com/opik/api/"
def test_url_validation_missing_suffix(self):
"""Test URL validation requires /api/ suffix"""
with pytest.raises(ValidationError, match="URL should end with /api/"):
OpikConfig(url="https://custom.comet.com/opik/")
def test_url_validation_invalid_scheme(self):
"""Test URL validation rejects invalid schemes"""
with pytest.raises(ValidationError, match="URL must start with https:// or http://"):
OpikConfig(url="ftp://custom.comet.com/opik/api/")
class TestWeaveConfig:
"""Test cases for WeaveConfig"""
def test_valid_config(self):
"""Test valid Weave configuration"""
config = WeaveConfig(
api_key="test_key",
entity="test_entity",
project="test_project",
endpoint="https://custom.wandb.ai",
host="https://custom.host.com",
)
assert config.api_key == "test_key"
assert config.entity == "test_entity"
assert config.project == "test_project"
assert config.endpoint == "https://custom.wandb.ai"
assert config.host == "https://custom.host.com"
def test_default_values(self):
"""Test default values are set correctly"""
config = WeaveConfig(api_key="key", project="project")
assert config.entity is None
assert config.endpoint == "https://trace.wandb.ai"
assert config.host is None
def test_missing_required_fields(self):
"""Test that required fields are enforced"""
with pytest.raises(ValidationError):
WeaveConfig()
with pytest.raises(ValidationError):
WeaveConfig(api_key="key")
with pytest.raises(ValidationError):
WeaveConfig(project="project")
def test_endpoint_validation_https_only(self):
"""Test endpoint validation only allows HTTPS"""
with pytest.raises(ValidationError, match="URL scheme must be one of"):
WeaveConfig(api_key="key", project="project", endpoint="http://insecure.wandb.ai")
def test_host_validation_optional(self):
"""Test host validation is optional but validates when provided"""
config = WeaveConfig(api_key="key", project="project", host=None)
assert config.host is None
config = WeaveConfig(api_key="key", project="project", host="")
assert config.host == ""
config = WeaveConfig(api_key="key", project="project", host="https://valid.host.com")
assert config.host == "https://valid.host.com"
def test_host_validation_invalid_scheme(self):
"""Test host validation rejects invalid schemes when provided"""
with pytest.raises(ValidationError, match="URL scheme must be one of"):
WeaveConfig(api_key="key", project="project", host="ftp://invalid.host.com")
class TestAliyunConfig:
"""Test cases for AliyunConfig"""
def test_valid_config(self):
"""Test valid Aliyun configuration"""
config = AliyunConfig(
app_name="test_app",
license_key="test_license_key",
endpoint="https://custom.tracing-analysis-dc-hz.aliyuncs.com",
)
assert config.app_name == "test_app"
assert config.license_key == "test_license_key"
assert config.endpoint == "https://custom.tracing-analysis-dc-hz.aliyuncs.com"
def test_default_values(self):
"""Test default values are set correctly"""
config = AliyunConfig(license_key="test_license", endpoint="https://tracing-analysis-dc-hz.aliyuncs.com")
assert config.app_name == "dify_app"
def test_missing_required_fields(self):
"""Test that required fields are enforced"""
with pytest.raises(ValidationError):
AliyunConfig()
with pytest.raises(ValidationError):
AliyunConfig(license_key="test_license")
with pytest.raises(ValidationError):
AliyunConfig(endpoint="https://tracing-analysis-dc-hz.aliyuncs.com")
def test_app_name_validation_empty(self):
"""Test app_name validation with empty value"""
config = AliyunConfig(
license_key="test_license", endpoint="https://tracing-analysis-dc-hz.aliyuncs.com", app_name=""
)
assert config.app_name == "dify_app"
def test_endpoint_validation_empty(self):
"""Test endpoint validation with empty value"""
config = AliyunConfig(license_key="test_license", endpoint="")
assert config.endpoint == "https://tracing-analysis-dc-hz.aliyuncs.com"
def test_endpoint_validation_with_path(self):
"""Test endpoint validation normalizes URL by removing path"""
config = AliyunConfig(
license_key="test_license", endpoint="https://tracing-analysis-dc-hz.aliyuncs.com/api/v1/traces"
)
assert config.endpoint == "https://tracing-analysis-dc-hz.aliyuncs.com"
def test_endpoint_validation_invalid_scheme(self):
"""Test endpoint validation rejects invalid schemes"""
with pytest.raises(ValidationError, match="URL scheme must be one of"):
AliyunConfig(license_key="test_license", endpoint="ftp://invalid.tracing-analysis-dc-hz.aliyuncs.com")
def test_endpoint_validation_no_scheme(self):
"""Test endpoint validation rejects URLs without scheme"""
with pytest.raises(ValidationError, match="URL scheme must be one of"):
AliyunConfig(license_key="test_license", endpoint="invalid.tracing-analysis-dc-hz.aliyuncs.com")
def test_license_key_required(self):
"""Test that license_key is required and cannot be empty"""
with pytest.raises(ValidationError):
AliyunConfig(license_key="", endpoint="https://tracing-analysis-dc-hz.aliyuncs.com")
class TestConfigIntegration:
"""Integration tests for configuration classes"""
def test_all_configs_can_be_instantiated(self):
"""Test that all config classes can be instantiated with valid data"""
configs = [
ArizeConfig(api_key="key"),
PhoenixConfig(api_key="key"),
LangfuseConfig(public_key="public", secret_key="secret"),
LangSmithConfig(api_key="key", project="project"),
OpikConfig(api_key="key"),
WeaveConfig(api_key="key", project="project"),
AliyunConfig(license_key="test_license", endpoint="https://tracing-analysis-dc-hz.aliyuncs.com"),
]
for config in configs:
assert config is not None
def test_url_normalization_consistency(self):
"""Test that URL normalization works consistently across configs"""
# Test that paths are removed from endpoints
arize_config = ArizeConfig(endpoint="https://arize.com/api/v1/test")
phoenix_config = PhoenixConfig(endpoint="https://phoenix.com/api/v2/")
aliyun_config = AliyunConfig(
license_key="test_license", endpoint="https://tracing-analysis-dc-hz.aliyuncs.com/api/v1/traces"
)
assert arize_config.endpoint == "https://arize.com"
assert phoenix_config.endpoint == "https://phoenix.com"
assert aliyun_config.endpoint == "https://tracing-analysis-dc-hz.aliyuncs.com"
def test_project_default_values(self):
"""Test that project default values are set correctly"""
arize_config = ArizeConfig(project="")
phoenix_config = PhoenixConfig(project="")
opik_config = OpikConfig(project="")
aliyun_config = AliyunConfig(
license_key="test_license", endpoint="https://tracing-analysis-dc-hz.aliyuncs.com", app_name=""
)
assert arize_config.project == "default"
assert phoenix_config.project == "default"
assert opik_config.project == "Default Project"
assert aliyun_config.app_name == "dify_app"

View File

@ -0,0 +1,138 @@
import pytest
from core.ops.utils import validate_project_name, validate_url, validate_url_with_path
class TestValidateUrl:
"""Test cases for validate_url function"""
def test_valid_https_url(self):
"""Test valid HTTPS URL"""
result = validate_url("https://example.com", "https://default.com")
assert result == "https://example.com"
def test_valid_http_url(self):
"""Test valid HTTP URL"""
result = validate_url("http://example.com", "https://default.com")
assert result == "http://example.com"
def test_url_with_path_removed(self):
"""Test that URL path is removed during normalization"""
result = validate_url("https://example.com/api/v1/test", "https://default.com")
assert result == "https://example.com"
def test_url_with_query_removed(self):
"""Test that URL query parameters are removed"""
result = validate_url("https://example.com?param=value", "https://default.com")
assert result == "https://example.com"
def test_url_with_fragment_removed(self):
"""Test that URL fragments are removed"""
result = validate_url("https://example.com#section", "https://default.com")
assert result == "https://example.com"
def test_empty_url_returns_default(self):
"""Test empty URL returns default"""
result = validate_url("", "https://default.com")
assert result == "https://default.com"
def test_none_url_returns_default(self):
"""Test None URL returns default"""
result = validate_url(None, "https://default.com")
assert result == "https://default.com"
def test_whitespace_url_returns_default(self):
"""Test whitespace URL returns default"""
result = validate_url(" ", "https://default.com")
assert result == "https://default.com"
def test_invalid_scheme_raises_error(self):
"""Test invalid scheme raises ValueError"""
with pytest.raises(ValueError, match="URL scheme must be one of"):
validate_url("ftp://example.com", "https://default.com")
def test_no_scheme_raises_error(self):
"""Test URL without scheme raises ValueError"""
with pytest.raises(ValueError, match="URL scheme must be one of"):
validate_url("example.com", "https://default.com")
def test_custom_allowed_schemes(self):
"""Test custom allowed schemes"""
result = validate_url("https://example.com", "https://default.com", allowed_schemes=("https",))
assert result == "https://example.com"
with pytest.raises(ValueError, match="URL scheme must be one of"):
validate_url("http://example.com", "https://default.com", allowed_schemes=("https",))
class TestValidateUrlWithPath:
"""Test cases for validate_url_with_path function"""
def test_valid_url_with_path(self):
"""Test valid URL with path"""
result = validate_url_with_path("https://example.com/api/v1", "https://default.com")
assert result == "https://example.com/api/v1"
def test_valid_url_with_required_suffix(self):
"""Test valid URL with required suffix"""
result = validate_url_with_path("https://example.com/api/", "https://default.com", required_suffix="/api/")
assert result == "https://example.com/api/"
def test_url_without_required_suffix_raises_error(self):
"""Test URL without required suffix raises error"""
with pytest.raises(ValueError, match="URL should end with /api/"):
validate_url_with_path("https://example.com/api", "https://default.com", required_suffix="/api/")
def test_empty_url_returns_default(self):
"""Test empty URL returns default"""
result = validate_url_with_path("", "https://default.com")
assert result == "https://default.com"
def test_none_url_returns_default(self):
"""Test None URL returns default"""
result = validate_url_with_path(None, "https://default.com")
assert result == "https://default.com"
def test_invalid_scheme_raises_error(self):
"""Test invalid scheme raises ValueError"""
with pytest.raises(ValueError, match="URL must start with https:// or http://"):
validate_url_with_path("ftp://example.com", "https://default.com")
def test_no_scheme_raises_error(self):
"""Test URL without scheme raises ValueError"""
with pytest.raises(ValueError, match="URL must start with https:// or http://"):
validate_url_with_path("example.com", "https://default.com")
class TestValidateProjectName:
"""Test cases for validate_project_name function"""
def test_valid_project_name(self):
"""Test valid project name"""
result = validate_project_name("my-project", "default")
assert result == "my-project"
def test_empty_project_name_returns_default(self):
"""Test empty project name returns default"""
result = validate_project_name("", "default")
assert result == "default"
def test_none_project_name_returns_default(self):
"""Test None project name returns default"""
result = validate_project_name(None, "default")
assert result == "default"
def test_whitespace_project_name_returns_default(self):
"""Test whitespace project name returns default"""
result = validate_project_name(" ", "default")
assert result == "default"
def test_project_name_with_whitespace_trimmed(self):
"""Test project name with whitespace is trimmed"""
result = validate_project_name(" my-project ", "default")
assert result == "my-project"
def test_custom_default_name(self):
"""Test custom default name"""
result = validate_project_name("", "Custom Default")
assert result == "Custom Default"

View File

@ -1,3 +1,4 @@
import time
from unittest.mock import patch
import pytest
@ -19,6 +20,7 @@ from core.workflow.graph_engine.entities.event import (
NodeRunSucceededEvent,
)
from core.workflow.graph_engine.entities.graph import Graph
from core.workflow.graph_engine.entities.graph_runtime_state import GraphRuntimeState
from core.workflow.graph_engine.entities.runtime_route_state import RouteNodeState
from core.workflow.graph_engine.graph_engine import GraphEngine
from core.workflow.nodes.code.code_node import CodeNode
@ -172,6 +174,7 @@ def test_run_parallel_in_workflow(mock_close, mock_remove):
system_variables={SystemVariableKey.FILES: [], SystemVariableKey.USER_ID: "aaa"}, user_inputs={"query": "hi"}
)
graph_runtime_state = GraphRuntimeState(variable_pool=variable_pool, start_at=time.perf_counter())
graph_engine = GraphEngine(
tenant_id="111",
app_id="222",
@ -183,7 +186,7 @@ def test_run_parallel_in_workflow(mock_close, mock_remove):
invoke_from=InvokeFrom.WEB_APP,
call_depth=0,
graph=graph,
variable_pool=variable_pool,
graph_runtime_state=graph_runtime_state,
max_execution_steps=500,
max_execution_time=1200,
)
@ -299,6 +302,7 @@ def test_run_parallel_in_chatflow(mock_close, mock_remove):
user_inputs={},
)
graph_runtime_state = GraphRuntimeState(variable_pool=variable_pool, start_at=time.perf_counter())
graph_engine = GraphEngine(
tenant_id="111",
app_id="222",
@ -310,7 +314,7 @@ def test_run_parallel_in_chatflow(mock_close, mock_remove):
invoke_from=InvokeFrom.WEB_APP,
call_depth=0,
graph=graph,
variable_pool=variable_pool,
graph_runtime_state=graph_runtime_state,
max_execution_steps=500,
max_execution_time=1200,
)
@ -479,6 +483,7 @@ def test_run_branch(mock_close, mock_remove):
user_inputs={"uid": "takato"},
)
graph_runtime_state = GraphRuntimeState(variable_pool=variable_pool, start_at=time.perf_counter())
graph_engine = GraphEngine(
tenant_id="111",
app_id="222",
@ -490,7 +495,7 @@ def test_run_branch(mock_close, mock_remove):
invoke_from=InvokeFrom.WEB_APP,
call_depth=0,
graph=graph,
variable_pool=variable_pool,
graph_runtime_state=graph_runtime_state,
max_execution_steps=500,
max_execution_time=1200,
)
@ -813,6 +818,7 @@ def test_condition_parallel_correct_output(mock_close, mock_remove, app):
system_variables={SystemVariableKey.FILES: [], SystemVariableKey.USER_ID: "aaa"}, user_inputs={"query": "hi"}
)
graph_runtime_state = GraphRuntimeState(variable_pool=variable_pool, start_at=time.perf_counter())
graph_engine = GraphEngine(
tenant_id="111",
app_id="222",
@ -824,7 +830,7 @@ def test_condition_parallel_correct_output(mock_close, mock_remove, app):
invoke_from=InvokeFrom.WEB_APP,
call_depth=0,
graph=graph,
variable_pool=variable_pool,
graph_runtime_state=graph_runtime_state,
max_execution_steps=500,
max_execution_time=1200,
)

View File

@ -1,7 +1,9 @@
import time
from unittest.mock import patch
from core.app.entities.app_invoke_entities import InvokeFrom
from core.workflow.entities.node_entities import NodeRunResult, WorkflowNodeExecutionMetadataKey
from core.workflow.entities.variable_pool import VariablePool
from core.workflow.entities.workflow_node_execution import WorkflowNodeExecutionStatus
from core.workflow.enums import SystemVariableKey
from core.workflow.graph_engine.entities.event import (
@ -11,6 +13,7 @@ from core.workflow.graph_engine.entities.event import (
NodeRunStreamChunkEvent,
)
from core.workflow.graph_engine.entities.graph import Graph
from core.workflow.graph_engine.entities.graph_runtime_state import GraphRuntimeState
from core.workflow.graph_engine.graph_engine import GraphEngine
from core.workflow.nodes.event.event import RunCompletedEvent, RunStreamChunkEvent
from core.workflow.nodes.llm.node import LLMNode
@ -163,15 +166,16 @@ class ContinueOnErrorTestHelper:
def create_test_graph_engine(graph_config: dict, user_inputs: dict | None = None):
"""Helper method to create a graph engine instance for testing"""
graph = Graph.init(graph_config=graph_config)
variable_pool = {
"system_variables": {
variable_pool = VariablePool(
system_variables={
SystemVariableKey.QUERY: "clear",
SystemVariableKey.FILES: [],
SystemVariableKey.CONVERSATION_ID: "abababa",
SystemVariableKey.USER_ID: "aaa",
},
"user_inputs": user_inputs or {"uid": "takato"},
}
user_inputs=user_inputs or {"uid": "takato"},
)
graph_runtime_state = GraphRuntimeState(variable_pool=variable_pool, start_at=time.perf_counter())
return GraphEngine(
tenant_id="111",
@ -184,7 +188,7 @@ class ContinueOnErrorTestHelper:
invoke_from=InvokeFrom.WEB_APP,
call_depth=0,
graph=graph,
variable_pool=variable_pool,
graph_runtime_state=graph_runtime_state,
max_execution_steps=500,
max_execution_time=1200,
)

View File

@ -0,0 +1,74 @@
import base64
import binascii
import os
import pytest
from libs.password import compare_password, hash_password, valid_password
class TestValidPassword:
"""Test password format validation"""
def test_should_accept_valid_passwords(self):
"""Test accepting valid password formats"""
assert valid_password("password123") == "password123"
assert valid_password("test1234") == "test1234"
assert valid_password("Test123456") == "Test123456"
def test_should_reject_invalid_passwords(self):
"""Test rejecting invalid password formats"""
# Too short
with pytest.raises(ValueError) as exc_info:
valid_password("abc123")
assert "Password must contain letters and numbers" in str(exc_info.value)
# No numbers
with pytest.raises(ValueError):
valid_password("abcdefgh")
# No letters
with pytest.raises(ValueError):
valid_password("12345678")
# Empty
with pytest.raises(ValueError):
valid_password("")
class TestPasswordHashing:
"""Test password hashing and comparison"""
def setup_method(self):
"""Setup test data"""
self.password = "test123password"
self.salt = os.urandom(16)
self.salt_base64 = base64.b64encode(self.salt).decode()
password_hash = hash_password(self.password, self.salt)
self.password_hash_base64 = base64.b64encode(password_hash).decode()
def test_should_verify_correct_password(self):
"""Test correct password verification"""
result = compare_password(self.password, self.password_hash_base64, self.salt_base64)
assert result is True
def test_should_reject_wrong_password(self):
"""Test rejection of incorrect passwords"""
result = compare_password("wrongpassword", self.password_hash_base64, self.salt_base64)
assert result is False
def test_should_handle_invalid_base64(self):
"""Test handling of invalid base64 data"""
# Invalid base64 hash
with pytest.raises(binascii.Error):
compare_password(self.password, "invalid_base64!", self.salt_base64)
# Invalid base64 salt
with pytest.raises(binascii.Error):
compare_password(self.password, self.password_hash_base64, "invalid_base64!")
def test_should_be_case_sensitive(self):
"""Test password case sensitivity"""
result = compare_password(self.password.upper(), self.password_hash_base64, self.salt_base64)
assert result is False

View File

@ -0,0 +1,465 @@
from decimal import Decimal
from unittest.mock import MagicMock, patch
import pytest
from core.llm_generator.output_parser.errors import OutputParserError
from core.llm_generator.output_parser.structured_output import invoke_llm_with_structured_output
from core.model_runtime.entities.llm_entities import (
LLMResult,
LLMResultChunk,
LLMResultChunkDelta,
LLMResultChunkWithStructuredOutput,
LLMResultWithStructuredOutput,
LLMUsage,
)
from core.model_runtime.entities.message_entities import (
AssistantPromptMessage,
SystemPromptMessage,
TextPromptMessageContent,
UserPromptMessage,
)
from core.model_runtime.entities.model_entities import AIModelEntity, ModelType
def create_mock_usage(prompt_tokens: int = 10, completion_tokens: int = 5) -> LLMUsage:
"""Create a mock LLMUsage with all required fields"""
return LLMUsage(
prompt_tokens=prompt_tokens,
prompt_unit_price=Decimal("0.001"),
prompt_price_unit=Decimal("1"),
prompt_price=Decimal(str(prompt_tokens)) * Decimal("0.001"),
completion_tokens=completion_tokens,
completion_unit_price=Decimal("0.002"),
completion_price_unit=Decimal("1"),
completion_price=Decimal(str(completion_tokens)) * Decimal("0.002"),
total_tokens=prompt_tokens + completion_tokens,
total_price=Decimal(str(prompt_tokens)) * Decimal("0.001") + Decimal(str(completion_tokens)) * Decimal("0.002"),
currency="USD",
latency=1.5,
)
def get_model_entity(provider: str, model_name: str, support_structure_output: bool = False) -> AIModelEntity:
"""Create a mock AIModelEntity for testing"""
model_schema = MagicMock()
model_schema.model = model_name
model_schema.provider = provider
model_schema.model_type = ModelType.LLM
model_schema.model_provider = provider
model_schema.model_name = model_name
model_schema.support_structure_output = support_structure_output
model_schema.parameter_rules = []
return model_schema
def get_model_instance() -> MagicMock:
"""Create a mock ModelInstance for testing"""
mock_instance = MagicMock()
mock_instance.provider = "openai"
mock_instance.credentials = {}
return mock_instance
def test_structured_output_parser():
"""Test cases for invoke_llm_with_structured_output function"""
testcases = [
# Test case 1: Model with native structured output support, non-streaming
{
"name": "native_structured_output_non_streaming",
"provider": "openai",
"model_name": "gpt-4o",
"support_structure_output": True,
"stream": False,
"json_schema": {"type": "object", "properties": {"name": {"type": "string"}}},
"expected_llm_response": LLMResult(
model="gpt-4o",
message=AssistantPromptMessage(content='{"name": "test"}'),
usage=create_mock_usage(prompt_tokens=10, completion_tokens=5),
),
"expected_result_type": LLMResultWithStructuredOutput,
"should_raise": False,
},
# Test case 2: Model with native structured output support, streaming
{
"name": "native_structured_output_streaming",
"provider": "openai",
"model_name": "gpt-4o",
"support_structure_output": True,
"stream": True,
"json_schema": {"type": "object", "properties": {"name": {"type": "string"}}},
"expected_llm_response": [
LLMResultChunk(
model="gpt-4o",
prompt_messages=[UserPromptMessage(content="test")],
system_fingerprint="test",
delta=LLMResultChunkDelta(
index=0,
message=AssistantPromptMessage(content='{"name":'),
usage=create_mock_usage(prompt_tokens=10, completion_tokens=2),
),
),
LLMResultChunk(
model="gpt-4o",
prompt_messages=[UserPromptMessage(content="test")],
system_fingerprint="test",
delta=LLMResultChunkDelta(
index=0,
message=AssistantPromptMessage(content=' "test"}'),
usage=create_mock_usage(prompt_tokens=10, completion_tokens=3),
),
),
],
"expected_result_type": "generator",
"should_raise": False,
},
# Test case 3: Model without native structured output support, non-streaming
{
"name": "prompt_based_structured_output_non_streaming",
"provider": "anthropic",
"model_name": "claude-3-sonnet",
"support_structure_output": False,
"stream": False,
"json_schema": {"type": "object", "properties": {"answer": {"type": "string"}}},
"expected_llm_response": LLMResult(
model="claude-3-sonnet",
message=AssistantPromptMessage(content='{"answer": "test response"}'),
usage=create_mock_usage(prompt_tokens=15, completion_tokens=8),
),
"expected_result_type": LLMResultWithStructuredOutput,
"should_raise": False,
},
# Test case 4: Model without native structured output support, streaming
{
"name": "prompt_based_structured_output_streaming",
"provider": "anthropic",
"model_name": "claude-3-sonnet",
"support_structure_output": False,
"stream": True,
"json_schema": {"type": "object", "properties": {"answer": {"type": "string"}}},
"expected_llm_response": [
LLMResultChunk(
model="claude-3-sonnet",
prompt_messages=[UserPromptMessage(content="test")],
system_fingerprint="test",
delta=LLMResultChunkDelta(
index=0,
message=AssistantPromptMessage(content='{"answer": "test'),
usage=create_mock_usage(prompt_tokens=15, completion_tokens=3),
),
),
LLMResultChunk(
model="claude-3-sonnet",
prompt_messages=[UserPromptMessage(content="test")],
system_fingerprint="test",
delta=LLMResultChunkDelta(
index=0,
message=AssistantPromptMessage(content=' response"}'),
usage=create_mock_usage(prompt_tokens=15, completion_tokens=5),
),
),
],
"expected_result_type": "generator",
"should_raise": False,
},
# Test case 5: Streaming with list content
{
"name": "streaming_with_list_content",
"provider": "openai",
"model_name": "gpt-4o",
"support_structure_output": True,
"stream": True,
"json_schema": {"type": "object", "properties": {"data": {"type": "string"}}},
"expected_llm_response": [
LLMResultChunk(
model="gpt-4o",
prompt_messages=[UserPromptMessage(content="test")],
system_fingerprint="test",
delta=LLMResultChunkDelta(
index=0,
message=AssistantPromptMessage(
content=[
TextPromptMessageContent(data='{"data":'),
]
),
usage=create_mock_usage(prompt_tokens=10, completion_tokens=2),
),
),
LLMResultChunk(
model="gpt-4o",
prompt_messages=[UserPromptMessage(content="test")],
system_fingerprint="test",
delta=LLMResultChunkDelta(
index=0,
message=AssistantPromptMessage(
content=[
TextPromptMessageContent(data=' "value"}'),
]
),
usage=create_mock_usage(prompt_tokens=10, completion_tokens=3),
),
),
],
"expected_result_type": "generator",
"should_raise": False,
},
# Test case 6: Error case - non-string LLM response content (non-streaming)
{
"name": "error_non_string_content_non_streaming",
"provider": "openai",
"model_name": "gpt-4o",
"support_structure_output": True,
"stream": False,
"json_schema": {"type": "object", "properties": {"name": {"type": "string"}}},
"expected_llm_response": LLMResult(
model="gpt-4o",
message=AssistantPromptMessage(content=None), # Non-string content
usage=create_mock_usage(prompt_tokens=10, completion_tokens=5),
),
"expected_result_type": None,
"should_raise": True,
"expected_error": OutputParserError,
},
# Test case 7: JSON repair scenario
{
"name": "json_repair_scenario",
"provider": "openai",
"model_name": "gpt-4o",
"support_structure_output": True,
"stream": False,
"json_schema": {"type": "object", "properties": {"name": {"type": "string"}}},
"expected_llm_response": LLMResult(
model="gpt-4o",
message=AssistantPromptMessage(content='{"name": "test"'), # Invalid JSON - missing closing brace
usage=create_mock_usage(prompt_tokens=10, completion_tokens=5),
),
"expected_result_type": LLMResultWithStructuredOutput,
"should_raise": False,
},
# Test case 8: Model with parameter rules for response format
{
"name": "model_with_parameter_rules",
"provider": "openai",
"model_name": "gpt-4o",
"support_structure_output": True,
"stream": False,
"json_schema": {"type": "object", "properties": {"result": {"type": "string"}}},
"parameter_rules": [
MagicMock(name="response_format", options=["json_schema"], required=False),
],
"expected_llm_response": LLMResult(
model="gpt-4o",
message=AssistantPromptMessage(content='{"result": "success"}'),
usage=create_mock_usage(prompt_tokens=10, completion_tokens=5),
),
"expected_result_type": LLMResultWithStructuredOutput,
"should_raise": False,
},
# Test case 9: Model without native support but with JSON response format rules
{
"name": "non_native_with_json_rules",
"provider": "anthropic",
"model_name": "claude-3-sonnet",
"support_structure_output": False,
"stream": False,
"json_schema": {"type": "object", "properties": {"output": {"type": "string"}}},
"parameter_rules": [
MagicMock(name="response_format", options=["JSON"], required=False),
],
"expected_llm_response": LLMResult(
model="claude-3-sonnet",
message=AssistantPromptMessage(content='{"output": "result"}'),
usage=create_mock_usage(prompt_tokens=15, completion_tokens=8),
),
"expected_result_type": LLMResultWithStructuredOutput,
"should_raise": False,
},
]
for case in testcases:
print(f"Running test case: {case['name']}")
# Setup model entity
model_schema = get_model_entity(case["provider"], case["model_name"], case["support_structure_output"])
# Add parameter rules if specified
if "parameter_rules" in case:
model_schema.parameter_rules = case["parameter_rules"]
# Setup model instance
model_instance = get_model_instance()
model_instance.invoke_llm.return_value = case["expected_llm_response"]
# Setup prompt messages
prompt_messages = [
SystemPromptMessage(content="You are a helpful assistant."),
UserPromptMessage(content="Generate a response according to the schema."),
]
if case["should_raise"]:
# Test error cases
with pytest.raises(case["expected_error"]): # noqa: PT012
if case["stream"]:
result_generator = invoke_llm_with_structured_output(
provider=case["provider"],
model_schema=model_schema,
model_instance=model_instance,
prompt_messages=prompt_messages,
json_schema=case["json_schema"],
stream=case["stream"],
)
# Consume the generator to trigger the error
list(result_generator)
else:
invoke_llm_with_structured_output(
provider=case["provider"],
model_schema=model_schema,
model_instance=model_instance,
prompt_messages=prompt_messages,
json_schema=case["json_schema"],
stream=case["stream"],
)
else:
# Test successful cases
with patch("core.llm_generator.output_parser.structured_output.json_repair.loads") as mock_json_repair:
# Configure json_repair mock for cases that need it
if case["name"] == "json_repair_scenario":
mock_json_repair.return_value = {"name": "test"}
result = invoke_llm_with_structured_output(
provider=case["provider"],
model_schema=model_schema,
model_instance=model_instance,
prompt_messages=prompt_messages,
json_schema=case["json_schema"],
stream=case["stream"],
model_parameters={"temperature": 0.7, "max_tokens": 100},
user="test_user",
)
if case["expected_result_type"] == "generator":
# Test streaming results
assert hasattr(result, "__iter__")
chunks = list(result)
assert len(chunks) > 0
# Verify all chunks are LLMResultChunkWithStructuredOutput
for chunk in chunks[:-1]: # All except last
assert isinstance(chunk, LLMResultChunkWithStructuredOutput)
assert chunk.model == case["model_name"]
# Last chunk should have structured output
last_chunk = chunks[-1]
assert isinstance(last_chunk, LLMResultChunkWithStructuredOutput)
assert last_chunk.structured_output is not None
assert isinstance(last_chunk.structured_output, dict)
else:
# Test non-streaming results
assert isinstance(result, case["expected_result_type"])
assert result.model == case["model_name"]
assert result.structured_output is not None
assert isinstance(result.structured_output, dict)
# Verify model_instance.invoke_llm was called with correct parameters
model_instance.invoke_llm.assert_called_once()
call_args = model_instance.invoke_llm.call_args
assert call_args.kwargs["stream"] == case["stream"]
assert call_args.kwargs["user"] == "test_user"
assert "temperature" in call_args.kwargs["model_parameters"]
assert "max_tokens" in call_args.kwargs["model_parameters"]
def test_parse_structured_output_edge_cases():
"""Test edge cases for structured output parsing"""
# Test case with list that contains dict (reasoning model scenario)
testcase_list_with_dict = {
"name": "list_with_dict_parsing",
"provider": "deepseek",
"model_name": "deepseek-r1",
"support_structure_output": False,
"stream": False,
"json_schema": {"type": "object", "properties": {"thought": {"type": "string"}}},
"expected_llm_response": LLMResult(
model="deepseek-r1",
message=AssistantPromptMessage(content='[{"thought": "reasoning process"}, "other content"]'),
usage=create_mock_usage(prompt_tokens=10, completion_tokens=5),
),
"expected_result_type": LLMResultWithStructuredOutput,
"should_raise": False,
}
# Setup for list parsing test
model_schema = get_model_entity(
testcase_list_with_dict["provider"],
testcase_list_with_dict["model_name"],
testcase_list_with_dict["support_structure_output"],
)
model_instance = get_model_instance()
model_instance.invoke_llm.return_value = testcase_list_with_dict["expected_llm_response"]
prompt_messages = [UserPromptMessage(content="Test reasoning")]
with patch("core.llm_generator.output_parser.structured_output.json_repair.loads") as mock_json_repair:
# Mock json_repair to return a list with dict
mock_json_repair.return_value = [{"thought": "reasoning process"}, "other content"]
result = invoke_llm_with_structured_output(
provider=testcase_list_with_dict["provider"],
model_schema=model_schema,
model_instance=model_instance,
prompt_messages=prompt_messages,
json_schema=testcase_list_with_dict["json_schema"],
stream=testcase_list_with_dict["stream"],
)
assert isinstance(result, LLMResultWithStructuredOutput)
assert result.structured_output == {"thought": "reasoning process"}
def test_model_specific_schema_preparation():
"""Test schema preparation for different model types"""
# Test Gemini model
gemini_case = {
"provider": "google",
"model_name": "gemini-pro",
"support_structure_output": True,
"stream": False,
"json_schema": {"type": "object", "properties": {"result": {"type": "boolean"}}, "additionalProperties": False},
}
model_schema = get_model_entity(
gemini_case["provider"], gemini_case["model_name"], gemini_case["support_structure_output"]
)
model_instance = get_model_instance()
model_instance.invoke_llm.return_value = LLMResult(
model="gemini-pro",
message=AssistantPromptMessage(content='{"result": "true"}'),
usage=create_mock_usage(prompt_tokens=10, completion_tokens=5),
)
prompt_messages = [UserPromptMessage(content="Test")]
result = invoke_llm_with_structured_output(
provider=gemini_case["provider"],
model_schema=model_schema,
model_instance=model_instance,
prompt_messages=prompt_messages,
json_schema=gemini_case["json_schema"],
stream=gemini_case["stream"],
)
assert isinstance(result, LLMResultWithStructuredOutput)
# Verify model_instance.invoke_llm was called and check the schema preparation
model_instance.invoke_llm.assert_called_once()
call_args = model_instance.invoke_llm.call_args
# For Gemini, the schema should not have additionalProperties and boolean should be converted to string
assert "json_schema" in call_args.kwargs["model_parameters"]

File diff suppressed because it is too large Load Diff

View File

@ -826,6 +826,9 @@ MAX_ITERATIONS_NUM=99
# The timeout for the text generation in millisecond
TEXT_GENERATION_TIMEOUT_MS=60000
# Allow rendering unsafe URLs which have "data:" scheme.
ALLOW_UNSAFE_DATA_SCHEME=false
# ------------------------------
# Environment Variables for db Service
# ------------------------------
@ -958,7 +961,7 @@ NGINX_SSL_PROTOCOLS=TLSv1.1 TLSv1.2 TLSv1.3
# Nginx performance tuning
NGINX_WORKER_PROCESSES=auto
NGINX_CLIENT_MAX_BODY_SIZE=15M
NGINX_CLIENT_MAX_BODY_SIZE=100M
NGINX_KEEPALIVE_TIMEOUT=65
# Proxy settings

View File

@ -67,6 +67,7 @@ services:
TEXT_GENERATION_TIMEOUT_MS: ${TEXT_GENERATION_TIMEOUT_MS:-60000}
CSP_WHITELIST: ${CSP_WHITELIST:-}
ALLOW_EMBED: ${ALLOW_EMBED:-false}
ALLOW_UNSAFE_DATA_SCHEME: ${ALLOW_UNSAFE_DATA_SCHEME:-false}
MARKETPLACE_API_URL: ${MARKETPLACE_API_URL:-https://marketplace.dify.ai}
MARKETPLACE_URL: ${MARKETPLACE_URL:-https://marketplace.dify.ai}
TOP_K_MAX_VALUE: ${TOP_K_MAX_VALUE:-}
@ -168,7 +169,7 @@ services:
PLUGIN_MEDIA_CACHE_PATH: ${PLUGIN_MEDIA_CACHE_PATH:-assets}
PLUGIN_STORAGE_OSS_BUCKET: ${PLUGIN_STORAGE_OSS_BUCKET:-}
S3_USE_AWS_MANAGED_IAM: ${PLUGIN_S3_USE_AWS_MANAGED_IAM:-false}
S3_USE_AWS: ${PLUGIN_S3_USE_AWS:-}
S3_USE_AWS: ${PLUGIN_S3_USE_AWS:-false}
S3_ENDPOINT: ${PLUGIN_S3_ENDPOINT:-}
S3_USE_PATH_STYLE: ${PLUGIN_S3_USE_PATH_STYLE:-false}
AWS_ACCESS_KEY: ${PLUGIN_AWS_ACCESS_KEY:-}
@ -265,7 +266,7 @@ services:
NGINX_SSL_CERT_KEY_FILENAME: ${NGINX_SSL_CERT_KEY_FILENAME:-dify.key}
NGINX_SSL_PROTOCOLS: ${NGINX_SSL_PROTOCOLS:-TLSv1.1 TLSv1.2 TLSv1.3}
NGINX_WORKER_PROCESSES: ${NGINX_WORKER_PROCESSES:-auto}
NGINX_CLIENT_MAX_BODY_SIZE: ${NGINX_CLIENT_MAX_BODY_SIZE:-15M}
NGINX_CLIENT_MAX_BODY_SIZE: ${NGINX_CLIENT_MAX_BODY_SIZE:-100M}
NGINX_KEEPALIVE_TIMEOUT: ${NGINX_KEEPALIVE_TIMEOUT:-65}
NGINX_PROXY_READ_TIMEOUT: ${NGINX_PROXY_READ_TIMEOUT:-3600s}
NGINX_PROXY_SEND_TIMEOUT: ${NGINX_PROXY_SEND_TIMEOUT:-3600s}

View File

@ -364,6 +364,7 @@ x-shared-env: &shared-api-worker-env
MAX_PARALLEL_LIMIT: ${MAX_PARALLEL_LIMIT:-10}
MAX_ITERATIONS_NUM: ${MAX_ITERATIONS_NUM:-99}
TEXT_GENERATION_TIMEOUT_MS: ${TEXT_GENERATION_TIMEOUT_MS:-60000}
ALLOW_UNSAFE_DATA_SCHEME: ${ALLOW_UNSAFE_DATA_SCHEME:-false}
POSTGRES_USER: ${POSTGRES_USER:-${DB_USERNAME}}
POSTGRES_PASSWORD: ${POSTGRES_PASSWORD:-${DB_PASSWORD}}
POSTGRES_DB: ${POSTGRES_DB:-${DB_DATABASE}}
@ -420,7 +421,7 @@ x-shared-env: &shared-api-worker-env
NGINX_SSL_CERT_KEY_FILENAME: ${NGINX_SSL_CERT_KEY_FILENAME:-dify.key}
NGINX_SSL_PROTOCOLS: ${NGINX_SSL_PROTOCOLS:-TLSv1.1 TLSv1.2 TLSv1.3}
NGINX_WORKER_PROCESSES: ${NGINX_WORKER_PROCESSES:-auto}
NGINX_CLIENT_MAX_BODY_SIZE: ${NGINX_CLIENT_MAX_BODY_SIZE:-15M}
NGINX_CLIENT_MAX_BODY_SIZE: ${NGINX_CLIENT_MAX_BODY_SIZE:-100M}
NGINX_KEEPALIVE_TIMEOUT: ${NGINX_KEEPALIVE_TIMEOUT:-65}
NGINX_PROXY_READ_TIMEOUT: ${NGINX_PROXY_READ_TIMEOUT:-3600s}
NGINX_PROXY_SEND_TIMEOUT: ${NGINX_PROXY_SEND_TIMEOUT:-3600s}
@ -582,6 +583,7 @@ services:
TEXT_GENERATION_TIMEOUT_MS: ${TEXT_GENERATION_TIMEOUT_MS:-60000}
CSP_WHITELIST: ${CSP_WHITELIST:-}
ALLOW_EMBED: ${ALLOW_EMBED:-false}
ALLOW_UNSAFE_DATA_SCHEME: ${ALLOW_UNSAFE_DATA_SCHEME:-false}
MARKETPLACE_API_URL: ${MARKETPLACE_API_URL:-https://marketplace.dify.ai}
MARKETPLACE_URL: ${MARKETPLACE_URL:-https://marketplace.dify.ai}
TOP_K_MAX_VALUE: ${TOP_K_MAX_VALUE:-}
@ -683,7 +685,7 @@ services:
PLUGIN_MEDIA_CACHE_PATH: ${PLUGIN_MEDIA_CACHE_PATH:-assets}
PLUGIN_STORAGE_OSS_BUCKET: ${PLUGIN_STORAGE_OSS_BUCKET:-}
S3_USE_AWS_MANAGED_IAM: ${PLUGIN_S3_USE_AWS_MANAGED_IAM:-false}
S3_USE_AWS: ${PLUGIN_S3_USE_AWS:-}
S3_USE_AWS: ${PLUGIN_S3_USE_AWS:-false}
S3_ENDPOINT: ${PLUGIN_S3_ENDPOINT:-}
S3_USE_PATH_STYLE: ${PLUGIN_S3_USE_PATH_STYLE:-false}
AWS_ACCESS_KEY: ${PLUGIN_AWS_ACCESS_KEY:-}
@ -780,7 +782,7 @@ services:
NGINX_SSL_CERT_KEY_FILENAME: ${NGINX_SSL_CERT_KEY_FILENAME:-dify.key}
NGINX_SSL_PROTOCOLS: ${NGINX_SSL_PROTOCOLS:-TLSv1.1 TLSv1.2 TLSv1.3}
NGINX_WORKER_PROCESSES: ${NGINX_WORKER_PROCESSES:-auto}
NGINX_CLIENT_MAX_BODY_SIZE: ${NGINX_CLIENT_MAX_BODY_SIZE:-15M}
NGINX_CLIENT_MAX_BODY_SIZE: ${NGINX_CLIENT_MAX_BODY_SIZE:-100M}
NGINX_KEEPALIVE_TIMEOUT: ${NGINX_KEEPALIVE_TIMEOUT:-65}
NGINX_PROXY_READ_TIMEOUT: ${NGINX_PROXY_READ_TIMEOUT:-3600s}
NGINX_PROXY_SEND_TIMEOUT: ${NGINX_PROXY_SEND_TIMEOUT:-3600s}

View File

@ -32,6 +32,9 @@ NEXT_PUBLIC_CSP_WHITELIST=
# Default is not allow to embed into iframe to prevent Clickjacking: https://owasp.org/www-community/attacks/Clickjacking
NEXT_PUBLIC_ALLOW_EMBED=
# Allow rendering unsafe URLs which have "data:" scheme.
NEXT_PUBLIC_ALLOW_UNSAFE_DATA_SCHEME=false
# Github Access Token, used for invoking Github API
NEXT_PUBLIC_GITHUB_ACCESS_TOKEN=
# The maximum number of top-k value for RAG.

View File

@ -5,7 +5,7 @@ import { useTranslation } from 'react-i18next'
import { useBoolean } from 'ahooks'
import TracingIcon from './tracing-icon'
import ProviderPanel from './provider-panel'
import type { LangFuseConfig, LangSmithConfig, OpikConfig, WeaveConfig } from './type'
import type { AliyunConfig, ArizeConfig, LangFuseConfig, LangSmithConfig, OpikConfig, PhoenixConfig, WeaveConfig } from './type'
import { TracingProvider } from './type'
import ProviderConfigModal from './provider-config-modal'
import Indicator from '@/app/components/header/indicator'
@ -23,11 +23,14 @@ export type PopupProps = {
onStatusChange: (enabled: boolean) => void
chosenProvider: TracingProvider | null
onChooseProvider: (provider: TracingProvider) => void
arizeConfig: ArizeConfig | null
phoenixConfig: PhoenixConfig | null
langSmithConfig: LangSmithConfig | null
langFuseConfig: LangFuseConfig | null
opikConfig: OpikConfig | null
weaveConfig: WeaveConfig | null
onConfigUpdated: (provider: TracingProvider, payload: LangSmithConfig | LangFuseConfig | OpikConfig | WeaveConfig) => void
aliyunConfig: AliyunConfig | null
onConfigUpdated: (provider: TracingProvider, payload: ArizeConfig | PhoenixConfig | LangSmithConfig | LangFuseConfig | OpikConfig | WeaveConfig | AliyunConfig) => void
onConfigRemoved: (provider: TracingProvider) => void
}
@ -38,10 +41,13 @@ const ConfigPopup: FC<PopupProps> = ({
onStatusChange,
chosenProvider,
onChooseProvider,
arizeConfig,
phoenixConfig,
langSmithConfig,
langFuseConfig,
opikConfig,
weaveConfig,
aliyunConfig,
onConfigUpdated,
onConfigRemoved,
}) => {
@ -65,7 +71,7 @@ const ConfigPopup: FC<PopupProps> = ({
}
}, [onChooseProvider])
const handleConfigUpdated = useCallback((payload: LangSmithConfig | LangFuseConfig | OpikConfig | WeaveConfig) => {
const handleConfigUpdated = useCallback((payload: ArizeConfig | PhoenixConfig | LangSmithConfig | LangFuseConfig | OpikConfig | WeaveConfig | AliyunConfig) => {
onConfigUpdated(currentProvider!, payload)
hideConfigModal()
}, [currentProvider, hideConfigModal, onConfigUpdated])
@ -75,8 +81,8 @@ const ConfigPopup: FC<PopupProps> = ({
hideConfigModal()
}, [currentProvider, hideConfigModal, onConfigRemoved])
const providerAllConfigured = langSmithConfig && langFuseConfig && opikConfig && weaveConfig
const providerAllNotConfigured = !langSmithConfig && !langFuseConfig && !opikConfig && !weaveConfig
const providerAllConfigured = arizeConfig && phoenixConfig && langSmithConfig && langFuseConfig && opikConfig && weaveConfig && aliyunConfig
const providerAllNotConfigured = !arizeConfig && !phoenixConfig && !langSmithConfig && !langFuseConfig && !opikConfig && !weaveConfig && !aliyunConfig
const switchContent = (
<Switch
@ -86,6 +92,32 @@ const ConfigPopup: FC<PopupProps> = ({
disabled={providerAllNotConfigured}
/>
)
const arizePanel = (
<ProviderPanel
type={TracingProvider.arize}
readOnly={readOnly}
config={arizeConfig}
hasConfigured={!!arizeConfig}
onConfig={handleOnConfig(TracingProvider.arize)}
isChosen={chosenProvider === TracingProvider.arize}
onChoose={handleOnChoose(TracingProvider.arize)}
key="arize-provider-panel"
/>
)
const phoenixPanel = (
<ProviderPanel
type={TracingProvider.phoenix}
readOnly={readOnly}
config={phoenixConfig}
hasConfigured={!!phoenixConfig}
onConfig={handleOnConfig(TracingProvider.phoenix)}
isChosen={chosenProvider === TracingProvider.phoenix}
onChoose={handleOnChoose(TracingProvider.phoenix)}
key="phoenix-provider-panel"
/>
)
const langSmithPanel = (
<ProviderPanel
type={TracingProvider.langSmith}
@ -137,6 +169,19 @@ const ConfigPopup: FC<PopupProps> = ({
key="weave-provider-panel"
/>
)
const aliyunPanel = (
<ProviderPanel
type={TracingProvider.aliyun}
readOnly={readOnly}
config={aliyunConfig}
hasConfigured={!!aliyunConfig}
onConfig={handleOnConfig(TracingProvider.aliyun)}
isChosen={chosenProvider === TracingProvider.aliyun}
onChoose={handleOnChoose(TracingProvider.aliyun)}
key="aliyun-provider-panel"
/>
)
const configuredProviderPanel = () => {
const configuredPanels: JSX.Element[] = []
@ -152,12 +197,27 @@ const ConfigPopup: FC<PopupProps> = ({
if (weaveConfig)
configuredPanels.push(weavePanel)
if (arizeConfig)
configuredPanels.push(arizePanel)
if (phoenixConfig)
configuredPanels.push(phoenixPanel)
if (aliyunConfig)
configuredPanels.push(aliyunPanel)
return configuredPanels
}
const moreProviderPanel = () => {
const notConfiguredPanels: JSX.Element[] = []
if (!arizeConfig)
notConfiguredPanels.push(arizePanel)
if (!phoenixConfig)
notConfiguredPanels.push(phoenixPanel)
if (!langFuseConfig)
notConfiguredPanels.push(langfusePanel)
@ -170,16 +230,25 @@ const ConfigPopup: FC<PopupProps> = ({
if (!weaveConfig)
notConfiguredPanels.push(weavePanel)
if (!aliyunConfig)
notConfiguredPanels.push(aliyunPanel)
return notConfiguredPanels
}
const configuredProviderConfig = () => {
if (currentProvider === TracingProvider.arize)
return arizeConfig
if (currentProvider === TracingProvider.phoenix)
return phoenixConfig
if (currentProvider === TracingProvider.langSmith)
return langSmithConfig
if (currentProvider === TracingProvider.langfuse)
return langFuseConfig
if (currentProvider === TracingProvider.opik)
return opikConfig
if (currentProvider === TracingProvider.aliyun)
return aliyunConfig
return weaveConfig
}
@ -220,22 +289,25 @@ const ConfigPopup: FC<PopupProps> = ({
? (
<>
<div className='system-xs-medium-uppercase text-text-tertiary'>{t(`${I18N_PREFIX}.configProviderTitle.${providerAllConfigured ? 'configured' : 'notConfigured'}`)}</div>
<div className='mt-2 space-y-2'>
<div className='mt-2 max-h-96 space-y-2 overflow-y-auto'>
{langfusePanel}
{langSmithPanel}
{opikPanel}
{weavePanel}
{arizePanel}
{phoenixPanel}
{aliyunPanel}
</div>
</>
)
: (
<>
<div className='system-xs-medium-uppercase text-text-tertiary'>{t(`${I18N_PREFIX}.configProviderTitle.configured`)}</div>
<div className='mt-2 space-y-2'>
<div className='mt-2 max-h-40 space-y-2 overflow-y-auto'>
{configuredProviderPanel()}
</div>
<div className='system-xs-medium-uppercase mt-3 text-text-tertiary'>{t(`${I18N_PREFIX}.configProviderTitle.moreProvider`)}</div>
<div className='mt-2 space-y-2'>
<div className='mt-2 max-h-40 space-y-2 overflow-y-auto'>
{moreProviderPanel()}
</div>
</>

View File

@ -1,8 +1,11 @@
import { TracingProvider } from './type'
export const docURL = {
[TracingProvider.arize]: 'https://docs.arize.com/arize',
[TracingProvider.phoenix]: 'https://docs.arize.com/phoenix',
[TracingProvider.langSmith]: 'https://docs.smith.langchain.com/',
[TracingProvider.langfuse]: 'https://docs.langfuse.com',
[TracingProvider.opik]: 'https://www.comet.com/docs/opik/tracing/integrations/dify#setup-instructions',
[TracingProvider.weave]: 'https://weave-docs.wandb.ai/',
[TracingProvider.aliyun]: 'https://help.aliyun.com/zh/arms/tracing-analysis/untitled-document-1750672984680',
}

View File

@ -7,12 +7,12 @@ import {
import { useTranslation } from 'react-i18next'
import { usePathname } from 'next/navigation'
import { useBoolean } from 'ahooks'
import type { LangFuseConfig, LangSmithConfig, OpikConfig, WeaveConfig } from './type'
import type { AliyunConfig, ArizeConfig, LangFuseConfig, LangSmithConfig, OpikConfig, PhoenixConfig, WeaveConfig } from './type'
import { TracingProvider } from './type'
import TracingIcon from './tracing-icon'
import ConfigButton from './config-button'
import cn from '@/utils/classnames'
import { LangfuseIcon, LangsmithIcon, OpikIcon, WeaveIcon } from '@/app/components/base/icons/src/public/tracing'
import { AliyunIcon, ArizeIcon, LangfuseIcon, LangsmithIcon, OpikIcon, PhoenixIcon, WeaveIcon } from '@/app/components/base/icons/src/public/tracing'
import Indicator from '@/app/components/header/indicator'
import { fetchTracingConfig as doFetchTracingConfig, fetchTracingStatus, updateTracingStatus } from '@/service/apps'
import type { TracingStatus } from '@/models/app'
@ -62,24 +62,33 @@ const Panel: FC = () => {
}
const inUseTracingProvider: TracingProvider | null = tracingStatus?.tracing_provider || null
const InUseProviderIcon
= inUseTracingProvider === TracingProvider.langSmith
? LangsmithIcon
: inUseTracingProvider === TracingProvider.langfuse
? LangfuseIcon
: inUseTracingProvider === TracingProvider.opik
? OpikIcon
: inUseTracingProvider === TracingProvider.weave
? WeaveIcon
: LangsmithIcon
const providerIconMap: Record<TracingProvider, React.FC<{ className?: string }>> = {
[TracingProvider.arize]: ArizeIcon,
[TracingProvider.phoenix]: PhoenixIcon,
[TracingProvider.langSmith]: LangsmithIcon,
[TracingProvider.langfuse]: LangfuseIcon,
[TracingProvider.opik]: OpikIcon,
[TracingProvider.weave]: WeaveIcon,
[TracingProvider.aliyun]: AliyunIcon,
}
const InUseProviderIcon = inUseTracingProvider ? providerIconMap[inUseTracingProvider] : undefined
const [arizeConfig, setArizeConfig] = useState<ArizeConfig | null>(null)
const [phoenixConfig, setPhoenixConfig] = useState<PhoenixConfig | null>(null)
const [langSmithConfig, setLangSmithConfig] = useState<LangSmithConfig | null>(null)
const [langFuseConfig, setLangFuseConfig] = useState<LangFuseConfig | null>(null)
const [opikConfig, setOpikConfig] = useState<OpikConfig | null>(null)
const [weaveConfig, setWeaveConfig] = useState<WeaveConfig | null>(null)
const hasConfiguredTracing = !!(langSmithConfig || langFuseConfig || opikConfig || weaveConfig)
const [aliyunConfig, setAliyunConfig] = useState<AliyunConfig | null>(null)
const hasConfiguredTracing = !!(langSmithConfig || langFuseConfig || opikConfig || weaveConfig || arizeConfig || phoenixConfig || aliyunConfig)
const fetchTracingConfig = async () => {
const { tracing_config: arizeConfig, has_not_configured: arizeHasNotConfig } = await doFetchTracingConfig({ appId, provider: TracingProvider.arize })
if (!arizeHasNotConfig)
setArizeConfig(arizeConfig as ArizeConfig)
const { tracing_config: phoenixConfig, has_not_configured: phoenixHasNotConfig } = await doFetchTracingConfig({ appId, provider: TracingProvider.phoenix })
if (!phoenixHasNotConfig)
setPhoenixConfig(phoenixConfig as PhoenixConfig)
const { tracing_config: langSmithConfig, has_not_configured: langSmithHasNotConfig } = await doFetchTracingConfig({ appId, provider: TracingProvider.langSmith })
if (!langSmithHasNotConfig)
setLangSmithConfig(langSmithConfig as LangSmithConfig)
@ -92,12 +101,19 @@ const Panel: FC = () => {
const { tracing_config: weaveConfig, has_not_configured: weaveHasNotConfig } = await doFetchTracingConfig({ appId, provider: TracingProvider.weave })
if (!weaveHasNotConfig)
setWeaveConfig(weaveConfig as WeaveConfig)
const { tracing_config: aliyunConfig, has_not_configured: aliyunHasNotConfig } = await doFetchTracingConfig({ appId, provider: TracingProvider.aliyun })
if (!aliyunHasNotConfig)
setAliyunConfig(aliyunConfig as AliyunConfig)
}
const handleTracingConfigUpdated = async (provider: TracingProvider) => {
// call api to hide secret key value
const { tracing_config } = await doFetchTracingConfig({ appId, provider })
if (provider === TracingProvider.langSmith)
if (provider === TracingProvider.arize)
setArizeConfig(tracing_config as ArizeConfig)
else if (provider === TracingProvider.phoenix)
setPhoenixConfig(tracing_config as PhoenixConfig)
else if (provider === TracingProvider.langSmith)
setLangSmithConfig(tracing_config as LangSmithConfig)
else if (provider === TracingProvider.langfuse)
setLangFuseConfig(tracing_config as LangFuseConfig)
@ -105,10 +121,16 @@ const Panel: FC = () => {
setOpikConfig(tracing_config as OpikConfig)
else if (provider === TracingProvider.weave)
setWeaveConfig(tracing_config as WeaveConfig)
else if (provider === TracingProvider.aliyun)
setAliyunConfig(tracing_config as AliyunConfig)
}
const handleTracingConfigRemoved = (provider: TracingProvider) => {
if (provider === TracingProvider.langSmith)
if (provider === TracingProvider.arize)
setArizeConfig(null)
else if (provider === TracingProvider.phoenix)
setPhoenixConfig(null)
else if (provider === TracingProvider.langSmith)
setLangSmithConfig(null)
else if (provider === TracingProvider.langfuse)
setLangFuseConfig(null)
@ -116,6 +138,8 @@ const Panel: FC = () => {
setOpikConfig(null)
else if (provider === TracingProvider.weave)
setWeaveConfig(null)
else if (provider === TracingProvider.aliyun)
setAliyunConfig(null)
if (provider === inUseTracingProvider) {
handleTracingStatusChange({
enabled: false,
@ -170,10 +194,13 @@ const Panel: FC = () => {
onStatusChange={handleTracingEnabledChange}
chosenProvider={inUseTracingProvider}
onChooseProvider={handleChooseProvider}
arizeConfig={arizeConfig}
phoenixConfig={phoenixConfig}
langSmithConfig={langSmithConfig}
langFuseConfig={langFuseConfig}
opikConfig={opikConfig}
weaveConfig={weaveConfig}
aliyunConfig={aliyunConfig}
onConfigUpdated={handleTracingConfigUpdated}
onConfigRemoved={handleTracingConfigRemoved}
controlShowPopup={controlShowPopup}
@ -205,10 +232,13 @@ const Panel: FC = () => {
onStatusChange={handleTracingEnabledChange}
chosenProvider={inUseTracingProvider}
onChooseProvider={handleChooseProvider}
arizeConfig={arizeConfig}
phoenixConfig={phoenixConfig}
langSmithConfig={langSmithConfig}
langFuseConfig={langFuseConfig}
opikConfig={opikConfig}
weaveConfig={weaveConfig}
aliyunConfig={aliyunConfig}
onConfigUpdated={handleTracingConfigUpdated}
onConfigRemoved={handleTracingConfigRemoved}
controlShowPopup={controlShowPopup}

View File

@ -4,7 +4,7 @@ import React, { useCallback, useState } from 'react'
import { useTranslation } from 'react-i18next'
import { useBoolean } from 'ahooks'
import Field from './field'
import type { LangFuseConfig, LangSmithConfig, OpikConfig, WeaveConfig } from './type'
import type { AliyunConfig, ArizeConfig, LangFuseConfig, LangSmithConfig, OpikConfig, PhoenixConfig, WeaveConfig } from './type'
import { TracingProvider } from './type'
import { docURL } from './config'
import {
@ -22,15 +22,28 @@ import Divider from '@/app/components/base/divider'
type Props = {
appId: string
type: TracingProvider
payload?: LangSmithConfig | LangFuseConfig | OpikConfig | WeaveConfig | null
payload?: ArizeConfig | PhoenixConfig | LangSmithConfig | LangFuseConfig | OpikConfig | WeaveConfig | AliyunConfig | null
onRemoved: () => void
onCancel: () => void
onSaved: (payload: LangSmithConfig | LangFuseConfig | OpikConfig | WeaveConfig) => void
onSaved: (payload: ArizeConfig | PhoenixConfig | LangSmithConfig | LangFuseConfig | OpikConfig | WeaveConfig | AliyunConfig) => void
onChosen: (provider: TracingProvider) => void
}
const I18N_PREFIX = 'app.tracing.configProvider'
const arizeConfigTemplate = {
api_key: '',
space_id: '',
project: '',
endpoint: '',
}
const phoenixConfigTemplate = {
api_key: '',
project: '',
endpoint: '',
}
const langSmithConfigTemplate = {
api_key: '',
project: '',
@ -58,6 +71,12 @@ const weaveConfigTemplate = {
host: '',
}
const aliyunConfigTemplate = {
app_name: '',
license_key: '',
endpoint: '',
}
const ProviderConfigModal: FC<Props> = ({
appId,
type,
@ -71,11 +90,17 @@ const ProviderConfigModal: FC<Props> = ({
const isEdit = !!payload
const isAdd = !isEdit
const [isSaving, setIsSaving] = useState(false)
const [config, setConfig] = useState<LangSmithConfig | LangFuseConfig | OpikConfig | WeaveConfig>((() => {
const [config, setConfig] = useState<ArizeConfig | PhoenixConfig | LangSmithConfig | LangFuseConfig | OpikConfig | WeaveConfig | AliyunConfig>((() => {
if (isEdit)
return payload
if (type === TracingProvider.langSmith)
if (type === TracingProvider.arize)
return arizeConfigTemplate
else if (type === TracingProvider.phoenix)
return phoenixConfigTemplate
else if (type === TracingProvider.langSmith)
return langSmithConfigTemplate
else if (type === TracingProvider.langfuse)
@ -84,6 +109,9 @@ const ProviderConfigModal: FC<Props> = ({
else if (type === TracingProvider.opik)
return opikConfigTemplate
else if (type === TracingProvider.aliyun)
return aliyunConfigTemplate
return weaveConfigTemplate
})())
const [isShowRemoveConfirm, {
@ -115,6 +143,24 @@ const ProviderConfigModal: FC<Props> = ({
const checkValid = useCallback(() => {
let errorMessage = ''
if (type === TracingProvider.arize) {
const postData = config as ArizeConfig
if (!postData.api_key)
errorMessage = t('common.errorMsg.fieldRequired', { field: 'API Key' })
if (!postData.space_id)
errorMessage = t('common.errorMsg.fieldRequired', { field: 'Space ID' })
if (!errorMessage && !postData.project)
errorMessage = t('common.errorMsg.fieldRequired', { field: t(`${I18N_PREFIX}.project`) })
}
if (type === TracingProvider.phoenix) {
const postData = config as PhoenixConfig
if (!postData.api_key)
errorMessage = t('common.errorMsg.fieldRequired', { field: 'API Key' })
if (!errorMessage && !postData.project)
errorMessage = t('common.errorMsg.fieldRequired', { field: t(`${I18N_PREFIX}.project`) })
}
if (type === TracingProvider.langSmith) {
const postData = config as LangSmithConfig
if (!postData.api_key)
@ -146,6 +192,16 @@ const ProviderConfigModal: FC<Props> = ({
errorMessage = t('common.errorMsg.fieldRequired', { field: t(`${I18N_PREFIX}.project`) })
}
if (type === TracingProvider.aliyun) {
const postData = config as AliyunConfig
if (!errorMessage && !postData.app_name)
errorMessage = t('common.errorMsg.fieldRequired', { field: 'App Name' })
if (!errorMessage && !postData.license_key)
errorMessage = t('common.errorMsg.fieldRequired', { field: 'License Key' })
if (!errorMessage && !postData.endpoint)
errorMessage = t('common.errorMsg.fieldRequired', { field: 'Endpoint' })
}
return errorMessage
}, [config, t, type])
const handleSave = useCallback(async () => {
@ -195,6 +251,93 @@ const ProviderConfigModal: FC<Props> = ({
</div>
<div className='space-y-4'>
{type === TracingProvider.arize && (
<>
<Field
label='API Key'
labelClassName='!text-sm'
isRequired
value={(config as ArizeConfig).api_key}
onChange={handleConfigChange('api_key')}
placeholder={t(`${I18N_PREFIX}.placeholder`, { key: 'API Key' })!}
/>
<Field
label='Space ID'
labelClassName='!text-sm'
isRequired
value={(config as ArizeConfig).space_id}
onChange={handleConfigChange('space_id')}
placeholder={t(`${I18N_PREFIX}.placeholder`, { key: 'Space ID' })!}
/>
<Field
label={t(`${I18N_PREFIX}.project`)!}
labelClassName='!text-sm'
isRequired
value={(config as ArizeConfig).project}
onChange={handleConfigChange('project')}
placeholder={t(`${I18N_PREFIX}.placeholder`, { key: t(`${I18N_PREFIX}.project`) })!}
/>
<Field
label='Endpoint'
labelClassName='!text-sm'
value={(config as ArizeConfig).endpoint}
onChange={handleConfigChange('endpoint')}
placeholder={'https://otlp.arize.com'}
/>
</>
)}
{type === TracingProvider.phoenix && (
<>
<Field
label='API Key'
labelClassName='!text-sm'
isRequired
value={(config as PhoenixConfig).api_key}
onChange={handleConfigChange('api_key')}
placeholder={t(`${I18N_PREFIX}.placeholder`, { key: 'API Key' })!}
/>
<Field
label={t(`${I18N_PREFIX}.project`)!}
labelClassName='!text-sm'
isRequired
value={(config as PhoenixConfig).project}
onChange={handleConfigChange('project')}
placeholder={t(`${I18N_PREFIX}.placeholder`, { key: t(`${I18N_PREFIX}.project`) })!}
/>
<Field
label='Endpoint'
labelClassName='!text-sm'
value={(config as PhoenixConfig).endpoint}
onChange={handleConfigChange('endpoint')}
placeholder={'https://app.phoenix.arize.com'}
/>
</>
)}
{type === TracingProvider.aliyun && (
<>
<Field
label='License Key'
labelClassName='!text-sm'
isRequired
value={(config as AliyunConfig).license_key}
onChange={handleConfigChange('license_key')}
placeholder={t(`${I18N_PREFIX}.placeholder`, { key: 'License Key' })!}
/>
<Field
label='Endpoint'
labelClassName='!text-sm'
value={(config as AliyunConfig).endpoint}
onChange={handleConfigChange('endpoint')}
placeholder={'https://tracing.arms.aliyuncs.com'}
/>
<Field
label='App Name'
labelClassName='!text-sm'
value={(config as AliyunConfig).app_name}
onChange={handleConfigChange('app_name')}
/>
</>
)}
{type === TracingProvider.weave && (
<>
<Field

View File

@ -7,7 +7,7 @@ import {
import { useTranslation } from 'react-i18next'
import { TracingProvider } from './type'
import cn from '@/utils/classnames'
import { LangfuseIconBig, LangsmithIconBig, OpikIconBig, WeaveIconBig } from '@/app/components/base/icons/src/public/tracing'
import { AliyunIconBig, ArizeIconBig, LangfuseIconBig, LangsmithIconBig, OpikIconBig, PhoenixIconBig, WeaveIconBig } from '@/app/components/base/icons/src/public/tracing'
import { Eye as View } from '@/app/components/base/icons/src/vender/solid/general'
const I18N_PREFIX = 'app.tracing'
@ -24,10 +24,13 @@ type Props = {
const getIcon = (type: TracingProvider) => {
return ({
[TracingProvider.arize]: ArizeIconBig,
[TracingProvider.phoenix]: PhoenixIconBig,
[TracingProvider.langSmith]: LangsmithIconBig,
[TracingProvider.langfuse]: LangfuseIconBig,
[TracingProvider.opik]: OpikIconBig,
[TracingProvider.weave]: WeaveIconBig,
[TracingProvider.aliyun]: AliyunIconBig,
})[type]
}

View File

@ -1,8 +1,24 @@
export enum TracingProvider {
arize = 'arize',
phoenix = 'phoenix',
langSmith = 'langsmith',
langfuse = 'langfuse',
opik = 'opik',
weave = 'weave',
aliyun = 'aliyun',
}
export type ArizeConfig = {
api_key: string
space_id: string
project: string
endpoint: string
}
export type PhoenixConfig = {
api_key: string
project: string
endpoint: string
}
export type LangSmithConfig = {
@ -31,3 +47,9 @@ export type WeaveConfig = {
endpoint: string
host: string
}
export type AliyunConfig = {
app_name: string
license_key: string
endpoint: string
}

View File

@ -339,7 +339,7 @@ const AppCard = ({ app, onRefresh }: AppCardProps) => {
<div className='flex items-center gap-1 text-[10px] font-medium leading-[18px] text-text-tertiary'>
<div className='truncate' title={app.author_name}>{app.author_name}</div>
<div>·</div>
<div className='truncate'>{EditTimeText}</div>
<div className='truncate' title={EditTimeText}>{EditTimeText}</div>
</div>
</div>
<div className='flex h-5 w-5 shrink-0 items-center justify-center'>

View File

@ -10,7 +10,6 @@ import {
RiFileCopy2Line,
RiFileDownloadLine,
RiFileUploadLine,
RiMoreLine,
} from '@remixicon/react'
import AppIcon from '../base/app-icon'
import SwitchAppModal from '../app/switch-app-modal'
@ -35,7 +34,8 @@ import ContentDialog from '@/app/components/base/content-dialog'
import Button from '@/app/components/base/button'
import CardView from '@/app/(commonLayout)/app/(appDetailLayout)/[appId]/overview/cardView'
import Divider from '../base/divider'
import { PortalToFollowElem, PortalToFollowElemContent, PortalToFollowElemTrigger } from '../base/portal-to-follow-elem'
import type { Operation } from './app-operations'
import AppOperations from './app-operations'
export type IAppInfoProps = {
expand: boolean
@ -186,14 +186,58 @@ const AppInfo = ({ expand, onlyShowDetail = false, openState = false, onDetailEx
const { isCurrentWorkspaceEditor } = useAppContext()
const [showMore, setShowMore] = useState(false)
const handleTriggerMore = useCallback(() => {
setShowMore(true)
}, [setShowMore])
if (!appDetail)
return null
const operations = [
{
id: 'edit',
title: t('app.editApp'),
icon: <RiEditLine />,
onClick: () => {
setOpen(false)
onDetailExpand?.(false)
setShowEditModal(true)
},
},
{
id: 'duplicate',
title: t('app.duplicate'),
icon: <RiFileCopy2Line />,
onClick: () => {
setOpen(false)
onDetailExpand?.(false)
setShowDuplicateModal(true)
},
},
{
id: 'export',
title: t('app.export'),
icon: <RiFileDownloadLine />,
onClick: exportCheck,
},
(appDetail.mode !== 'agent-chat' && (appDetail.mode === 'advanced-chat' || appDetail.mode === 'workflow')) ? {
id: 'import',
title: t('workflow.common.importDSL'),
icon: <RiFileUploadLine />,
onClick: () => {
setOpen(false)
onDetailExpand?.(false)
setShowImportDSLModal(true)
},
} : undefined,
(appDetail.mode !== 'agent-chat' && (appDetail.mode === 'completion' || appDetail.mode === 'chat')) ? {
id: 'switch',
title: t('app.switch'),
icon: <RiExchange2Line />,
onClick: () => {
setOpen(false)
onDetailExpand?.(false)
setShowSwitchModal(true)
},
} : undefined,
].filter((op): op is Operation => Boolean(op))
return (
<div>
{!onlyShowDetail && (
@ -259,88 +303,10 @@ const AppInfo = ({ expand, onlyShowDetail = false, openState = false, onDetailEx
<div className='system-xs-regular overflow-wrap-anywhere max-h-[105px] w-full max-w-full overflow-y-auto whitespace-normal break-words text-text-tertiary'>{appDetail.description}</div>
)}
{/* operations */}
<div className='flex flex-wrap items-center gap-1 self-stretch'>
<Button
size={'small'}
variant={'secondary'}
className='gap-[1px]'
onClick={() => {
setOpen(false)
onDetailExpand?.(false)
setShowEditModal(true)
}}
>
<RiEditLine className='h-3.5 w-3.5 text-components-button-secondary-text' />
<span className='system-xs-medium text-components-button-secondary-text'>{t('app.editApp')}</span>
</Button>
<Button
size={'small'}
variant={'secondary'}
className='gap-[1px]'
onClick={() => {
setOpen(false)
onDetailExpand?.(false)
setShowDuplicateModal(true)
}}>
<RiFileCopy2Line className='h-3.5 w-3.5 text-components-button-secondary-text' />
<span className='system-xs-medium text-components-button-secondary-text'>{t('app.duplicate')}</span>
</Button>
<Button
size={'small'}
variant={'secondary'}
className='gap-[1px]'
onClick={exportCheck}
>
<RiFileDownloadLine className='h-3.5 w-3.5 text-components-button-secondary-text' />
<span className='system-xs-medium text-components-button-secondary-text'>{t('app.export')}</span>
</Button>
{appDetail.mode !== 'agent-chat' && <PortalToFollowElem
open={showMore}
onOpenChange={setShowMore}
placement='bottom-end'
offset={{
mainAxis: 4,
}}>
<PortalToFollowElemTrigger onClick={handleTriggerMore}>
<Button
size={'small'}
variant={'secondary'}
className='gap-[1px]'
>
<RiMoreLine className='h-3.5 w-3.5 text-components-button-secondary-text' />
<span className='system-xs-medium text-components-button-secondary-text'>{t('common.operation.more')}</span>
</Button>
</PortalToFollowElemTrigger>
<PortalToFollowElemContent className='z-[21]'>
<div className='flex w-[264px] flex-col rounded-[12px] border-[0.5px] border-components-panel-border bg-components-panel-bg-blur p-1 shadow-lg backdrop-blur-[5px]'>
{
(appDetail.mode === 'advanced-chat' || appDetail.mode === 'workflow')
&& <div className='flex h-8 cursor-pointer items-center gap-x-1 rounded-lg p-1.5 hover:bg-state-base-hover'
onClick={() => {
setOpen(false)
onDetailExpand?.(false)
setShowImportDSLModal(true)
}}>
<RiFileUploadLine className='h-4 w-4 text-text-tertiary' />
<span className='system-md-regular text-text-secondary'>{t('workflow.common.importDSL')}</span>
</div>
}
{
(appDetail.mode === 'completion' || appDetail.mode === 'chat')
&& <div className='flex h-8 cursor-pointer items-center gap-x-1 rounded-lg p-1.5 hover:bg-state-base-hover'
onClick={() => {
setOpen(false)
onDetailExpand?.(false)
setShowSwitchModal(true)
}}>
<RiExchange2Line className='h-4 w-4 text-text-tertiary' />
<span className='system-md-regular text-text-secondary'>{t('app.switch')}</span>
</div>
}
</div>
</PortalToFollowElemContent>
</PortalToFollowElem>}
</div>
<AppOperations
gap={4}
operations={operations}
/>
</div>
<div className='flex flex-1'>
<CardView

View File

@ -0,0 +1,145 @@
import type { ReactElement } from 'react'
import { cloneElement, useCallback } from 'react'
import { useEffect, useRef, useState } from 'react'
import { useTranslation } from 'react-i18next'
import Button from '@/app/components/base/button'
import { PortalToFollowElem, PortalToFollowElemContent, PortalToFollowElemTrigger } from '../base/portal-to-follow-elem'
import { RiMoreLine } from '@remixicon/react'
export type Operation = {
id: string; title: string; icon: ReactElement; onClick: () => void
}
const AppOperations = ({ operations, gap }: {
operations: Operation[]
gap: number
}) => {
const { t } = useTranslation()
const [visibleOpreations, setVisibleOperations] = useState<Operation[]>([])
const [moreOperations, setMoreOperations] = useState<Operation[]>([])
const [showMore, setShowMore] = useState(false)
const navRef = useRef<HTMLDivElement>(null)
const handleTriggerMore = useCallback(() => {
setShowMore(true)
}, [setShowMore])
useEffect(() => {
const moreElement = document.getElementById('more')
const navElement = document.getElementById('nav')
let width = 0
const containerWidth = navElement?.clientWidth ?? 0
const moreWidth = moreElement?.clientWidth ?? 0
if (containerWidth === 0 || moreWidth === 0) return
const updatedEntries: Record<string, boolean> = operations.reduce((pre, cur) => {
pre[cur.id] = false
return pre
}, {} as Record<string, boolean>)
const childrens = Array.from(navRef.current!.children).slice(0, -1)
for (let i = 0; i < childrens.length; i++) {
const child: any = childrens[i]
const id = child.dataset.targetid
if (!id) break
const childWidth = child.clientWidth
if (width + gap + childWidth + moreWidth <= containerWidth) {
updatedEntries[id] = true
width += gap + childWidth
}
else {
if (i === childrens.length - 1 && width + childWidth <= containerWidth)
updatedEntries[id] = true
else
updatedEntries[id] = false
break
}
}
setVisibleOperations(operations.filter(item => updatedEntries[item.id]))
setMoreOperations(operations.filter(item => !updatedEntries[item.id]))
}, [operations, gap])
return (
<>
{!visibleOpreations.length && <div
id="nav"
ref={navRef}
className="flex h-0 items-center self-stretch overflow-hidden"
style={{ gap }}
>
{operations.map((operation, index) =>
<Button
key={index}
data-targetid={operation.id}
size={'small'}
variant={'secondary'}
className="gap-[1px]">
{cloneElement(operation.icon, { className: 'h-3.5 w-3.5 text-components-button-secondary-text' })}
<span className="system-xs-medium text-components-button-secondary-text">
{operation.title}
</span>
</Button>,
)}
<Button
id="more"
size={'small'}
variant={'secondary'}
className="gap-[1px]"
>
<RiMoreLine className="h-3.5 w-3.5 text-components-button-secondary-text" />
<span className="system-xs-medium text-components-button-secondary-text">
{t('common.operation.more')}
</span>
</Button>
</div>}
<div className="flex items-center self-stretch overflow-hidden" style={{ gap }}>
{visibleOpreations.map(operation =>
<Button
key={operation.id}
data-targetid={operation.id}
size={'small'}
variant={'secondary'}
className="gap-[1px]"
onClick={operation.onClick}>
{cloneElement(operation.icon, { className: 'h-3.5 w-3.5 text-components-button-secondary-text' })}
<span className="system-xs-medium text-components-button-secondary-text">
{operation.title}
</span>
</Button>,
)}
{visibleOpreations.length < operations.length && <PortalToFollowElem
open={showMore}
onOpenChange={setShowMore}
placement='bottom-end'
offset={{
mainAxis: 4,
}}>
<PortalToFollowElemTrigger onClick={handleTriggerMore}>
<Button
size={'small'}
variant={'secondary'}
className='gap-[1px]'
>
<RiMoreLine className='h-3.5 w-3.5 text-components-button-secondary-text' />
<span className='system-xs-medium text-components-button-secondary-text'>{t('common.operation.more')}</span>
</Button>
</PortalToFollowElemTrigger>
<PortalToFollowElemContent className='z-[21]'>
<div className='flex min-w-[264px] flex-col rounded-[12px] border-[0.5px] border-components-panel-border bg-components-panel-bg-blur p-1 shadow-lg backdrop-blur-[5px]'>
{moreOperations.map(item => <div
key={item.id}
className='flex h-8 cursor-pointer items-center gap-x-1 rounded-lg p-1.5 hover:bg-state-base-hover'
onClick={item.onClick}
>
{cloneElement(item.icon, { className: 'h-4 w-4 text-text-tertiary' })}
<span className='system-md-regular text-text-secondary'>{item.title}</span>
</div>)}
</div>
</PortalToFollowElemContent>
</PortalToFollowElem>}
</div>
</>
)
}
export default AppOperations

View File

@ -2,7 +2,6 @@
import type { FC } from 'react'
import React, { useState } from 'react'
import { useTranslation } from 'react-i18next'
import cn from '@/utils/classnames'
import OperationBtn from '@/app/components/app/configuration/base/operation-btn'
import {
PortalToFollowElem,
@ -28,11 +27,11 @@ type ItemProps = {
const SelectItem: FC<ItemProps> = ({ text, type, value, Icon, onClick }) => {
return (
<div
className='flex h-8 cursor-pointer items-center rounded-lg px-3 hover:bg-gray-50'
className='flex h-8 cursor-pointer items-center rounded-lg px-3 hover:bg-state-base-hover'
onClick={() => onClick(value)}
>
{Icon ? <Icon className='h-4 w-4 text-gray-500' /> : <InputVarTypeIcon type={type!} className='h-4 w-4 text-gray-500' />}
<div className='ml-2 truncate text-xs text-gray-600'>{text}</div>
{Icon ? <Icon className='h-4 w-4 text-text-secondary' /> : <InputVarTypeIcon type={type!} className='h-4 w-4 text-text-secondary' />}
<div className='ml-2 truncate text-xs text-text-primary'>{text}</div>
</div>
)
}
@ -57,17 +56,17 @@ const SelectVarType: FC<Props> = ({
}}
>
<PortalToFollowElemTrigger onClick={() => setOpen(v => !v)}>
<OperationBtn type='add' className={cn(open && 'bg-gray-200')} />
<OperationBtn type='add' />
</PortalToFollowElemTrigger>
<PortalToFollowElemContent style={{ zIndex: 1000 }}>
<div className='min-w-[192px] rounded-lg border border-gray-200 bg-white shadow-lg'>
<div className='min-w-[192px] rounded-lg border border-components-panel-border bg-components-panel-bg-blur shadow-lg backdrop-blur-sm'>
<div className='p-1'>
<SelectItem type={InputVarType.textInput} value='string' text={t('appDebug.variableConfig.string')} onClick={handleChange}></SelectItem>
<SelectItem type={InputVarType.paragraph} value='paragraph' text={t('appDebug.variableConfig.paragraph')} onClick={handleChange}></SelectItem>
<SelectItem type={InputVarType.select} value='select' text={t('appDebug.variableConfig.select')} onClick={handleChange}></SelectItem>
<SelectItem type={InputVarType.number} value='number' text={t('appDebug.variableConfig.number')} onClick={handleChange}></SelectItem>
</div>
<div className='h-[1px] bg-gray-100'></div>
<div className='h-[1px] border-t border-components-panel-border'></div>
<div className='p-1'>
<SelectItem Icon={ApiConnection} value='api' text={t('appDebug.variableConfig.apiBasedVar')} onClick={handleChange}></SelectItem>
</div>

File diff suppressed because one or more lines are too long

After

Width:  |  Height:  |  Size: 14 KiB

File diff suppressed because one or more lines are too long

After

Width:  |  Height:  |  Size: 10 KiB

View File

@ -0,0 +1,17 @@
<svg id="Layer_2" data-name="Layer 2" xmlns="http://www.w3.org/2000/svg" viewBox="0 0 111 24" width="111" height="24">
<g id="Layer_1-2" data-name="Layer 1">
<rect style="fill: none;" y="0" width="111" height="24"/>
<g id="Arize_-_standard" data-name="Arize - standard">
<g>
<path d="M100.94,14.55h-11.29c0,.13-.01.23.02.31.53,1.55,1.54,2.59,3.19,2.88,1.7.29,3.22,0,4.3-1.52.09-.13.28-.27.43-.28.99-.02,1.99-.01,2.99-.01-.16,2.69-3.89,4.98-7.6,4.7-3.99-.3-6.91-3.79-6.58-7.88.37-4.62,3.67-7.31,8.37-6.85,4.05.4,6.68,4.04,6.19,8.64ZM89.71,11.66h7.96c-.43-1.88-2.04-3.02-4.17-2.97-1.87.04-3.48,1.3-3.78,2.97Z"/>
<path style="fill: #ff008c;" d="M20.81,2.53c.59-.03,1.11.26,1.49.8,3.05,4.38,6.09,8.77,9.13,13.16.63.91.43,1.99-.43,2.59-.84.59-1.97.35-2.62-.57-2.1-3.01-4.19-6.04-6.28-9.05-.04-.06-.08-.11-.12-.17-.83-1.17-1.65-1.18-2.47,0-2.11,3.05-4.23,6.09-6.34,9.14-.35.5-.77.88-1.4.95-.77.08-1.39-.19-1.79-.86-.41-.69-.38-1.38.07-2.03,1.01-1.47,2.04-2.93,3.06-4.4,2.01-2.89,4.03-5.77,6.03-8.67.39-.56.88-.9,1.67-.89Z"/>
<path d="M52.84,20.43h-3.02v-1.16c-.1.02-.16.01-.19.03-2.46,1.73-5.09,1.88-7.68.51-2.61-1.38-3.71-3.77-3.68-6.67.03-3.26,1.82-5.96,4.64-6.86,2.35-.75,4.64-.67,6.69.93.04.03.09.03.2.07v-1.18h3.03v14.31ZM41.36,13.32c.01.17.02.46.05.75.22,2.09,1.76,3.58,3.91,3.76,2.1.18,3.8-.95,4.38-2.96.14-.47.2-.98.22-1.47.13-3.22-2.25-5.27-5.33-4.61-1.92.41-3.19,2.14-3.23,4.53Z"/>
<path d="M80.41,8.9h-7.44v-2.77h11.64c0,.81.02,1.61-.01,2.41,0,.17-.19.35-.32.51-2.28,2.68-4.57,5.36-6.85,8.04-.11.13-.21.26-.38.47h7.53v2.86h-11.74c0-.82-.02-1.62.01-2.42,0-.16.17-.33.28-.47,2.32-2.74,4.64-5.47,6.96-8.21.09-.1.16-.21.32-.42Z"/>
<path d="M59.39,20.54h-3.03V6.22h3.03v1.54c.8-.48,1.55-1.01,2.37-1.41.85-.41,1.79-.41,2.77-.35v2.94c-.16.01-.3.03-.45.03-2.37.03-4.01,1.36-4.51,3.69-.12.57-.16,1.16-.16,1.74-.02,1.85,0,3.71,0,5.56v.57Z"/>
<path d="M67.1,6.09h2.99v14.33h-2.99V6.09Z"/>
<path style="fill: #ff008c;" d="M20.73,19.53c1.25,0,2.24.96,2.25,2.19.01,1.24-1.02,2.29-2.24,2.28-1.23-.01-2.21-1.01-2.22-2.24,0-1.25.96-2.22,2.21-2.22Z"/>
<path d="M70.7,2.11c0,1.19-.92,2.14-2.09,2.15-1.19.01-2.16-.95-2.16-2.14C66.46.95,67.4,0,68.58,0c1.18,0,2.12.93,2.12,2.11Z"/>
</g>
</g>
</g>
</svg>

After

Width:  |  Height:  |  Size: 2.2 KiB

View File

@ -0,0 +1,17 @@
<svg id="Layer_2" data-name="Layer 2" xmlns="http://www.w3.org/2000/svg" viewBox="0 0 74 16" width="74" height="16">
<g id="Layer_1-2" data-name="Layer 1">
<rect style="fill: none;" y="0" width="74" height="16"/>
<g id="Arize_-_standard" data-name="Arize - standard">
<g>
<path d="M67.29,9.7h-7.52c0,.08,0,.15.01.21.35,1.03,1.03,1.73,2.13,1.92,1.13.19,2.14,0,2.86-1.01.06-.09.19-.18.29-.19.66-.02,1.33,0,1.99,0-.1,1.79-2.59,3.32-5.07,3.14-2.66-.2-4.61-2.53-4.39-5.25.25-3.08,2.44-4.88,5.58-4.56,2.7.27,4.45,2.69,4.12,5.76ZM59.81,7.77h5.3c-.28-1.25-1.36-2.01-2.78-1.98-1.25.03-2.32.87-2.52,1.98Z"/>
<path style="fill: #ff008c;" d="M13.87,1.69c.4-.02.74.17.99.54,2.03,2.92,4.06,5.85,6.08,8.77.42.61.28,1.33-.29,1.73-.56.39-1.31.24-1.74-.38-1.4-2.01-2.79-4.02-4.19-6.04-.03-.04-.05-.08-.08-.11-.55-.78-1.1-.78-1.64,0-1.41,2.03-2.82,4.06-4.23,6.09-.23.34-.52.59-.93.63-.51.06-.92-.13-1.19-.57-.28-.46-.25-.92.05-1.35.68-.98,1.36-1.96,2.04-2.93,1.34-1.93,2.68-3.85,4.02-5.78.26-.37.59-.6,1.11-.59Z"/>
<path d="M35.23,13.62h-2.01v-.77c-.07.01-.1,0-.13.02-1.64,1.16-3.39,1.25-5.12.34-1.74-.92-2.47-2.51-2.45-4.45.02-2.17,1.22-3.97,3.1-4.57,1.57-.5,3.09-.45,4.46.62.02.02.06.02.14.05v-.79h2.02v9.54ZM27.57,8.88c0,.11.01.31.03.5.14,1.39,1.18,2.39,2.61,2.51,1.4.12,2.53-.63,2.92-1.97.09-.32.14-.65.15-.98.09-2.15-1.5-3.51-3.56-3.07-1.28.27-2.13,1.43-2.15,3.02Z"/>
<path d="M53.61,5.93h-4.96v-1.85h7.76c0,.54.01,1.07,0,1.61,0,.12-.12.24-.21.34-1.52,1.79-3.05,3.57-4.57,5.36-.07.09-.14.18-.26.32h5.02v1.91h-7.83c0-.54-.01-1.08,0-1.61,0-.11.11-.22.19-.31,1.55-1.83,3.1-3.65,4.64-5.47.06-.07.11-.14.21-.28Z"/>
<path d="M39.6,13.69h-2.02V4.15h2.02v1.03c.54-.32,1.04-.68,1.58-.94.57-.28,1.19-.27,1.85-.23v1.96c-.1,0-.2.02-.3.02-1.58.02-2.68.9-3.01,2.46-.08.38-.11.77-.11,1.16-.01,1.24,0,2.47,0,3.71v.38Z"/>
<path d="M44.74,4.06h1.99v9.56h-1.99V4.06Z"/>
<path style="fill: #ff008c;" d="M13.82,13.02c.84,0,1.49.64,1.5,1.46,0,.83-.68,1.53-1.5,1.52-.82,0-1.47-.67-1.48-1.5,0-.83.64-1.48,1.47-1.48Z"/>
<path d="M47.13,1.41c0,.8-.61,1.43-1.39,1.43-.8,0-1.44-.63-1.44-1.43,0-.78.63-1.41,1.42-1.41.79,0,1.41.62,1.41,1.41Z"/>
</g>
</g>
</g>
</svg>

After

Width:  |  Height:  |  Size: 2.2 KiB

View File

@ -0,0 +1,97 @@
<svg id="Layer_2" data-name="Layer 2" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" viewBox="0 0 111 24" width="111" height="24">
<defs>
<linearGradient id="linear-gradient" x1="9.07" y1="1.47" x2="19.77" y2="20" gradientUnits="userSpaceOnUse">
<stop offset="0" stop-color="#11bab5"/>
<stop offset=".5" stop-color="#00adee"/>
<stop offset="1" stop-color="#0094c5"/>
</linearGradient>
<linearGradient id="linear-gradient-2" x1="19.48" y1="16.3" x2="10.46" y2=".67" gradientUnits="userSpaceOnUse">
<stop offset="0" stop-color="#fcfdff" stop-opacity="0"/>
<stop offset=".12" stop-color="#fcfdff" stop-opacity=".17"/>
<stop offset=".3" stop-color="#fcfdff" stop-opacity=".42"/>
<stop offset=".47" stop-color="#fcfdff" stop-opacity=".63"/>
<stop offset=".64" stop-color="#fcfdff" stop-opacity=".79"/>
<stop offset=".78" stop-color="#fcfdff" stop-opacity=".9"/>
<stop offset=".91" stop-color="#fcfdff" stop-opacity=".97"/>
<stop offset="1" stop-color="#fcfdff"/>
</linearGradient>
<linearGradient id="linear-gradient-3" x1="16.82" y1="16.21" x2="10.32" y2="4.94" xlink:href="#linear-gradient-2"/>
<linearGradient id="linear-gradient-4" x1="15.92" y1="17.03" x2="12.29" y2="10.74" xlink:href="#linear-gradient-2"/>
<linearGradient id="linear-gradient-5" x1="16.08" y1="18.3" x2="13.82" y2="14.38" xlink:href="#linear-gradient-2"/>
<linearGradient id="linear-gradient-6" x1="12.26" y1="17.94" x2="12.26" y2="22.07" gradientUnits="userSpaceOnUse">
<stop offset="0" stop-color="#231f20"/>
<stop offset=".03" stop-color="#231f20" stop-opacity=".9"/>
<stop offset=".22" stop-color="#231f20" stop-opacity=".4"/>
<stop offset=".42" stop-color="#231f20" stop-opacity=".1"/>
<stop offset=".65" stop-color="#231f20" stop-opacity="0"/>
</linearGradient>
<linearGradient id="Fade_to_Black_2" data-name="Fade to Black 2" x1="16.89" y1="17.55" x2="16.89" y2="19.66" gradientUnits="userSpaceOnUse">
<stop offset="0" stop-color="#231f20"/>
<stop offset="1" stop-color="#231f20" stop-opacity="0"/>
</linearGradient>
<linearGradient id="linear-gradient-7" x1="17.91" y1="12.1" x2="17.91" y2="10.38" gradientUnits="userSpaceOnUse">
<stop offset="0" stop-color="#231f20"/>
<stop offset=".03" stop-color="#231f20" stop-opacity=".95"/>
<stop offset=".51" stop-color="#231f20" stop-opacity=".26"/>
<stop offset=".81" stop-color="#231f20" stop-opacity="0"/>
</linearGradient>
<linearGradient id="linear-gradient-8" x1="21.67" y1="13.37" x2="21.67" y2="9.21" gradientUnits="userSpaceOnUse">
<stop offset="0" stop-color="#231f20"/>
<stop offset=".18" stop-color="#231f20" stop-opacity=".48"/>
<stop offset=".38" stop-color="#231f20" stop-opacity=".12"/>
<stop offset=".58" stop-color="#231f20" stop-opacity="0"/>
</linearGradient>
<linearGradient id="Fade_to_Black_2-2" data-name="Fade to Black 2" x1="25.54" y1="16.65" x2="24.1" y2="14.16" xlink:href="#Fade_to_Black_2"/>
<linearGradient id="linear-gradient-9" x1="26.7" y1="15.97" x2="24.55" y2="12.24" xlink:href="#linear-gradient-2"/>
<linearGradient id="linear-gradient-10" x1="21.11" y1="15.34" x2="24.24" y2="13.53" gradientUnits="userSpaceOnUse">
<stop offset="0" stop-color="#231f20"/>
<stop offset=".31" stop-color="#231f20" stop-opacity=".5"/>
<stop offset=".67" stop-color="#231f20" stop-opacity=".13"/>
<stop offset="1" stop-color="#231f20" stop-opacity="0"/>
</linearGradient>
</defs>
<g id="Layer_1-2" data-name="Layer 1">
<rect style="fill: none;" y=".04" width="111" height="24"/>
<g id="Phoenix_horiz_-_gradient" data-name="Phoenix horiz - gradient">
<path style="fill: url(#linear-gradient);" d="M26.87,15c-.06-.28-.2-.53-.42-.76-.08-.09-.18-.17-.28-.25-.05-.04-.1-.07-.15-.1-.07-.04-.14-.08-.21-.12-.28-.14-.51-.29-.72-.44-.08-.06-.17-.13-.24-.19-.19-.16-.31-.3-.4-.45-.03-.05-.06-.1-.08-.15-.01-.02-.03-.04-.05-.05-.03-.02-.08-.04-.12-.03-.07.01-.12.07-.13.13,0,.05,0,.11,0,.17,0,.03-.01.07-.04.08,0,0-.01,0-.02,0-.04,0-.08.03-.11.07-.03.04-.03.08-.02.13,0,.04.02.06.03.09v.02s.02.03.03.04c.02.04.04.08.07.12.05.07.1.14.17.22.02.02.02.06,0,.09-.02.03-.05.04-.08.04-.37-.05-.7-.13-1-.25-.04-.01-.07-.03-.11-.04-.28-.12.22-.61.45-.96,1-1.53,1.2-3.29,1.21-5.07,0-1.72-.18-3.41-.52-5.08h0c-.22-1.4-.41-1.93-.53-2.13-.03-.05-.05-.08-.08-.1-.03-.02-.05-.01-.05-.01-.06,0-.14.06-.23.15-.37.39-.35.86-.25,1.34.47,2.21.82,4.43.66,6.7-.11,1.56-.4,3.07-1.5,4.3-.07.07-.14.14-.21.21-.08.08-.21.24-.3.17-.13-.09-.01-.27.03-.36.64-1.5.82-3.07.73-4.69,0-.04,0-.07,0-.11h0s0-.06,0-.09c-.02-.21-.04-.43-.06-.64-.28-2.56-.61-2.73-.61-2.73h0s-.05-.03-.09-.04c-.17-.04-.27.12-.35.23-.35.52-.19,1.07-.08,1.62.39,1.92.4,3.82-.28,5.69-.06.17-.14.34-.21.5-.08.17-.14.39-.42.3-.26-.09-.19-.29-.16-.47.09-.47.16-.93.2-1.4h0s0-.09.01-.13c0-.08.01-.16.02-.24,0-.07,0-.13.01-.2,0-.03,0-.07,0-.1.05-1.48-.2-2.16-.29-2.36-.01-.02-.02-.05-.03-.07h0s0,0,0,0c-.1-.15-.26-.13-.4,0-.26.25-.4.56-.33.93.19,1.1.08,2.2-.13,3.28-.03.15-.09.41-.4.34-.21-.05-.26-.14-.27-.32,0-.14,0-.28,0-.42,0,0,0,0,0-.01h0s0-.04,0-.06h0s0-.02,0-.03c0-.19-.02-.38-.05-.57,0-.02,0-.04,0-.05-.12-1.07-.27-1.17-.27-1.17-.1-.13-.25-.11-.39.03-.26.25-.39.55-.33.93.04.28.03.19.06.43.02.12.05.35.05.42,0,.06-.02.12-.06.16-.13.14-.34,0-.5-.07-3.12-1.29-5.79-3.1-8.12-5.4-.92-.94-2.48-2.83-2.48-2.83h0c-.06-.07-.13-.11-.24-.06-.2.09-.2.35-.22.57-.04.44.2.76.45,1.06,3.52,4.11,7.82,7.06,13,8.54l.93.25s.02,0,.03,0c0,0,0,0,0,0l.8.21h.02s-.06.02-.09.03c-.88.17-2.63-.15-4.04-.47-.7-.15-1.38-.32-2.05-.53-.03,0-.05-.01-.05-.01h0c-2.6-.83-4.97-2.17-7.05-4.21-.2-.19-.38-.4-.56-.61-.16-.2-.5-.61-.62-.75-.01-.02-.03-.03-.04-.05h0s0,0,0,0c-.04-.04-.08-.06-.14-.05-.14.03-.19.18-.21.31-.11.51.05.93.39,1.31,2.13,2.39,4.8,3.91,7.81,4.91,1.71.57,3.46.91,5.25,1.13-1.07.35-2.16.45-3.27.42-2.14-.05-4.15-.57-6.04-1.49-.7-.36-1.34-.76-1.52-.86-.1-.07-.2-.11-.3-.03-.19.15-.06.4,0,.6.12.38.38.65.73.83,3.08,1.63,6.32,2.28,9.78,1.7.35-.06.68-.12,1.05-.25-.58.5-1.25.83-1.95,1.08-2.17.79-4.32.76-6.46.22-.69-.18-1.49-.48-1.49-.48h0c-.12-.05-.23-.07-.32.06-.16.22-.02.46.12.67.32.5.94.55,1.43.72.17.05-.29.36-.43.47-.71.54-1.4,1.04-2.11,1.56l-.46.34h0c-.08.05-.15.12-.11.23.04.13.18.15.31.18.51.1.94-.06,1.33-.36.86-.64,1.73-1.26,2.56-1.93.32-.25.61-.23,1.04-.12-1.09.82-2.11,1.59-3.13,2.36-1.03.78-2.07,1.56-3.1,2.33l-.68.51s-.02.01-.03.02h-.01s0,0,0,0c-.06.05-.1.1-.09.17.03.15.21.19.36.22.61.11.99-.12,1.41-.44,2.09-1.57,4.19-3.13,6.27-4.71.47-.36.95-.56,1.62-.48-.57.43-1.12.84-1.67,1.25l-1.47,1.09s-.02.02-.03.02h0c-.08.06-.13.13-.1.24.06.19.29.22.49.25.37.05.68-.08.96-.29.17-.12.33-.24.5-.37h0s2.25-1.79,2.25-1.79c0,0,.53-.38,1.15-.73.05-.03.11-.07.17-.1.02-.01.08-.04.17-.08.07-.03.13-.07.2-.1.1-.05.21-.11.33-.18.36-.15,1.4-.64,2.26-1.55.41-.32.98-.73,1.43-.92h0s0,0,0,0c.03-.02.07-.03.1-.04h0s0,0,0,0c.02,0,.04-.02.06-.02l.06-.02c.16-.05.43-.06.46.29,0,.09,0,.18,0,.27,0,.07-.02.15-.03.21-.01.06.01.12.06.15,0,0,.02.01.02.01.06.03.14.02.18-.03.02-.02.03-.04.05-.06.22-.23.44-.39.68-.49.35-.14.7-.14,1.07,0,.18.07.35.16.51.28.07.05.14.11.21.17.04.04.08.08.12.12h0s.03.04.03.04c0,0,.01.01.02.02.04.03.08.04.12.03.05-.01.09-.05.11-.1,0,0,0-.02.01-.03.11-.31.13-.6.07-.89Z"/>
<path style="fill: #fddab4;" d="M20.22,11.62c-.03.16-.05.31-.08.47.03-.16.06-.31.08-.47h0Z"/>
<path style="fill: #fddab4;" d="M22.99,13.35s.02,0,.03.01c-.01,0-.02,0-.03-.01"/>
<polyline style="fill: #fddab4;" points="21.68 12.5 21.68 12.5 21.68 12.5"/>
<polyline style="fill: #fddab4;" points="21.73 12.39 21.73 12.39 21.73 12.39"/>
<polyline style="fill: #fddab4;" points="21.74 12.37 21.73 12.38 21.74 12.37"/>
<polyline style="fill: #fddab4;" points="23.45 12.37 23.45 12.38 23.45 12.37"/>
<path style="fill: #fddab4;" d="M20.72,12.26s-.04.08-.06.12c.02-.04.04-.08.06-.12"/>
<polyline style="fill: #fddab4;" points="18.86 12.17 18.86 12.17 18.86 12.17"/>
<polyline style="fill: #fddab4;" points="18.58 12.04 18.58 12.04 18.58 12.04"/>
<polyline style="fill: #fddab4;" points="18.58 12.04 18.58 12.04 18.58 12.04"/>
<polyline style="fill: #fddab4;" points="18.58 12.04 18.58 12.04 18.58 12.04"/>
<polyline style="fill: #fddab4;" points="18.58 12.04 18.58 12.04 18.58 12.04"/>
<polyline style="fill: #fddab4;" points="18.58 12.03 18.58 12.04 18.58 12.03"/>
<polyline style="fill: #fddab4;" points="18.52 11.84 18.52 11.84 18.52 11.84"/>
<path style="fill: #fddab4;" d="M19.22,11.72s-.01.06-.02.09c0-.03.01-.06.02-.09"/>
<path style="fill: #fddab4;" d="M17.51,11.51s-.04.04-.07.05c.02,0,.05-.02.07-.05"/>
<path style="fill: #e5b3a5;" d="M18.58,12.04s.04.04.07.06h0s-.05-.04-.07-.06M18.58,12.04s0,0,0,0c0,0,0,0,0,0M18.58,12.04s0,0,0,0c0,0,0,0,0,0M18.58,12.04s0,0,0,0c0,0,0,0,0,0M18.58,12.04s0,0,0,0c0,0,0,0,0,0M18.57,12.03s0,0,0,0c0,0,0,0,0,0M18.52,11.84s0,0,0,0c0,0,0,0,0,0M18.52,11.84h0s0,0,0,0c0,0,0,0,0,0M17.17,11.51s0,0,0,0c.06.03.13.05.19.05.03,0,.05,0,.07-.01-.02,0-.05.01-.07.01-.06,0-.13-.02-.19-.05M18.52,11.31s0,0,0,0c0,0,0,0,0,0"/>
<path style="fill: #e5b3a5;" d="M23.02,13.37s.01,0,.02,0h0s-.01,0-.02,0M21.78,12.86s0,0,0,0c0,0,0,0,0,0M21.59,12.76s.02.08.06.11c.02.01.03.02.05.02.03,0,.05-.01.08-.03-.03.02-.05.03-.08.03-.02,0-.03,0-.05-.02-.04-.03-.06-.07-.06-.11M21.68,12.5s0,.01,0,.02c0,0,0-.01,0-.02M21.73,12.39s-.03.07-.04.11c.01-.03.03-.07.04-.11M21.73,12.38s0,0,0,.01c0,0,0,0,0-.01M23.45,12.38s0,0,0,0c0,0,0,0,0,0M23.45,12.37s0,0,0,0c0,0,0,0,0,0M21.74,12.37s0,0,0,0c0,0,0,0,0,0M20.1,12.32c0,.1.04.19.19.24.05.02.09.02.12.02.13,0,.19-.09.24-.2-.05.1-.11.2-.24.2-.04,0-.08,0-.12-.02-.15-.05-.19-.14-.19-.24M18.86,12.17h0,0M19.11,12.07c-.05.06-.11.1-.22.1-.01,0-.02,0-.03,0,.01,0,.02,0,.03,0,.1,0,.17-.04.22-.1M21.08,11.32s0,0,0,0c-.05.15-.09.3-.15.44-.06.17-.14.34-.21.5h0c.08-.16.15-.33.21-.5.05-.15.1-.3.15-.45M19.3,11.23c-.02.16-.05.33-.08.49.03-.16.06-.33.08-.49h0M23.28,10.45s0,0,0,0c-.22.73-.57,1.42-1.12,2.04-.07.07-.14.14-.21.21h0c.07-.07.14-.14.21-.21.55-.62.9-1.31,1.12-2.04"/>
<path style="fill: url(#linear-gradient-2); opacity: .4;" d="M6.25,3.13s-.06,0-.09.02c-.14.06-.18.21-.2.37.78,1.02,3.84,4.77,8.67,7.35,0,0,4.72,2.49,9.48,2.76-.37-.05-.7-.13-1-.25-.02,0-.04-.01-.05-.02h0c-1.73-.37-3.2-.84-4.24-1.21.02,0,.04,0,.06,0-.02,0-.04,0-.06,0-.06-.01-.11-.03-.15-.05-.82-.3-1.35-.53-1.47-.59-.06-.03-.12-.06-.17-.08-3.12-1.29-5.79-3.1-8.12-5.4-.92-.94-2.48-2.83-2.48-2.83h0s-.09-.08-.15-.08"/>
<path style="fill: url(#linear-gradient-3); opacity: .4;" d="M6.59,7.12s-.02,0-.03,0c-.14.03-.19.18-.21.31,0,.02,0,.05-.01.07,2.41,3.27,5.65,4.6,5.65,4.6,3.23,1.52,5.88,1.83,7.5,1.83.67,0,1.16-.05,1.44-.09-.13.01-.27.02-.42.02-.95,0-2.3-.26-3.43-.52-.7-.15-1.38-.32-2.05-.53-.03,0-.05-.01-.05-.01h0c-2.6-.83-4.97-2.17-7.05-4.21-.2-.19-.38-.4-.56-.61-.16-.2-.5-.61-.62-.75-.01-.02-.03-.03-.04-.05h0s0,0,0,0c-.03-.03-.06-.05-.1-.05"/>
<path style="fill: url(#linear-gradient-4); opacity: .4;" d="M8.78,12.8s-.08.01-.12.04c-.16.13-.09.34-.02.52,1.21.73,3.98,2.16,7.27,2.16,1.24,0,2.55-.2,3.88-.72-.96.31-1.94.43-2.94.43-.11,0-.22,0-.33,0-2.14-.05-4.15-.57-6.04-1.49-.7-.36-1.34-.76-1.52-.86-.06-.04-.12-.07-.18-.07"/>
<path style="fill: url(#linear-gradient-5); opacity: .4;" d="M20.02,15.9c-.53.4-1.12.68-1.74.91-1.16.42-2.32.61-3.47.61-1,0-1.99-.14-2.99-.39-.69-.18-1.49-.48-1.49-.48h0c-.05-.02-.1-.04-.15-.04-.06,0-.12.03-.17.1-.07.1-.08.21-.06.32.76.35,2.4.98,4.39.98,1.73,0,3.73-.47,5.67-2.01"/>
<path style="fill: url(#linear-gradient-6); opacity: .4;" d="M14.87,18.37c-1.87,0-3.29-.38-3.47-.43.05.02.1.03.15.05.17.05-.29.36-.43.47-.52.4-1.04.78-1.56,1.16h1.59c.5-.37,1-.74,1.49-1.13.18-.14.35-.2.55-.2.15,0,.31.03.49.08-1.09.82-2.11,1.59-3.13,2.36-.6.45-1.19.9-1.79,1.34h1.6c1.44-1.08,2.88-2.15,4.31-3.24.33-.25.67-.42,1.07-.48-.3.02-.59.03-.88.03Z"/>
<path style="fill: url(#Fade_to_Black_2); opacity: .4;" d="M14.54,19.66h1.55l1.14-.91s.53-.38,1.15-.73c.05-.03.11-.07.17-.1.02-.01.08-.04.17-.08.07-.03.13-.07.2-.1.1-.05.22-.11.35-.19-1.1.46-2.23.69-3.28.77.01,0,.03,0,.04,0,.09,0,.18,0,.27.02-.57.43-1.12.84-1.67,1.25l-.09.07Z"/>
<path style="fill: url(#linear-gradient-7); opacity: .4;" d="M18.52,11.84c0-.14,0-.28,0-.42h0s0-.01,0-.01c0-.02,0-.04,0-.06h0s0-.03,0-.03c0-.12-.01-.23-.03-.35-.37-.16-.72-.35-1.04-.59,0,.03,0,.07,0,.1.04.28.03.19.06.43.02.12.05.35.05.42,0,.06-.02.12-.06.16-.04.04-.09.06-.14.06-.06,0-.13-.02-.19-.05.12.06.65.29,1.48.59-.09-.05-.12-.14-.12-.27Z"/>
<path style="fill: url(#linear-gradient-8); opacity: .4;" d="M24.54,9.21c-.34.48-.77.9-1.26,1.24-.22.73-.57,1.42-1.12,2.04-.07.07-.14.14-.21.21-.07.07-.17.19-.25.19-.02,0-.03,0-.05-.02-.13-.09-.01-.27.03-.36.21-.48.37-.98.48-1.47-.35.13-.71.22-1.08.27-.05.15-.09.3-.15.44-.06.17-.14.34-.21.5-.07.14-.12.32-.3.32-.04,0-.08,0-.12-.02-.26-.09-.19-.29-.16-.47.05-.24.09-.49.12-.73-.33-.01-.65-.05-.96-.12-.03.19-.06.39-.1.58-.02.13-.08.35-.31.35-.03,0-.06,0-.09-.01,1.04.37,2.51.84,4.24,1.21h0s-.03-.01-.05-.02c-.28-.12.22-.61.45-.96.64-.98.95-2.06,1.1-3.18Z"/>
<path style="fill: url(#Fade_to_Black_2-2); opacity: .5;" d="M24.28,14.56c-.22,0-.43.02-.65.06-.04,0-.08.01-.12.01-.06,0-.12,0-.17-.03,0,.01.02.02.03.03.09.13.14.27.16.42.02.03.04.06.06.1h0s0,0,0,0c0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0h0s0,0,0,0c0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0t0,0s0,0,0,0c0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0s0,0,0,0c0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0s0,0,0,0c0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0h0s0,0,0,0h0s0,0,0,0h0s0,0,0,0h0s0,0,0,0h0s0,0,0,0h0s0,0,0,0h0s0,0,0,0h0s0,0,0,0c0,0,0,0,0,0,0,0,0,0,0,0h0s0,0,0,0h0s0,0,0,0h0s0,0,0,0h0s0,0,0,0h0s0,0,0,0h0s0,0,0,0h0s0,0,0,0h0s0,0,0,0h0s0,.07,0,.1c0,.05,0,.11,0,.17,0,.07-.02.15-.03.21,0,0,0,.02,0,.03,0,.05.02.09.06.12h.02s.05.03.07.03c.04,0,.08-.02.11-.05.02-.02.03-.04.05-.06.22-.23.44-.39.68-.49.17-.07.35-.11.53-.11s.36.04.55.11c.18.07.35.16.51.28.07.05.14.11.21.17.04.04.08.08.12.12h0s.03.04.03.04l.02.02s.06.03.09.03c-.15-.32-.33-.61-.6-.84-.5-.43-1.13-.62-1.77-.62"/>
<path style="fill: url(#linear-gradient-9); opacity: .4;" d="M24.22,12.45h-.03c-.07.01-.12.07-.13.14,0,.05,0,.11,0,.17,0,.02,0,.04-.01.05.27.4.84,1.1,1.72,1.36,0,0,1.36.71,1.01,1.74h0v-.03c.12-.31.14-.6.08-.89-.06-.28-.2-.53-.42-.76-.08-.09-.18-.17-.28-.25-.05-.04-.1-.07-.15-.1-.07-.04-.14-.08-.21-.12-.28-.14-.51-.29-.72-.44-.08-.06-.17-.13-.24-.19-.19-.16-.31-.3-.4-.45-.03-.05-.06-.1-.08-.15-.01-.02-.03-.04-.05-.05-.03-.02-.06-.03-.09-.03"/>
<path style="fill: url(#linear-gradient-10); opacity: .5;" d="M22.5,15.32l.28-.16,1.16-1.55s-.63-.12-.94-.26c0,0-.32,1.45-1.49,2.66l.43-.32c.17-.12.33-.23.56-.37Z"/>
<g>
<path style="fill: #404041;" d="M38,7.1h2.95c2.13,0,3.11,1.1,3.11,3.35v1.12c0,2.25-.98,3.35-3.11,3.35h-1.71v6.14h-1.24V7.1ZM40.95,13.78c1.3,0,1.87-.58,1.87-2.15v-1.26c0-1.55-.58-2.15-1.87-2.15h-1.71v5.56h1.71Z"/>
<path style="fill: #404041;" d="M48.61,7.1h1.24v6.12h3.81v-6.12h1.24v13.95h-1.24v-6.72h-3.81v6.72h-1.24V7.1Z"/>
<path style="fill: #404041;" d="M59.73,17.84v-7.54c0-2.21,1.12-3.41,3.13-3.41s3.13,1.2,3.13,3.41v7.54c0,2.21-1.12,3.41-3.13,3.41s-3.13-1.2-3.13-3.41ZM64.75,17.92v-7.69c0-1.5-.68-2.21-1.89-2.21s-1.89.72-1.89,2.21v7.69c0,1.5.68,2.19,1.89,2.19s1.89-.7,1.89-2.19Z"/>
<path style="fill: #404041;" d="M70.85,7.1h5.58v1.12h-4.35v5h3.57v1.12h-3.57v5.58h4.35v1.14h-5.58V7.1Z"/>
<path style="fill: #404041;" d="M80.9,7.1h1.63l3.63,10.78V7.1h1.16v13.95h-1.32l-3.97-11.9v11.9h-1.14V7.1Z"/>
<path style="fill: #404041;" d="M92.32,7.1h1.24v13.95h-1.24V7.1Z"/>
<path style="fill: #404041;" d="M100.79,13.94l-2.73-6.84h1.32l2.19,5.54,2.21-5.54h1.2l-2.73,6.84,2.85,7.12h-1.32l-2.31-5.86-2.33,5.86h-1.2l2.85-7.12Z"/>
</g>
</g>
</g>
</svg>

After

Width:  |  Height:  |  Size: 16 KiB

View File

@ -0,0 +1,97 @@
<svg id="Layer_2" data-name="Layer 2" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" viewBox="0 0 74 16" width="74" height="16">
<defs>
<linearGradient id="linear-gradient" x1="6.05" y1=".98" x2="13.18" y2="13.34" gradientUnits="userSpaceOnUse">
<stop offset="0" stop-color="#11bab5"/>
<stop offset=".5" stop-color="#00adee"/>
<stop offset="1" stop-color="#0094c5"/>
</linearGradient>
<linearGradient id="linear-gradient-2" x1="12.99" y1="10.87" x2="6.97" y2=".45" gradientUnits="userSpaceOnUse">
<stop offset="0" stop-color="#fcfdff" stop-opacity="0"/>
<stop offset=".12" stop-color="#fcfdff" stop-opacity=".17"/>
<stop offset=".3" stop-color="#fcfdff" stop-opacity=".42"/>
<stop offset=".47" stop-color="#fcfdff" stop-opacity=".63"/>
<stop offset=".64" stop-color="#fcfdff" stop-opacity=".79"/>
<stop offset=".78" stop-color="#fcfdff" stop-opacity=".9"/>
<stop offset=".91" stop-color="#fcfdff" stop-opacity=".97"/>
<stop offset="1" stop-color="#fcfdff"/>
</linearGradient>
<linearGradient id="linear-gradient-3" x1="11.21" y1="10.8" x2="6.88" y2="3.29" xlink:href="#linear-gradient-2"/>
<linearGradient id="linear-gradient-4" x1="10.62" y1="11.35" x2="8.2" y2="7.16" xlink:href="#linear-gradient-2"/>
<linearGradient id="linear-gradient-5" x1="10.72" y1="12.2" x2="9.21" y2="9.59" xlink:href="#linear-gradient-2"/>
<linearGradient id="linear-gradient-6" x1="8.17" y1="11.96" x2="8.17" y2="14.71" gradientUnits="userSpaceOnUse">
<stop offset="0" stop-color="#231f20"/>
<stop offset=".03" stop-color="#231f20" stop-opacity=".9"/>
<stop offset=".22" stop-color="#231f20" stop-opacity=".4"/>
<stop offset=".42" stop-color="#231f20" stop-opacity=".1"/>
<stop offset=".65" stop-color="#231f20" stop-opacity="0"/>
</linearGradient>
<linearGradient id="Fade_to_Black_2" data-name="Fade to Black 2" x1="11.26" y1="11.7" x2="11.26" y2="13.1" gradientUnits="userSpaceOnUse">
<stop offset="0" stop-color="#231f20"/>
<stop offset="1" stop-color="#231f20" stop-opacity="0"/>
</linearGradient>
<linearGradient id="linear-gradient-7" x1="11.94" y1="8.07" x2="11.94" y2="6.92" gradientUnits="userSpaceOnUse">
<stop offset="0" stop-color="#231f20"/>
<stop offset=".03" stop-color="#231f20" stop-opacity=".95"/>
<stop offset=".51" stop-color="#231f20" stop-opacity=".26"/>
<stop offset=".81" stop-color="#231f20" stop-opacity="0"/>
</linearGradient>
<linearGradient id="linear-gradient-8" x1="14.45" y1="8.92" x2="14.45" y2="6.14" gradientUnits="userSpaceOnUse">
<stop offset="0" stop-color="#231f20"/>
<stop offset=".18" stop-color="#231f20" stop-opacity=".48"/>
<stop offset=".38" stop-color="#231f20" stop-opacity=".12"/>
<stop offset=".58" stop-color="#231f20" stop-opacity="0"/>
</linearGradient>
<linearGradient id="Fade_to_Black_2-2" data-name="Fade to Black 2" x1="17.03" y1="11.1" x2="16.07" y2="9.44" xlink:href="#Fade_to_Black_2"/>
<linearGradient id="linear-gradient-9" x1="17.8" y1="10.64" x2="16.36" y2="8.16" xlink:href="#linear-gradient-2"/>
<linearGradient id="linear-gradient-10" x1="14.07" y1="10.22" x2="16.16" y2="9.02" gradientUnits="userSpaceOnUse">
<stop offset="0" stop-color="#231f20"/>
<stop offset=".31" stop-color="#231f20" stop-opacity=".5"/>
<stop offset=".67" stop-color="#231f20" stop-opacity=".13"/>
<stop offset="1" stop-color="#231f20" stop-opacity="0"/>
</linearGradient>
</defs>
<g id="Layer_1-2" data-name="Layer 1">
<rect style="fill: none;" y=".02" width="74" height="16"/>
<g id="Phoenix_horiz_-_gradient" data-name="Phoenix horiz - gradient">
<path style="fill: url(#linear-gradient);" d="M17.91,10c-.04-.19-.14-.36-.28-.51-.06-.06-.12-.11-.19-.16-.03-.02-.07-.05-.1-.07-.04-.03-.09-.05-.14-.08-.19-.09-.34-.19-.48-.29-.06-.04-.11-.09-.16-.13-.12-.11-.21-.2-.27-.3-.02-.03-.04-.07-.05-.1,0-.01-.02-.03-.03-.04-.02-.02-.05-.02-.08-.02-.04,0-.08.04-.08.09,0,.03,0,.07,0,.11,0,.02,0,.04-.03.05,0,0,0,0-.02,0-.03,0-.06.02-.07.04-.02.02-.02.05-.02.08,0,.03.01.04.02.06h0s0,.03.01.04c.01.03.03.05.05.08.03.05.07.09.11.15.01.02.02.04,0,.06-.01.02-.03.03-.05.03-.25-.03-.46-.09-.67-.16-.02,0-.05-.02-.07-.03-.18-.08.15-.41.3-.64.66-1.02.8-2.19.81-3.38,0-1.15-.12-2.27-.35-3.39h0c-.15-.94-.27-1.29-.35-1.42-.02-.03-.04-.06-.06-.07-.02-.01-.03,0-.03,0-.04,0-.09.04-.15.1-.25.26-.23.57-.17.89.31,1.47.55,2.95.44,4.47-.07,1.04-.27,2.05-1,2.86-.04.05-.09.1-.14.14-.05.05-.14.16-.2.11-.08-.06,0-.18.02-.24.43-1,.55-2.05.48-3.12,0-.02,0-.05,0-.07h0s0-.04,0-.06c-.01-.14-.02-.28-.04-.43-.18-1.71-.4-1.82-.4-1.82h0s-.03-.02-.06-.03c-.12-.02-.18.08-.23.15-.23.35-.13.72-.05,1.08.26,1.28.27,2.55-.18,3.79-.04.11-.09.22-.14.33-.05.12-.09.26-.28.2-.18-.06-.13-.19-.1-.31.06-.31.11-.62.14-.93h0s0-.06,0-.09c0-.05,0-.11.01-.16,0-.05,0-.09,0-.13,0-.02,0-.04,0-.07.04-.99-.13-1.44-.19-1.57,0-.02-.01-.03-.02-.04h0s0,0,0,0c-.07-.1-.17-.09-.27,0-.17.17-.27.37-.22.62.13.74.05,1.46-.08,2.19-.02.1-.06.27-.27.23-.14-.03-.17-.09-.18-.21,0-.09,0-.19,0-.28,0,0,0,0,0,0h0s0-.03,0-.04h0s0-.01,0-.02c0-.13-.02-.25-.03-.38,0-.01,0-.02,0-.04-.08-.71-.18-.78-.18-.78-.07-.09-.17-.07-.26.02-.17.17-.26.37-.22.62.03.19.02.13.04.29.01.08.03.23.03.28,0,.04-.01.08-.04.11-.09.09-.23,0-.34-.05-2.08-.86-3.86-2.07-5.42-3.6-.61-.62-1.65-1.88-1.65-1.88h0s-.09-.07-.16-.04c-.14.06-.13.23-.14.38-.02.29.13.51.3.7,2.35,2.74,5.21,4.71,8.67,5.69l.62.16s.01,0,.02,0c0,0,0,0,0,0l.54.14h.01s-.04.01-.06.02c-.59.12-1.76-.1-2.69-.32-.46-.1-.92-.22-1.36-.36-.02,0-.03,0-.03,0h0c-1.73-.55-3.32-1.44-4.7-2.81-.13-.13-.25-.27-.37-.41-.11-.13-.34-.4-.41-.5,0-.01-.02-.02-.03-.03h0s0,0,0,0c-.02-.02-.05-.04-.09-.03-.1.02-.12.12-.14.21-.07.34.04.62.26.88,1.42,1.59,3.2,2.61,5.21,3.28,1.14.38,2.31.61,3.5.76-.71.23-1.44.3-2.18.28-1.43-.04-2.77-.38-4.03-.99-.46-.24-.9-.5-1.01-.58-.07-.04-.13-.07-.2-.02-.13.1-.04.27,0,.4.08.26.25.43.49.55,2.05,1.08,4.22,1.52,6.52,1.13.23-.04.45-.08.7-.16-.39.33-.83.55-1.3.72-1.44.53-2.88.51-4.31.15-.46-.12-.99-.32-.99-.32h0c-.08-.03-.15-.05-.21.04-.1.15-.01.3.08.45.21.33.63.36.95.48.11.04-.19.24-.29.31-.47.36-.94.69-1.41,1.04l-.31.23h0c-.05.04-.1.08-.08.15.03.09.12.1.2.12.34.07.62-.04.89-.24.57-.43,1.15-.84,1.71-1.28.21-.17.4-.15.7-.08-.73.55-1.4,1.06-2.08,1.57-.69.52-1.38,1.04-2.07,1.56l-.46.34s-.01,0-.02.01h0s0,0,0,0c-.04.03-.07.07-.06.11.02.1.14.13.24.15.41.07.66-.08.94-.29,1.39-1.05,2.79-2.09,4.18-3.14.31-.24.63-.37,1.08-.32-.38.29-.75.56-1.11.83l-.98.73s-.01.01-.02.02h0c-.05.04-.09.09-.07.16.04.13.19.15.33.17.24.04.45-.05.64-.19.11-.08.22-.16.33-.24h0s1.5-1.19,1.5-1.19c0,0,.35-.25.76-.49.04-.02.07-.05.11-.07.02,0,.05-.02.11-.05.04-.02.09-.05.13-.07.06-.03.14-.07.22-.12.24-.1.93-.42,1.51-1.03.27-.21.66-.48.95-.62h0s0,0,0,0c.02-.01.04-.02.07-.03h0s0,0,0,0c.01,0,.03-.01.04-.01h.04c.11-.05.28-.06.31.18,0,.06,0,.12,0,.18,0,.05-.01.1-.02.14,0,.04,0,.08.04.1,0,0,.01,0,.02,0,.04.02.09.01.12-.02.01-.01.02-.03.04-.04.15-.15.3-.26.45-.33.23-.1.47-.1.72,0,.12.05.23.11.34.19.05.03.1.07.14.12.03.03.06.05.08.08h0s.02.02.02.02c0,0,0,0,.01.01.02.02.05.02.08.02.03,0,.06-.03.07-.06,0,0,0-.01,0-.02.07-.21.09-.4.04-.59Z"/>
<path style="fill: #fddab4;" d="M13.48,7.75c-.02.1-.03.21-.05.31.02-.1.04-.21.05-.31h0Z"/>
<path style="fill: #fddab4;" d="M15.33,8.9s.02,0,.02,0c0,0-.02,0-.02,0"/>
<polyline style="fill: #fddab4;" points="14.46 8.33 14.46 8.33 14.46 8.33"/>
<polyline style="fill: #fddab4;" points="14.48 8.26 14.48 8.26 14.48 8.26"/>
<polyline style="fill: #fddab4;" points="14.49 8.25 14.49 8.25 14.49 8.25"/>
<polyline style="fill: #fddab4;" points="15.64 8.25 15.63 8.25 15.64 8.25"/>
<path style="fill: #fddab4;" d="M13.81,8.17s-.02.06-.04.08c.01-.03.02-.06.04-.08"/>
<polyline style="fill: #fddab4;" points="12.57 8.11 12.57 8.11 12.57 8.11"/>
<polyline style="fill: #fddab4;" points="12.39 8.03 12.39 8.03 12.39 8.03"/>
<polyline style="fill: #fddab4;" points="12.39 8.03 12.39 8.03 12.39 8.03"/>
<polyline style="fill: #fddab4;" points="12.39 8.03 12.39 8.03 12.39 8.03"/>
<polyline style="fill: #fddab4;" points="12.38 8.02 12.38 8.03 12.38 8.02"/>
<polyline style="fill: #fddab4;" points="12.38 8.02 12.38 8.02 12.38 8.02"/>
<polyline style="fill: #fddab4;" points="12.35 7.89 12.35 7.89 12.35 7.89"/>
<path style="fill: #fddab4;" d="M12.81,7.81s0,.04-.01.06c0-.02,0-.04.01-.06"/>
<path style="fill: #fddab4;" d="M11.67,7.67s-.03.02-.05.03c.02,0,.03-.02.05-.03"/>
<path style="fill: #e5b3a5;" d="M12.39,8.03s.03.03.05.04h0s-.03-.03-.05-.04M12.39,8.03s0,0,0,0c0,0,0,0,0,0M12.39,8.03s0,0,0,0c0,0,0,0,0,0M12.38,8.03s0,0,0,0c0,0,0,0,0,0M12.38,8.02s0,0,0,0c0,0,0,0,0,0M12.38,8.02s0,0,0,0c0,0,0,0,0,0M12.35,7.89s0,0,0,0c0,0,0,0,0,0M12.35,7.89h0s0,0,0,0c0,0,0,0,0,0M11.45,7.68s0,0,0,0c.04.02.09.03.13.03.02,0,.03,0,.05,0-.02,0-.03,0-.05,0-.04,0-.09-.02-.13-.03M12.34,7.54s0,0,0,0c0,0,0,0,0,0"/>
<path style="fill: #e5b3a5;" d="M15.35,8.91s0,0,.01,0h0s0,0-.01,0M14.52,8.58s0,0,0,0c0,0,0,0,0,0M14.39,8.51s.01.06.04.08c.01,0,.02.01.03.01.02,0,.04,0,.05-.02-.02.01-.04.02-.05.02-.01,0-.02,0-.03-.01-.03-.02-.04-.05-.04-.08M14.46,8.33s0,0,0,.01c0,0,0,0,0-.01M14.48,8.26s-.02.05-.03.07c0-.02.02-.05.03-.07M14.49,8.25s0,0,0,0c0,0,0,0,0,0M15.63,8.25s0,0,0,0c0,0,0,0,0,0M15.64,8.24s0,0,0,0c0,0,0,0,0,0M14.49,8.24s0,0,0,0c0,0,0,0,0,0M13.4,8.21c0,.07.03.13.13.16.03.01.06.01.08.01.09,0,.13-.06.16-.13-.03.07-.07.13-.16.13-.02,0-.05,0-.08-.01-.1-.03-.13-.09-.13-.16M12.57,8.11h0,0M12.74,8.04s-.08.07-.14.07c0,0-.01,0-.02,0,0,0,.01,0,.02,0,.07,0,.11-.03.14-.07M14.05,7.54s0,0,0,0c-.03.1-.06.2-.1.3-.04.11-.09.22-.14.33h0c.05-.11.1-.22.14-.33.04-.1.07-.2.1-.3M12.87,7.49c-.02.11-.03.22-.05.33.02-.11.04-.22.06-.33h0M15.52,6.97s0,0,0,0c-.15.49-.38.95-.75,1.36-.04.05-.09.1-.14.14h0s.09-.09.14-.14c.37-.41.6-.87.75-1.36"/>
<path style="fill: url(#linear-gradient-2); opacity: .4;" d="M4.17,2.09s-.04,0-.06.01c-.1.04-.12.14-.13.25.52.68,2.56,3.18,5.78,4.9,0,0,3.15,1.66,6.32,1.84-.25-.03-.46-.09-.67-.16-.01,0-.02,0-.04-.01h0c-1.15-.25-2.13-.56-2.83-.81.01,0,.03,0,.04,0-.01,0-.03,0-.04,0-.04,0-.07-.02-.1-.04-.55-.2-.9-.35-.98-.39-.04-.02-.08-.04-.12-.05-2.08-.86-3.86-2.07-5.42-3.6-.61-.62-1.65-1.88-1.65-1.88h0s-.06-.05-.1-.05"/>
<path style="fill: url(#linear-gradient-3); opacity: .4;" d="M4.39,4.75s-.01,0-.02,0c-.1.02-.12.12-.14.21,0,.02,0,.03,0,.05,1.61,2.18,3.77,3.07,3.77,3.07,2.15,1.01,3.92,1.22,5,1.22.45,0,.77-.04.96-.06-.09,0-.18.01-.28.01-.63,0-1.53-.17-2.29-.35-.46-.1-.92-.22-1.36-.36-.02,0-.03,0-.03,0h0c-1.73-.55-3.32-1.44-4.7-2.81-.13-.13-.25-.27-.37-.41-.11-.13-.34-.4-.41-.5,0-.01-.02-.02-.03-.03h0s0,0,0,0c-.02-.02-.04-.03-.07-.03"/>
<path style="fill: url(#linear-gradient-4); opacity: .4" d="M5.86,8.53s-.05,0-.08.03c-.11.08-.06.22-.02.35.81.49,2.66,1.44,4.84,1.44.83,0,1.7-.14,2.59-.48-.64.21-1.3.28-1.96.28-.07,0-.15,0-.22,0-1.43-.04-2.77-.38-4.03-.99-.46-.24-.9-.5-1.01-.58-.04-.03-.08-.05-.12-.05"/>
<path style="fill: url(#linear-gradient-5); opacity: .4;" d="M13.34,10.6c-.35.27-.75.46-1.16.61-.77.28-1.55.41-2.32.41-.67,0-1.33-.09-1.99-.26-.46-.12-.99-.32-.99-.32h0s-.07-.03-.1-.03c-.04,0-.08.02-.11.06-.05.07-.05.14-.04.21.51.24,1.6.66,2.93.66,1.15,0,2.49-.32,3.78-1.34"/>
<path style="fill: url(#linear-gradient-6); opacity: .4;" d="M9.91,12.25c-1.25,0-2.19-.25-2.31-.29.04.01.07.02.1.03.11.04-.19.24-.29.31-.35.27-.69.52-1.04.77h1.06c.33-.25.67-.5.99-.75.12-.1.24-.13.37-.13.1,0,.2.02.33.05-.73.55-1.4,1.06-2.08,1.57-.4.3-.79.6-1.19.9h1.07c.96-.72,1.92-1.44,2.87-2.16.22-.17.44-.28.71-.32-.2.01-.4.02-.58.02Z"/>
<path style="fill: url(#Fade_to_Black_2); opacity: .4;" d="M9.69,13.1h1.03l.76-.6s.35-.25.76-.49c.04-.02.07-.05.11-.07.02,0,.05-.02.11-.05.04-.02.09-.05.13-.07.07-.03.15-.08.23-.12-.73.31-1.49.46-2.18.52,0,0,.02,0,.03,0,.06,0,.12,0,.18.01-.38.29-.75.56-1.11.83l-.06.05Z"/>
<path style="fill: url(#linear-gradient-7); opacity: .4;" d="M12.35,7.89c0-.09,0-.19,0-.28h0s0,0,0,0c0-.01,0-.03,0-.04h0s0-.02,0-.02c0-.08,0-.16-.02-.23-.25-.1-.48-.24-.69-.39,0,.02,0,.04,0,.07.03.19.02.13.04.29.01.08.03.23.03.28,0,.04-.01.08-.04.11-.03.03-.06.04-.09.04-.04,0-.09-.02-.13-.03.08.04.43.19.98.39-.06-.04-.08-.09-.08-.18Z"/>
<path style="fill: url(#linear-gradient-8); opacity: .4;" d="M16.36,6.14c-.23.32-.51.6-.84.83-.15.49-.38.95-.75,1.36-.04.05-.09.1-.14.14-.05.04-.11.12-.17.12-.01,0-.02,0-.03-.01-.08-.06,0-.18.02-.24.14-.32.24-.65.32-.98-.23.09-.47.15-.72.18-.03.1-.06.2-.1.3-.04.11-.09.22-.14.33-.05.1-.08.21-.2.21-.02,0-.05,0-.08-.01-.18-.06-.13-.19-.1-.31.03-.16.06-.32.08-.49-.22,0-.43-.04-.64-.08-.02.13-.04.26-.07.39-.02.09-.05.24-.21.24-.02,0-.04,0-.06,0,.69.25,1.68.56,2.83.81h0s-.02,0-.03-.01c-.18-.08.15-.41.3-.64.43-.66.64-1.37.73-2.12Z"/>
<path style="fill: url(#Fade_to_Black_2-2); opacity: .5;" d="M16.19,9.7c-.14,0-.29.01-.43.04-.03,0-.05,0-.08,0-.04,0-.08,0-.11-.02,0,0,.01.01.02.02.06.09.1.18.11.28.02.02.03.04.04.07h0s0,0,0,0c0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0h0s0,0,0,0c0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0t0,0s0,0,0,0c0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0s0,0,0,0c0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0s0,0,0,0c0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0h0s0,0,0,0h0s0,0,0,0h0s0,0,0,0h0s0,0,0,0h0s0,0,0,0h0s0,0,0,0h0s0,0,0,0h0s0,0,0,0c0,0,0,0,0,0,0,0,0,0,0,0h0s0,0,0,0h0s0,0,0,0h0s0,0,0,0h0s0,0,0,0h0s0,0,0,0h0s0,0,0,0h0s0,0,0,0h0s0,0,0,0h0s0,.05,0,.07c0,.04,0,.07,0,.11,0,.05-.01.1-.02.14,0,0,0,.01,0,.02,0,.03.02.06.04.08h.02s.03.02.05.02c.03,0,.06-.01.08-.03.01-.01.02-.03.04-.04.15-.15.3-.26.45-.33.12-.05.23-.07.35-.07s.24.02.36.07c.12.05.23.11.34.19.05.03.1.07.14.12.03.03.06.05.08.08h0s.02.02.02.02h.01s.04.03.06.03c-.1-.21-.22-.41-.4-.56-.33-.29-.75-.41-1.18-.41"/>
<path style="fill: url(#linear-gradient-9); opacity: .4;" d="M16.15,8.3h-.02s-.08.05-.08.09c0,.03,0,.07,0,.11,0,.01,0,.02,0,.03.18.26.56.73,1.15.91,0,0,.91.47.68,1.16h0v-.02c.08-.21.09-.4.05-.59-.04-.19-.14-.36-.28-.51-.06-.06-.12-.11-.19-.16-.03-.02-.07-.05-.1-.07-.04-.03-.09-.05-.14-.08-.19-.09-.34-.19-.48-.29-.06-.04-.11-.09-.16-.13-.12-.11-.21-.2-.27-.3-.02-.03-.04-.07-.05-.1,0-.01-.02-.03-.03-.04-.02-.01-.04-.02-.06-.02"/>
<path style="fill: url(#linear-gradient-10); opacity: .5;" d="M15,10.21l.18-.11.77-1.03s-.42-.08-.63-.17c0,0-.21.96-.99,1.77l.29-.21c.11-.08.22-.15.38-.25Z"/>
<g>
<path style="fill: #404041;" d="M25.33,4.73h1.97c1.42,0,2.07.73,2.07,2.23v.74c0,1.5-.65,2.23-2.07,2.23h-1.14v4.09h-.82V4.73ZM27.3,9.19c.86,0,1.25-.39,1.25-1.44v-.84c0-1.04-.39-1.44-1.25-1.44h-1.14v3.71h1.14Z"/>
<path style="fill: #404041;" d="M32.4,4.73h.82v4.08h2.54v-4.08h.82v9.3h-.82v-4.48h-2.54v4.48h-.82V4.73Z"/>
<path style="fill: #404041;" d="M39.82,11.9v-5.02c0-1.48.74-2.27,2.09-2.27s2.09.8,2.09,2.27v5.02c0,1.48-.74,2.27-2.09,2.27s-2.09-.8-2.09-2.27ZM43.17,11.95v-5.13c0-1-.45-1.48-1.26-1.48s-1.26.48-1.26,1.48v5.13c0,1,.45,1.46,1.26,1.46s1.26-.47,1.26-1.46Z"/>
<path style="fill: #404041;" d="M47.23,4.73h3.72v.74h-2.9v3.34h2.38v.74h-2.38v3.72h2.9v.76h-3.72V4.73Z"/>
<path style="fill: #404041;" d="M53.93,4.73h1.09l2.42,7.19v-7.19h.77v9.3h-.88l-2.64-7.93v7.93h-.76V4.73Z"/>
<path style="fill: #404041;" d="M61.55,4.73h.82v9.3h-.82V4.73Z"/>
<path style="fill: #404041;" d="M67.19,9.29l-1.82-4.56h.88l1.46,3.69,1.48-3.69h.8l-1.82,4.56,1.9,4.74h-.88l-1.54-3.91-1.55,3.91h-.8l1.9-4.74Z"/>
</g>
</g>
</g>
</svg>

After

Width:  |  Height:  |  Size: 16 KiB

View File

@ -0,0 +1,118 @@
{
"icon": {
"type": "element",
"isRootNode": true,
"name": "svg",
"attributes": {
"xmlns": "http://www.w3.org/2000/svg",
"xmlns:xlink": "http://www.w3.org/1999/xlink",
"fill": "none",
"version": "1.1",
"width": "65",
"height": "16",
"viewBox": "0 0 65 16"
},
"children": [
{
"type": "element",
"name": "defs",
"children": [
{
"type": "element",
"name": "clipPath",
"attributes": {
"id": "master_svg0_42_34281"
},
"children": [
{
"type": "element",
"name": "rect",
"attributes": {
"x": "0",
"y": "0",
"width": "19",
"height": "16",
"rx": "0"
}
}
]
}
]
},
{
"type": "element",
"name": "g",
"children": [
{
"type": "element",
"name": "g",
"attributes": {
"clip-path": "url(#master_svg0_42_34281)"
},
"children": [
{
"type": "element",
"name": "g",
"children": [
{
"type": "element",
"name": "g",
"children": [
{
"type": "element",
"name": "path",
"attributes": {
"d": "M4.06862,14.6667C3.79213,14.6667,3.45463,14.5688,3.05614,14.373C2.97908,14.3351,2.92692,14.3105,2.89968,14.2992C2.33193,14.0628,1.82911,13.7294,1.39123,13.2989C0.463742,12.3871,0,11.2874,0,10C0,8.71258,0.463742,7.61293,1.39123,6.70107C2.16172,5.94358,3.06404,5.50073,4.09819,5.37252C4.23172,3.98276,4.81755,2.77756,5.85569,1.75693C7.04708,0.585642,8.4857,0,10.1716,0C11.5256,0,12.743,0.396982,13.8239,1.19095C14.8847,1.97019,15.61,2.97855,16,4.21604L14.7045,4.61063C14.4016,3.64918,13.8374,2.86532,13.0121,2.25905C12.1719,1.64191,11.2251,1.33333,10.1716,1.33333C8.8602,1.33333,7.74124,1.7888,6.81467,2.69974C5.88811,3.61067,5.42483,4.71076,5.42483,6L5.42483,6.66667L4.74673,6.66667C3.81172,6.66667,3.01288,6.99242,2.35021,7.64393C1.68754,8.2954,1.35621,9.08076,1.35621,10C1.35621,10.9192,1.68754,11.7046,2.35021,12.3561C2.66354,12.6641,3.02298,12.9026,3.42852,13.0714C3.48193,13.0937,3.55988,13.13,3.66237,13.1803C3.87004,13.2823,4.00545,13.3333,4.06862,13.3333L4.06862,14.6667Z",
"fill-rule": "evenodd",
"fill": "#FF6A00",
"fill-opacity": "1"
}
}
]
},
{
"type": "element",
"name": "g",
"children": [
{
"type": "element",
"name": "path",
"attributes": {
"d": "M13.458613505859375,7.779393492279053C12.975613505859375,7.717463492279053,12.484813505859375,7.686503492279053,11.993983505859376,7.686503492279053C11.152583505859376,7.686503492279053,10.303403505859375,7.779393492279053,9.493183505859374,7.941943492279053C8.682953505859375,8.104503492279052,7.903893505859375,8.359943492279053,7.155983505859375,8.654083492279053C6.657383505859375,8.870823492279053,6.158783505859375,9.128843492279053,5.660181505859375,9.428153492279053C5.332974751859375,9.621673492279053,5.239486705859375,10.070633492279054,5.434253505859375,10.395743492279053L7.413073505859375,13.298533492279052C7.639003505859375,13.623603492279052,8.090863505859375,13.716463492279052,8.418073505859375,13.523003492279052C8.547913505859375,13.435263492279052,8.763453505859374,13.326893492279053,9.064693505859374,13.197863492279053C9.516553505859374,13.004333492279052,9.976203505859374,12.872733492279053,10.459223505859375,12.779863492279052C10.942243505859375,12.679263492279052,11.433053505859375,12.617333492279052,11.955023505859375,12.617333492279052L13.380683505859375,7.810353492279052L13.458613505859375,7.779393492279053ZM15.273813505859374,8.135463492279053L15.016753505859375,5.333333492279053L13.458613505859375,7.787133492279053C13.817013505859375,7.818093492279052,14.144213505859375,7.880023492279053,14.494743505859375,7.949683492279053C14.494743505859375,7.944523492279053,14.754433505859375,8.006453492279054,15.273813505859374,8.135463492279053ZM12.064083505859376,12.648273492279053L11.378523505859375,14.970463492279054L12.515943505859376,16.00003349227905L14.074083505859376,15.643933492279054L14.525943505859376,13.027603492279052C14.198743505859374,12.934663492279054,13.879283505859375,12.834063492279054,13.552083505859375,12.772133492279053C13.069083505859375,12.717933492279052,12.578283505859375,12.648273492279053,12.064083505859376,12.648273492279053ZM18.327743505859374,9.428153492279053C17.829143505859374,9.128843492279053,17.330543505859374,8.870823492279053,16.831943505859375,8.654083492279053C16.348943505859374,8.460573492279053,15.826943505859376,8.267053492279054,15.305013505859375,8.135463492279053L15.305013505859375,8.267053492279054L14.463613505859374,13.043063492279053C14.596083505859376,13.105003492279053,14.759683505859375,13.135933492279053,14.884283505859376,13.205603492279053C15.185523505859376,13.334623492279052,15.401043505859375,13.443003492279052,15.530943505859375,13.530733492279053C15.858143505859376,13.724263492279054,16.341143505859375,13.623603492279052,16.535943505859375,13.306263492279053L18.514743505859375,10.403483492279053C18.779643505859376,10.039673492279054,18.686143505859377,9.621673492279053,18.327743505859374,9.428153492279053Z",
"fill": "#FF6A00",
"fill-opacity": "1"
}
}
]
}
]
}
]
},
{
"type": "element",
"name": "g",
"children": [
{
"type": "element",
"name": "g",
"children": [
{
"type": "element",
"name": "path",
"attributes": {
"d": "M25.044,2.668L34.676,2.668L34.676,4.04L25.044,4.04L25.044,2.668ZM29.958,7.82Q29.258,9.066,28.355,10.41Q27.451999999999998,11.754,26.92,12.3L32.506,11.782Q31.442,10.158,30.84,9.346L32.058,8.562000000000001Q32.786,9.5,33.843,11.012Q34.9,12.524,35.516,13.546L34.214,14.526Q33.891999999999996,13.966,33.346000000000004,13.098Q32.016,13.182,29.734,13.378Q27.451999999999998,13.574,25.87,13.742L25.31,13.812L24.834,13.882L24.414,12.468Q24.708,12.37,24.862000000000002,12.265Q25.016,12.16,25.121,12.069Q25.226,11.978,25.268,11.936Q25.912,11.32,26.724,10.165Q27.536,9.01,28.208,7.82L23.854,7.82L23.854,6.434L35.866,6.434L35.866,7.82L29.958,7.82ZM42.656,7.414L42.656,8.576L41.354,8.576L41.354,1.814L42.656,1.87L42.656,7.036Q43.314,5.846,43.888000000000005,4.369Q44.462,2.892,44.714,1.6600000000000001L46.086,1.981999Q45.96,2.612,45.722,3.41L49.6,3.41L49.6,4.74L45.274,4.74Q44.616,6.56,43.706,8.128L42.656,7.414ZM38.596000000000004,2.346L39.884,2.402L39.884,8.212L38.596000000000004,8.212L38.596000000000004,2.346ZM46.184,4.964Q46.688,5.356,47.5,6.175Q48.312,6.994,48.788,7.582L47.751999999999995,8.59Q47.346000000000004,8.072,46.576,7.274Q45.806,6.476,45.204,5.902L46.184,4.964ZM48.41,9.01L48.41,12.706L49.894,12.706L49.894,13.966L37.391999999999996,13.966L37.391999999999996,12.706L38.848,12.706L38.848,9.01L48.41,9.01ZM41.676,10.256L40.164,10.256L40.164,12.706L41.676,12.706L41.676,10.256ZM42.908,12.706L44.364000000000004,12.706L44.364000000000004,10.256L42.908,10.256L42.908,12.706ZM45.582,12.706L47.108000000000004,12.706L47.108000000000004,10.256L45.582,10.256L45.582,12.706ZM54.906,7.456L55.116,8.394L54.178,8.814L54.178,12.818Q54.178,13.434,54.031,13.735Q53.884,14.036,53.534,14.162Q53.184,14.288,52.456,14.358L51.867999999999995,14.414L51.476,13.084L52.162,13.028Q52.512,13,52.652,12.958Q52.792,12.916,52.841,12.797Q52.89,12.678,52.89,12.384L52.89,9.36Q51.980000000000004,9.724,51.322,9.948L51.013999999999996,8.576Q51.798,8.324,52.89,7.876L52.89,5.524L51.42,5.524L51.42,4.166L52.89,4.166L52.89,1.7579989999999999L54.178,1.814L54.178,4.166L55.214,4.166L55.214,5.524L54.178,5.524L54.178,7.316L54.808,7.022L54.906,7.456ZM56.894,4.5440000000000005L56.894,6.098L55.564,6.098L55.564,3.256L58.686,3.256Q58.42,2.346,58.266,1.9260000000000002L59.624,1.7579989999999999Q59.848,2.276,60.142,3.256L63.25,3.256L63.25,6.098L61.962,6.098L61.962,4.5440000000000005L56.894,4.5440000000000005ZM59.008,6.322Q58.392,6.938,57.685,7.512Q56.978,8.086,55.956,8.841999999999999L55.242,7.764Q56.824,6.728,58.126,5.37L59.008,6.322ZM60.422,5.37Q61.024,5.776,62.095,6.581Q63.166,7.386,63.656,7.806L62.942,8.982Q62.368,8.45,61.332,7.652Q60.296,6.854,59.666,6.434L60.422,5.37ZM62.592,10.256L60.044,10.256L60.044,12.566L63.572,12.566L63.572,13.826L55.144,13.826L55.144,12.566L58.63,12.566L58.63,10.256L56.054,10.256L56.054,8.982L62.592,8.982L62.592,10.256Z",
"fill": "#FF6A00",
"fill-opacity": "1"
}
}
]
}
]
}
]
}
]
},
"name": "AliyunIcon"
}

View File

@ -0,0 +1,16 @@
// GENERATE BY script
// DON NOT EDIT IT MANUALLY
import * as React from 'react'
import data from './AliyunIcon.json'
import IconBase from '@/app/components/base/icons/IconBase'
import type { IconBaseProps, IconData } from '@/app/components/base/icons/IconBase'
const Icon = React.forwardRef<React.MutableRefObject<SVGElement>, Omit<IconBaseProps, 'data'>>((
props,
ref,
) => <IconBase {...props} ref={ref} data={data as IconData} />)
Icon.displayName = 'AliyunIcon'
export default Icon

View File

@ -0,0 +1,71 @@
{
"icon": {
"type": "element",
"isRootNode": true,
"name": "svg",
"attributes": {
"xmlns": "http://www.w3.org/2000/svg",
"xmlns:xlink": "http://www.w3.org/1999/xlink",
"fill": "none",
"version": "1.1",
"width": "96",
"height": "24",
"viewBox": "0 0 96 24"
},
"children": [
{
"type": "element",
"name": "g",
"children": [
{
"type": "element",
"name": "g",
"children": [
{
"type": "element",
"name": "path",
"attributes": {
"d": "M6.10294,22C5.68819,22,5.18195,21.8532,4.58421,21.5595C4.46861,21.5027,4.39038,21.4658,4.34951,21.4488C3.49789,21.0943,2.74367,20.5941,2.08684,19.9484C0.695613,18.5806,0,16.9311,0,15C0,13.0689,0.695612,11.4194,2.08684,10.0516C3.24259,8.91537,4.59607,8.2511,6.14728,8.05878C6.34758,5.97414,7.22633,4.16634,8.78354,2.63539C10.5706,0.878463,12.7286,0,15.2573,0C17.2884,0,19.1146,0.595472,20.7358,1.78642C22.327,2.95528,23.4151,4.46783,24,6.32406L22.0568,6.91594C21.6024,5.47377,20.7561,4.29798,19.5181,3.38858C18.2579,2.46286,16.8377,2,15.2573,2C13.2903,2,11.6119,2.6832,10.222,4.04961C8.83217,5.41601,8.13725,7.06614,8.13725,9L8.13725,10L7.12009,10C5.71758,10,4.51932,10.4886,3.52532,11.4659C2.53132,12.4431,2.03431,13.6211,2.03431,15C2.03431,16.3789,2.53132,17.5569,3.52532,18.5341C3.99531,18.9962,4.53447,19.3538,5.14278,19.6071C5.2229,19.6405,5.33983,19.695,5.49356,19.7705C5.80505,19.9235,6.00818,20,6.10294,20L6.10294,22Z",
"fill-rule": "evenodd",
"fill": "#FF6A00",
"fill-opacity": "1"
}
}
]
},
{
"type": "element",
"name": "g",
"children": [
{
"type": "element",
"name": "path",
"attributes": {
"d": "M20.18796103515625,11.66909C19.46346103515625,11.5762,18.72726103515625,11.52975,17.991011035156248,11.52975C16.728921035156247,11.52975,15.45515103515625,11.66909,14.23981103515625,11.91292C13.02447103515625,12.156749999999999,11.85588103515625,12.539909999999999,10.73402103515625,12.98113C9.98612103515625,13.306239999999999,9.23822103515625,13.69327,8.49031803515625,14.14223C7.99950790415625,14.43251,7.85927603515625,15.10595,8.15142503515625,15.59361L11.11966103515625,19.9478C11.45855103515625,20.4354,12.13634103515625,20.5747,12.627151035156249,20.2845C12.821921035156251,20.152900000000002,13.14523103515625,19.990299999999998,13.59708103515625,19.796799999999998C14.27487103515625,19.506500000000003,14.964341035156249,19.3091,15.68887103515625,19.169800000000002C16.413401035156248,19.018900000000002,17.14962103515625,18.926000000000002,17.93258103515625,18.926000000000002L20.071061035156248,11.715530000000001L20.18796103515625,11.66909ZM22.91076103515625,12.20319L22.525161035156252,8L20.18796103515625,11.6807C20.72556103515625,11.72714,21.21636103515625,11.82003,21.74216103515625,11.92453C21.74216103515625,11.91679,22.13166103515625,12.00968,22.91076103515625,12.20319ZM18.09616103515625,18.9724L17.06782103515625,22.4557L18.773961035156248,24L21.11116103515625,23.465899999999998L21.788961035156248,19.5414C21.298161035156248,19.402,20.81896103515625,19.2511,20.32816103515625,19.1582C19.60366103515625,19.076900000000002,18.86746103515625,18.9724,18.09616103515625,18.9724ZM27.49166103515625,14.14223C26.74376103515625,13.69327,25.99586103515625,13.306239999999999,25.24796103515625,12.98113C24.52346103515625,12.69086,23.74046103515625,12.40058,22.95756103515625,12.20319L22.95756103515625,12.40058L21.69546103515625,19.5646C21.89416103515625,19.6575,22.139561035156248,19.7039,22.32646103515625,19.8084C22.77836103515625,20.0019,23.101661035156248,20.1645,23.29646103515625,20.2961C23.78726103515625,20.586399999999998,24.51176103515625,20.4354,24.80396103515625,19.959400000000002L27.77216103515625,15.605229999999999C28.16946103515625,15.05951,28.02926103515625,14.43251,27.49166103515625,14.14223Z",
"fill": "#FF6A00",
"fill-opacity": "1"
}
}
]
},
{
"type": "element",
"name": "g",
"children": [
{
"type": "element",
"name": "path",
"attributes": {
"d": "M35.785,3.8624638671875L50.233000000000004,3.8624638671875L50.233000000000004,5.9204638671875L35.785,5.9204638671875L35.785,3.8624638671875ZM43.156,11.5904638671875Q42.106,13.4594638671875,40.7515,15.4754638671875Q39.397,17.4914638671875,38.599000000000004,18.3104638671875L46.978,17.5334638671875Q45.382,15.0974638671875,44.479,13.8794638671875L46.306,12.7034638671875Q47.397999999999996,14.1104638671875,48.9835,16.3784638671875Q50.569,18.6464638671875,51.492999999999995,20.1794638671875L49.54,21.6494638671875Q49.057,20.8094638671875,48.238,19.5074638671875Q46.243,19.6334638671875,42.82,19.9274638671875Q39.397,20.2214638671875,37.024,20.4734638671875L36.184,20.5784638671875L35.47,20.6834638671875L34.84,18.5624638671875Q35.281,18.4154638671875,35.512,18.2579638671875Q35.743,18.1004638671875,35.9005,17.963963867187502Q36.058,17.8274638671875,36.121,17.7644638671875Q37.087,16.840463867187502,38.305,15.1079638671875Q39.522999999999996,13.3754638671875,40.531,11.5904638671875L34,11.5904638671875L34,9.5114638671875L52.018,9.5114638671875L52.018,11.5904638671875L43.156,11.5904638671875ZM62.203,10.9814638671875L62.203,12.7244638671875L60.25,12.7244638671875L60.25,2.5814638671875L62.203,2.6654638671875L62.203,10.4144638671875Q63.19,8.6294638671875,64.051,6.4139638671875Q64.912,4.1984638671875,65.28999999999999,2.3504638671875L67.348,2.8334628671875Q67.15899999999999,3.7784638671875,66.80199999999999,4.9754638671875L72.619,4.9754638671875L72.619,6.9704638671875L66.13,6.9704638671875Q65.143,9.7004638671875,63.778,12.0524638671875L62.203,10.9814638671875ZM56.113,3.3794638671875L58.045,3.4634638671875L58.045,12.1784638671875L56.113,12.1784638671875L56.113,3.3794638671875ZM67.495,7.3064638671875Q68.251,7.8944638671875,69.469,9.1229638671875Q70.687,10.3514638671875,71.40100000000001,11.2334638671875L69.84700000000001,12.7454638671875Q69.238,11.9684638671875,68.083,10.7714638671875Q66.928,9.5744638671875,66.025,8.7134638671875L67.495,7.3064638671875ZM70.834,13.3754638671875L70.834,18.9194638671875L73.06,18.9194638671875L73.06,20.8094638671875L54.307,20.8094638671875L54.307,18.9194638671875L56.491,18.9194638671875L56.491,13.3754638671875L70.834,13.3754638671875ZM60.733000000000004,15.2444638671875L58.465,15.2444638671875L58.465,18.9194638671875L60.733000000000004,18.9194638671875L60.733000000000004,15.2444638671875ZM62.581,18.9194638671875L64.765,18.9194638671875L64.765,15.2444638671875L62.581,15.2444638671875L62.581,18.9194638671875ZM66.592,18.9194638671875L68.881,18.9194638671875L68.881,15.2444638671875L66.592,15.2444638671875L66.592,18.9194638671875ZM80.578,11.0444638671875L80.893,12.4514638671875L79.48599999999999,13.0814638671875L79.48599999999999,19.0874638671875Q79.48599999999999,20.0114638671875,79.2655,20.4629638671875Q79.045,20.9144638671875,78.52000000000001,21.1034638671875Q77.995,21.2924638671875,76.90299999999999,21.3974638671875L76.021,21.4814638671875L75.43299999999999,19.4864638671875L76.462,19.4024638671875Q76.987,19.3604638671875,77.197,19.2974638671875Q77.407,19.2344638671875,77.4805,19.0559638671875Q77.554,18.8774638671875,77.554,18.4364638671875L77.554,13.9004638671875Q76.189,14.4464638671875,75.202,14.7824638671875L74.74000000000001,12.7244638671875Q75.916,12.3464638671875,77.554,11.6744638671875L77.554,8.1464638671875L75.34899999999999,8.1464638671875L75.34899999999999,6.1094638671875L77.554,6.1094638671875L77.554,2.4974628671875L79.48599999999999,2.5814638671875L79.48599999999999,6.1094638671875L81.03999999999999,6.1094638671875L81.03999999999999,8.1464638671875L79.48599999999999,8.1464638671875L79.48599999999999,10.8344638671875L80.431,10.3934638671875L80.578,11.0444638671875ZM83.56,6.6764638671875L83.56,9.0074638671875L81.565,9.0074638671875L81.565,4.7444638671875L86.24799999999999,4.7444638671875Q85.84899999999999,3.3794638671875,85.618,2.7494638671875L87.655,2.4974628671875Q87.991,3.2744638671875,88.432,4.7444638671875L93.094,4.7444638671875L93.094,9.0074638671875L91.162,9.0074638671875L91.162,6.6764638671875L83.56,6.6764638671875ZM86.731,9.3434638671875Q85.807,10.2674638671875,84.7465,11.1284638671875Q83.686,11.9894638671875,82.15299999999999,13.1234638671875L81.082,11.5064638671875Q83.455,9.9524638671875,85.408,7.9154638671875L86.731,9.3434638671875ZM88.852,7.9154638671875Q89.755,8.5244638671875,91.3615,9.731963867187499Q92.968,10.9394638671875,93.703,11.5694638671875L92.632,13.3334638671875Q91.771,12.5354638671875,90.217,11.3384638671875Q88.663,10.1414638671875,87.718,9.5114638671875L88.852,7.9154638671875ZM92.107,15.2444638671875L88.285,15.2444638671875L88.285,18.7094638671875L93.577,18.7094638671875L93.577,20.5994638671875L80.935,20.5994638671875L80.935,18.7094638671875L86.164,18.7094638671875L86.164,15.2444638671875L82.3,15.2444638671875L82.3,13.3334638671875L92.107,13.3334638671875L92.107,15.2444638671875Z",
"fill": "#FF6A00",
"fill-opacity": "1"
}
}
]
}
]
}
]
},
"name": "AliyunBigIcon"
}

View File

@ -0,0 +1,16 @@
// GENERATE BY script
// DON NOT EDIT IT MANUALLY
import * as React from 'react'
import data from './AliyunIconBig.json'
import IconBase from '@/app/components/base/icons/IconBase'
import type { IconBaseProps, IconData } from '@/app/components/base/icons/IconBase'
const Icon = React.forwardRef<React.MutableRefObject<SVGElement>, Omit<IconBaseProps, 'data'>>((
props,
ref,
) => <IconBase {...props} ref={ref} data={data as IconData} />)
Icon.displayName = 'AliyunIconBig'
export default Icon

View File

@ -0,0 +1,122 @@
{
"icon": {
"type": "element",
"isRootNode": true,
"name": "svg",
"attributes": {
"id": "Layer_2",
"data-name": "Layer 2",
"xmlns": "http://www.w3.org/2000/svg",
"viewBox": "0 0 74 16",
"width": "74",
"height": "16"
},
"children": [
{
"type": "element",
"name": "g",
"attributes": {
"id": "Layer_1-2",
"data-name": "Layer 1"
},
"children": [
{
"type": "element",
"name": "rect",
"attributes": {
"style": "fill: none;",
"y": "0",
"width": "74",
"height": "16"
},
"children": []
},
{
"type": "element",
"name": "g",
"attributes": {
"id": "Arize_-_standard",
"data-name": "Arize - standard"
},
"children": [
{
"type": "element",
"name": "g",
"attributes": {},
"children": [
{
"type": "element",
"name": "path",
"attributes": {
"d": "M67.29,9.7h-7.52c0,.08,0,.15.01.21.35,1.03,1.03,1.73,2.13,1.92,1.13.19,2.14,0,2.86-1.01.06-.09.19-.18.29-.19.66-.02,1.33,0,1.99,0-.1,1.79-2.59,3.32-5.07,3.14-2.66-.2-4.61-2.53-4.39-5.25.25-3.08,2.44-4.88,5.58-4.56,2.7.27,4.45,2.69,4.12,5.76ZM59.81,7.77h5.3c-.28-1.25-1.36-2.01-2.78-1.98-1.25.03-2.32.87-2.52,1.98Z"
},
"children": []
},
{
"type": "element",
"name": "path",
"attributes": {
"style": "fill: #ff008c;",
"d": "M13.87,1.69c.4-.02.74.17.99.54,2.03,2.92,4.06,5.85,6.08,8.77.42.61.28,1.33-.29,1.73-.56.39-1.31.24-1.74-.38-1.4-2.01-2.79-4.02-4.19-6.04-.03-.04-.05-.08-.08-.11-.55-.78-1.1-.78-1.64,0-1.41,2.03-2.82,4.06-4.23,6.09-.23.34-.52.59-.93.63-.51.06-.92-.13-1.19-.57-.28-.46-.25-.92.05-1.35.68-.98,1.36-1.96,2.04-2.93,1.34-1.93,2.68-3.85,4.02-5.78.26-.37.59-.6,1.11-.59Z"
},
"children": []
},
{
"type": "element",
"name": "path",
"attributes": {
"d": "M35.23,13.62h-2.01v-.77c-.07.01-.1,0-.13.02-1.64,1.16-3.39,1.25-5.12.34-1.74-.92-2.47-2.51-2.45-4.45.02-2.17,1.22-3.97,3.1-4.57,1.57-.5,3.09-.45,4.46.62.02.02.06.02.14.05v-.79h2.02v9.54ZM27.57,8.88c0,.11.01.31.03.5.14,1.39,1.18,2.39,2.61,2.51,1.4.12,2.53-.63,2.92-1.97.09-.32.14-.65.15-.98.09-2.15-1.5-3.51-3.56-3.07-1.28.27-2.13,1.43-2.15,3.02Z"
},
"children": []
},
{
"type": "element",
"name": "path",
"attributes": {
"d": "M53.61,5.93h-4.96v-1.85h7.76c0,.54.01,1.07,0,1.61,0,.12-.12.24-.21.34-1.52,1.79-3.05,3.57-4.57,5.36-.07.09-.14.18-.26.32h5.02v1.91h-7.83c0-.54-.01-1.08,0-1.61,0-.11.11-.22.19-.31,1.55-1.83,3.1-3.65,4.64-5.47.06-.07.11-.14.21-.28Z"
},
"children": []
},
{
"type": "element",
"name": "path",
"attributes": {
"d": "M39.6,13.69h-2.02V4.15h2.02v1.03c.54-.32,1.04-.68,1.58-.94.57-.28,1.19-.27,1.85-.23v1.96c-.1,0-.2.02-.3.02-1.58.02-2.68.9-3.01,2.46-.08.38-.11.77-.11,1.16-.01,1.24,0,2.47,0,3.71v.38Z"
},
"children": []
},
{
"type": "element",
"name": "path",
"attributes": {
"d": "M44.74,4.06h1.99v9.56h-1.99V4.06Z"
},
"children": []
},
{
"type": "element",
"name": "path",
"attributes": {
"style": "fill: #ff008c;",
"d": "M13.82,13.02c.84,0,1.49.64,1.5,1.46,0,.83-.68,1.53-1.5,1.52-.82,0-1.47-.67-1.48-1.5,0-.83.64-1.48,1.47-1.48Z"
},
"children": []
},
{
"type": "element",
"name": "path",
"attributes": {
"d": "M47.13,1.41c0,.8-.61,1.43-1.39,1.43-.8,0-1.44-.63-1.44-1.43,0-.78.63-1.41,1.42-1.41.79,0,1.41.62,1.41,1.41Z"
},
"children": []
}
]
}
]
}
]
}
]
},
"name": "ArizeIcon"
}

View File

@ -0,0 +1,20 @@
// GENERATE BY script
// DON NOT EDIT IT MANUALLY
import * as React from 'react'
import data from './ArizeIcon.json'
import IconBase from '@/app/components/base/icons/IconBase'
import type { IconData } from '@/app/components/base/icons/IconBase'
const Icon = (
{
ref,
...props
}: React.SVGProps<SVGSVGElement> & {
ref?: React.RefObject<React.MutableRefObject<HTMLOrSVGElement>>;
},
) => <IconBase {...props} ref={ref} data={data as IconData} />
Icon.displayName = 'ArizeIcon'
export default Icon

View File

@ -0,0 +1,122 @@
{
"icon": {
"type": "element",
"isRootNode": true,
"name": "svg",
"attributes": {
"id": "Layer_2",
"data-name": "Layer 2",
"xmlns": "http://www.w3.org/2000/svg",
"viewBox": "0 0 111 24",
"width": "111",
"height": "24"
},
"children": [
{
"type": "element",
"name": "g",
"attributes": {
"id": "Layer_1-2",
"data-name": "Layer 1"
},
"children": [
{
"type": "element",
"name": "rect",
"attributes": {
"style": "fill: none;",
"y": "0",
"width": "111",
"height": "24"
},
"children": []
},
{
"type": "element",
"name": "g",
"attributes": {
"id": "Arize_-_standard",
"data-name": "Arize - standard"
},
"children": [
{
"type": "element",
"name": "g",
"attributes": {},
"children": [
{
"type": "element",
"name": "path",
"attributes": {
"d": "M100.94,14.55h-11.29c0,.13-.01.23.02.31.53,1.55,1.54,2.59,3.19,2.88,1.7.29,3.22,0,4.3-1.52.09-.13.28-.27.43-.28.99-.02,1.99-.01,2.99-.01-.16,2.69-3.89,4.98-7.6,4.7-3.99-.3-6.91-3.79-6.58-7.88.37-4.62,3.67-7.31,8.37-6.85,4.05.4,6.68,4.04,6.19,8.64ZM89.71,11.66h7.96c-.43-1.88-2.04-3.02-4.17-2.97-1.87.04-3.48,1.3-3.78,2.97Z"
},
"children": []
},
{
"type": "element",
"name": "path",
"attributes": {
"style": "fill: #ff008c;",
"d": "M20.81,2.53c.59-.03,1.11.26,1.49.8,3.05,4.38,6.09,8.77,9.13,13.16.63.91.43,1.99-.43,2.59-.84.59-1.97.35-2.62-.57-2.1-3.01-4.19-6.04-6.28-9.05-.04-.06-.08-.11-.12-.17-.83-1.17-1.65-1.18-2.47,0-2.11,3.05-4.23,6.09-6.34,9.14-.35.5-.77.88-1.4.95-.77.08-1.39-.19-1.79-.86-.41-.69-.38-1.38.07-2.03,1.01-1.47,2.04-2.93,3.06-4.4,2.01-2.89,4.03-5.77,6.03-8.67.39-.56.88-.9,1.67-.89Z"
},
"children": []
},
{
"type": "element",
"name": "path",
"attributes": {
"d": "M52.84,20.43h-3.02v-1.16c-.1.02-.16.01-.19.03-2.46,1.73-5.09,1.88-7.68.51-2.61-1.38-3.71-3.77-3.68-6.67.03-3.26,1.82-5.96,4.64-6.86,2.35-.75,4.64-.67,6.69.93.04.03.09.03.2.07v-1.18h3.03v14.31ZM41.36,13.32c.01.17.02.46.05.75.22,2.09,1.76,3.58,3.91,3.76,2.1.18,3.8-.95,4.38-2.96.14-.47.2-.98.22-1.47.13-3.22-2.25-5.27-5.33-4.61-1.92.41-3.19,2.14-3.23,4.53Z"
},
"children": []
},
{
"type": "element",
"name": "path",
"attributes": {
"d": "M80.41,8.9h-7.44v-2.77h11.64c0,.81.02,1.61-.01,2.41,0,.17-.19.35-.32.51-2.28,2.68-4.57,5.36-6.85,8.04-.11.13-.21.26-.38.47h7.53v2.86h-11.74c0-.82-.02-1.62.01-2.42,0-.16.17-.33.28-.47,2.32-2.74,4.64-5.47,6.96-8.21.09-.1.16-.21.32-.42Z"
},
"children": []
},
{
"type": "element",
"name": "path",
"attributes": {
"d": "M59.39,20.54h-3.03V6.22h3.03v1.54c.8-.48,1.55-1.01,2.37-1.41.85-.41,1.79-.41,2.77-.35v2.94c-.16.01-.3.03-.45.03-2.37.03-4.01,1.36-4.51,3.69-.12.57-.16,1.16-.16,1.74-.02,1.85,0,3.71,0,5.56v.57Z"
},
"children": []
},
{
"type": "element",
"name": "path",
"attributes": {
"d": "M67.1,6.09h2.99v14.33h-2.99V6.09Z"
},
"children": []
},
{
"type": "element",
"name": "path",
"attributes": {
"style": "fill: #ff008c;",
"d": "M20.73,19.53c1.25,0,2.24.96,2.25,2.19.01,1.24-1.02,2.29-2.24,2.28-1.23-.01-2.21-1.01-2.22-2.24,0-1.25.96-2.22,2.21-2.22Z"
},
"children": []
},
{
"type": "element",
"name": "path",
"attributes": {
"d": "M70.7,2.11c0,1.19-.92,2.14-2.09,2.15-1.19.01-2.16-.95-2.16-2.14C66.46.95,67.4,0,68.58,0c1.18,0,2.12.93,2.12,2.11Z"
},
"children": []
}
]
}
]
}
]
}
]
},
"name": "ArizeIconBig"
}

View File

@ -0,0 +1,20 @@
// GENERATE BY script
// DON NOT EDIT IT MANUALLY
import * as React from 'react'
import data from './ArizeIconBig.json'
import IconBase from '@/app/components/base/icons/IconBase'
import type { IconData } from '@/app/components/base/icons/IconBase'
const Icon = (
{
ref,
...props
}: React.SVGProps<SVGSVGElement> & {
ref?: React.RefObject<React.MutableRefObject<HTMLOrSVGElement>>;
},
) => <IconBase {...props} ref={ref} data={data as IconData} />
Icon.displayName = 'ArizeIconBig'
export default Icon

View File

@ -0,0 +1,853 @@
{
"icon": {
"type": "element",
"isRootNode": true,
"name": "svg",
"attributes": {
"id": "Layer_2",
"data-name": "Layer 2",
"xmlns": "http://www.w3.org/2000/svg",
"xmlns:xlink": "http://www.w3.org/1999/xlink",
"viewBox": "0 0 74 16",
"width": "74",
"height": "16"
},
"children": [
{
"type": "element",
"name": "defs",
"attributes": {},
"children": [
{
"type": "element",
"name": "linearGradient",
"attributes": {
"id": "linear-gradient",
"x1": "6.05",
"y1": ".98",
"x2": "13.18",
"y2": "13.34",
"gradientUnits": "userSpaceOnUse"
},
"children": [
{
"type": "element",
"name": "stop",
"attributes": {
"offset": "0",
"stop-color": "#11bab5"
},
"children": []
},
{
"type": "element",
"name": "stop",
"attributes": {
"offset": ".5",
"stop-color": "#00adee"
},
"children": []
},
{
"type": "element",
"name": "stop",
"attributes": {
"offset": "1",
"stop-color": "#0094c5"
},
"children": []
}
]
},
{
"type": "element",
"name": "linearGradient",
"attributes": {
"id": "linear-gradient-2",
"x1": "12.99",
"y1": "10.87",
"x2": "6.97",
"y2": ".45",
"gradientUnits": "userSpaceOnUse"
},
"children": [
{
"type": "element",
"name": "stop",
"attributes": {
"offset": "0",
"stop-color": "#fcfdff",
"stop-opacity": "0"
},
"children": []
},
{
"type": "element",
"name": "stop",
"attributes": {
"offset": ".12",
"stop-color": "#fcfdff",
"stop-opacity": ".17"
},
"children": []
},
{
"type": "element",
"name": "stop",
"attributes": {
"offset": ".3",
"stop-color": "#fcfdff",
"stop-opacity": ".42"
},
"children": []
},
{
"type": "element",
"name": "stop",
"attributes": {
"offset": ".47",
"stop-color": "#fcfdff",
"stop-opacity": ".63"
},
"children": []
},
{
"type": "element",
"name": "stop",
"attributes": {
"offset": ".64",
"stop-color": "#fcfdff",
"stop-opacity": ".79"
},
"children": []
},
{
"type": "element",
"name": "stop",
"attributes": {
"offset": ".78",
"stop-color": "#fcfdff",
"stop-opacity": ".9"
},
"children": []
},
{
"type": "element",
"name": "stop",
"attributes": {
"offset": ".91",
"stop-color": "#fcfdff",
"stop-opacity": ".97"
},
"children": []
},
{
"type": "element",
"name": "stop",
"attributes": {
"offset": "1",
"stop-color": "#fcfdff"
},
"children": []
}
]
},
{
"type": "element",
"name": "linearGradient",
"attributes": {
"id": "linear-gradient-3",
"x1": "11.21",
"y1": "10.8",
"x2": "6.88",
"y2": "3.29",
"xlink:href": "#linear-gradient-2"
},
"children": []
},
{
"type": "element",
"name": "linearGradient",
"attributes": {
"id": "linear-gradient-4",
"x1": "10.62",
"y1": "11.35",
"x2": "8.2",
"y2": "7.16",
"xlink:href": "#linear-gradient-2"
},
"children": []
},
{
"type": "element",
"name": "linearGradient",
"attributes": {
"id": "linear-gradient-5",
"x1": "10.72",
"y1": "12.2",
"x2": "9.21",
"y2": "9.59",
"xlink:href": "#linear-gradient-2"
},
"children": []
},
{
"type": "element",
"name": "linearGradient",
"attributes": {
"id": "linear-gradient-6",
"x1": "8.17",
"y1": "11.96",
"x2": "8.17",
"y2": "14.71",
"gradientUnits": "userSpaceOnUse"
},
"children": [
{
"type": "element",
"name": "stop",
"attributes": {
"offset": "0",
"stop-color": "#231f20"
},
"children": []
},
{
"type": "element",
"name": "stop",
"attributes": {
"offset": ".03",
"stop-color": "#231f20",
"stop-opacity": ".9"
},
"children": []
},
{
"type": "element",
"name": "stop",
"attributes": {
"offset": ".22",
"stop-color": "#231f20",
"stop-opacity": ".4"
},
"children": []
},
{
"type": "element",
"name": "stop",
"attributes": {
"offset": ".42",
"stop-color": "#231f20",
"stop-opacity": ".1"
},
"children": []
},
{
"type": "element",
"name": "stop",
"attributes": {
"offset": ".65",
"stop-color": "#231f20",
"stop-opacity": "0"
},
"children": []
}
]
},
{
"type": "element",
"name": "linearGradient",
"attributes": {
"id": "Fade_to_Black_2",
"data-name": "Fade to Black 2",
"x1": "11.26",
"y1": "11.7",
"x2": "11.26",
"y2": "13.1",
"gradientUnits": "userSpaceOnUse"
},
"children": [
{
"type": "element",
"name": "stop",
"attributes": {
"offset": "0",
"stop-color": "#231f20"
},
"children": []
},
{
"type": "element",
"name": "stop",
"attributes": {
"offset": "1",
"stop-color": "#231f20",
"stop-opacity": "0"
},
"children": []
}
]
},
{
"type": "element",
"name": "linearGradient",
"attributes": {
"id": "linear-gradient-7",
"x1": "11.94",
"y1": "8.07",
"x2": "11.94",
"y2": "6.92",
"gradientUnits": "userSpaceOnUse"
},
"children": [
{
"type": "element",
"name": "stop",
"attributes": {
"offset": "0",
"stop-color": "#231f20"
},
"children": []
},
{
"type": "element",
"name": "stop",
"attributes": {
"offset": ".03",
"stop-color": "#231f20",
"stop-opacity": ".95"
},
"children": []
},
{
"type": "element",
"name": "stop",
"attributes": {
"offset": ".51",
"stop-color": "#231f20",
"stop-opacity": ".26"
},
"children": []
},
{
"type": "element",
"name": "stop",
"attributes": {
"offset": ".81",
"stop-color": "#231f20",
"stop-opacity": "0"
},
"children": []
}
]
},
{
"type": "element",
"name": "linearGradient",
"attributes": {
"id": "linear-gradient-8",
"x1": "14.45",
"y1": "8.92",
"x2": "14.45",
"y2": "6.14",
"gradientUnits": "userSpaceOnUse"
},
"children": [
{
"type": "element",
"name": "stop",
"attributes": {
"offset": "0",
"stop-color": "#231f20"
},
"children": []
},
{
"type": "element",
"name": "stop",
"attributes": {
"offset": ".18",
"stop-color": "#231f20",
"stop-opacity": ".48"
},
"children": []
},
{
"type": "element",
"name": "stop",
"attributes": {
"offset": ".38",
"stop-color": "#231f20",
"stop-opacity": ".12"
},
"children": []
},
{
"type": "element",
"name": "stop",
"attributes": {
"offset": ".58",
"stop-color": "#231f20",
"stop-opacity": "0"
},
"children": []
}
]
},
{
"type": "element",
"name": "linearGradient",
"attributes": {
"id": "Fade_to_Black_2-2",
"data-name": "Fade to Black 2",
"x1": "17.03",
"y1": "11.1",
"x2": "16.07",
"y2": "9.44",
"xlink:href": "#Fade_to_Black_2"
},
"children": []
},
{
"type": "element",
"name": "linearGradient",
"attributes": {
"id": "linear-gradient-9",
"x1": "17.8",
"y1": "10.64",
"x2": "16.36",
"y2": "8.16",
"xlink:href": "#linear-gradient-2"
},
"children": []
},
{
"type": "element",
"name": "linearGradient",
"attributes": {
"id": "linear-gradient-10",
"x1": "14.07",
"y1": "10.22",
"x2": "16.16",
"y2": "9.02",
"gradientUnits": "userSpaceOnUse"
},
"children": [
{
"type": "element",
"name": "stop",
"attributes": {
"offset": "0",
"stop-color": "#231f20"
},
"children": []
},
{
"type": "element",
"name": "stop",
"attributes": {
"offset": ".31",
"stop-color": "#231f20",
"stop-opacity": ".5"
},
"children": []
},
{
"type": "element",
"name": "stop",
"attributes": {
"offset": ".67",
"stop-color": "#231f20",
"stop-opacity": ".13"
},
"children": []
},
{
"type": "element",
"name": "stop",
"attributes": {
"offset": "1",
"stop-color": "#231f20",
"stop-opacity": "0"
},
"children": []
}
]
}
]
},
{
"type": "element",
"name": "g",
"attributes": {
"id": "Layer_1-2",
"data-name": "Layer 1"
},
"children": [
{
"type": "element",
"name": "rect",
"attributes": {
"style": "fill: none;",
"y": ".02",
"width": "74",
"height": "16"
},
"children": []
},
{
"type": "element",
"name": "g",
"attributes": {
"id": "Phoenix_horiz_-_gradient",
"data-name": "Phoenix horiz - gradient"
},
"children": [
{
"type": "element",
"name": "path",
"attributes": {
"style": "fill: url(#linear-gradient);",
"d": "M17.91,10c-.04-.19-.14-.36-.28-.51-.06-.06-.12-.11-.19-.16-.03-.02-.07-.05-.1-.07-.04-.03-.09-.05-.14-.08-.19-.09-.34-.19-.48-.29-.06-.04-.11-.09-.16-.13-.12-.11-.21-.2-.27-.3-.02-.03-.04-.07-.05-.1,0-.01-.02-.03-.03-.04-.02-.02-.05-.02-.08-.02-.04,0-.08.04-.08.09,0,.03,0,.07,0,.11,0,.02,0,.04-.03.05,0,0,0,0-.02,0-.03,0-.06.02-.07.04-.02.02-.02.05-.02.08,0,.03.01.04.02.06h0s0,.03.01.04c.01.03.03.05.05.08.03.05.07.09.11.15.01.02.02.04,0,.06-.01.02-.03.03-.05.03-.25-.03-.46-.09-.67-.16-.02,0-.05-.02-.07-.03-.18-.08.15-.41.3-.64.66-1.02.8-2.19.81-3.38,0-1.15-.12-2.27-.35-3.39h0c-.15-.94-.27-1.29-.35-1.42-.02-.03-.04-.06-.06-.07-.02-.01-.03,0-.03,0-.04,0-.09.04-.15.1-.25.26-.23.57-.17.89.31,1.47.55,2.95.44,4.47-.07,1.04-.27,2.05-1,2.86-.04.05-.09.1-.14.14-.05.05-.14.16-.2.11-.08-.06,0-.18.02-.24.43-1,.55-2.05.48-3.12,0-.02,0-.05,0-.07h0s0-.04,0-.06c-.01-.14-.02-.28-.04-.43-.18-1.71-.4-1.82-.4-1.82h0s-.03-.02-.06-.03c-.12-.02-.18.08-.23.15-.23.35-.13.72-.05,1.08.26,1.28.27,2.55-.18,3.79-.04.11-.09.22-.14.33-.05.12-.09.26-.28.2-.18-.06-.13-.19-.1-.31.06-.31.11-.62.14-.93h0s0-.06,0-.09c0-.05,0-.11.01-.16,0-.05,0-.09,0-.13,0-.02,0-.04,0-.07.04-.99-.13-1.44-.19-1.57,0-.02-.01-.03-.02-.04h0s0,0,0,0c-.07-.1-.17-.09-.27,0-.17.17-.27.37-.22.62.13.74.05,1.46-.08,2.19-.02.1-.06.27-.27.23-.14-.03-.17-.09-.18-.21,0-.09,0-.19,0-.28,0,0,0,0,0,0h0s0-.03,0-.04h0s0-.01,0-.02c0-.13-.02-.25-.03-.38,0-.01,0-.02,0-.04-.08-.71-.18-.78-.18-.78-.07-.09-.17-.07-.26.02-.17.17-.26.37-.22.62.03.19.02.13.04.29.01.08.03.23.03.28,0,.04-.01.08-.04.11-.09.09-.23,0-.34-.05-2.08-.86-3.86-2.07-5.42-3.6-.61-.62-1.65-1.88-1.65-1.88h0s-.09-.07-.16-.04c-.14.06-.13.23-.14.38-.02.29.13.51.3.7,2.35,2.74,5.21,4.71,8.67,5.69l.62.16s.01,0,.02,0c0,0,0,0,0,0l.54.14h.01s-.04.01-.06.02c-.59.12-1.76-.1-2.69-.32-.46-.1-.92-.22-1.36-.36-.02,0-.03,0-.03,0h0c-1.73-.55-3.32-1.44-4.7-2.81-.13-.13-.25-.27-.37-.41-.11-.13-.34-.4-.41-.5,0-.01-.02-.02-.03-.03h0s0,0,0,0c-.02-.02-.05-.04-.09-.03-.1.02-.12.12-.14.21-.07.34.04.62.26.88,1.42,1.59,3.2,2.61,5.21,3.28,1.14.38,2.31.61,3.5.76-.71.23-1.44.3-2.18.28-1.43-.04-2.77-.38-4.03-.99-.46-.24-.9-.5-1.01-.58-.07-.04-.13-.07-.2-.02-.13.1-.04.27,0,.4.08.26.25.43.49.55,2.05,1.08,4.22,1.52,6.52,1.13.23-.04.45-.08.7-.16-.39.33-.83.55-1.3.72-1.44.53-2.88.51-4.31.15-.46-.12-.99-.32-.99-.32h0c-.08-.03-.15-.05-.21.04-.1.15-.01.3.08.45.21.33.63.36.95.48.11.04-.19.24-.29.31-.47.36-.94.69-1.41,1.04l-.31.23h0c-.05.04-.1.08-.08.15.03.09.12.1.2.12.34.07.62-.04.89-.24.57-.43,1.15-.84,1.71-1.28.21-.17.4-.15.7-.08-.73.55-1.4,1.06-2.08,1.57-.69.52-1.38,1.04-2.07,1.56l-.46.34s-.01,0-.02.01h0s0,0,0,0c-.04.03-.07.07-.06.11.02.1.14.13.24.15.41.07.66-.08.94-.29,1.39-1.05,2.79-2.09,4.18-3.14.31-.24.63-.37,1.08-.32-.38.29-.75.56-1.11.83l-.98.73s-.01.01-.02.02h0c-.05.04-.09.09-.07.16.04.13.19.15.33.17.24.04.45-.05.64-.19.11-.08.22-.16.33-.24h0s1.5-1.19,1.5-1.19c0,0,.35-.25.76-.49.04-.02.07-.05.11-.07.02,0,.05-.02.11-.05.04-.02.09-.05.13-.07.06-.03.14-.07.22-.12.24-.1.93-.42,1.51-1.03.27-.21.66-.48.95-.62h0s0,0,0,0c.02-.01.04-.02.07-.03h0s0,0,0,0c.01,0,.03-.01.04-.01h.04c.11-.05.28-.06.31.18,0,.06,0,.12,0,.18,0,.05-.01.1-.02.14,0,.04,0,.08.04.1,0,0,.01,0,.02,0,.04.02.09.01.12-.02.01-.01.02-.03.04-.04.15-.15.3-.26.45-.33.23-.1.47-.1.72,0,.12.05.23.11.34.19.05.03.1.07.14.12.03.03.06.05.08.08h0s.02.02.02.02c0,0,0,0,.01.01.02.02.05.02.08.02.03,0,.06-.03.07-.06,0,0,0-.01,0-.02.07-.21.09-.4.04-.59Z"
},
"children": []
},
{
"type": "element",
"name": "path",
"attributes": {
"style": "fill: #fddab4;",
"d": "M13.48,7.75c-.02.1-.03.21-.05.31.02-.1.04-.21.05-.31h0Z"
},
"children": []
},
{
"type": "element",
"name": "path",
"attributes": {
"style": "fill: #fddab4;",
"d": "M15.33,8.9s.02,0,.02,0c0,0-.02,0-.02,0"
},
"children": []
},
{
"type": "element",
"name": "polyline",
"attributes": {
"style": "fill: #fddab4;",
"points": "14.46 8.33 14.46 8.33 14.46 8.33"
},
"children": []
},
{
"type": "element",
"name": "polyline",
"attributes": {
"style": "fill: #fddab4;",
"points": "14.48 8.26 14.48 8.26 14.48 8.26"
},
"children": []
},
{
"type": "element",
"name": "polyline",
"attributes": {
"style": "fill: #fddab4;",
"points": "14.49 8.25 14.49 8.25 14.49 8.25"
},
"children": []
},
{
"type": "element",
"name": "polyline",
"attributes": {
"style": "fill: #fddab4;",
"points": "15.64 8.25 15.63 8.25 15.64 8.25"
},
"children": []
},
{
"type": "element",
"name": "path",
"attributes": {
"style": "fill: #fddab4;",
"d": "M13.81,8.17s-.02.06-.04.08c.01-.03.02-.06.04-.08"
},
"children": []
},
{
"type": "element",
"name": "polyline",
"attributes": {
"style": "fill: #fddab4;",
"points": "12.57 8.11 12.57 8.11 12.57 8.11"
},
"children": []
},
{
"type": "element",
"name": "polyline",
"attributes": {
"style": "fill: #fddab4;",
"points": "12.39 8.03 12.39 8.03 12.39 8.03"
},
"children": []
},
{
"type": "element",
"name": "polyline",
"attributes": {
"style": "fill: #fddab4;",
"points": "12.39 8.03 12.39 8.03 12.39 8.03"
},
"children": []
},
{
"type": "element",
"name": "polyline",
"attributes": {
"style": "fill: #fddab4;",
"points": "12.39 8.03 12.39 8.03 12.39 8.03"
},
"children": []
},
{
"type": "element",
"name": "polyline",
"attributes": {
"style": "fill: #fddab4;",
"points": "12.38 8.02 12.38 8.03 12.38 8.02"
},
"children": []
},
{
"type": "element",
"name": "polyline",
"attributes": {
"style": "fill: #fddab4;",
"points": "12.38 8.02 12.38 8.02 12.38 8.02"
},
"children": []
},
{
"type": "element",
"name": "polyline",
"attributes": {
"style": "fill: #fddab4;",
"points": "12.35 7.89 12.35 7.89 12.35 7.89"
},
"children": []
},
{
"type": "element",
"name": "path",
"attributes": {
"style": "fill: #fddab4;",
"d": "M12.81,7.81s0,.04-.01.06c0-.02,0-.04.01-.06"
},
"children": []
},
{
"type": "element",
"name": "path",
"attributes": {
"style": "fill: #fddab4;",
"d": "M11.67,7.67s-.03.02-.05.03c.02,0,.03-.02.05-.03"
},
"children": []
},
{
"type": "element",
"name": "path",
"attributes": {
"style": "fill: #e5b3a5;",
"d": "M12.39,8.03s.03.03.05.04h0s-.03-.03-.05-.04M12.39,8.03s0,0,0,0c0,0,0,0,0,0M12.39,8.03s0,0,0,0c0,0,0,0,0,0M12.38,8.03s0,0,0,0c0,0,0,0,0,0M12.38,8.02s0,0,0,0c0,0,0,0,0,0M12.38,8.02s0,0,0,0c0,0,0,0,0,0M12.35,7.89s0,0,0,0c0,0,0,0,0,0M12.35,7.89h0s0,0,0,0c0,0,0,0,0,0M11.45,7.68s0,0,0,0c.04.02.09.03.13.03.02,0,.03,0,.05,0-.02,0-.03,0-.05,0-.04,0-.09-.02-.13-.03M12.34,7.54s0,0,0,0c0,0,0,0,0,0"
},
"children": []
},
{
"type": "element",
"name": "path",
"attributes": {
"style": "fill: #e5b3a5;",
"d": "M15.35,8.91s0,0,.01,0h0s0,0-.01,0M14.52,8.58s0,0,0,0c0,0,0,0,0,0M14.39,8.51s.01.06.04.08c.01,0,.02.01.03.01.02,0,.04,0,.05-.02-.02.01-.04.02-.05.02-.01,0-.02,0-.03-.01-.03-.02-.04-.05-.04-.08M14.46,8.33s0,0,0,.01c0,0,0,0,0-.01M14.48,8.26s-.02.05-.03.07c0-.02.02-.05.03-.07M14.49,8.25s0,0,0,0c0,0,0,0,0,0M15.63,8.25s0,0,0,0c0,0,0,0,0,0M15.64,8.24s0,0,0,0c0,0,0,0,0,0M14.49,8.24s0,0,0,0c0,0,0,0,0,0M13.4,8.21c0,.07.03.13.13.16.03.01.06.01.08.01.09,0,.13-.06.16-.13-.03.07-.07.13-.16.13-.02,0-.05,0-.08-.01-.1-.03-.13-.09-.13-.16M12.57,8.11h0,0M12.74,8.04s-.08.07-.14.07c0,0-.01,0-.02,0,0,0,.01,0,.02,0,.07,0,.11-.03.14-.07M14.05,7.54s0,0,0,0c-.03.1-.06.2-.1.3-.04.11-.09.22-.14.33h0c.05-.11.1-.22.14-.33.04-.1.07-.2.1-.3M12.87,7.49c-.02.11-.03.22-.05.33.02-.11.04-.22.06-.33h0M15.52,6.97s0,0,0,0c-.15.49-.38.95-.75,1.36-.04.05-.09.1-.14.14h0s.09-.09.14-.14c.37-.41.6-.87.75-1.36"
},
"children": []
},
{
"type": "element",
"name": "path",
"attributes": {
"style": "fill: url(#linear-gradient-2); opacity: .4;",
"d": "M4.17,2.09s-.04,0-.06.01c-.1.04-.12.14-.13.25.52.68,2.56,3.18,5.78,4.9,0,0,3.15,1.66,6.32,1.84-.25-.03-.46-.09-.67-.16-.01,0-.02,0-.04-.01h0c-1.15-.25-2.13-.56-2.83-.81.01,0,.03,0,.04,0-.01,0-.03,0-.04,0-.04,0-.07-.02-.1-.04-.55-.2-.9-.35-.98-.39-.04-.02-.08-.04-.12-.05-2.08-.86-3.86-2.07-5.42-3.6-.61-.62-1.65-1.88-1.65-1.88h0s-.06-.05-.1-.05"
},
"children": []
},
{
"type": "element",
"name": "path",
"attributes": {
"style": "fill: url(#linear-gradient-3); opacity: .4;",
"d": "M4.39,4.75s-.01,0-.02,0c-.1.02-.12.12-.14.21,0,.02,0,.03,0,.05,1.61,2.18,3.77,3.07,3.77,3.07,2.15,1.01,3.92,1.22,5,1.22.45,0,.77-.04.96-.06-.09,0-.18.01-.28.01-.63,0-1.53-.17-2.29-.35-.46-.1-.92-.22-1.36-.36-.02,0-.03,0-.03,0h0c-1.73-.55-3.32-1.44-4.7-2.81-.13-.13-.25-.27-.37-.41-.11-.13-.34-.4-.41-.5,0-.01-.02-.02-.03-.03h0s0,0,0,0c-.02-.02-.04-.03-.07-.03"
},
"children": []
},
{
"type": "element",
"name": "path",
"attributes": {
"style": "fill: url(#linear-gradient-4); opacity: .4",
"d": "M5.86,8.53s-.05,0-.08.03c-.11.08-.06.22-.02.35.81.49,2.66,1.44,4.84,1.44.83,0,1.7-.14,2.59-.48-.64.21-1.3.28-1.96.28-.07,0-.15,0-.22,0-1.43-.04-2.77-.38-4.03-.99-.46-.24-.9-.5-1.01-.58-.04-.03-.08-.05-.12-.05"
},
"children": []
},
{
"type": "element",
"name": "path",
"attributes": {
"style": "fill: url(#linear-gradient-5); opacity: .4;",
"d": "M13.34,10.6c-.35.27-.75.46-1.16.61-.77.28-1.55.41-2.32.41-.67,0-1.33-.09-1.99-.26-.46-.12-.99-.32-.99-.32h0s-.07-.03-.1-.03c-.04,0-.08.02-.11.06-.05.07-.05.14-.04.21.51.24,1.6.66,2.93.66,1.15,0,2.49-.32,3.78-1.34"
},
"children": []
},
{
"type": "element",
"name": "path",
"attributes": {
"style": "fill: url(#linear-gradient-6); opacity: .4;",
"d": "M9.91,12.25c-1.25,0-2.19-.25-2.31-.29.04.01.07.02.1.03.11.04-.19.24-.29.31-.35.27-.69.52-1.04.77h1.06c.33-.25.67-.5.99-.75.12-.1.24-.13.37-.13.1,0,.2.02.33.05-.73.55-1.4,1.06-2.08,1.57-.4.3-.79.6-1.19.9h1.07c.96-.72,1.92-1.44,2.87-2.16.22-.17.44-.28.71-.32-.2.01-.4.02-.58.02Z"
},
"children": []
},
{
"type": "element",
"name": "path",
"attributes": {
"style": "fill: url(#Fade_to_Black_2); opacity: .4;",
"d": "M9.69,13.1h1.03l.76-.6s.35-.25.76-.49c.04-.02.07-.05.11-.07.02,0,.05-.02.11-.05.04-.02.09-.05.13-.07.07-.03.15-.08.23-.12-.73.31-1.49.46-2.18.52,0,0,.02,0,.03,0,.06,0,.12,0,.18.01-.38.29-.75.56-1.11.83l-.06.05Z"
},
"children": []
},
{
"type": "element",
"name": "path",
"attributes": {
"style": "fill: url(#linear-gradient-7); opacity: .4;",
"d": "M12.35,7.89c0-.09,0-.19,0-.28h0s0,0,0,0c0-.01,0-.03,0-.04h0s0-.02,0-.02c0-.08,0-.16-.02-.23-.25-.1-.48-.24-.69-.39,0,.02,0,.04,0,.07.03.19.02.13.04.29.01.08.03.23.03.28,0,.04-.01.08-.04.11-.03.03-.06.04-.09.04-.04,0-.09-.02-.13-.03.08.04.43.19.98.39-.06-.04-.08-.09-.08-.18Z"
},
"children": []
},
{
"type": "element",
"name": "path",
"attributes": {
"style": "fill: url(#linear-gradient-8); opacity: .4;",
"d": "M16.36,6.14c-.23.32-.51.6-.84.83-.15.49-.38.95-.75,1.36-.04.05-.09.1-.14.14-.05.04-.11.12-.17.12-.01,0-.02,0-.03-.01-.08-.06,0-.18.02-.24.14-.32.24-.65.32-.98-.23.09-.47.15-.72.18-.03.1-.06.2-.1.3-.04.11-.09.22-.14.33-.05.1-.08.21-.2.21-.02,0-.05,0-.08-.01-.18-.06-.13-.19-.1-.31.03-.16.06-.32.08-.49-.22,0-.43-.04-.64-.08-.02.13-.04.26-.07.39-.02.09-.05.24-.21.24-.02,0-.04,0-.06,0,.69.25,1.68.56,2.83.81h0s-.02,0-.03-.01c-.18-.08.15-.41.3-.64.43-.66.64-1.37.73-2.12Z"
},
"children": []
},
{
"type": "element",
"name": "path",
"attributes": {
"style": "fill: url(#Fade_to_Black_2-2); opacity: .5;",
"d": "M16.19,9.7c-.14,0-.29.01-.43.04-.03,0-.05,0-.08,0-.04,0-.08,0-.11-.02,0,0,.01.01.02.02.06.09.1.18.11.28.02.02.03.04.04.07h0s0,0,0,0c0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0h0s0,0,0,0c0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0t0,0s0,0,0,0c0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0s0,0,0,0c0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0s0,0,0,0c0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0h0s0,0,0,0h0s0,0,0,0h0s0,0,0,0h0s0,0,0,0h0s0,0,0,0h0s0,0,0,0h0s0,0,0,0h0s0,0,0,0c0,0,0,0,0,0,0,0,0,0,0,0h0s0,0,0,0h0s0,0,0,0h0s0,0,0,0h0s0,0,0,0h0s0,0,0,0h0s0,0,0,0h0s0,0,0,0h0s0,0,0,0h0s0,.05,0,.07c0,.04,0,.07,0,.11,0,.05-.01.1-.02.14,0,0,0,.01,0,.02,0,.03.02.06.04.08h.02s.03.02.05.02c.03,0,.06-.01.08-.03.01-.01.02-.03.04-.04.15-.15.3-.26.45-.33.12-.05.23-.07.35-.07s.24.02.36.07c.12.05.23.11.34.19.05.03.1.07.14.12.03.03.06.05.08.08h0s.02.02.02.02h.01s.04.03.06.03c-.1-.21-.22-.41-.4-.56-.33-.29-.75-.41-1.18-.41"
},
"children": []
},
{
"type": "element",
"name": "path",
"attributes": {
"style": "fill: url(#linear-gradient-9); opacity: .4;",
"d": "M16.15,8.3h-.02s-.08.05-.08.09c0,.03,0,.07,0,.11,0,.01,0,.02,0,.03.18.26.56.73,1.15.91,0,0,.91.47.68,1.16h0v-.02c.08-.21.09-.4.05-.59-.04-.19-.14-.36-.28-.51-.06-.06-.12-.11-.19-.16-.03-.02-.07-.05-.1-.07-.04-.03-.09-.05-.14-.08-.19-.09-.34-.19-.48-.29-.06-.04-.11-.09-.16-.13-.12-.11-.21-.2-.27-.3-.02-.03-.04-.07-.05-.1,0-.01-.02-.03-.03-.04-.02-.01-.04-.02-.06-.02"
},
"children": []
},
{
"type": "element",
"name": "path",
"attributes": {
"style": "fill: url(#linear-gradient-10); opacity: .5;",
"d": "M15,10.21l.18-.11.77-1.03s-.42-.08-.63-.17c0,0-.21.96-.99,1.77l.29-.21c.11-.08.22-.15.38-.25Z"
},
"children": []
},
{
"type": "element",
"name": "g",
"attributes": {},
"children": [
{
"type": "element",
"name": "path",
"attributes": {
"style": "fill: #404041;",
"d": "M25.33,4.73h1.97c1.42,0,2.07.73,2.07,2.23v.74c0,1.5-.65,2.23-2.07,2.23h-1.14v4.09h-.82V4.73ZM27.3,9.19c.86,0,1.25-.39,1.25-1.44v-.84c0-1.04-.39-1.44-1.25-1.44h-1.14v3.71h1.14Z"
},
"children": []
},
{
"type": "element",
"name": "path",
"attributes": {
"style": "fill: #404041;",
"d": "M32.4,4.73h.82v4.08h2.54v-4.08h.82v9.3h-.82v-4.48h-2.54v4.48h-.82V4.73Z"
},
"children": []
},
{
"type": "element",
"name": "path",
"attributes": {
"style": "fill: #404041;",
"d": "M39.82,11.9v-5.02c0-1.48.74-2.27,2.09-2.27s2.09.8,2.09,2.27v5.02c0,1.48-.74,2.27-2.09,2.27s-2.09-.8-2.09-2.27ZM43.17,11.95v-5.13c0-1-.45-1.48-1.26-1.48s-1.26.48-1.26,1.48v5.13c0,1,.45,1.46,1.26,1.46s1.26-.47,1.26-1.46Z"
},
"children": []
},
{
"type": "element",
"name": "path",
"attributes": {
"style": "fill: #404041;",
"d": "M47.23,4.73h3.72v.74h-2.9v3.34h2.38v.74h-2.38v3.72h2.9v.76h-3.72V4.73Z"
},
"children": []
},
{
"type": "element",
"name": "path",
"attributes": {
"style": "fill: #404041;",
"d": "M53.93,4.73h1.09l2.42,7.19v-7.19h.77v9.3h-.88l-2.64-7.93v7.93h-.76V4.73Z"
},
"children": []
},
{
"type": "element",
"name": "path",
"attributes": {
"style": "fill: #404041;",
"d": "M61.55,4.73h.82v9.3h-.82V4.73Z"
},
"children": []
},
{
"type": "element",
"name": "path",
"attributes": {
"style": "fill: #404041;",
"d": "M67.19,9.29l-1.82-4.56h.88l1.46,3.69,1.48-3.69h.8l-1.82,4.56,1.9,4.74h-.88l-1.54-3.91-1.55,3.91h-.8l1.9-4.74Z"
},
"children": []
}
]
}
]
}
]
}
]
},
"name": "PhoenixIcon"
}

View File

@ -0,0 +1,20 @@
// GENERATE BY script
// DON NOT EDIT IT MANUALLY
import * as React from 'react'
import data from './PhoenixIcon.json'
import IconBase from '@/app/components/base/icons/IconBase'
import type { IconData } from '@/app/components/base/icons/IconBase'
const Icon = (
{
ref,
...props
}: React.SVGProps<SVGSVGElement> & {
ref?: React.RefObject<React.MutableRefObject<HTMLOrSVGElement>>;
},
) => <IconBase {...props} ref={ref} data={data as IconData} />
Icon.displayName = 'PhoenixIcon'
export default Icon

View File

@ -0,0 +1,853 @@
{
"icon": {
"type": "element",
"isRootNode": true,
"name": "svg",
"attributes": {
"id": "Layer_2",
"data-name": "Layer 2",
"xmlns": "http://www.w3.org/2000/svg",
"xmlns:xlink": "http://www.w3.org/1999/xlink",
"viewBox": "0 0 111 24",
"width": "111",
"height": "24"
},
"children": [
{
"type": "element",
"name": "defs",
"attributes": {},
"children": [
{
"type": "element",
"name": "linearGradient",
"attributes": {
"id": "linear-gradient",
"x1": "9.07",
"y1": "1.47",
"x2": "19.77",
"y2": "20",
"gradientUnits": "userSpaceOnUse"
},
"children": [
{
"type": "element",
"name": "stop",
"attributes": {
"offset": "0",
"stop-color": "#11bab5"
},
"children": []
},
{
"type": "element",
"name": "stop",
"attributes": {
"offset": ".5",
"stop-color": "#00adee"
},
"children": []
},
{
"type": "element",
"name": "stop",
"attributes": {
"offset": "1",
"stop-color": "#0094c5"
},
"children": []
}
]
},
{
"type": "element",
"name": "linearGradient",
"attributes": {
"id": "linear-gradient-2",
"x1": "19.48",
"y1": "16.3",
"x2": "10.46",
"y2": ".67",
"gradientUnits": "userSpaceOnUse"
},
"children": [
{
"type": "element",
"name": "stop",
"attributes": {
"offset": "0",
"stop-color": "#fcfdff",
"stop-opacity": "0"
},
"children": []
},
{
"type": "element",
"name": "stop",
"attributes": {
"offset": ".12",
"stop-color": "#fcfdff",
"stop-opacity": ".17"
},
"children": []
},
{
"type": "element",
"name": "stop",
"attributes": {
"offset": ".3",
"stop-color": "#fcfdff",
"stop-opacity": ".42"
},
"children": []
},
{
"type": "element",
"name": "stop",
"attributes": {
"offset": ".47",
"stop-color": "#fcfdff",
"stop-opacity": ".63"
},
"children": []
},
{
"type": "element",
"name": "stop",
"attributes": {
"offset": ".64",
"stop-color": "#fcfdff",
"stop-opacity": ".79"
},
"children": []
},
{
"type": "element",
"name": "stop",
"attributes": {
"offset": ".78",
"stop-color": "#fcfdff",
"stop-opacity": ".9"
},
"children": []
},
{
"type": "element",
"name": "stop",
"attributes": {
"offset": ".91",
"stop-color": "#fcfdff",
"stop-opacity": ".97"
},
"children": []
},
{
"type": "element",
"name": "stop",
"attributes": {
"offset": "1",
"stop-color": "#fcfdff"
},
"children": []
}
]
},
{
"type": "element",
"name": "linearGradient",
"attributes": {
"id": "linear-gradient-3",
"x1": "16.82",
"y1": "16.21",
"x2": "10.32",
"y2": "4.94",
"xlink:href": "#linear-gradient-2"
},
"children": []
},
{
"type": "element",
"name": "linearGradient",
"attributes": {
"id": "linear-gradient-4",
"x1": "15.92",
"y1": "17.03",
"x2": "12.29",
"y2": "10.74",
"xlink:href": "#linear-gradient-2"
},
"children": []
},
{
"type": "element",
"name": "linearGradient",
"attributes": {
"id": "linear-gradient-5",
"x1": "16.08",
"y1": "18.3",
"x2": "13.82",
"y2": "14.38",
"xlink:href": "#linear-gradient-2"
},
"children": []
},
{
"type": "element",
"name": "linearGradient",
"attributes": {
"id": "linear-gradient-6",
"x1": "12.26",
"y1": "17.94",
"x2": "12.26",
"y2": "22.07",
"gradientUnits": "userSpaceOnUse"
},
"children": [
{
"type": "element",
"name": "stop",
"attributes": {
"offset": "0",
"stop-color": "#231f20"
},
"children": []
},
{
"type": "element",
"name": "stop",
"attributes": {
"offset": ".03",
"stop-color": "#231f20",
"stop-opacity": ".9"
},
"children": []
},
{
"type": "element",
"name": "stop",
"attributes": {
"offset": ".22",
"stop-color": "#231f20",
"stop-opacity": ".4"
},
"children": []
},
{
"type": "element",
"name": "stop",
"attributes": {
"offset": ".42",
"stop-color": "#231f20",
"stop-opacity": ".1"
},
"children": []
},
{
"type": "element",
"name": "stop",
"attributes": {
"offset": ".65",
"stop-color": "#231f20",
"stop-opacity": "0"
},
"children": []
}
]
},
{
"type": "element",
"name": "linearGradient",
"attributes": {
"id": "Fade_to_Black_2",
"data-name": "Fade to Black 2",
"x1": "16.89",
"y1": "17.55",
"x2": "16.89",
"y2": "19.66",
"gradientUnits": "userSpaceOnUse"
},
"children": [
{
"type": "element",
"name": "stop",
"attributes": {
"offset": "0",
"stop-color": "#231f20"
},
"children": []
},
{
"type": "element",
"name": "stop",
"attributes": {
"offset": "1",
"stop-color": "#231f20",
"stop-opacity": "0"
},
"children": []
}
]
},
{
"type": "element",
"name": "linearGradient",
"attributes": {
"id": "linear-gradient-7",
"x1": "17.91",
"y1": "12.1",
"x2": "17.91",
"y2": "10.38",
"gradientUnits": "userSpaceOnUse"
},
"children": [
{
"type": "element",
"name": "stop",
"attributes": {
"offset": "0",
"stop-color": "#231f20"
},
"children": []
},
{
"type": "element",
"name": "stop",
"attributes": {
"offset": ".03",
"stop-color": "#231f20",
"stop-opacity": ".95"
},
"children": []
},
{
"type": "element",
"name": "stop",
"attributes": {
"offset": ".51",
"stop-color": "#231f20",
"stop-opacity": ".26"
},
"children": []
},
{
"type": "element",
"name": "stop",
"attributes": {
"offset": ".81",
"stop-color": "#231f20",
"stop-opacity": "0"
},
"children": []
}
]
},
{
"type": "element",
"name": "linearGradient",
"attributes": {
"id": "linear-gradient-8",
"x1": "21.67",
"y1": "13.37",
"x2": "21.67",
"y2": "9.21",
"gradientUnits": "userSpaceOnUse"
},
"children": [
{
"type": "element",
"name": "stop",
"attributes": {
"offset": "0",
"stop-color": "#231f20"
},
"children": []
},
{
"type": "element",
"name": "stop",
"attributes": {
"offset": ".18",
"stop-color": "#231f20",
"stop-opacity": ".48"
},
"children": []
},
{
"type": "element",
"name": "stop",
"attributes": {
"offset": ".38",
"stop-color": "#231f20",
"stop-opacity": ".12"
},
"children": []
},
{
"type": "element",
"name": "stop",
"attributes": {
"offset": ".58",
"stop-color": "#231f20",
"stop-opacity": "0"
},
"children": []
}
]
},
{
"type": "element",
"name": "linearGradient",
"attributes": {
"id": "Fade_to_Black_2-2",
"data-name": "Fade to Black 2",
"x1": "25.54",
"y1": "16.65",
"x2": "24.1",
"y2": "14.16",
"xlink:href": "#Fade_to_Black_2"
},
"children": []
},
{
"type": "element",
"name": "linearGradient",
"attributes": {
"id": "linear-gradient-9",
"x1": "26.7",
"y1": "15.97",
"x2": "24.55",
"y2": "12.24",
"xlink:href": "#linear-gradient-2"
},
"children": []
},
{
"type": "element",
"name": "linearGradient",
"attributes": {
"id": "linear-gradient-10",
"x1": "21.11",
"y1": "15.34",
"x2": "24.24",
"y2": "13.53",
"gradientUnits": "userSpaceOnUse"
},
"children": [
{
"type": "element",
"name": "stop",
"attributes": {
"offset": "0",
"stop-color": "#231f20"
},
"children": []
},
{
"type": "element",
"name": "stop",
"attributes": {
"offset": ".31",
"stop-color": "#231f20",
"stop-opacity": ".5"
},
"children": []
},
{
"type": "element",
"name": "stop",
"attributes": {
"offset": ".67",
"stop-color": "#231f20",
"stop-opacity": ".13"
},
"children": []
},
{
"type": "element",
"name": "stop",
"attributes": {
"offset": "1",
"stop-color": "#231f20",
"stop-opacity": "0"
},
"children": []
}
]
}
]
},
{
"type": "element",
"name": "g",
"attributes": {
"id": "Layer_1-2",
"data-name": "Layer 1"
},
"children": [
{
"type": "element",
"name": "rect",
"attributes": {
"style": "fill: none;",
"y": ".04",
"width": "111",
"height": "24"
},
"children": []
},
{
"type": "element",
"name": "g",
"attributes": {
"id": "Phoenix_horiz_-_gradient",
"data-name": "Phoenix horiz - gradient"
},
"children": [
{
"type": "element",
"name": "path",
"attributes": {
"style": "fill: url(#linear-gradient);",
"d": "M26.87,15c-.06-.28-.2-.53-.42-.76-.08-.09-.18-.17-.28-.25-.05-.04-.1-.07-.15-.1-.07-.04-.14-.08-.21-.12-.28-.14-.51-.29-.72-.44-.08-.06-.17-.13-.24-.19-.19-.16-.31-.3-.4-.45-.03-.05-.06-.1-.08-.15-.01-.02-.03-.04-.05-.05-.03-.02-.08-.04-.12-.03-.07.01-.12.07-.13.13,0,.05,0,.11,0,.17,0,.03-.01.07-.04.08,0,0-.01,0-.02,0-.04,0-.08.03-.11.07-.03.04-.03.08-.02.13,0,.04.02.06.03.09v.02s.02.03.03.04c.02.04.04.08.07.12.05.07.1.14.17.22.02.02.02.06,0,.09-.02.03-.05.04-.08.04-.37-.05-.7-.13-1-.25-.04-.01-.07-.03-.11-.04-.28-.12.22-.61.45-.96,1-1.53,1.2-3.29,1.21-5.07,0-1.72-.18-3.41-.52-5.08h0c-.22-1.4-.41-1.93-.53-2.13-.03-.05-.05-.08-.08-.1-.03-.02-.05-.01-.05-.01-.06,0-.14.06-.23.15-.37.39-.35.86-.25,1.34.47,2.21.82,4.43.66,6.7-.11,1.56-.4,3.07-1.5,4.3-.07.07-.14.14-.21.21-.08.08-.21.24-.3.17-.13-.09-.01-.27.03-.36.64-1.5.82-3.07.73-4.69,0-.04,0-.07,0-.11h0s0-.06,0-.09c-.02-.21-.04-.43-.06-.64-.28-2.56-.61-2.73-.61-2.73h0s-.05-.03-.09-.04c-.17-.04-.27.12-.35.23-.35.52-.19,1.07-.08,1.62.39,1.92.4,3.82-.28,5.69-.06.17-.14.34-.21.5-.08.17-.14.39-.42.3-.26-.09-.19-.29-.16-.47.09-.47.16-.93.2-1.4h0s0-.09.01-.13c0-.08.01-.16.02-.24,0-.07,0-.13.01-.2,0-.03,0-.07,0-.1.05-1.48-.2-2.16-.29-2.36-.01-.02-.02-.05-.03-.07h0s0,0,0,0c-.1-.15-.26-.13-.4,0-.26.25-.4.56-.33.93.19,1.1.08,2.2-.13,3.28-.03.15-.09.41-.4.34-.21-.05-.26-.14-.27-.32,0-.14,0-.28,0-.42,0,0,0,0,0-.01h0s0-.04,0-.06h0s0-.02,0-.03c0-.19-.02-.38-.05-.57,0-.02,0-.04,0-.05-.12-1.07-.27-1.17-.27-1.17-.1-.13-.25-.11-.39.03-.26.25-.39.55-.33.93.04.28.03.19.06.43.02.12.05.35.05.42,0,.06-.02.12-.06.16-.13.14-.34,0-.5-.07-3.12-1.29-5.79-3.1-8.12-5.4-.92-.94-2.48-2.83-2.48-2.83h0c-.06-.07-.13-.11-.24-.06-.2.09-.2.35-.22.57-.04.44.2.76.45,1.06,3.52,4.11,7.82,7.06,13,8.54l.93.25s.02,0,.03,0c0,0,0,0,0,0l.8.21h.02s-.06.02-.09.03c-.88.17-2.63-.15-4.04-.47-.7-.15-1.38-.32-2.05-.53-.03,0-.05-.01-.05-.01h0c-2.6-.83-4.97-2.17-7.05-4.21-.2-.19-.38-.4-.56-.61-.16-.2-.5-.61-.62-.75-.01-.02-.03-.03-.04-.05h0s0,0,0,0c-.04-.04-.08-.06-.14-.05-.14.03-.19.18-.21.31-.11.51.05.93.39,1.31,2.13,2.39,4.8,3.91,7.81,4.91,1.71.57,3.46.91,5.25,1.13-1.07.35-2.16.45-3.27.42-2.14-.05-4.15-.57-6.04-1.49-.7-.36-1.34-.76-1.52-.86-.1-.07-.2-.11-.3-.03-.19.15-.06.4,0,.6.12.38.38.65.73.83,3.08,1.63,6.32,2.28,9.78,1.7.35-.06.68-.12,1.05-.25-.58.5-1.25.83-1.95,1.08-2.17.79-4.32.76-6.46.22-.69-.18-1.49-.48-1.49-.48h0c-.12-.05-.23-.07-.32.06-.16.22-.02.46.12.67.32.5.94.55,1.43.72.17.05-.29.36-.43.47-.71.54-1.4,1.04-2.11,1.56l-.46.34h0c-.08.05-.15.12-.11.23.04.13.18.15.31.18.51.1.94-.06,1.33-.36.86-.64,1.73-1.26,2.56-1.93.32-.25.61-.23,1.04-.12-1.09.82-2.11,1.59-3.13,2.36-1.03.78-2.07,1.56-3.1,2.33l-.68.51s-.02.01-.03.02h-.01s0,0,0,0c-.06.05-.1.1-.09.17.03.15.21.19.36.22.61.11.99-.12,1.41-.44,2.09-1.57,4.19-3.13,6.27-4.71.47-.36.95-.56,1.62-.48-.57.43-1.12.84-1.67,1.25l-1.47,1.09s-.02.02-.03.02h0c-.08.06-.13.13-.1.24.06.19.29.22.49.25.37.05.68-.08.96-.29.17-.12.33-.24.5-.37h0s2.25-1.79,2.25-1.79c0,0,.53-.38,1.15-.73.05-.03.11-.07.17-.1.02-.01.08-.04.17-.08.07-.03.13-.07.2-.1.1-.05.21-.11.33-.18.36-.15,1.4-.64,2.26-1.55.41-.32.98-.73,1.43-.92h0s0,0,0,0c.03-.02.07-.03.1-.04h0s0,0,0,0c.02,0,.04-.02.06-.02l.06-.02c.16-.05.43-.06.46.29,0,.09,0,.18,0,.27,0,.07-.02.15-.03.21-.01.06.01.12.06.15,0,0,.02.01.02.01.06.03.14.02.18-.03.02-.02.03-.04.05-.06.22-.23.44-.39.68-.49.35-.14.7-.14,1.07,0,.18.07.35.16.51.28.07.05.14.11.21.17.04.04.08.08.12.12h0s.03.04.03.04c0,0,.01.01.02.02.04.03.08.04.12.03.05-.01.09-.05.11-.1,0,0,0-.02.01-.03.11-.31.13-.6.07-.89Z"
},
"children": []
},
{
"type": "element",
"name": "path",
"attributes": {
"style": "fill: #fddab4;",
"d": "M20.22,11.62c-.03.16-.05.31-.08.47.03-.16.06-.31.08-.47h0Z"
},
"children": []
},
{
"type": "element",
"name": "path",
"attributes": {
"style": "fill: #fddab4;",
"d": "M22.99,13.35s.02,0,.03.01c-.01,0-.02,0-.03-.01"
},
"children": []
},
{
"type": "element",
"name": "polyline",
"attributes": {
"style": "fill: #fddab4;",
"points": "21.68 12.5 21.68 12.5 21.68 12.5"
},
"children": []
},
{
"type": "element",
"name": "polyline",
"attributes": {
"style": "fill: #fddab4;",
"points": "21.73 12.39 21.73 12.39 21.73 12.39"
},
"children": []
},
{
"type": "element",
"name": "polyline",
"attributes": {
"style": "fill: #fddab4;",
"points": "21.74 12.37 21.73 12.38 21.74 12.37"
},
"children": []
},
{
"type": "element",
"name": "polyline",
"attributes": {
"style": "fill: #fddab4;",
"points": "23.45 12.37 23.45 12.38 23.45 12.37"
},
"children": []
},
{
"type": "element",
"name": "path",
"attributes": {
"style": "fill: #fddab4;",
"d": "M20.72,12.26s-.04.08-.06.12c.02-.04.04-.08.06-.12"
},
"children": []
},
{
"type": "element",
"name": "polyline",
"attributes": {
"style": "fill: #fddab4;",
"points": "18.86 12.17 18.86 12.17 18.86 12.17"
},
"children": []
},
{
"type": "element",
"name": "polyline",
"attributes": {
"style": "fill: #fddab4;",
"points": "18.58 12.04 18.58 12.04 18.58 12.04"
},
"children": []
},
{
"type": "element",
"name": "polyline",
"attributes": {
"style": "fill: #fddab4;",
"points": "18.58 12.04 18.58 12.04 18.58 12.04"
},
"children": []
},
{
"type": "element",
"name": "polyline",
"attributes": {
"style": "fill: #fddab4;",
"points": "18.58 12.04 18.58 12.04 18.58 12.04"
},
"children": []
},
{
"type": "element",
"name": "polyline",
"attributes": {
"style": "fill: #fddab4;",
"points": "18.58 12.04 18.58 12.04 18.58 12.04"
},
"children": []
},
{
"type": "element",
"name": "polyline",
"attributes": {
"style": "fill: #fddab4;",
"points": "18.58 12.03 18.58 12.04 18.58 12.03"
},
"children": []
},
{
"type": "element",
"name": "polyline",
"attributes": {
"style": "fill: #fddab4;",
"points": "18.52 11.84 18.52 11.84 18.52 11.84"
},
"children": []
},
{
"type": "element",
"name": "path",
"attributes": {
"style": "fill: #fddab4;",
"d": "M19.22,11.72s-.01.06-.02.09c0-.03.01-.06.02-.09"
},
"children": []
},
{
"type": "element",
"name": "path",
"attributes": {
"style": "fill: #fddab4;",
"d": "M17.51,11.51s-.04.04-.07.05c.02,0,.05-.02.07-.05"
},
"children": []
},
{
"type": "element",
"name": "path",
"attributes": {
"style": "fill: #e5b3a5;",
"d": "M18.58,12.04s.04.04.07.06h0s-.05-.04-.07-.06M18.58,12.04s0,0,0,0c0,0,0,0,0,0M18.58,12.04s0,0,0,0c0,0,0,0,0,0M18.58,12.04s0,0,0,0c0,0,0,0,0,0M18.58,12.04s0,0,0,0c0,0,0,0,0,0M18.57,12.03s0,0,0,0c0,0,0,0,0,0M18.52,11.84s0,0,0,0c0,0,0,0,0,0M18.52,11.84h0s0,0,0,0c0,0,0,0,0,0M17.17,11.51s0,0,0,0c.06.03.13.05.19.05.03,0,.05,0,.07-.01-.02,0-.05.01-.07.01-.06,0-.13-.02-.19-.05M18.52,11.31s0,0,0,0c0,0,0,0,0,0"
},
"children": []
},
{
"type": "element",
"name": "path",
"attributes": {
"style": "fill: #e5b3a5;",
"d": "M23.02,13.37s.01,0,.02,0h0s-.01,0-.02,0M21.78,12.86s0,0,0,0c0,0,0,0,0,0M21.59,12.76s.02.08.06.11c.02.01.03.02.05.02.03,0,.05-.01.08-.03-.03.02-.05.03-.08.03-.02,0-.03,0-.05-.02-.04-.03-.06-.07-.06-.11M21.68,12.5s0,.01,0,.02c0,0,0-.01,0-.02M21.73,12.39s-.03.07-.04.11c.01-.03.03-.07.04-.11M21.73,12.38s0,0,0,.01c0,0,0,0,0-.01M23.45,12.38s0,0,0,0c0,0,0,0,0,0M23.45,12.37s0,0,0,0c0,0,0,0,0,0M21.74,12.37s0,0,0,0c0,0,0,0,0,0M20.1,12.32c0,.1.04.19.19.24.05.02.09.02.12.02.13,0,.19-.09.24-.2-.05.1-.11.2-.24.2-.04,0-.08,0-.12-.02-.15-.05-.19-.14-.19-.24M18.86,12.17h0,0M19.11,12.07c-.05.06-.11.1-.22.1-.01,0-.02,0-.03,0,.01,0,.02,0,.03,0,.1,0,.17-.04.22-.1M21.08,11.32s0,0,0,0c-.05.15-.09.3-.15.44-.06.17-.14.34-.21.5h0c.08-.16.15-.33.21-.5.05-.15.1-.3.15-.45M19.3,11.23c-.02.16-.05.33-.08.49.03-.16.06-.33.08-.49h0M23.28,10.45s0,0,0,0c-.22.73-.57,1.42-1.12,2.04-.07.07-.14.14-.21.21h0c.07-.07.14-.14.21-.21.55-.62.9-1.31,1.12-2.04"
},
"children": []
},
{
"type": "element",
"name": "path",
"attributes": {
"style": "fill: url(#linear-gradient-2); opacity: .4;",
"d": "M6.25,3.13s-.06,0-.09.02c-.14.06-.18.21-.2.37.78,1.02,3.84,4.77,8.67,7.35,0,0,4.72,2.49,9.48,2.76-.37-.05-.7-.13-1-.25-.02,0-.04-.01-.05-.02h0c-1.73-.37-3.2-.84-4.24-1.21.02,0,.04,0,.06,0-.02,0-.04,0-.06,0-.06-.01-.11-.03-.15-.05-.82-.3-1.35-.53-1.47-.59-.06-.03-.12-.06-.17-.08-3.12-1.29-5.79-3.1-8.12-5.4-.92-.94-2.48-2.83-2.48-2.83h0s-.09-.08-.15-.08"
},
"children": []
},
{
"type": "element",
"name": "path",
"attributes": {
"style": "fill: url(#linear-gradient-3); opacity: .4;",
"d": "M6.59,7.12s-.02,0-.03,0c-.14.03-.19.18-.21.31,0,.02,0,.05-.01.07,2.41,3.27,5.65,4.6,5.65,4.6,3.23,1.52,5.88,1.83,7.5,1.83.67,0,1.16-.05,1.44-.09-.13.01-.27.02-.42.02-.95,0-2.3-.26-3.43-.52-.7-.15-1.38-.32-2.05-.53-.03,0-.05-.01-.05-.01h0c-2.6-.83-4.97-2.17-7.05-4.21-.2-.19-.38-.4-.56-.61-.16-.2-.5-.61-.62-.75-.01-.02-.03-.03-.04-.05h0s0,0,0,0c-.03-.03-.06-.05-.1-.05"
},
"children": []
},
{
"type": "element",
"name": "path",
"attributes": {
"style": "fill: url(#linear-gradient-4); opacity: .4;",
"d": "M8.78,12.8s-.08.01-.12.04c-.16.13-.09.34-.02.52,1.21.73,3.98,2.16,7.27,2.16,1.24,0,2.55-.2,3.88-.72-.96.31-1.94.43-2.94.43-.11,0-.22,0-.33,0-2.14-.05-4.15-.57-6.04-1.49-.7-.36-1.34-.76-1.52-.86-.06-.04-.12-.07-.18-.07"
},
"children": []
},
{
"type": "element",
"name": "path",
"attributes": {
"style": "fill: url(#linear-gradient-5); opacity: .4;",
"d": "M20.02,15.9c-.53.4-1.12.68-1.74.91-1.16.42-2.32.61-3.47.61-1,0-1.99-.14-2.99-.39-.69-.18-1.49-.48-1.49-.48h0c-.05-.02-.1-.04-.15-.04-.06,0-.12.03-.17.1-.07.1-.08.21-.06.32.76.35,2.4.98,4.39.98,1.73,0,3.73-.47,5.67-2.01"
},
"children": []
},
{
"type": "element",
"name": "path",
"attributes": {
"style": "fill: url(#linear-gradient-6); opacity: .4;",
"d": "M14.87,18.37c-1.87,0-3.29-.38-3.47-.43.05.02.1.03.15.05.17.05-.29.36-.43.47-.52.4-1.04.78-1.56,1.16h1.59c.5-.37,1-.74,1.49-1.13.18-.14.35-.2.55-.2.15,0,.31.03.49.08-1.09.82-2.11,1.59-3.13,2.36-.6.45-1.19.9-1.79,1.34h1.6c1.44-1.08,2.88-2.15,4.31-3.24.33-.25.67-.42,1.07-.48-.3.02-.59.03-.88.03Z"
},
"children": []
},
{
"type": "element",
"name": "path",
"attributes": {
"style": "fill: url(#Fade_to_Black_2); opacity: .4;",
"d": "M14.54,19.66h1.55l1.14-.91s.53-.38,1.15-.73c.05-.03.11-.07.17-.1.02-.01.08-.04.17-.08.07-.03.13-.07.2-.1.1-.05.22-.11.35-.19-1.1.46-2.23.69-3.28.77.01,0,.03,0,.04,0,.09,0,.18,0,.27.02-.57.43-1.12.84-1.67,1.25l-.09.07Z"
},
"children": []
},
{
"type": "element",
"name": "path",
"attributes": {
"style": "fill: url(#linear-gradient-7); opacity: .4;",
"d": "M18.52,11.84c0-.14,0-.28,0-.42h0s0-.01,0-.01c0-.02,0-.04,0-.06h0s0-.03,0-.03c0-.12-.01-.23-.03-.35-.37-.16-.72-.35-1.04-.59,0,.03,0,.07,0,.1.04.28.03.19.06.43.02.12.05.35.05.42,0,.06-.02.12-.06.16-.04.04-.09.06-.14.06-.06,0-.13-.02-.19-.05.12.06.65.29,1.48.59-.09-.05-.12-.14-.12-.27Z"
},
"children": []
},
{
"type": "element",
"name": "path",
"attributes": {
"style": "fill: url(#linear-gradient-8); opacity: .4;",
"d": "M24.54,9.21c-.34.48-.77.9-1.26,1.24-.22.73-.57,1.42-1.12,2.04-.07.07-.14.14-.21.21-.07.07-.17.19-.25.19-.02,0-.03,0-.05-.02-.13-.09-.01-.27.03-.36.21-.48.37-.98.48-1.47-.35.13-.71.22-1.08.27-.05.15-.09.3-.15.44-.06.17-.14.34-.21.5-.07.14-.12.32-.3.32-.04,0-.08,0-.12-.02-.26-.09-.19-.29-.16-.47.05-.24.09-.49.12-.73-.33-.01-.65-.05-.96-.12-.03.19-.06.39-.1.58-.02.13-.08.35-.31.35-.03,0-.06,0-.09-.01,1.04.37,2.51.84,4.24,1.21h0s-.03-.01-.05-.02c-.28-.12.22-.61.45-.96.64-.98.95-2.06,1.1-3.18Z"
},
"children": []
},
{
"type": "element",
"name": "path",
"attributes": {
"style": "fill: url(#Fade_to_Black_2-2); opacity: .5;",
"d": "M24.28,14.56c-.22,0-.43.02-.65.06-.04,0-.08.01-.12.01-.06,0-.12,0-.17-.03,0,.01.02.02.03.03.09.13.14.27.16.42.02.03.04.06.06.1h0s0,0,0,0c0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0h0s0,0,0,0c0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0t0,0s0,0,0,0c0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0s0,0,0,0c0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0s0,0,0,0c0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0h0s0,0,0,0h0s0,0,0,0h0s0,0,0,0h0s0,0,0,0h0s0,0,0,0h0s0,0,0,0h0s0,0,0,0h0s0,0,0,0c0,0,0,0,0,0,0,0,0,0,0,0h0s0,0,0,0h0s0,0,0,0h0s0,0,0,0h0s0,0,0,0h0s0,0,0,0h0s0,0,0,0h0s0,0,0,0h0s0,0,0,0h0s0,.07,0,.1c0,.05,0,.11,0,.17,0,.07-.02.15-.03.21,0,0,0,.02,0,.03,0,.05.02.09.06.12h.02s.05.03.07.03c.04,0,.08-.02.11-.05.02-.02.03-.04.05-.06.22-.23.44-.39.68-.49.17-.07.35-.11.53-.11s.36.04.55.11c.18.07.35.16.51.28.07.05.14.11.21.17.04.04.08.08.12.12h0s.03.04.03.04l.02.02s.06.03.09.03c-.15-.32-.33-.61-.6-.84-.5-.43-1.13-.62-1.77-.62"
},
"children": []
},
{
"type": "element",
"name": "path",
"attributes": {
"style": "fill: url(#linear-gradient-9); opacity: .4;",
"d": "M24.22,12.45h-.03c-.07.01-.12.07-.13.14,0,.05,0,.11,0,.17,0,.02,0,.04-.01.05.27.4.84,1.1,1.72,1.36,0,0,1.36.71,1.01,1.74h0v-.03c.12-.31.14-.6.08-.89-.06-.28-.2-.53-.42-.76-.08-.09-.18-.17-.28-.25-.05-.04-.1-.07-.15-.1-.07-.04-.14-.08-.21-.12-.28-.14-.51-.29-.72-.44-.08-.06-.17-.13-.24-.19-.19-.16-.31-.3-.4-.45-.03-.05-.06-.1-.08-.15-.01-.02-.03-.04-.05-.05-.03-.02-.06-.03-.09-.03"
},
"children": []
},
{
"type": "element",
"name": "path",
"attributes": {
"style": "fill: url(#linear-gradient-10); opacity: .5;",
"d": "M22.5,15.32l.28-.16,1.16-1.55s-.63-.12-.94-.26c0,0-.32,1.45-1.49,2.66l.43-.32c.17-.12.33-.23.56-.37Z"
},
"children": []
},
{
"type": "element",
"name": "g",
"attributes": {},
"children": [
{
"type": "element",
"name": "path",
"attributes": {
"style": "fill: #404041;",
"d": "M38,7.1h2.95c2.13,0,3.11,1.1,3.11,3.35v1.12c0,2.25-.98,3.35-3.11,3.35h-1.71v6.14h-1.24V7.1ZM40.95,13.78c1.3,0,1.87-.58,1.87-2.15v-1.26c0-1.55-.58-2.15-1.87-2.15h-1.71v5.56h1.71Z"
},
"children": []
},
{
"type": "element",
"name": "path",
"attributes": {
"style": "fill: #404041;",
"d": "M48.61,7.1h1.24v6.12h3.81v-6.12h1.24v13.95h-1.24v-6.72h-3.81v6.72h-1.24V7.1Z"
},
"children": []
},
{
"type": "element",
"name": "path",
"attributes": {
"style": "fill: #404041;",
"d": "M59.73,17.84v-7.54c0-2.21,1.12-3.41,3.13-3.41s3.13,1.2,3.13,3.41v7.54c0,2.21-1.12,3.41-3.13,3.41s-3.13-1.2-3.13-3.41ZM64.75,17.92v-7.69c0-1.5-.68-2.21-1.89-2.21s-1.89.72-1.89,2.21v7.69c0,1.5.68,2.19,1.89,2.19s1.89-.7,1.89-2.19Z"
},
"children": []
},
{
"type": "element",
"name": "path",
"attributes": {
"style": "fill: #404041;",
"d": "M70.85,7.1h5.58v1.12h-4.35v5h3.57v1.12h-3.57v5.58h4.35v1.14h-5.58V7.1Z"
},
"children": []
},
{
"type": "element",
"name": "path",
"attributes": {
"style": "fill: #404041;",
"d": "M80.9,7.1h1.63l3.63,10.78V7.1h1.16v13.95h-1.32l-3.97-11.9v11.9h-1.14V7.1Z"
},
"children": []
},
{
"type": "element",
"name": "path",
"attributes": {
"style": "fill: #404041;",
"d": "M92.32,7.1h1.24v13.95h-1.24V7.1Z"
},
"children": []
},
{
"type": "element",
"name": "path",
"attributes": {
"style": "fill: #404041;",
"d": "M100.79,13.94l-2.73-6.84h1.32l2.19,5.54,2.21-5.54h1.2l-2.73,6.84,2.85,7.12h-1.32l-2.31-5.86-2.33,5.86h-1.2l2.85-7.12Z"
},
"children": []
}
]
}
]
}
]
}
]
},
"name": "PhoenixIconBig"
}

View File

@ -0,0 +1,20 @@
// GENERATE BY script
// DON NOT EDIT IT MANUALLY
import * as React from 'react'
import data from './PhoenixIconBig.json'
import IconBase from '@/app/components/base/icons/IconBase'
import type { IconData } from '@/app/components/base/icons/IconBase'
const Icon = (
{
ref,
...props
}: React.SVGProps<SVGSVGElement> & {
ref?: React.RefObject<React.MutableRefObject<HTMLOrSVGElement>>;
},
) => <IconBase {...props} ref={ref} data={data as IconData} />
Icon.displayName = 'PhoenixIconBig'
export default Icon

View File

@ -1,9 +1,15 @@
export { default as ArizeIconBig } from './ArizeIconBig'
export { default as ArizeIcon } from './ArizeIcon'
export { default as LangfuseIconBig } from './LangfuseIconBig'
export { default as LangfuseIcon } from './LangfuseIcon'
export { default as LangsmithIconBig } from './LangsmithIconBig'
export { default as LangsmithIcon } from './LangsmithIcon'
export { default as OpikIconBig } from './OpikIconBig'
export { default as OpikIcon } from './OpikIcon'
export { default as PhoenixIconBig } from './PhoenixIconBig'
export { default as PhoenixIcon } from './PhoenixIcon'
export { default as TracingIcon } from './TracingIcon'
export { default as WeaveIconBig } from './WeaveIconBig'
export { default as WeaveIcon } from './WeaveIcon'
export { default as AliyunIconBig } from './AliyunIconBig'
export { default as AliyunIcon } from './AliyunIcon'

View File

@ -28,6 +28,7 @@ enum SUPPORTED_TYPES {
DATETIME = 'datetime',
CHECKBOX = 'checkbox',
SELECT = 'select',
HIDDEN = 'hidden',
}
const MarkdownForm = ({ node }: any) => {
const { onSend } = useChatContext()
@ -37,8 +38,12 @@ const MarkdownForm = ({ node }: any) => {
useEffect(() => {
const initialValues: { [key: string]: any } = {}
node.children.forEach((child: any) => {
if ([SUPPORTED_TAGS.INPUT, SUPPORTED_TAGS.TEXTAREA].includes(child.tagName))
initialValues[child.properties.name] = child.properties.value
if ([SUPPORTED_TAGS.INPUT, SUPPORTED_TAGS.TEXTAREA].includes(child.tagName)) {
initialValues[child.properties.name]
= (child.tagName === SUPPORTED_TAGS.INPUT && child.properties.type === SUPPORTED_TYPES.HIDDEN)
? (child.properties.value || '')
: child.properties.value
}
})
setFormValues(initialValues)
}, [node.children])
@ -180,6 +185,17 @@ const MarkdownForm = ({ node }: any) => {
)
}
if (child.properties.type === SUPPORTED_TYPES.HIDDEN) {
return (
<input
key={index}
type="hidden"
name={child.properties.name}
value={formValues[child.properties.name] || child.properties.value || ''}
/>
)
}
return (
<Input
key={index}

View File

@ -1,3 +1,7 @@
import { ALLOW_UNSAFE_DATA_SCHEME } from '@/config'
export const isValidUrl = (url: string): boolean => {
return ['http:', 'https:', '//', 'mailto:'].some(prefix => url.startsWith(prefix))
const validPrefixes = ['http:', 'https:', '//', 'mailto:']
if (ALLOW_UNSAFE_DATA_SCHEME) validPrefixes.push('data:')
return validPrefixes.some(prefix => url.startsWith(prefix))
}

View File

@ -4,6 +4,7 @@
* Includes preprocessing for LaTeX and custom "think" tags.
*/
import { flow } from 'lodash-es'
import { ALLOW_UNSAFE_DATA_SCHEME } from '@/config'
export const preprocessLaTeX = (content: string) => {
if (typeof content !== 'string')
@ -86,5 +87,8 @@ export const customUrlTransform = (uri: string): string | undefined => {
if (PERMITTED_SCHEME_REGEX.test(scheme))
return uri
if (ALLOW_UNSAFE_DATA_SCHEME && scheme === 'data:')
return uri
return undefined
}

View File

@ -0,0 +1,83 @@
import { type ReadonlyURLSearchParams, usePathname, useRouter, useSearchParams } from 'next/navigation'
import { useCallback, useMemo } from 'react'
export type DocumentListQuery = {
page: number
limit: number
keyword: string
}
const DEFAULT_QUERY: DocumentListQuery = {
page: 1,
limit: 10,
keyword: '',
}
// Parse the query parameters from the URL search string.
function parseParams(params: ReadonlyURLSearchParams): DocumentListQuery {
const page = Number.parseInt(params.get('page') || '1', 10)
const limit = Number.parseInt(params.get('limit') || '10', 10)
const keyword = params.get('keyword') || ''
return {
page: page > 0 ? page : 1,
limit: (limit > 0 && limit <= 100) ? limit : 10,
keyword: keyword ? decodeURIComponent(keyword) : '',
}
}
// Update the URL search string with the given query parameters.
function updateSearchParams(query: DocumentListQuery, searchParams: URLSearchParams) {
const { page, limit, keyword } = query || {}
const hasNonDefaultParams = (page && page > 1) || (limit && limit !== 10) || (keyword && keyword.trim())
if (hasNonDefaultParams) {
searchParams.set('page', (page || 1).toString())
searchParams.set('limit', (limit || 10).toString())
}
else {
searchParams.delete('page')
searchParams.delete('limit')
}
if (keyword && keyword.trim())
searchParams.set('keyword', encodeURIComponent(keyword))
else
searchParams.delete('keyword')
}
function useDocumentListQueryState() {
const searchParams = useSearchParams()
const query = useMemo(() => parseParams(searchParams), [searchParams])
const router = useRouter()
const pathname = usePathname()
// Helper function to update specific query parameters
const updateQuery = useCallback((updates: Partial<DocumentListQuery>) => {
const newQuery = { ...query, ...updates }
const params = new URLSearchParams()
updateSearchParams(newQuery, params)
const search = params.toString()
const queryString = search ? `?${search}` : ''
router.push(`${pathname}${queryString}`, { scroll: false })
}, [query, router, pathname])
// Helper function to reset query to defaults
const resetQuery = useCallback(() => {
const params = new URLSearchParams()
updateSearchParams(DEFAULT_QUERY, params)
const search = params.toString()
const queryString = search ? `?${search}` : ''
router.push(`${pathname}${queryString}`, { scroll: false })
}, [router, pathname])
return useMemo(() => ({
query,
updateQuery,
resetQuery,
}), [query, updateQuery, resetQuery])
}
export default useDocumentListQueryState

View File

@ -26,6 +26,7 @@ import cn from '@/utils/classnames'
import { useDocumentList, useInvalidDocumentDetail, useInvalidDocumentList } from '@/service/knowledge/use-document'
import { useInvalid } from '@/service/use-base'
import { useChildSegmentListKey, useSegmentListKey } from '@/service/knowledge/use-segment'
import useDocumentListQueryState from './hooks/use-document-list-query-state'
import useEditDocumentMetadata from '../metadata/hooks/use-edit-dataset-metadata'
import DatasetMetadataDrawer from '../metadata/metadata-dataset/dataset-metadata-drawer'
import StatusWithAction from '../common/document-status-with-action/status-with-action'
@ -82,7 +83,6 @@ type IDocumentsProps = {
}
export const fetcher = (url: string) => get(url, {}, {})
const DEFAULT_LIMIT = 10
const Documents: FC<IDocumentsProps> = ({ datasetId }) => {
const { t } = useTranslation()
@ -91,8 +91,12 @@ const Documents: FC<IDocumentsProps> = ({ datasetId }) => {
const isFreePlan = plan.type === 'sandbox'
const [inputValue, setInputValue] = useState<string>('') // the input value
const [searchValue, setSearchValue] = useState<string>('')
const [currPage, setCurrPage] = React.useState<number>(0)
const [limit, setLimit] = useState<number>(DEFAULT_LIMIT)
// Use the new hook for URL state management
const { query, updateQuery } = useDocumentListQueryState()
const [currPage, setCurrPage] = React.useState<number>(query.page - 1) // Convert to 0-based index
const [limit, setLimit] = useState<number>(query.limit)
const router = useRouter()
const dataset = useDatasetDetailContextWithSelector(s => s.dataset)
const [notionPageSelectorModalVisible, setNotionPageSelectorModalVisible] = useState(false)
@ -103,6 +107,45 @@ const Documents: FC<IDocumentsProps> = ({ datasetId }) => {
const embeddingAvailable = !!dataset?.embedding_available
const debouncedSearchValue = useDebounce(searchValue, { wait: 500 })
// Initialize search value from URL on mount
useEffect(() => {
if (query.keyword) {
setInputValue(query.keyword)
setSearchValue(query.keyword)
}
}, []) // Only run on mount
// Sync local state with URL query changes
useEffect(() => {
setCurrPage(query.page - 1)
setLimit(query.limit)
if (query.keyword !== searchValue) {
setInputValue(query.keyword)
setSearchValue(query.keyword)
}
}, [query])
// Update URL when pagination changes
const handlePageChange = (newPage: number) => {
setCurrPage(newPage)
updateQuery({ page: newPage + 1 }) // Convert to 1-based index
}
// Update URL when limit changes
const handleLimitChange = (newLimit: number) => {
setLimit(newLimit)
setCurrPage(0) // Reset to first page when limit changes
updateQuery({ limit: newLimit, page: 1 })
}
// Update URL when search changes
useEffect(() => {
if (debouncedSearchValue !== query.keyword) {
setCurrPage(0) // Reset to first page when search changes
updateQuery({ keyword: debouncedSearchValue, page: 1 })
}
}, [debouncedSearchValue, query.keyword, updateQuery])
const { data: documentsRes, isFetching: isListLoading } = useDocumentList({
datasetId,
query: {
@ -333,9 +376,9 @@ const Documents: FC<IDocumentsProps> = ({ datasetId }) => {
pagination={{
total,
limit,
onLimitChange: setLimit,
onLimitChange: handleLimitChange,
current: currPage,
onChange: setCurrPage,
onChange: handlePageChange,
}}
onManageMetadata={showEditMetadataModal}
/>

View File

@ -1,10 +1,20 @@
'use client'
import type { FC } from 'react'
import React, { useCallback, useEffect, useMemo, useState } from 'react'
import { useBoolean } from 'ahooks'
import { useBoolean, useDebounceFn } from 'ahooks'
import { ArrowDownIcon } from '@heroicons/react/24/outline'
import { pick, uniq } from 'lodash-es'
import { RiEditLine } from '@remixicon/react'
import {
RiArchive2Line,
RiDeleteBinLine,
RiEditLine,
RiEqualizer2Line,
RiLoopLeftLine,
RiMoreFill,
RiPauseCircleLine,
RiPlayCircleLine,
} from '@remixicon/react'
import { useContext } from 'use-context-selector'
import { useRouter } from 'next/navigation'
import { useTranslation } from 'react-i18next'
import dayjs from 'dayjs'
@ -16,11 +26,12 @@ import RenameModal from './rename-modal'
import BatchAction from './detail/completed/common/batch-action'
import cn from '@/utils/classnames'
import Tooltip from '@/app/components/base/tooltip'
import Toast from '@/app/components/base/toast'
import Toast, { ToastContext } from '@/app/components/base/toast'
import { asyncRunSafe } from '@/utils'
import { formatNumber } from '@/utils/format'
import NotionIcon from '@/app/components/base/notion-icon'
import ProgressBar from '@/app/components/base/progress-bar'
import type { DocumentDisplayStatus } from '@/models/datasets'
import { ChunkingMode, DataSourceType, DocumentActionType, type SimpleDocumentDetail } from '@/models/datasets'
import type { CommonResponse } from '@/models/common'
import useTimestamp from '@/hooks/use-timestamp'
@ -28,12 +39,369 @@ import { useDatasetDetailContextWithSelector as useDatasetDetailContext } from '
import type { Props as PaginationProps } from '@/app/components/base/pagination'
import Pagination from '@/app/components/base/pagination'
import Checkbox from '@/app/components/base/checkbox'
import { useDocumentArchive, useDocumentDelete, useDocumentDisable, useDocumentEnable } from '@/service/knowledge/use-document'
import { useDocumentArchive, useDocumentDelete, useDocumentDisable, useDocumentEnable, useDocumentPause, useDocumentResume, useDocumentUnArchive, useSyncDocument, useSyncWebsite } from '@/service/knowledge/use-document'
import { extensionToFileType } from '@/app/components/datasets/hit-testing/utils/extension-to-file-type'
import useBatchEditDocumentMetadata from '../metadata/hooks/use-batch-edit-document-metadata'
import EditMetadataBatchModal from '@/app/components/datasets/metadata/edit-metadata-batch/modal'
import StatusItem from './status-item'
import Operations from './operations'
// import StatusItem from './status-item'
// import Operations from './operations'
import { noop } from 'lodash-es'
import type { ColorMap, IndicatorProps } from '../../header/indicator'
import Indicator from '../../header/indicator'
import Switch from '../../base/switch'
import Divider from '../../base/divider'
import CustomPopover from '../../base/popover'
import Confirm from '../../base/confirm'
export const useIndexStatus = () => {
const { t } = useTranslation()
return {
queuing: { color: 'orange', text: t('datasetDocuments.list.status.queuing') }, // waiting
indexing: { color: 'blue', text: t('datasetDocuments.list.status.indexing') }, // indexing splitting parsing cleaning
paused: { color: 'orange', text: t('datasetDocuments.list.status.paused') }, // paused
error: { color: 'red', text: t('datasetDocuments.list.status.error') }, // error
available: { color: 'green', text: t('datasetDocuments.list.status.available') }, // completedarchived = falseenabled = true
enabled: { color: 'green', text: t('datasetDocuments.list.status.enabled') }, // completedarchived = falseenabled = true
disabled: { color: 'gray', text: t('datasetDocuments.list.status.disabled') }, // completedarchived = falseenabled = false
archived: { color: 'gray', text: t('datasetDocuments.list.status.archived') }, // completedarchived = true
}
}
const STATUS_TEXT_COLOR_MAP: ColorMap = {
green: 'text-util-colors-green-green-600',
orange: 'text-util-colors-warning-warning-600',
red: 'text-util-colors-red-red-600',
blue: 'text-util-colors-blue-light-blue-light-600',
yellow: 'text-util-colors-warning-warning-600',
gray: 'text-text-tertiary',
}
// status item for list
export const StatusItem: FC<{
status: DocumentDisplayStatus
reverse?: boolean
scene?: 'list' | 'detail'
textCls?: string
errorMessage?: string
detail?: {
enabled: boolean
archived: boolean
id: string
}
datasetId?: string
onUpdate?: (operationName?: string) => void
}> = ({ status, reverse = false, scene = 'list', textCls = '', errorMessage, datasetId = '', detail, onUpdate }) => {
const DOC_INDEX_STATUS_MAP = useIndexStatus()
const localStatus = status.toLowerCase() as keyof typeof DOC_INDEX_STATUS_MAP
const { enabled = false, archived = false, id = '' } = detail || {}
const { notify } = useContext(ToastContext)
const { t } = useTranslation()
const { mutateAsync: enableDocument } = useDocumentEnable()
const { mutateAsync: disableDocument } = useDocumentDisable()
const { mutateAsync: deleteDocument } = useDocumentDelete()
const onOperate = async (operationName: OperationName) => {
let opApi = deleteDocument
switch (operationName) {
case 'enable':
opApi = enableDocument
break
case 'disable':
opApi = disableDocument
break
}
const [e] = await asyncRunSafe<CommonResponse>(opApi({ datasetId, documentId: id }) as Promise<CommonResponse>)
if (!e) {
notify({ type: 'success', message: t('common.actionMsg.modifiedSuccessfully') })
onUpdate?.()
// onUpdate?.(operationName)
}
else { notify({ type: 'error', message: t('common.actionMsg.modifiedUnsuccessfully') }) }
}
const { run: handleSwitch } = useDebounceFn((operationName: OperationName) => {
if (operationName === 'enable' && enabled)
return
if (operationName === 'disable' && !enabled)
return
onOperate(operationName)
}, { wait: 500 })
const embedding = useMemo(() => {
return ['queuing', 'indexing', 'paused'].includes(localStatus)
}, [localStatus])
return <div className={
cn('flex items-center',
reverse ? 'flex-row-reverse' : '',
scene === 'detail' ? s.statusItemDetail : '')
}>
<Indicator color={DOC_INDEX_STATUS_MAP[localStatus]?.color as IndicatorProps['color']} className={reverse ? 'ml-2' : 'mr-2'} />
<span className={cn(`${STATUS_TEXT_COLOR_MAP[DOC_INDEX_STATUS_MAP[localStatus].color as keyof typeof STATUS_TEXT_COLOR_MAP]} text-sm`, textCls)}>
{DOC_INDEX_STATUS_MAP[localStatus]?.text}
</span>
{
errorMessage && (
<Tooltip
popupContent={
<div className='max-w-[260px] break-all'>{errorMessage}</div>
}
triggerClassName='ml-1 w-4 h-4'
/>
)
}
{
scene === 'detail' && (
<div className='ml-1.5 flex items-center justify-between'>
<Tooltip
popupContent={t('datasetDocuments.list.action.enableWarning')}
popupClassName='text-text-secondary system-xs-medium'
needsDelay
disabled={!archived}
>
<Switch
defaultValue={archived ? false : enabled}
onChange={v => !archived && handleSwitch(v ? 'enable' : 'disable')}
disabled={embedding || archived}
size='md'
/>
</Tooltip>
</div>
)
}
</div>
}
type OperationName = 'delete' | 'archive' | 'enable' | 'disable' | 'sync' | 'un_archive' | 'pause' | 'resume'
// operation action for list and detail
export const OperationAction: FC<{
embeddingAvailable: boolean
detail: {
name: string
enabled: boolean
archived: boolean
id: string
data_source_type: string
doc_form: string
display_status?: string
}
datasetId: string
onUpdate: (operationName?: string) => void
scene?: 'list' | 'detail'
className?: string
}> = ({ embeddingAvailable, datasetId, detail, onUpdate, scene = 'list', className = '' }) => {
const { id, enabled = false, archived = false, data_source_type, display_status } = detail || {}
const [showModal, setShowModal] = useState(false)
const [deleting, setDeleting] = useState(false)
const { notify } = useContext(ToastContext)
const { t } = useTranslation()
const router = useRouter()
const { mutateAsync: archiveDocument } = useDocumentArchive()
const { mutateAsync: unArchiveDocument } = useDocumentUnArchive()
const { mutateAsync: enableDocument } = useDocumentEnable()
const { mutateAsync: disableDocument } = useDocumentDisable()
const { mutateAsync: deleteDocument } = useDocumentDelete()
const { mutateAsync: syncDocument } = useSyncDocument()
const { mutateAsync: syncWebsite } = useSyncWebsite()
const { mutateAsync: pauseDocument } = useDocumentPause()
const { mutateAsync: resumeDocument } = useDocumentResume()
const isListScene = scene === 'list'
const onOperate = async (operationName: OperationName) => {
let opApi
switch (operationName) {
case 'archive':
opApi = archiveDocument
break
case 'un_archive':
opApi = unArchiveDocument
break
case 'enable':
opApi = enableDocument
break
case 'disable':
opApi = disableDocument
break
case 'sync':
if (data_source_type === 'notion_import')
opApi = syncDocument
else
opApi = syncWebsite
break
case 'pause':
opApi = pauseDocument
break
case 'resume':
opApi = resumeDocument
break
default:
opApi = deleteDocument
setDeleting(true)
break
}
const [e] = await asyncRunSafe<CommonResponse>(opApi({ datasetId, documentId: id }) as Promise<CommonResponse>)
if (!e) {
notify({ type: 'success', message: t('common.actionMsg.modifiedSuccessfully') })
onUpdate(operationName)
}
else { notify({ type: 'error', message: t('common.actionMsg.modifiedUnsuccessfully') }) }
if (operationName === 'delete')
setDeleting(false)
}
const { run: handleSwitch } = useDebounceFn((operationName: OperationName) => {
if (operationName === 'enable' && enabled)
return
if (operationName === 'disable' && !enabled)
return
onOperate(operationName)
}, { wait: 500 })
const [currDocument, setCurrDocument] = useState<{
id: string
name: string
} | null>(null)
const [isShowRenameModal, {
setTrue: setShowRenameModalTrue,
setFalse: setShowRenameModalFalse,
}] = useBoolean(false)
const handleShowRenameModal = useCallback((doc: {
id: string
name: string
}) => {
setCurrDocument(doc)
setShowRenameModalTrue()
}, [setShowRenameModalTrue])
const handleRenamed = useCallback(() => {
onUpdate()
}, [onUpdate])
return <div className='flex items-center' onClick={e => e.stopPropagation()}>
{isListScene && !embeddingAvailable && (
<Switch defaultValue={false} onChange={noop} disabled={true} size='md' />
)}
{isListScene && embeddingAvailable && (
<>
{archived
? <Tooltip
popupContent={t('datasetDocuments.list.action.enableWarning')}
popupClassName='!font-semibold'
needsDelay
>
<div>
<Switch defaultValue={false} onChange={noop} disabled={true} size='md' />
</div>
</Tooltip>
: <Switch defaultValue={enabled} onChange={v => handleSwitch(v ? 'enable' : 'disable')} size='md' />
}
<Divider className='!ml-4 !mr-2 !h-3' type='vertical' />
</>
)}
{embeddingAvailable && (
<>
<Tooltip
popupContent={t('datasetDocuments.list.action.settings')}
popupClassName='text-text-secondary system-xs-medium'
>
<button
className={cn('mr-2 cursor-pointer rounded-lg',
!isListScene
? 'border-[0.5px] border-components-button-secondary-border bg-components-button-secondary-bg p-2 shadow-xs shadow-shadow-shadow-3 backdrop-blur-[5px] hover:border-components-button-secondary-border-hover hover:bg-components-button-secondary-bg-hover'
: 'p-0.5 hover:bg-state-base-hover')}
onClick={() => router.push(`/datasets/${datasetId}/documents/${detail.id}/settings`)}>
<RiEqualizer2Line className='h-4 w-4 text-components-button-secondary-text' />
</button>
</Tooltip>
<CustomPopover
htmlContent={
<div className='w-full py-1'>
{!archived && (
<>
<div className={s.actionItem} onClick={() => {
handleShowRenameModal({
id: detail.id,
name: detail.name,
})
}}>
<RiEditLine className='h-4 w-4 text-text-tertiary' />
<span className={s.actionName}>{t('datasetDocuments.list.table.rename')}</span>
</div>
{['notion_import', DataSourceType.WEB].includes(data_source_type) && (
<div className={s.actionItem} onClick={() => onOperate('sync')}>
<RiLoopLeftLine className='h-4 w-4 text-text-tertiary' />
<span className={s.actionName}>{t('datasetDocuments.list.action.sync')}</span>
</div>
)}
<Divider className='my-1' />
</>
)}
{!archived && display_status?.toLowerCase() === 'indexing' && (
<div className={s.actionItem} onClick={() => onOperate('pause')}>
<RiPauseCircleLine className='h-4 w-4 text-text-tertiary' />
<span className={s.actionName}>{t('datasetDocuments.list.action.pause')}</span>
</div>
)}
{!archived && display_status?.toLowerCase() === 'paused' && (
<div className={s.actionItem} onClick={() => onOperate('resume')}>
<RiPlayCircleLine className='h-4 w-4 text-text-tertiary' />
<span className={s.actionName}>{t('datasetDocuments.list.action.resume')}</span>
</div>
)}
{!archived && <div className={s.actionItem} onClick={() => onOperate('archive')}>
<RiArchive2Line className='h-4 w-4 text-text-tertiary' />
<span className={s.actionName}>{t('datasetDocuments.list.action.archive')}</span>
</div>}
{archived && (
<div className={s.actionItem} onClick={() => onOperate('un_archive')}>
<RiArchive2Line className='h-4 w-4 text-text-tertiary' />
<span className={s.actionName}>{t('datasetDocuments.list.action.unarchive')}</span>
</div>
)}
<div className={cn(s.actionItem, s.deleteActionItem, 'group')} onClick={() => setShowModal(true)}>
<RiDeleteBinLine className={'h-4 w-4 text-text-tertiary group-hover:text-text-destructive'} />
<span className={cn(s.actionName, 'group-hover:text-text-destructive')}>{t('datasetDocuments.list.action.delete')}</span>
</div>
</div>
}
trigger='click'
position='br'
btnElement={
<div className={cn(s.commonIcon)}>
<RiMoreFill className='h-4 w-4 text-components-button-secondary-text' />
</div>
}
btnClassName={open => cn(isListScene ? s.actionIconWrapperList : s.actionIconWrapperDetail, open ? '!hover:bg-state-base-hover !shadow-none' : '!bg-transparent')}
popupClassName='!w-full'
className={`!z-20 flex h-fit !w-[200px] justify-end ${className}`}
/>
</>
)}
{showModal
&& <Confirm
isShow={showModal}
isLoading={deleting}
isDisabled={deleting}
title={t('datasetDocuments.list.delete.title')}
content={t('datasetDocuments.list.delete.content')}
confirmText={t('common.operation.sure')}
onConfirm={() => onOperate('delete')}
onCancel={() => setShowModal(false)}
/>
}
{isShowRenameModal && currDocument && (
<RenameModal
datasetId={datasetId}
documentId={currDocument.id}
name={currDocument.name}
onClose={setShowRenameModalFalse}
onSaved={handleRenamed}
/>
)}
</div>
}
export const renderTdValue = (value: string | number | null, isEmptyStyle = false) => {
return (
@ -282,10 +650,10 @@ const DocumentList: FC<IDocumentListProps> = ({
}
</td>
<td>
<Operations
<OperationAction
embeddingAvailable={embeddingAvailable}
datasetId={datasetId}
detail={pick(doc, ['name', 'enabled', 'archived', 'id', 'data_source_type', 'doc_form'])}
detail={pick(doc, ['name', 'enabled', 'archived', 'id', 'data_source_type', 'doc_form', 'display_status'])}
onUpdate={onUpdate}
/>
</td>

View File

@ -36,7 +36,7 @@ import { useDocLink } from '@/context/i18n'
export default function AppSelector() {
const itemClassName = `
flex items-center w-full h-9 pl-3 pr-2 text-text-secondary system-md-regular
flex items-center w-full h-8 pl-3 pr-2 text-text-secondary system-md-regular
rounded-lg hover:bg-state-base-hover cursor-pointer gap-1
`
const router = useRouter()
@ -87,24 +87,24 @@ export default function AppSelector() {
backdrop-blur-sm focus:outline-none
"
>
<MenuItem disabled>
<div className='flex flex-nowrap items-center py-[13px] pl-3 pr-2'>
<div className='grow'>
<div className='system-md-medium break-all text-text-primary'>
{userProfile.name}
{isEducationAccount && (
<PremiumBadge size='s' color='blue' className='ml-1 !px-2'>
<RiGraduationCapFill className='mr-1 h-3 w-3' />
<span className='system-2xs-medium'>EDU</span>
</PremiumBadge>
)}
</div>
<div className='system-xs-regular break-all text-text-tertiary'>{userProfile.email}</div>
</div>
<Avatar avatar={userProfile.avatar_url} name={userProfile.name} size={36} className='mr-3' />
</div>
</MenuItem>
<div className="px-1 py-1">
<MenuItem disabled>
<div className='flex flex-nowrap items-center py-2 pl-3 pr-2'>
<div className='grow'>
<div className='system-md-medium break-all text-text-primary'>
{userProfile.name}
{isEducationAccount && (
<PremiumBadge size='s' color='blue' className='ml-1 !px-2'>
<RiGraduationCapFill className='mr-1 h-3 w-3' />
<span className='system-2xs-medium'>EDU</span>
</PremiumBadge>
)}
</div>
<div className='system-xs-regular break-all text-text-tertiary'>{userProfile.email}</div>
</div>
<Avatar avatar={userProfile.avatar_url} name={userProfile.name} size={36} />
</div>
</MenuItem>
<MenuItem>
<Link
className={cn(itemClassName, 'group',

View File

@ -31,8 +31,8 @@ const PluginsNav = ({
)}>
<div
className={classNames(
'relative flex flex-row h-8 p-1.5 gap-0.5 border border-transparent items-center justify-center rounded-xl system-sm-medium',
activated && 'border-components-main-nav-nav-button-border bg-components-main-nav-nav-button-bg-active shadow-md text-components-main-nav-nav-button-text',
'system-sm-medium relative flex h-8 flex-row items-center justify-center gap-0.5 rounded-xl border border-transparent p-1.5',
activated && 'border-components-main-nav-nav-button-border bg-components-main-nav-nav-button-bg-active text-components-main-nav-nav-button-text shadow-md',
!activated && 'text-text-tertiary hover:bg-state-base-hover hover:text-text-secondary',
(isInstallingWithError || isFailed) && !activated && 'border-components-panel-border-subtle',
)}

View File

@ -22,7 +22,7 @@ const ToolsNav = ({
return (
<Link href="/tools" className={classNames(
'group text-sm font-medium',
activated && 'font-semibold bg-components-main-nav-nav-button-bg-active hover:bg-components-main-nav-nav-button-bg-active-hover shadow-md',
activated && 'hover:bg-components-main-nav-nav-button-bg-active-hover bg-components-main-nav-nav-button-bg-active font-semibold shadow-md',
activated ? 'text-components-main-nav-nav-button-text-active' : 'text-components-main-nav-nav-button-text hover:bg-components-main-nav-nav-button-bg-hover',
className,
)}>

Some files were not shown because too many files have changed in this diff Show More