mirror of https://github.com/langgenius/dify.git
Merge branch 'main' into feat/plugin-auto-upgrade-fe
This commit is contained in:
commit
a0af33e945
|
|
@ -47,15 +47,17 @@ jobs:
|
|||
- name: Run Unit tests
|
||||
run: |
|
||||
uv run --project api bash dev/pytest/pytest_unit_tests.sh
|
||||
|
||||
- name: Coverage Summary
|
||||
run: |
|
||||
set -x
|
||||
# Extract coverage percentage and create a summary
|
||||
TOTAL_COVERAGE=$(python -c 'import json; print(json.load(open("coverage.json"))["totals"]["percent_covered_display"])')
|
||||
|
||||
# Create a detailed coverage summary
|
||||
echo "### Test Coverage Summary :test_tube:" >> $GITHUB_STEP_SUMMARY
|
||||
echo "Total Coverage: ${TOTAL_COVERAGE}%" >> $GITHUB_STEP_SUMMARY
|
||||
echo "\`\`\`" >> $GITHUB_STEP_SUMMARY
|
||||
uv run --project api coverage report >> $GITHUB_STEP_SUMMARY
|
||||
echo "\`\`\`" >> $GITHUB_STEP_SUMMARY
|
||||
uv run --project api coverage report --format=markdown >> $GITHUB_STEP_SUMMARY
|
||||
|
||||
- name: Run dify config tests
|
||||
run: uv run --project api dev/pytest/pytest_config_tests.py
|
||||
|
|
|
|||
|
|
@ -214,3 +214,4 @@ mise.toml
|
|||
|
||||
# AI Assistant
|
||||
.roo/
|
||||
api/.env.backup
|
||||
|
|
|
|||
|
|
@ -1,8 +1,11 @@
|
|||
import logging
|
||||
from pathlib import Path
|
||||
from typing import Any
|
||||
|
||||
from pydantic.fields import FieldInfo
|
||||
from pydantic_settings import BaseSettings, PydanticBaseSettingsSource, SettingsConfigDict
|
||||
from pydantic_settings import BaseSettings, PydanticBaseSettingsSource, SettingsConfigDict, TomlConfigSettingsSource
|
||||
|
||||
from libs.file_utils import search_file_upwards
|
||||
|
||||
from .deploy import DeploymentConfig
|
||||
from .enterprise import EnterpriseFeatureConfig
|
||||
|
|
@ -99,4 +102,12 @@ class DifyConfig(
|
|||
RemoteSettingsSourceFactory(settings_cls),
|
||||
dotenv_settings,
|
||||
file_secret_settings,
|
||||
TomlConfigSettingsSource(
|
||||
settings_cls=settings_cls,
|
||||
toml_file=search_file_upwards(
|
||||
base_dir_path=Path(__file__).parent,
|
||||
target_file_name="pyproject.toml",
|
||||
max_search_parent_depth=2,
|
||||
),
|
||||
),
|
||||
)
|
||||
|
|
|
|||
|
|
@ -1,17 +1,13 @@
|
|||
from pydantic import Field
|
||||
from pydantic_settings import BaseSettings
|
||||
|
||||
from configs.packaging.pyproject import PyProjectConfig, PyProjectTomlConfig
|
||||
|
||||
|
||||
class PackagingInfo(BaseSettings):
|
||||
class PackagingInfo(PyProjectTomlConfig):
|
||||
"""
|
||||
Packaging build information
|
||||
"""
|
||||
|
||||
CURRENT_VERSION: str = Field(
|
||||
description="Dify version",
|
||||
default="1.5.0",
|
||||
)
|
||||
|
||||
COMMIT_SHA: str = Field(
|
||||
description="SHA-1 checksum of the git commit used to build the app",
|
||||
default="",
|
||||
|
|
|
|||
|
|
@ -0,0 +1,17 @@
|
|||
from pydantic import BaseModel, Field
|
||||
from pydantic_settings import BaseSettings
|
||||
|
||||
|
||||
class PyProjectConfig(BaseModel):
|
||||
version: str = Field(description="Dify version", default="")
|
||||
|
||||
|
||||
class PyProjectTomlConfig(BaseSettings):
|
||||
"""
|
||||
configs in api/pyproject.toml
|
||||
"""
|
||||
|
||||
project: PyProjectConfig = Field(
|
||||
description="configs in the project section of pyproject.toml",
|
||||
default=PyProjectConfig(),
|
||||
)
|
||||
|
|
@ -90,23 +90,11 @@ class ChatMessageTextApi(Resource):
|
|||
|
||||
message_id = args.get("message_id", None)
|
||||
text = args.get("text", None)
|
||||
if (
|
||||
app_model.mode in {AppMode.ADVANCED_CHAT.value, AppMode.WORKFLOW.value}
|
||||
and app_model.workflow
|
||||
and app_model.workflow.features_dict
|
||||
):
|
||||
text_to_speech = app_model.workflow.features_dict.get("text_to_speech")
|
||||
if text_to_speech is None:
|
||||
raise ValueError("TTS is not enabled")
|
||||
voice = args.get("voice") or text_to_speech.get("voice")
|
||||
else:
|
||||
try:
|
||||
if app_model.app_model_config is None:
|
||||
raise ValueError("AppModelConfig not found")
|
||||
voice = args.get("voice") or app_model.app_model_config.text_to_speech_dict.get("voice")
|
||||
except Exception:
|
||||
voice = None
|
||||
response = AudioService.transcript_tts(app_model=app_model, text=text, message_id=message_id, voice=voice)
|
||||
voice = args.get("voice", None)
|
||||
|
||||
response = AudioService.transcript_tts(
|
||||
app_model=app_model, text=text, voice=voice, message_id=message_id, is_draft=True
|
||||
)
|
||||
return response
|
||||
except services.errors.app_model_config.AppModelConfigBrokenError:
|
||||
logging.exception("App model config broken.")
|
||||
|
|
|
|||
|
|
@ -41,7 +41,7 @@ class OAuthDataSource(Resource):
|
|||
if not internal_secret:
|
||||
return ({"error": "Internal secret is not set"},)
|
||||
oauth_provider.save_internal_access_token(internal_secret)
|
||||
return {"data": ""}
|
||||
return {"data": "internal"}
|
||||
else:
|
||||
auth_url = oauth_provider.get_authorization_url()
|
||||
return {"data": auth_url}, 200
|
||||
|
|
|
|||
|
|
@ -18,7 +18,6 @@ from controllers.console.app.error import (
|
|||
from controllers.console.explore.wraps import InstalledAppResource
|
||||
from core.errors.error import ModelCurrentlyNotSupportError, ProviderTokenNotInitError, QuotaExceededError
|
||||
from core.model_runtime.errors.invoke import InvokeError
|
||||
from models.model import AppMode
|
||||
from services.audio_service import AudioService
|
||||
from services.errors.audio import (
|
||||
AudioTooLargeServiceError,
|
||||
|
|
@ -79,19 +78,9 @@ class ChatTextApi(InstalledAppResource):
|
|||
|
||||
message_id = args.get("message_id", None)
|
||||
text = args.get("text", None)
|
||||
if (
|
||||
app_model.mode in {AppMode.ADVANCED_CHAT.value, AppMode.WORKFLOW.value}
|
||||
and app_model.workflow
|
||||
and app_model.workflow.features_dict
|
||||
):
|
||||
text_to_speech = app_model.workflow.features_dict.get("text_to_speech")
|
||||
voice = args.get("voice") or text_to_speech.get("voice")
|
||||
else:
|
||||
try:
|
||||
voice = args.get("voice") or app_model.app_model_config.text_to_speech_dict.get("voice")
|
||||
except Exception:
|
||||
voice = None
|
||||
response = AudioService.transcript_tts(app_model=app_model, message_id=message_id, voice=voice, text=text)
|
||||
voice = args.get("voice", None)
|
||||
|
||||
response = AudioService.transcript_tts(app_model=app_model, text=text, voice=voice, message_id=message_id)
|
||||
return response
|
||||
except services.errors.app_model_config.AppModelConfigBrokenError:
|
||||
logging.exception("App model config broken.")
|
||||
|
|
|
|||
|
|
@ -18,7 +18,7 @@ class VersionApi(Resource):
|
|||
check_update_url = dify_config.CHECK_UPDATE_URL
|
||||
|
||||
result = {
|
||||
"version": dify_config.CURRENT_VERSION,
|
||||
"version": dify_config.project.version,
|
||||
"release_date": "",
|
||||
"release_notes": "",
|
||||
"can_auto_update": False,
|
||||
|
|
|
|||
|
|
@ -20,7 +20,7 @@ from controllers.service_api.app.error import (
|
|||
from controllers.service_api.wraps import FetchUserArg, WhereisUserArg, validate_app_token
|
||||
from core.errors.error import ModelCurrentlyNotSupportError, ProviderTokenNotInitError, QuotaExceededError
|
||||
from core.model_runtime.errors.invoke import InvokeError
|
||||
from models.model import App, AppMode, EndUser
|
||||
from models.model import App, EndUser
|
||||
from services.audio_service import AudioService
|
||||
from services.errors.audio import (
|
||||
AudioTooLargeServiceError,
|
||||
|
|
@ -78,20 +78,9 @@ class TextApi(Resource):
|
|||
|
||||
message_id = args.get("message_id", None)
|
||||
text = args.get("text", None)
|
||||
if (
|
||||
app_model.mode in {AppMode.ADVANCED_CHAT.value, AppMode.WORKFLOW.value}
|
||||
and app_model.workflow
|
||||
and app_model.workflow.features_dict
|
||||
):
|
||||
text_to_speech = app_model.workflow.features_dict.get("text_to_speech", {})
|
||||
voice = args.get("voice") or text_to_speech.get("voice")
|
||||
else:
|
||||
try:
|
||||
voice = args.get("voice") or app_model.app_model_config.text_to_speech_dict.get("voice")
|
||||
except Exception:
|
||||
voice = None
|
||||
voice = args.get("voice", None)
|
||||
response = AudioService.transcript_tts(
|
||||
app_model=app_model, message_id=message_id, end_user=end_user.external_user_id, voice=voice, text=text
|
||||
app_model=app_model, text=text, voice=voice, end_user=end_user.external_user_id, message_id=message_id
|
||||
)
|
||||
|
||||
return response
|
||||
|
|
|
|||
|
|
@ -3,7 +3,7 @@ import json
|
|||
from flask import request
|
||||
from flask_restful import marshal, reqparse
|
||||
from sqlalchemy import desc, select
|
||||
from werkzeug.exceptions import NotFound
|
||||
from werkzeug.exceptions import Forbidden, NotFound
|
||||
|
||||
import services
|
||||
from controllers.common.errors import FilenameNotExistsError
|
||||
|
|
@ -18,6 +18,7 @@ from controllers.service_api.app.error import (
|
|||
from controllers.service_api.dataset.error import (
|
||||
ArchivedDocumentImmutableError,
|
||||
DocumentIndexingError,
|
||||
InvalidMetadataError,
|
||||
)
|
||||
from controllers.service_api.wraps import (
|
||||
DatasetApiResource,
|
||||
|
|
@ -210,6 +211,9 @@ class DocumentAddByFileApi(DatasetApiResource):
|
|||
if not dataset:
|
||||
raise ValueError("Dataset does not exist.")
|
||||
|
||||
if dataset.provider == "external":
|
||||
raise ValueError("External datasets are not supported.")
|
||||
|
||||
indexing_technique = args.get("indexing_technique") or dataset.indexing_technique
|
||||
if not indexing_technique:
|
||||
raise ValueError("indexing_technique is required.")
|
||||
|
|
@ -300,6 +304,9 @@ class DocumentUpdateByFileApi(DatasetApiResource):
|
|||
if not dataset:
|
||||
raise ValueError("Dataset does not exist.")
|
||||
|
||||
if dataset.provider == "external":
|
||||
raise ValueError("External datasets are not supported.")
|
||||
|
||||
# indexing_technique is already set in dataset since this is an update
|
||||
args["indexing_technique"] = dataset.indexing_technique
|
||||
|
||||
|
|
@ -466,6 +473,101 @@ class DocumentIndexingStatusApi(DatasetApiResource):
|
|||
return data
|
||||
|
||||
|
||||
class DocumentDetailApi(DatasetApiResource):
|
||||
METADATA_CHOICES = {"all", "only", "without"}
|
||||
|
||||
def get(self, tenant_id, dataset_id, document_id):
|
||||
dataset_id = str(dataset_id)
|
||||
document_id = str(document_id)
|
||||
|
||||
dataset = self.get_dataset(dataset_id, tenant_id)
|
||||
|
||||
document = DocumentService.get_document(dataset.id, document_id)
|
||||
|
||||
if not document:
|
||||
raise NotFound("Document not found.")
|
||||
|
||||
if document.tenant_id != str(tenant_id):
|
||||
raise Forbidden("No permission.")
|
||||
|
||||
metadata = request.args.get("metadata", "all")
|
||||
if metadata not in self.METADATA_CHOICES:
|
||||
raise InvalidMetadataError(f"Invalid metadata value: {metadata}")
|
||||
|
||||
if metadata == "only":
|
||||
response = {"id": document.id, "doc_type": document.doc_type, "doc_metadata": document.doc_metadata_details}
|
||||
elif metadata == "without":
|
||||
dataset_process_rules = DatasetService.get_process_rules(dataset_id)
|
||||
document_process_rules = document.dataset_process_rule.to_dict()
|
||||
data_source_info = document.data_source_detail_dict
|
||||
response = {
|
||||
"id": document.id,
|
||||
"position": document.position,
|
||||
"data_source_type": document.data_source_type,
|
||||
"data_source_info": data_source_info,
|
||||
"dataset_process_rule_id": document.dataset_process_rule_id,
|
||||
"dataset_process_rule": dataset_process_rules,
|
||||
"document_process_rule": document_process_rules,
|
||||
"name": document.name,
|
||||
"created_from": document.created_from,
|
||||
"created_by": document.created_by,
|
||||
"created_at": document.created_at.timestamp(),
|
||||
"tokens": document.tokens,
|
||||
"indexing_status": document.indexing_status,
|
||||
"completed_at": int(document.completed_at.timestamp()) if document.completed_at else None,
|
||||
"updated_at": int(document.updated_at.timestamp()) if document.updated_at else None,
|
||||
"indexing_latency": document.indexing_latency,
|
||||
"error": document.error,
|
||||
"enabled": document.enabled,
|
||||
"disabled_at": int(document.disabled_at.timestamp()) if document.disabled_at else None,
|
||||
"disabled_by": document.disabled_by,
|
||||
"archived": document.archived,
|
||||
"segment_count": document.segment_count,
|
||||
"average_segment_length": document.average_segment_length,
|
||||
"hit_count": document.hit_count,
|
||||
"display_status": document.display_status,
|
||||
"doc_form": document.doc_form,
|
||||
"doc_language": document.doc_language,
|
||||
}
|
||||
else:
|
||||
dataset_process_rules = DatasetService.get_process_rules(dataset_id)
|
||||
document_process_rules = document.dataset_process_rule.to_dict()
|
||||
data_source_info = document.data_source_detail_dict
|
||||
response = {
|
||||
"id": document.id,
|
||||
"position": document.position,
|
||||
"data_source_type": document.data_source_type,
|
||||
"data_source_info": data_source_info,
|
||||
"dataset_process_rule_id": document.dataset_process_rule_id,
|
||||
"dataset_process_rule": dataset_process_rules,
|
||||
"document_process_rule": document_process_rules,
|
||||
"name": document.name,
|
||||
"created_from": document.created_from,
|
||||
"created_by": document.created_by,
|
||||
"created_at": document.created_at.timestamp(),
|
||||
"tokens": document.tokens,
|
||||
"indexing_status": document.indexing_status,
|
||||
"completed_at": int(document.completed_at.timestamp()) if document.completed_at else None,
|
||||
"updated_at": int(document.updated_at.timestamp()) if document.updated_at else None,
|
||||
"indexing_latency": document.indexing_latency,
|
||||
"error": document.error,
|
||||
"enabled": document.enabled,
|
||||
"disabled_at": int(document.disabled_at.timestamp()) if document.disabled_at else None,
|
||||
"disabled_by": document.disabled_by,
|
||||
"archived": document.archived,
|
||||
"doc_type": document.doc_type,
|
||||
"doc_metadata": document.doc_metadata_details,
|
||||
"segment_count": document.segment_count,
|
||||
"average_segment_length": document.average_segment_length,
|
||||
"hit_count": document.hit_count,
|
||||
"display_status": document.display_status,
|
||||
"doc_form": document.doc_form,
|
||||
"doc_language": document.doc_language,
|
||||
}
|
||||
|
||||
return response
|
||||
|
||||
|
||||
api.add_resource(
|
||||
DocumentAddByTextApi,
|
||||
"/datasets/<uuid:dataset_id>/document/create_by_text",
|
||||
|
|
@ -489,3 +591,4 @@ api.add_resource(
|
|||
api.add_resource(DocumentDeleteApi, "/datasets/<uuid:dataset_id>/documents/<uuid:document_id>")
|
||||
api.add_resource(DocumentListApi, "/datasets/<uuid:dataset_id>/documents")
|
||||
api.add_resource(DocumentIndexingStatusApi, "/datasets/<uuid:dataset_id>/documents/<string:batch>/indexing-status")
|
||||
api.add_resource(DocumentDetailApi, "/datasets/<uuid:dataset_id>/documents/<uuid:document_id>")
|
||||
|
|
|
|||
|
|
@ -9,7 +9,7 @@ class IndexApi(Resource):
|
|||
return {
|
||||
"welcome": "Dify OpenAPI",
|
||||
"api_version": "v1",
|
||||
"server_version": dify_config.CURRENT_VERSION,
|
||||
"server_version": dify_config.project.version,
|
||||
}
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -11,13 +11,13 @@ from flask_restful import Resource
|
|||
from pydantic import BaseModel
|
||||
from sqlalchemy import select, update
|
||||
from sqlalchemy.orm import Session
|
||||
from werkzeug.exceptions import Forbidden, Unauthorized
|
||||
from werkzeug.exceptions import Forbidden, NotFound, Unauthorized
|
||||
|
||||
from extensions.ext_database import db
|
||||
from extensions.ext_redis import redis_client
|
||||
from libs.login import _get_user
|
||||
from models.account import Account, Tenant, TenantAccountJoin, TenantStatus
|
||||
from models.dataset import RateLimitLog
|
||||
from models.dataset import Dataset, RateLimitLog
|
||||
from models.model import ApiToken, App, EndUser
|
||||
from services.feature_service import FeatureService
|
||||
|
||||
|
|
@ -317,3 +317,11 @@ def create_or_update_end_user_for_user_id(app_model: App, user_id: Optional[str]
|
|||
|
||||
class DatasetApiResource(Resource):
|
||||
method_decorators = [validate_dataset_token]
|
||||
|
||||
def get_dataset(self, dataset_id: str, tenant_id: str) -> Dataset:
|
||||
dataset = db.session.query(Dataset).filter(Dataset.id == dataset_id, Dataset.tenant_id == tenant_id).first()
|
||||
|
||||
if not dataset:
|
||||
raise NotFound("Dataset not found.")
|
||||
|
||||
return dataset
|
||||
|
|
|
|||
|
|
@ -19,7 +19,7 @@ from controllers.web.error import (
|
|||
from controllers.web.wraps import WebApiResource
|
||||
from core.errors.error import ModelCurrentlyNotSupportError, ProviderTokenNotInitError, QuotaExceededError
|
||||
from core.model_runtime.errors.invoke import InvokeError
|
||||
from models.model import App, AppMode
|
||||
from models.model import App
|
||||
from services.audio_service import AudioService
|
||||
from services.errors.audio import (
|
||||
AudioTooLargeServiceError,
|
||||
|
|
@ -77,21 +77,9 @@ class TextApi(WebApiResource):
|
|||
|
||||
message_id = args.get("message_id", None)
|
||||
text = args.get("text", None)
|
||||
if (
|
||||
app_model.mode in {AppMode.ADVANCED_CHAT.value, AppMode.WORKFLOW.value}
|
||||
and app_model.workflow
|
||||
and app_model.workflow.features_dict
|
||||
):
|
||||
text_to_speech = app_model.workflow.features_dict.get("text_to_speech", {})
|
||||
voice = args.get("voice") or text_to_speech.get("voice")
|
||||
else:
|
||||
try:
|
||||
voice = args.get("voice") or app_model.app_model_config.text_to_speech_dict.get("voice")
|
||||
except Exception:
|
||||
voice = None
|
||||
|
||||
voice = args.get("voice", None)
|
||||
response = AudioService.transcript_tts(
|
||||
app_model=app_model, message_id=message_id, end_user=end_user.external_user_id, voice=voice, text=text
|
||||
app_model=app_model, text=text, voice=voice, end_user=end_user.external_user_id, message_id=message_id
|
||||
)
|
||||
|
||||
return response
|
||||
|
|
|
|||
|
|
@ -27,6 +27,9 @@ from core.ops.ops_trace_manager import TraceQueueManager
|
|||
from core.prompt.utils.get_thread_messages_length import get_thread_messages_length
|
||||
from core.repositories import SQLAlchemyWorkflowNodeExecutionRepository
|
||||
from core.repositories.sqlalchemy_workflow_execution_repository import SQLAlchemyWorkflowExecutionRepository
|
||||
from core.workflow.repositories.draft_variable_repository import (
|
||||
DraftVariableSaverFactory,
|
||||
)
|
||||
from core.workflow.repositories.workflow_execution_repository import WorkflowExecutionRepository
|
||||
from core.workflow.repositories.workflow_node_execution_repository import WorkflowNodeExecutionRepository
|
||||
from core.workflow.variable_loader import DUMMY_VARIABLE_LOADER, VariableLoader
|
||||
|
|
@ -36,7 +39,10 @@ from libs.flask_utils import preserve_flask_contexts
|
|||
from models import Account, App, Conversation, EndUser, Message, Workflow, WorkflowNodeExecutionTriggeredFrom
|
||||
from models.enums import WorkflowRunTriggeredFrom
|
||||
from services.conversation_service import ConversationService
|
||||
from services.workflow_draft_variable_service import DraftVarLoader, WorkflowDraftVariableService
|
||||
from services.workflow_draft_variable_service import (
|
||||
DraftVarLoader,
|
||||
WorkflowDraftVariableService,
|
||||
)
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
|
@ -450,6 +456,7 @@ class AdvancedChatAppGenerator(MessageBasedAppGenerator):
|
|||
workflow_execution_repository=workflow_execution_repository,
|
||||
workflow_node_execution_repository=workflow_node_execution_repository,
|
||||
stream=stream,
|
||||
draft_var_saver_factory=self._get_draft_var_saver_factory(invoke_from),
|
||||
)
|
||||
|
||||
return AdvancedChatAppGenerateResponseConverter.convert(response=response, invoke_from=invoke_from)
|
||||
|
|
@ -521,6 +528,7 @@ class AdvancedChatAppGenerator(MessageBasedAppGenerator):
|
|||
user: Union[Account, EndUser],
|
||||
workflow_execution_repository: WorkflowExecutionRepository,
|
||||
workflow_node_execution_repository: WorkflowNodeExecutionRepository,
|
||||
draft_var_saver_factory: DraftVariableSaverFactory,
|
||||
stream: bool = False,
|
||||
) -> Union[ChatbotAppBlockingResponse, Generator[ChatbotAppStreamResponse, None, None]]:
|
||||
"""
|
||||
|
|
@ -547,6 +555,7 @@ class AdvancedChatAppGenerator(MessageBasedAppGenerator):
|
|||
workflow_execution_repository=workflow_execution_repository,
|
||||
workflow_node_execution_repository=workflow_node_execution_repository,
|
||||
stream=stream,
|
||||
draft_var_saver_factory=draft_var_saver_factory,
|
||||
)
|
||||
|
||||
try:
|
||||
|
|
|
|||
|
|
@ -64,6 +64,7 @@ from core.workflow.entities.workflow_execution import WorkflowExecutionStatus, W
|
|||
from core.workflow.enums import SystemVariableKey
|
||||
from core.workflow.graph_engine.entities.graph_runtime_state import GraphRuntimeState
|
||||
from core.workflow.nodes import NodeType
|
||||
from core.workflow.repositories.draft_variable_repository import DraftVariableSaverFactory
|
||||
from core.workflow.repositories.workflow_execution_repository import WorkflowExecutionRepository
|
||||
from core.workflow.repositories.workflow_node_execution_repository import WorkflowNodeExecutionRepository
|
||||
from core.workflow.workflow_cycle_manager import CycleManagerWorkflowInfo, WorkflowCycleManager
|
||||
|
|
@ -94,6 +95,7 @@ class AdvancedChatAppGenerateTaskPipeline:
|
|||
dialogue_count: int,
|
||||
workflow_execution_repository: WorkflowExecutionRepository,
|
||||
workflow_node_execution_repository: WorkflowNodeExecutionRepository,
|
||||
draft_var_saver_factory: DraftVariableSaverFactory,
|
||||
) -> None:
|
||||
self._base_task_pipeline = BasedGenerateTaskPipeline(
|
||||
application_generate_entity=application_generate_entity,
|
||||
|
|
@ -153,6 +155,7 @@ class AdvancedChatAppGenerateTaskPipeline:
|
|||
self._conversation_name_generate_thread: Thread | None = None
|
||||
self._recorded_files: list[Mapping[str, Any]] = []
|
||||
self._workflow_run_id: str = ""
|
||||
self._draft_var_saver_factory = draft_var_saver_factory
|
||||
|
||||
def process(self) -> Union[ChatbotAppBlockingResponse, Generator[ChatbotAppStreamResponse, None, None]]:
|
||||
"""
|
||||
|
|
@ -371,6 +374,7 @@ class AdvancedChatAppGenerateTaskPipeline:
|
|||
workflow_node_execution=workflow_node_execution,
|
||||
)
|
||||
session.commit()
|
||||
self._save_output_for_event(event, workflow_node_execution.id)
|
||||
|
||||
if node_finish_resp:
|
||||
yield node_finish_resp
|
||||
|
|
@ -390,6 +394,8 @@ class AdvancedChatAppGenerateTaskPipeline:
|
|||
task_id=self._application_generate_entity.task_id,
|
||||
workflow_node_execution=workflow_node_execution,
|
||||
)
|
||||
if isinstance(event, QueueNodeExceptionEvent):
|
||||
self._save_output_for_event(event, workflow_node_execution.id)
|
||||
|
||||
if node_finish_resp:
|
||||
yield node_finish_resp
|
||||
|
|
@ -759,3 +765,15 @@ class AdvancedChatAppGenerateTaskPipeline:
|
|||
if not message:
|
||||
raise ValueError(f"Message not found: {self._message_id}")
|
||||
return message
|
||||
|
||||
def _save_output_for_event(self, event: QueueNodeSucceededEvent | QueueNodeExceptionEvent, node_execution_id: str):
|
||||
with Session(db.engine) as session, session.begin():
|
||||
saver = self._draft_var_saver_factory(
|
||||
session=session,
|
||||
app_id=self._application_generate_entity.app_config.app_id,
|
||||
node_id=event.node_id,
|
||||
node_type=event.node_type,
|
||||
node_execution_id=node_execution_id,
|
||||
enclosing_node_id=event.in_loop_id or event.in_iteration_id,
|
||||
)
|
||||
saver.save(event.process_data, event.outputs)
|
||||
|
|
|
|||
|
|
@ -1,10 +1,20 @@
|
|||
import json
|
||||
from collections.abc import Generator, Mapping, Sequence
|
||||
from typing import TYPE_CHECKING, Any, Optional, Union
|
||||
from typing import TYPE_CHECKING, Any, Optional, Union, final
|
||||
|
||||
from sqlalchemy.orm import Session
|
||||
|
||||
from core.app.app_config.entities import VariableEntityType
|
||||
from core.app.entities.app_invoke_entities import InvokeFrom
|
||||
from core.file import File, FileUploadConfig
|
||||
from core.workflow.nodes.enums import NodeType
|
||||
from core.workflow.repositories.draft_variable_repository import (
|
||||
DraftVariableSaver,
|
||||
DraftVariableSaverFactory,
|
||||
NoopDraftVariableSaver,
|
||||
)
|
||||
from factories import file_factory
|
||||
from services.workflow_draft_variable_service import DraftVariableSaver as DraftVariableSaverImpl
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from core.app.app_config.entities import VariableEntity
|
||||
|
|
@ -159,3 +169,38 @@ class BaseAppGenerator:
|
|||
yield f"event: {message}\n\n"
|
||||
|
||||
return gen()
|
||||
|
||||
@final
|
||||
@staticmethod
|
||||
def _get_draft_var_saver_factory(invoke_from: InvokeFrom) -> DraftVariableSaverFactory:
|
||||
if invoke_from == InvokeFrom.DEBUGGER:
|
||||
|
||||
def draft_var_saver_factory(
|
||||
session: Session,
|
||||
app_id: str,
|
||||
node_id: str,
|
||||
node_type: NodeType,
|
||||
node_execution_id: str,
|
||||
enclosing_node_id: str | None = None,
|
||||
) -> DraftVariableSaver:
|
||||
return DraftVariableSaverImpl(
|
||||
session=session,
|
||||
app_id=app_id,
|
||||
node_id=node_id,
|
||||
node_type=node_type,
|
||||
node_execution_id=node_execution_id,
|
||||
enclosing_node_id=enclosing_node_id,
|
||||
)
|
||||
else:
|
||||
|
||||
def draft_var_saver_factory(
|
||||
session: Session,
|
||||
app_id: str,
|
||||
node_id: str,
|
||||
node_type: NodeType,
|
||||
node_execution_id: str,
|
||||
enclosing_node_id: str | None = None,
|
||||
) -> DraftVariableSaver:
|
||||
return NoopDraftVariableSaver()
|
||||
|
||||
return draft_var_saver_factory
|
||||
|
|
|
|||
|
|
@ -44,6 +44,7 @@ from core.app.entities.task_entities import (
|
|||
)
|
||||
from core.file import FILE_MODEL_IDENTITY, File
|
||||
from core.tools.tool_manager import ToolManager
|
||||
from core.variables.segments import ArrayFileSegment, FileSegment, Segment
|
||||
from core.workflow.entities.workflow_execution import WorkflowExecution
|
||||
from core.workflow.entities.workflow_node_execution import WorkflowNodeExecution, WorkflowNodeExecutionStatus
|
||||
from core.workflow.nodes import NodeType
|
||||
|
|
@ -506,7 +507,8 @@ class WorkflowResponseConverter:
|
|||
# Convert to tuple to match Sequence type
|
||||
return tuple(flattened_files)
|
||||
|
||||
def _fetch_files_from_variable_value(self, value: Union[dict, list]) -> Sequence[Mapping[str, Any]]:
|
||||
@classmethod
|
||||
def _fetch_files_from_variable_value(cls, value: Union[dict, list, Segment]) -> Sequence[Mapping[str, Any]]:
|
||||
"""
|
||||
Fetch files from variable value
|
||||
:param value: variable value
|
||||
|
|
@ -515,20 +517,30 @@ class WorkflowResponseConverter:
|
|||
if not value:
|
||||
return []
|
||||
|
||||
files = []
|
||||
if isinstance(value, list):
|
||||
files: list[Mapping[str, Any]] = []
|
||||
if isinstance(value, FileSegment):
|
||||
files.append(value.value.to_dict())
|
||||
elif isinstance(value, ArrayFileSegment):
|
||||
files.extend([i.to_dict() for i in value.value])
|
||||
elif isinstance(value, File):
|
||||
files.append(value.to_dict())
|
||||
elif isinstance(value, list):
|
||||
for item in value:
|
||||
file = self._get_file_var_from_value(item)
|
||||
file = cls._get_file_var_from_value(item)
|
||||
if file:
|
||||
files.append(file)
|
||||
elif isinstance(value, dict):
|
||||
file = self._get_file_var_from_value(value)
|
||||
elif isinstance(
|
||||
value,
|
||||
dict,
|
||||
):
|
||||
file = cls._get_file_var_from_value(value)
|
||||
if file:
|
||||
files.append(file)
|
||||
|
||||
return files
|
||||
|
||||
def _get_file_var_from_value(self, value: Union[dict, list]) -> Mapping[str, Any] | None:
|
||||
@classmethod
|
||||
def _get_file_var_from_value(cls, value: Union[dict, list]) -> Mapping[str, Any] | None:
|
||||
"""
|
||||
Get file var from value
|
||||
:param value: variable value
|
||||
|
|
|
|||
|
|
@ -25,6 +25,7 @@ from core.model_runtime.errors.invoke import InvokeAuthorizationError
|
|||
from core.ops.ops_trace_manager import TraceQueueManager
|
||||
from core.repositories import SQLAlchemyWorkflowNodeExecutionRepository
|
||||
from core.repositories.sqlalchemy_workflow_execution_repository import SQLAlchemyWorkflowExecutionRepository
|
||||
from core.workflow.repositories.draft_variable_repository import DraftVariableSaverFactory
|
||||
from core.workflow.repositories.workflow_execution_repository import WorkflowExecutionRepository
|
||||
from core.workflow.repositories.workflow_node_execution_repository import WorkflowNodeExecutionRepository
|
||||
from core.workflow.variable_loader import DUMMY_VARIABLE_LOADER, VariableLoader
|
||||
|
|
@ -219,6 +220,9 @@ class WorkflowAppGenerator(BaseAppGenerator):
|
|||
# new thread with request context and contextvars
|
||||
context = contextvars.copy_context()
|
||||
|
||||
# release database connection, because the following new thread operations may take a long time
|
||||
db.session.close()
|
||||
|
||||
worker_thread = threading.Thread(
|
||||
target=self._generate_worker,
|
||||
kwargs={
|
||||
|
|
@ -233,6 +237,10 @@ class WorkflowAppGenerator(BaseAppGenerator):
|
|||
|
||||
worker_thread.start()
|
||||
|
||||
draft_var_saver_factory = self._get_draft_var_saver_factory(
|
||||
invoke_from,
|
||||
)
|
||||
|
||||
# return response or stream generator
|
||||
response = self._handle_response(
|
||||
application_generate_entity=application_generate_entity,
|
||||
|
|
@ -241,6 +249,7 @@ class WorkflowAppGenerator(BaseAppGenerator):
|
|||
user=user,
|
||||
workflow_execution_repository=workflow_execution_repository,
|
||||
workflow_node_execution_repository=workflow_node_execution_repository,
|
||||
draft_var_saver_factory=draft_var_saver_factory,
|
||||
stream=streaming,
|
||||
)
|
||||
|
||||
|
|
@ -471,6 +480,7 @@ class WorkflowAppGenerator(BaseAppGenerator):
|
|||
user: Union[Account, EndUser],
|
||||
workflow_execution_repository: WorkflowExecutionRepository,
|
||||
workflow_node_execution_repository: WorkflowNodeExecutionRepository,
|
||||
draft_var_saver_factory: DraftVariableSaverFactory,
|
||||
stream: bool = False,
|
||||
) -> Union[WorkflowAppBlockingResponse, Generator[WorkflowAppStreamResponse, None, None]]:
|
||||
"""
|
||||
|
|
@ -491,6 +501,7 @@ class WorkflowAppGenerator(BaseAppGenerator):
|
|||
user=user,
|
||||
workflow_execution_repository=workflow_execution_repository,
|
||||
workflow_node_execution_repository=workflow_node_execution_repository,
|
||||
draft_var_saver_factory=draft_var_saver_factory,
|
||||
stream=stream,
|
||||
)
|
||||
|
||||
|
|
|
|||
|
|
@ -56,6 +56,7 @@ from core.base.tts import AppGeneratorTTSPublisher, AudioTrunk
|
|||
from core.ops.ops_trace_manager import TraceQueueManager
|
||||
from core.workflow.entities.workflow_execution import WorkflowExecution, WorkflowExecutionStatus, WorkflowType
|
||||
from core.workflow.enums import SystemVariableKey
|
||||
from core.workflow.repositories.draft_variable_repository import DraftVariableSaverFactory
|
||||
from core.workflow.repositories.workflow_execution_repository import WorkflowExecutionRepository
|
||||
from core.workflow.repositories.workflow_node_execution_repository import WorkflowNodeExecutionRepository
|
||||
from core.workflow.workflow_cycle_manager import CycleManagerWorkflowInfo, WorkflowCycleManager
|
||||
|
|
@ -87,6 +88,7 @@ class WorkflowAppGenerateTaskPipeline:
|
|||
stream: bool,
|
||||
workflow_execution_repository: WorkflowExecutionRepository,
|
||||
workflow_node_execution_repository: WorkflowNodeExecutionRepository,
|
||||
draft_var_saver_factory: DraftVariableSaverFactory,
|
||||
) -> None:
|
||||
self._base_task_pipeline = BasedGenerateTaskPipeline(
|
||||
application_generate_entity=application_generate_entity,
|
||||
|
|
@ -131,6 +133,8 @@ class WorkflowAppGenerateTaskPipeline:
|
|||
self._application_generate_entity = application_generate_entity
|
||||
self._workflow_features_dict = workflow.features_dict
|
||||
self._workflow_run_id = ""
|
||||
self._invoke_from = queue_manager._invoke_from
|
||||
self._draft_var_saver_factory = draft_var_saver_factory
|
||||
|
||||
def process(self) -> Union[WorkflowAppBlockingResponse, Generator[WorkflowAppStreamResponse, None, None]]:
|
||||
"""
|
||||
|
|
@ -322,6 +326,8 @@ class WorkflowAppGenerateTaskPipeline:
|
|||
workflow_node_execution=workflow_node_execution,
|
||||
)
|
||||
|
||||
self._save_output_for_event(event, workflow_node_execution.id)
|
||||
|
||||
if node_success_response:
|
||||
yield node_success_response
|
||||
elif isinstance(
|
||||
|
|
@ -339,6 +345,8 @@ class WorkflowAppGenerateTaskPipeline:
|
|||
task_id=self._application_generate_entity.task_id,
|
||||
workflow_node_execution=workflow_node_execution,
|
||||
)
|
||||
if isinstance(event, QueueNodeExceptionEvent):
|
||||
self._save_output_for_event(event, workflow_node_execution.id)
|
||||
|
||||
if node_failed_response:
|
||||
yield node_failed_response
|
||||
|
|
@ -593,3 +601,15 @@ class WorkflowAppGenerateTaskPipeline:
|
|||
)
|
||||
|
||||
return response
|
||||
|
||||
def _save_output_for_event(self, event: QueueNodeSucceededEvent | QueueNodeExceptionEvent, node_execution_id: str):
|
||||
with Session(db.engine) as session, session.begin():
|
||||
saver = self._draft_var_saver_factory(
|
||||
session=session,
|
||||
app_id=self._application_generate_entity.app_config.app_id,
|
||||
node_id=event.node_id,
|
||||
node_type=event.node_type,
|
||||
node_execution_id=node_execution_id,
|
||||
enclosing_node_id=event.in_loop_id or event.in_iteration_id,
|
||||
)
|
||||
saver.save(event.process_data, event.outputs)
|
||||
|
|
|
|||
|
|
@ -1,8 +1,6 @@
|
|||
from collections.abc import Mapping
|
||||
from typing import Any, Optional, cast
|
||||
|
||||
from sqlalchemy.orm import Session
|
||||
|
||||
from core.app.apps.base_app_queue_manager import AppQueueManager, PublishFrom
|
||||
from core.app.apps.base_app_runner import AppRunner
|
||||
from core.app.entities.queue_entities import (
|
||||
|
|
@ -35,7 +33,6 @@ from core.workflow.entities.variable_pool import VariablePool
|
|||
from core.workflow.entities.workflow_node_execution import WorkflowNodeExecutionMetadataKey
|
||||
from core.workflow.graph_engine.entities.event import (
|
||||
AgentLogEvent,
|
||||
BaseNodeEvent,
|
||||
GraphEngineEvent,
|
||||
GraphRunFailedEvent,
|
||||
GraphRunPartialSucceededEvent,
|
||||
|
|
@ -70,9 +67,6 @@ from core.workflow.workflow_entry import WorkflowEntry
|
|||
from extensions.ext_database import db
|
||||
from models.model import App
|
||||
from models.workflow import Workflow
|
||||
from services.workflow_draft_variable_service import (
|
||||
DraftVariableSaver,
|
||||
)
|
||||
|
||||
|
||||
class WorkflowBasedAppRunner(AppRunner):
|
||||
|
|
@ -400,7 +394,6 @@ class WorkflowBasedAppRunner(AppRunner):
|
|||
in_loop_id=event.in_loop_id,
|
||||
)
|
||||
)
|
||||
self._save_draft_var_for_event(event)
|
||||
|
||||
elif isinstance(event, NodeRunFailedEvent):
|
||||
self._publish_event(
|
||||
|
|
@ -464,7 +457,6 @@ class WorkflowBasedAppRunner(AppRunner):
|
|||
in_loop_id=event.in_loop_id,
|
||||
)
|
||||
)
|
||||
self._save_draft_var_for_event(event)
|
||||
|
||||
elif isinstance(event, NodeInIterationFailedEvent):
|
||||
self._publish_event(
|
||||
|
|
@ -718,30 +710,3 @@ class WorkflowBasedAppRunner(AppRunner):
|
|||
|
||||
def _publish_event(self, event: AppQueueEvent) -> None:
|
||||
self.queue_manager.publish(event, PublishFrom.APPLICATION_MANAGER)
|
||||
|
||||
def _save_draft_var_for_event(self, event: BaseNodeEvent):
|
||||
run_result = event.route_node_state.node_run_result
|
||||
if run_result is None:
|
||||
return
|
||||
process_data = run_result.process_data
|
||||
outputs = run_result.outputs
|
||||
with Session(bind=db.engine) as session, session.begin():
|
||||
draft_var_saver = DraftVariableSaver(
|
||||
session=session,
|
||||
app_id=self._get_app_id(),
|
||||
node_id=event.node_id,
|
||||
node_type=event.node_type,
|
||||
# FIXME(QuantumGhost): rely on private state of queue_manager is not ideal.
|
||||
invoke_from=self.queue_manager._invoke_from,
|
||||
node_execution_id=event.id,
|
||||
enclosing_node_id=event.in_loop_id or event.in_iteration_id or None,
|
||||
)
|
||||
draft_var_saver.save(process_data=process_data, outputs=outputs)
|
||||
|
||||
|
||||
def _remove_first_element_from_variable_string(key: str) -> str:
|
||||
"""
|
||||
Remove the first element from the prefix.
|
||||
"""
|
||||
prefix, remaining = key.split(".", maxsplit=1)
|
||||
return remaining
|
||||
|
|
|
|||
|
|
@ -19,6 +19,7 @@ from core.app.entities.task_entities import (
|
|||
from core.errors.error import QuotaExceededError
|
||||
from core.model_runtime.errors.invoke import InvokeAuthorizationError, InvokeError
|
||||
from core.moderation.output_moderation import ModerationRule, OutputModeration
|
||||
from models.enums import MessageStatus
|
||||
from models.model import Message
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
|
@ -62,7 +63,7 @@ class BasedGenerateTaskPipeline:
|
|||
return err
|
||||
|
||||
err_desc = self._error_to_desc(err)
|
||||
message.status = "error"
|
||||
message.status = MessageStatus.ERROR
|
||||
message.error = err_desc
|
||||
return err
|
||||
|
||||
|
|
|
|||
|
|
@ -395,6 +395,7 @@ class EasyUIBasedGenerateTaskPipeline(BasedGenerateTaskPipeline):
|
|||
message.provider_response_latency = time.perf_counter() - self._start_at
|
||||
message.total_price = usage.total_price
|
||||
message.currency = usage.currency
|
||||
self._task_state.llm_result.usage.latency = message.provider_response_latency
|
||||
message.message_metadata = self._task_state.metadata.model_dump_json()
|
||||
|
||||
if trace_manager:
|
||||
|
|
|
|||
|
|
@ -23,6 +23,7 @@ from core.model_runtime.entities.message_entities import (
|
|||
PromptMessage,
|
||||
PromptMessageTool,
|
||||
SystemPromptMessage,
|
||||
TextPromptMessageContent,
|
||||
)
|
||||
from core.model_runtime.entities.model_entities import AIModelEntity, ParameterRule
|
||||
|
||||
|
|
@ -170,10 +171,15 @@ def invoke_llm_with_structured_output(
|
|||
system_fingerprint: Optional[str] = None
|
||||
for event in llm_result:
|
||||
if isinstance(event, LLMResultChunk):
|
||||
prompt_messages = event.prompt_messages
|
||||
system_fingerprint = event.system_fingerprint
|
||||
|
||||
if isinstance(event.delta.message.content, str):
|
||||
result_text += event.delta.message.content
|
||||
prompt_messages = event.prompt_messages
|
||||
system_fingerprint = event.system_fingerprint
|
||||
elif isinstance(event.delta.message.content, list):
|
||||
for item in event.delta.message.content:
|
||||
if isinstance(item, TextPromptMessageContent):
|
||||
result_text += item.data
|
||||
|
||||
yield LLMResultChunkWithStructuredOutput(
|
||||
model=model_schema.model,
|
||||
|
|
|
|||
|
|
@ -53,6 +53,37 @@ class LLMUsage(ModelUsage):
|
|||
latency=0.0,
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def from_metadata(cls, metadata: dict) -> "LLMUsage":
|
||||
"""
|
||||
Create LLMUsage instance from metadata dictionary with default values.
|
||||
|
||||
Args:
|
||||
metadata: Dictionary containing usage metadata
|
||||
|
||||
Returns:
|
||||
LLMUsage instance with values from metadata or defaults
|
||||
"""
|
||||
total_tokens = metadata.get("total_tokens", 0)
|
||||
completion_tokens = metadata.get("completion_tokens", 0)
|
||||
if total_tokens > 0 and completion_tokens == 0:
|
||||
completion_tokens = total_tokens
|
||||
|
||||
return cls(
|
||||
prompt_tokens=metadata.get("prompt_tokens", 0),
|
||||
completion_tokens=completion_tokens,
|
||||
total_tokens=total_tokens,
|
||||
prompt_unit_price=Decimal(str(metadata.get("prompt_unit_price", 0))),
|
||||
completion_unit_price=Decimal(str(metadata.get("completion_unit_price", 0))),
|
||||
total_price=Decimal(str(metadata.get("total_price", 0))),
|
||||
currency=metadata.get("currency", "USD"),
|
||||
prompt_price_unit=Decimal(str(metadata.get("prompt_price_unit", 0))),
|
||||
completion_price_unit=Decimal(str(metadata.get("completion_price_unit", 0))),
|
||||
prompt_price=Decimal(str(metadata.get("prompt_price", 0))),
|
||||
completion_price=Decimal(str(metadata.get("completion_price", 0))),
|
||||
latency=metadata.get("latency", 0.0),
|
||||
)
|
||||
|
||||
def plus(self, other: "LLMUsage") -> "LLMUsage":
|
||||
"""
|
||||
Add two LLMUsage instances together.
|
||||
|
|
|
|||
|
|
@ -0,0 +1,720 @@
|
|||
import hashlib
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
from datetime import datetime, timedelta
|
||||
from typing import Optional, Union, cast
|
||||
|
||||
from openinference.semconv.trace import OpenInferenceSpanKindValues, SpanAttributes
|
||||
from opentelemetry import trace
|
||||
from opentelemetry.exporter.otlp.proto.grpc.trace_exporter import OTLPSpanExporter as GrpcOTLPSpanExporter
|
||||
from opentelemetry.exporter.otlp.proto.http.trace_exporter import OTLPSpanExporter as HttpOTLPSpanExporter
|
||||
from opentelemetry.sdk import trace as trace_sdk
|
||||
from opentelemetry.sdk.resources import Resource
|
||||
from opentelemetry.sdk.trace.export import SimpleSpanProcessor
|
||||
from opentelemetry.sdk.trace.id_generator import RandomIdGenerator
|
||||
from opentelemetry.trace import SpanContext, TraceFlags, TraceState
|
||||
|
||||
from core.ops.base_trace_instance import BaseTraceInstance
|
||||
from core.ops.entities.config_entity import ArizeConfig, PhoenixConfig
|
||||
from core.ops.entities.trace_entity import (
|
||||
BaseTraceInfo,
|
||||
DatasetRetrievalTraceInfo,
|
||||
GenerateNameTraceInfo,
|
||||
MessageTraceInfo,
|
||||
ModerationTraceInfo,
|
||||
SuggestedQuestionTraceInfo,
|
||||
ToolTraceInfo,
|
||||
TraceTaskName,
|
||||
WorkflowTraceInfo,
|
||||
)
|
||||
from extensions.ext_database import db
|
||||
from models.model import EndUser, MessageFile
|
||||
from models.workflow import WorkflowNodeExecutionModel
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def setup_tracer(arize_phoenix_config: ArizeConfig | PhoenixConfig) -> tuple[trace_sdk.Tracer, SimpleSpanProcessor]:
|
||||
"""Configure OpenTelemetry tracer with OTLP exporter for Arize/Phoenix."""
|
||||
try:
|
||||
# Choose the appropriate exporter based on config type
|
||||
exporter: Union[GrpcOTLPSpanExporter, HttpOTLPSpanExporter]
|
||||
if isinstance(arize_phoenix_config, ArizeConfig):
|
||||
arize_endpoint = f"{arize_phoenix_config.endpoint}/v1"
|
||||
arize_headers = {
|
||||
"api_key": arize_phoenix_config.api_key or "",
|
||||
"space_id": arize_phoenix_config.space_id or "",
|
||||
"authorization": f"Bearer {arize_phoenix_config.api_key or ''}",
|
||||
}
|
||||
exporter = GrpcOTLPSpanExporter(
|
||||
endpoint=arize_endpoint,
|
||||
headers=arize_headers,
|
||||
timeout=30,
|
||||
)
|
||||
else:
|
||||
phoenix_endpoint = f"{arize_phoenix_config.endpoint}/v1/traces"
|
||||
phoenix_headers = {
|
||||
"api_key": arize_phoenix_config.api_key or "",
|
||||
"authorization": f"Bearer {arize_phoenix_config.api_key or ''}",
|
||||
}
|
||||
exporter = HttpOTLPSpanExporter(
|
||||
endpoint=phoenix_endpoint,
|
||||
headers=phoenix_headers,
|
||||
timeout=30,
|
||||
)
|
||||
|
||||
attributes = {
|
||||
"openinference.project.name": arize_phoenix_config.project or "",
|
||||
"model_id": arize_phoenix_config.project or "",
|
||||
}
|
||||
resource = Resource(attributes=attributes)
|
||||
provider = trace_sdk.TracerProvider(resource=resource)
|
||||
processor = SimpleSpanProcessor(
|
||||
exporter,
|
||||
)
|
||||
provider.add_span_processor(processor)
|
||||
|
||||
# Create a named tracer instead of setting the global provider
|
||||
tracer_name = f"arize_phoenix_tracer_{arize_phoenix_config.project}"
|
||||
logger.info(f"[Arize/Phoenix] Created tracer with name: {tracer_name}")
|
||||
return cast(trace_sdk.Tracer, provider.get_tracer(tracer_name)), processor
|
||||
except Exception as e:
|
||||
logger.error(f"[Arize/Phoenix] Failed to setup the tracer: {str(e)}", exc_info=True)
|
||||
raise
|
||||
|
||||
|
||||
def datetime_to_nanos(dt: Optional[datetime]) -> int:
|
||||
"""Convert datetime to nanoseconds since epoch. If None, use current time."""
|
||||
if dt is None:
|
||||
dt = datetime.now()
|
||||
return int(dt.timestamp() * 1_000_000_000)
|
||||
|
||||
|
||||
def uuid_to_trace_id(string: Optional[str]) -> int:
|
||||
"""Convert UUID string to a valid trace ID (16-byte integer)."""
|
||||
if string is None:
|
||||
string = ""
|
||||
hash_object = hashlib.sha256(string.encode())
|
||||
|
||||
# Take the first 16 bytes (128 bits) of the hash
|
||||
digest = hash_object.digest()[:16]
|
||||
|
||||
# Convert to integer (128 bits)
|
||||
return int.from_bytes(digest, byteorder="big")
|
||||
|
||||
|
||||
class ArizePhoenixDataTrace(BaseTraceInstance):
|
||||
def __init__(
|
||||
self,
|
||||
arize_phoenix_config: ArizeConfig | PhoenixConfig,
|
||||
):
|
||||
super().__init__(arize_phoenix_config)
|
||||
import logging
|
||||
|
||||
logging.basicConfig()
|
||||
logging.getLogger().setLevel(logging.DEBUG)
|
||||
self.arize_phoenix_config = arize_phoenix_config
|
||||
self.tracer, self.processor = setup_tracer(arize_phoenix_config)
|
||||
self.project = arize_phoenix_config.project
|
||||
self.file_base_url = os.getenv("FILES_URL", "http://127.0.0.1:5001")
|
||||
|
||||
def trace(self, trace_info: BaseTraceInfo):
|
||||
logger.info(f"[Arize/Phoenix] Trace: {trace_info}")
|
||||
try:
|
||||
if isinstance(trace_info, WorkflowTraceInfo):
|
||||
self.workflow_trace(trace_info)
|
||||
if isinstance(trace_info, MessageTraceInfo):
|
||||
self.message_trace(trace_info)
|
||||
if isinstance(trace_info, ModerationTraceInfo):
|
||||
self.moderation_trace(trace_info)
|
||||
if isinstance(trace_info, SuggestedQuestionTraceInfo):
|
||||
self.suggested_question_trace(trace_info)
|
||||
if isinstance(trace_info, DatasetRetrievalTraceInfo):
|
||||
self.dataset_retrieval_trace(trace_info)
|
||||
if isinstance(trace_info, ToolTraceInfo):
|
||||
self.tool_trace(trace_info)
|
||||
if isinstance(trace_info, GenerateNameTraceInfo):
|
||||
self.generate_name_trace(trace_info)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"[Arize/Phoenix] Error in the trace: {str(e)}", exc_info=True)
|
||||
raise
|
||||
|
||||
def workflow_trace(self, trace_info: WorkflowTraceInfo):
|
||||
if trace_info.message_data is None:
|
||||
return
|
||||
|
||||
workflow_metadata = {
|
||||
"workflow_id": trace_info.workflow_run_id or "",
|
||||
"message_id": trace_info.message_id or "",
|
||||
"workflow_app_log_id": trace_info.workflow_app_log_id or "",
|
||||
"status": trace_info.workflow_run_status or "",
|
||||
"status_message": trace_info.error or "",
|
||||
"level": "ERROR" if trace_info.error else "DEFAULT",
|
||||
"total_tokens": trace_info.total_tokens or 0,
|
||||
}
|
||||
workflow_metadata.update(trace_info.metadata)
|
||||
|
||||
trace_id = uuid_to_trace_id(trace_info.message_id)
|
||||
span_id = RandomIdGenerator().generate_span_id()
|
||||
context = SpanContext(
|
||||
trace_id=trace_id,
|
||||
span_id=span_id,
|
||||
is_remote=False,
|
||||
trace_flags=TraceFlags(TraceFlags.SAMPLED),
|
||||
trace_state=TraceState(),
|
||||
)
|
||||
|
||||
workflow_span = self.tracer.start_span(
|
||||
name=TraceTaskName.WORKFLOW_TRACE.value,
|
||||
attributes={
|
||||
SpanAttributes.INPUT_VALUE: json.dumps(trace_info.workflow_run_inputs, ensure_ascii=False),
|
||||
SpanAttributes.OUTPUT_VALUE: json.dumps(trace_info.workflow_run_outputs, ensure_ascii=False),
|
||||
SpanAttributes.OPENINFERENCE_SPAN_KIND: OpenInferenceSpanKindValues.CHAIN.value,
|
||||
SpanAttributes.METADATA: json.dumps(workflow_metadata, ensure_ascii=False),
|
||||
SpanAttributes.SESSION_ID: trace_info.conversation_id or "",
|
||||
},
|
||||
start_time=datetime_to_nanos(trace_info.start_time),
|
||||
context=trace.set_span_in_context(trace.NonRecordingSpan(context)),
|
||||
)
|
||||
|
||||
try:
|
||||
# Process workflow nodes
|
||||
for node_execution in self._get_workflow_nodes(trace_info.workflow_run_id):
|
||||
created_at = node_execution.created_at or datetime.now()
|
||||
elapsed_time = node_execution.elapsed_time
|
||||
finished_at = created_at + timedelta(seconds=elapsed_time)
|
||||
|
||||
process_data = json.loads(node_execution.process_data) if node_execution.process_data else {}
|
||||
|
||||
node_metadata = {
|
||||
"node_id": node_execution.id,
|
||||
"node_type": node_execution.node_type,
|
||||
"node_status": node_execution.status,
|
||||
"tenant_id": node_execution.tenant_id,
|
||||
"app_id": node_execution.app_id,
|
||||
"app_name": node_execution.title,
|
||||
"status": node_execution.status,
|
||||
"level": "ERROR" if node_execution.status != "succeeded" else "DEFAULT",
|
||||
}
|
||||
|
||||
if node_execution.execution_metadata:
|
||||
node_metadata.update(json.loads(node_execution.execution_metadata))
|
||||
|
||||
# Determine the correct span kind based on node type
|
||||
span_kind = OpenInferenceSpanKindValues.CHAIN.value
|
||||
if node_execution.node_type == "llm":
|
||||
span_kind = OpenInferenceSpanKindValues.LLM.value
|
||||
provider = process_data.get("model_provider")
|
||||
model = process_data.get("model_name")
|
||||
if provider:
|
||||
node_metadata["ls_provider"] = provider
|
||||
if model:
|
||||
node_metadata["ls_model_name"] = model
|
||||
|
||||
usage = json.loads(node_execution.outputs).get("usage", {}) if node_execution.outputs else {}
|
||||
if usage:
|
||||
node_metadata["total_tokens"] = usage.get("total_tokens", 0)
|
||||
node_metadata["prompt_tokens"] = usage.get("prompt_tokens", 0)
|
||||
node_metadata["completion_tokens"] = usage.get("completion_tokens", 0)
|
||||
elif node_execution.node_type == "dataset_retrieval":
|
||||
span_kind = OpenInferenceSpanKindValues.RETRIEVER.value
|
||||
elif node_execution.node_type == "tool":
|
||||
span_kind = OpenInferenceSpanKindValues.TOOL.value
|
||||
else:
|
||||
span_kind = OpenInferenceSpanKindValues.CHAIN.value
|
||||
|
||||
node_span = self.tracer.start_span(
|
||||
name=node_execution.node_type,
|
||||
attributes={
|
||||
SpanAttributes.INPUT_VALUE: node_execution.inputs or "{}",
|
||||
SpanAttributes.OUTPUT_VALUE: node_execution.outputs or "{}",
|
||||
SpanAttributes.OPENINFERENCE_SPAN_KIND: span_kind,
|
||||
SpanAttributes.METADATA: json.dumps(node_metadata, ensure_ascii=False),
|
||||
SpanAttributes.SESSION_ID: trace_info.conversation_id or "",
|
||||
},
|
||||
start_time=datetime_to_nanos(created_at),
|
||||
)
|
||||
|
||||
try:
|
||||
if node_execution.node_type == "llm":
|
||||
provider = process_data.get("model_provider")
|
||||
model = process_data.get("model_name")
|
||||
if provider:
|
||||
node_span.set_attribute(SpanAttributes.LLM_PROVIDER, provider)
|
||||
if model:
|
||||
node_span.set_attribute(SpanAttributes.LLM_MODEL_NAME, model)
|
||||
|
||||
usage = json.loads(node_execution.outputs).get("usage", {}) if node_execution.outputs else {}
|
||||
if usage:
|
||||
node_span.set_attribute(SpanAttributes.LLM_TOKEN_COUNT_TOTAL, usage.get("total_tokens", 0))
|
||||
node_span.set_attribute(
|
||||
SpanAttributes.LLM_TOKEN_COUNT_PROMPT, usage.get("prompt_tokens", 0)
|
||||
)
|
||||
node_span.set_attribute(
|
||||
SpanAttributes.LLM_TOKEN_COUNT_COMPLETION, usage.get("completion_tokens", 0)
|
||||
)
|
||||
finally:
|
||||
node_span.end(end_time=datetime_to_nanos(finished_at))
|
||||
finally:
|
||||
workflow_span.end(end_time=datetime_to_nanos(trace_info.end_time))
|
||||
|
||||
def message_trace(self, trace_info: MessageTraceInfo):
|
||||
if trace_info.message_data is None:
|
||||
return
|
||||
|
||||
file_list = cast(list[str], trace_info.file_list) or []
|
||||
message_file_data: Optional[MessageFile] = trace_info.message_file_data
|
||||
|
||||
if message_file_data is not None:
|
||||
file_url = f"{self.file_base_url}/{message_file_data.url}" if message_file_data else ""
|
||||
file_list.append(file_url)
|
||||
|
||||
message_metadata = {
|
||||
"message_id": trace_info.message_id or "",
|
||||
"conversation_mode": str(trace_info.conversation_mode or ""),
|
||||
"user_id": trace_info.message_data.from_account_id or "",
|
||||
"file_list": json.dumps(file_list),
|
||||
"status": trace_info.message_data.status or "",
|
||||
"status_message": trace_info.error or "",
|
||||
"level": "ERROR" if trace_info.error else "DEFAULT",
|
||||
"total_tokens": trace_info.total_tokens or 0,
|
||||
"prompt_tokens": trace_info.message_tokens or 0,
|
||||
"completion_tokens": trace_info.answer_tokens or 0,
|
||||
"ls_provider": trace_info.message_data.model_provider or "",
|
||||
"ls_model_name": trace_info.message_data.model_id or "",
|
||||
}
|
||||
message_metadata.update(trace_info.metadata)
|
||||
|
||||
# Add end user data if available
|
||||
if trace_info.message_data.from_end_user_id:
|
||||
end_user_data: Optional[EndUser] = (
|
||||
db.session.query(EndUser).filter(EndUser.id == trace_info.message_data.from_end_user_id).first()
|
||||
)
|
||||
if end_user_data is not None:
|
||||
message_metadata["end_user_id"] = end_user_data.session_id
|
||||
|
||||
attributes = {
|
||||
SpanAttributes.INPUT_VALUE: trace_info.message_data.query,
|
||||
SpanAttributes.OUTPUT_VALUE: trace_info.message_data.answer,
|
||||
SpanAttributes.OPENINFERENCE_SPAN_KIND: OpenInferenceSpanKindValues.CHAIN.value,
|
||||
SpanAttributes.METADATA: json.dumps(message_metadata, ensure_ascii=False),
|
||||
SpanAttributes.SESSION_ID: trace_info.message_data.conversation_id,
|
||||
}
|
||||
|
||||
trace_id = uuid_to_trace_id(trace_info.message_id)
|
||||
message_span_id = RandomIdGenerator().generate_span_id()
|
||||
span_context = SpanContext(
|
||||
trace_id=trace_id,
|
||||
span_id=message_span_id,
|
||||
is_remote=False,
|
||||
trace_flags=TraceFlags(TraceFlags.SAMPLED),
|
||||
trace_state=TraceState(),
|
||||
)
|
||||
|
||||
message_span = self.tracer.start_span(
|
||||
name=TraceTaskName.MESSAGE_TRACE.value,
|
||||
attributes=attributes,
|
||||
start_time=datetime_to_nanos(trace_info.start_time),
|
||||
context=trace.set_span_in_context(trace.NonRecordingSpan(span_context)),
|
||||
)
|
||||
|
||||
try:
|
||||
if trace_info.error:
|
||||
message_span.add_event(
|
||||
"exception",
|
||||
attributes={
|
||||
"exception.message": trace_info.error,
|
||||
"exception.type": "Error",
|
||||
"exception.stacktrace": trace_info.error,
|
||||
},
|
||||
)
|
||||
|
||||
# Convert outputs to string based on type
|
||||
if isinstance(trace_info.outputs, dict | list):
|
||||
outputs_str = json.dumps(trace_info.outputs, ensure_ascii=False)
|
||||
elif isinstance(trace_info.outputs, str):
|
||||
outputs_str = trace_info.outputs
|
||||
else:
|
||||
outputs_str = str(trace_info.outputs)
|
||||
|
||||
llm_attributes = {
|
||||
SpanAttributes.OPENINFERENCE_SPAN_KIND: OpenInferenceSpanKindValues.LLM.value,
|
||||
SpanAttributes.INPUT_VALUE: json.dumps(trace_info.inputs, ensure_ascii=False),
|
||||
SpanAttributes.OUTPUT_VALUE: outputs_str,
|
||||
SpanAttributes.METADATA: json.dumps(message_metadata, ensure_ascii=False),
|
||||
SpanAttributes.SESSION_ID: trace_info.message_data.conversation_id,
|
||||
}
|
||||
|
||||
if isinstance(trace_info.inputs, list):
|
||||
for i, msg in enumerate(trace_info.inputs):
|
||||
if isinstance(msg, dict):
|
||||
llm_attributes[f"{SpanAttributes.LLM_INPUT_MESSAGES}.{i}.message.content"] = msg.get("text", "")
|
||||
llm_attributes[f"{SpanAttributes.LLM_INPUT_MESSAGES}.{i}.message.role"] = msg.get(
|
||||
"role", "user"
|
||||
)
|
||||
# todo: handle assistant and tool role messages, as they don't always
|
||||
# have a text field, but may have a tool_calls field instead
|
||||
# e.g. 'tool_calls': [{'id': '98af3a29-b066-45a5-b4b1-46c74ddafc58',
|
||||
# 'type': 'function', 'function': {'name': 'current_time', 'arguments': '{}'}}]}
|
||||
elif isinstance(trace_info.inputs, dict):
|
||||
llm_attributes[f"{SpanAttributes.LLM_INPUT_MESSAGES}.0.message.content"] = json.dumps(trace_info.inputs)
|
||||
llm_attributes[f"{SpanAttributes.LLM_INPUT_MESSAGES}.0.message.role"] = "user"
|
||||
elif isinstance(trace_info.inputs, str):
|
||||
llm_attributes[f"{SpanAttributes.LLM_INPUT_MESSAGES}.0.message.content"] = trace_info.inputs
|
||||
llm_attributes[f"{SpanAttributes.LLM_INPUT_MESSAGES}.0.message.role"] = "user"
|
||||
|
||||
if trace_info.total_tokens is not None and trace_info.total_tokens > 0:
|
||||
llm_attributes[SpanAttributes.LLM_TOKEN_COUNT_TOTAL] = trace_info.total_tokens
|
||||
if trace_info.message_tokens is not None and trace_info.message_tokens > 0:
|
||||
llm_attributes[SpanAttributes.LLM_TOKEN_COUNT_PROMPT] = trace_info.message_tokens
|
||||
if trace_info.answer_tokens is not None and trace_info.answer_tokens > 0:
|
||||
llm_attributes[SpanAttributes.LLM_TOKEN_COUNT_COMPLETION] = trace_info.answer_tokens
|
||||
|
||||
if trace_info.message_data.model_id is not None:
|
||||
llm_attributes[SpanAttributes.LLM_MODEL_NAME] = trace_info.message_data.model_id
|
||||
if trace_info.message_data.model_provider is not None:
|
||||
llm_attributes[SpanAttributes.LLM_PROVIDER] = trace_info.message_data.model_provider
|
||||
|
||||
if trace_info.message_data and trace_info.message_data.message_metadata:
|
||||
metadata_dict = json.loads(trace_info.message_data.message_metadata)
|
||||
if model_params := metadata_dict.get("model_parameters"):
|
||||
llm_attributes[SpanAttributes.LLM_INVOCATION_PARAMETERS] = json.dumps(model_params)
|
||||
|
||||
llm_span = self.tracer.start_span(
|
||||
name="llm",
|
||||
attributes=llm_attributes,
|
||||
start_time=datetime_to_nanos(trace_info.start_time),
|
||||
context=trace.set_span_in_context(trace.NonRecordingSpan(span_context)),
|
||||
)
|
||||
|
||||
try:
|
||||
if trace_info.error:
|
||||
llm_span.add_event(
|
||||
"exception",
|
||||
attributes={
|
||||
"exception.message": trace_info.error,
|
||||
"exception.type": "Error",
|
||||
"exception.stacktrace": trace_info.error,
|
||||
},
|
||||
)
|
||||
finally:
|
||||
llm_span.end(end_time=datetime_to_nanos(trace_info.end_time))
|
||||
finally:
|
||||
message_span.end(end_time=datetime_to_nanos(trace_info.end_time))
|
||||
|
||||
def moderation_trace(self, trace_info: ModerationTraceInfo):
|
||||
if trace_info.message_data is None:
|
||||
return
|
||||
|
||||
metadata = {
|
||||
"message_id": trace_info.message_id,
|
||||
"tool_name": "moderation",
|
||||
"status": trace_info.message_data.status,
|
||||
"status_message": trace_info.message_data.error or "",
|
||||
"level": "ERROR" if trace_info.message_data.error else "DEFAULT",
|
||||
}
|
||||
metadata.update(trace_info.metadata)
|
||||
|
||||
trace_id = uuid_to_trace_id(trace_info.message_id)
|
||||
span_id = RandomIdGenerator().generate_span_id()
|
||||
context = SpanContext(
|
||||
trace_id=trace_id,
|
||||
span_id=span_id,
|
||||
is_remote=False,
|
||||
trace_flags=TraceFlags(TraceFlags.SAMPLED),
|
||||
trace_state=TraceState(),
|
||||
)
|
||||
|
||||
span = self.tracer.start_span(
|
||||
name=TraceTaskName.MODERATION_TRACE.value,
|
||||
attributes={
|
||||
SpanAttributes.INPUT_VALUE: json.dumps(trace_info.inputs, ensure_ascii=False),
|
||||
SpanAttributes.OUTPUT_VALUE: json.dumps(
|
||||
{
|
||||
"action": trace_info.action,
|
||||
"flagged": trace_info.flagged,
|
||||
"preset_response": trace_info.preset_response,
|
||||
"inputs": trace_info.inputs,
|
||||
},
|
||||
ensure_ascii=False,
|
||||
),
|
||||
SpanAttributes.OPENINFERENCE_SPAN_KIND: OpenInferenceSpanKindValues.CHAIN.value,
|
||||
SpanAttributes.METADATA: json.dumps(metadata, ensure_ascii=False),
|
||||
},
|
||||
start_time=datetime_to_nanos(trace_info.start_time),
|
||||
context=trace.set_span_in_context(trace.NonRecordingSpan(context)),
|
||||
)
|
||||
|
||||
try:
|
||||
if trace_info.message_data.error:
|
||||
span.add_event(
|
||||
"exception",
|
||||
attributes={
|
||||
"exception.message": trace_info.message_data.error,
|
||||
"exception.type": "Error",
|
||||
"exception.stacktrace": trace_info.message_data.error,
|
||||
},
|
||||
)
|
||||
finally:
|
||||
span.end(end_time=datetime_to_nanos(trace_info.end_time))
|
||||
|
||||
def suggested_question_trace(self, trace_info: SuggestedQuestionTraceInfo):
|
||||
if trace_info.message_data is None:
|
||||
return
|
||||
|
||||
start_time = trace_info.start_time or trace_info.message_data.created_at
|
||||
end_time = trace_info.end_time or trace_info.message_data.updated_at
|
||||
|
||||
metadata = {
|
||||
"message_id": trace_info.message_id,
|
||||
"tool_name": "suggested_question",
|
||||
"status": trace_info.status,
|
||||
"status_message": trace_info.error or "",
|
||||
"level": "ERROR" if trace_info.error else "DEFAULT",
|
||||
"total_tokens": trace_info.total_tokens,
|
||||
"ls_provider": trace_info.model_provider or "",
|
||||
"ls_model_name": trace_info.model_id or "",
|
||||
}
|
||||
metadata.update(trace_info.metadata)
|
||||
|
||||
trace_id = uuid_to_trace_id(trace_info.message_id)
|
||||
span_id = RandomIdGenerator().generate_span_id()
|
||||
context = SpanContext(
|
||||
trace_id=trace_id,
|
||||
span_id=span_id,
|
||||
is_remote=False,
|
||||
trace_flags=TraceFlags(TraceFlags.SAMPLED),
|
||||
trace_state=TraceState(),
|
||||
)
|
||||
|
||||
span = self.tracer.start_span(
|
||||
name=TraceTaskName.SUGGESTED_QUESTION_TRACE.value,
|
||||
attributes={
|
||||
SpanAttributes.INPUT_VALUE: json.dumps(trace_info.inputs, ensure_ascii=False),
|
||||
SpanAttributes.OUTPUT_VALUE: json.dumps(trace_info.suggested_question, ensure_ascii=False),
|
||||
SpanAttributes.OPENINFERENCE_SPAN_KIND: OpenInferenceSpanKindValues.CHAIN.value,
|
||||
SpanAttributes.METADATA: json.dumps(metadata, ensure_ascii=False),
|
||||
},
|
||||
start_time=datetime_to_nanos(start_time),
|
||||
context=trace.set_span_in_context(trace.NonRecordingSpan(context)),
|
||||
)
|
||||
|
||||
try:
|
||||
if trace_info.error:
|
||||
span.add_event(
|
||||
"exception",
|
||||
attributes={
|
||||
"exception.message": trace_info.error,
|
||||
"exception.type": "Error",
|
||||
"exception.stacktrace": trace_info.error,
|
||||
},
|
||||
)
|
||||
finally:
|
||||
span.end(end_time=datetime_to_nanos(end_time))
|
||||
|
||||
def dataset_retrieval_trace(self, trace_info: DatasetRetrievalTraceInfo):
|
||||
if trace_info.message_data is None:
|
||||
return
|
||||
|
||||
start_time = trace_info.start_time or trace_info.message_data.created_at
|
||||
end_time = trace_info.end_time or trace_info.message_data.updated_at
|
||||
|
||||
metadata = {
|
||||
"message_id": trace_info.message_id,
|
||||
"tool_name": "dataset_retrieval",
|
||||
"status": trace_info.message_data.status,
|
||||
"status_message": trace_info.message_data.error or "",
|
||||
"level": "ERROR" if trace_info.message_data.error else "DEFAULT",
|
||||
"ls_provider": trace_info.message_data.model_provider or "",
|
||||
"ls_model_name": trace_info.message_data.model_id or "",
|
||||
}
|
||||
metadata.update(trace_info.metadata)
|
||||
|
||||
trace_id = uuid_to_trace_id(trace_info.message_id)
|
||||
span_id = RandomIdGenerator().generate_span_id()
|
||||
context = SpanContext(
|
||||
trace_id=trace_id,
|
||||
span_id=span_id,
|
||||
is_remote=False,
|
||||
trace_flags=TraceFlags(TraceFlags.SAMPLED),
|
||||
trace_state=TraceState(),
|
||||
)
|
||||
|
||||
span = self.tracer.start_span(
|
||||
name=TraceTaskName.DATASET_RETRIEVAL_TRACE.value,
|
||||
attributes={
|
||||
SpanAttributes.INPUT_VALUE: json.dumps(trace_info.inputs, ensure_ascii=False),
|
||||
SpanAttributes.OUTPUT_VALUE: json.dumps({"documents": trace_info.documents}, ensure_ascii=False),
|
||||
SpanAttributes.OPENINFERENCE_SPAN_KIND: OpenInferenceSpanKindValues.RETRIEVER.value,
|
||||
SpanAttributes.METADATA: json.dumps(metadata, ensure_ascii=False),
|
||||
"start_time": start_time.isoformat() if start_time else "",
|
||||
"end_time": end_time.isoformat() if end_time else "",
|
||||
},
|
||||
start_time=datetime_to_nanos(start_time),
|
||||
context=trace.set_span_in_context(trace.NonRecordingSpan(context)),
|
||||
)
|
||||
|
||||
try:
|
||||
if trace_info.message_data.error:
|
||||
span.add_event(
|
||||
"exception",
|
||||
attributes={
|
||||
"exception.message": trace_info.message_data.error,
|
||||
"exception.type": "Error",
|
||||
"exception.stacktrace": trace_info.message_data.error,
|
||||
},
|
||||
)
|
||||
finally:
|
||||
span.end(end_time=datetime_to_nanos(end_time))
|
||||
|
||||
def tool_trace(self, trace_info: ToolTraceInfo):
|
||||
if trace_info.message_data is None:
|
||||
logger.warning("[Arize/Phoenix] Message data is None, skipping tool trace.")
|
||||
return
|
||||
|
||||
metadata = {
|
||||
"message_id": trace_info.message_id,
|
||||
"tool_config": json.dumps(trace_info.tool_config, ensure_ascii=False),
|
||||
}
|
||||
|
||||
trace_id = uuid_to_trace_id(trace_info.message_id)
|
||||
tool_span_id = RandomIdGenerator().generate_span_id()
|
||||
logger.info(f"[Arize/Phoenix] Creating tool trace with trace_id: {trace_id}, span_id: {tool_span_id}")
|
||||
|
||||
# Create span context with the same trace_id as the parent
|
||||
# todo: Create with the appropriate parent span context, so that the tool span is
|
||||
# a child of the appropriate span (e.g. message span)
|
||||
span_context = SpanContext(
|
||||
trace_id=trace_id,
|
||||
span_id=tool_span_id,
|
||||
is_remote=False,
|
||||
trace_flags=TraceFlags(TraceFlags.SAMPLED),
|
||||
trace_state=TraceState(),
|
||||
)
|
||||
|
||||
tool_params_str = (
|
||||
json.dumps(trace_info.tool_parameters, ensure_ascii=False)
|
||||
if isinstance(trace_info.tool_parameters, dict)
|
||||
else str(trace_info.tool_parameters)
|
||||
)
|
||||
|
||||
span = self.tracer.start_span(
|
||||
name=trace_info.tool_name,
|
||||
attributes={
|
||||
SpanAttributes.INPUT_VALUE: json.dumps(trace_info.tool_inputs, ensure_ascii=False),
|
||||
SpanAttributes.OUTPUT_VALUE: trace_info.tool_outputs,
|
||||
SpanAttributes.OPENINFERENCE_SPAN_KIND: OpenInferenceSpanKindValues.TOOL.value,
|
||||
SpanAttributes.METADATA: json.dumps(metadata, ensure_ascii=False),
|
||||
SpanAttributes.TOOL_NAME: trace_info.tool_name,
|
||||
SpanAttributes.TOOL_PARAMETERS: tool_params_str,
|
||||
},
|
||||
start_time=datetime_to_nanos(trace_info.start_time),
|
||||
context=trace.set_span_in_context(trace.NonRecordingSpan(span_context)),
|
||||
)
|
||||
|
||||
try:
|
||||
if trace_info.error:
|
||||
span.add_event(
|
||||
"exception",
|
||||
attributes={
|
||||
"exception.message": trace_info.error,
|
||||
"exception.type": "Error",
|
||||
"exception.stacktrace": trace_info.error,
|
||||
},
|
||||
)
|
||||
finally:
|
||||
span.end(end_time=datetime_to_nanos(trace_info.end_time))
|
||||
|
||||
def generate_name_trace(self, trace_info: GenerateNameTraceInfo):
|
||||
if trace_info.message_data is None:
|
||||
return
|
||||
|
||||
metadata = {
|
||||
"project_name": self.project,
|
||||
"message_id": trace_info.message_id,
|
||||
"status": trace_info.message_data.status,
|
||||
"status_message": trace_info.message_data.error or "",
|
||||
"level": "ERROR" if trace_info.message_data.error else "DEFAULT",
|
||||
}
|
||||
metadata.update(trace_info.metadata)
|
||||
|
||||
trace_id = uuid_to_trace_id(trace_info.message_id)
|
||||
span_id = RandomIdGenerator().generate_span_id()
|
||||
context = SpanContext(
|
||||
trace_id=trace_id,
|
||||
span_id=span_id,
|
||||
is_remote=False,
|
||||
trace_flags=TraceFlags(TraceFlags.SAMPLED),
|
||||
trace_state=TraceState(),
|
||||
)
|
||||
|
||||
span = self.tracer.start_span(
|
||||
name=TraceTaskName.GENERATE_NAME_TRACE.value,
|
||||
attributes={
|
||||
SpanAttributes.INPUT_VALUE: json.dumps(trace_info.inputs, ensure_ascii=False),
|
||||
SpanAttributes.OUTPUT_VALUE: json.dumps(trace_info.outputs, ensure_ascii=False),
|
||||
SpanAttributes.OPENINFERENCE_SPAN_KIND: OpenInferenceSpanKindValues.CHAIN.value,
|
||||
SpanAttributes.METADATA: json.dumps(metadata, ensure_ascii=False),
|
||||
SpanAttributes.SESSION_ID: trace_info.message_data.conversation_id,
|
||||
"start_time": trace_info.start_time.isoformat() if trace_info.start_time else "",
|
||||
"end_time": trace_info.end_time.isoformat() if trace_info.end_time else "",
|
||||
},
|
||||
start_time=datetime_to_nanos(trace_info.start_time),
|
||||
context=trace.set_span_in_context(trace.NonRecordingSpan(context)),
|
||||
)
|
||||
|
||||
try:
|
||||
if trace_info.message_data.error:
|
||||
span.add_event(
|
||||
"exception",
|
||||
attributes={
|
||||
"exception.message": trace_info.message_data.error,
|
||||
"exception.type": "Error",
|
||||
"exception.stacktrace": trace_info.message_data.error,
|
||||
},
|
||||
)
|
||||
finally:
|
||||
span.end(end_time=datetime_to_nanos(trace_info.end_time))
|
||||
|
||||
def api_check(self):
|
||||
try:
|
||||
with self.tracer.start_span("api_check") as span:
|
||||
span.set_attribute("test", "true")
|
||||
return True
|
||||
except Exception as e:
|
||||
logger.info(f"[Arize/Phoenix] API check failed: {str(e)}", exc_info=True)
|
||||
raise ValueError(f"[Arize/Phoenix] API check failed: {str(e)}")
|
||||
|
||||
def get_project_url(self):
|
||||
try:
|
||||
if self.arize_phoenix_config.endpoint == "https://otlp.arize.com":
|
||||
return "https://app.arize.com/"
|
||||
else:
|
||||
return f"{self.arize_phoenix_config.endpoint}/projects/"
|
||||
except Exception as e:
|
||||
logger.info(f"[Arize/Phoenix] Get run url failed: {str(e)}", exc_info=True)
|
||||
raise ValueError(f"[Arize/Phoenix] Get run url failed: {str(e)}")
|
||||
|
||||
def _get_workflow_nodes(self, workflow_run_id: str):
|
||||
"""Helper method to get workflow nodes"""
|
||||
workflow_nodes = (
|
||||
db.session.query(
|
||||
WorkflowNodeExecutionModel.id,
|
||||
WorkflowNodeExecutionModel.tenant_id,
|
||||
WorkflowNodeExecutionModel.app_id,
|
||||
WorkflowNodeExecutionModel.title,
|
||||
WorkflowNodeExecutionModel.node_type,
|
||||
WorkflowNodeExecutionModel.status,
|
||||
WorkflowNodeExecutionModel.inputs,
|
||||
WorkflowNodeExecutionModel.outputs,
|
||||
WorkflowNodeExecutionModel.created_at,
|
||||
WorkflowNodeExecutionModel.elapsed_time,
|
||||
WorkflowNodeExecutionModel.process_data,
|
||||
WorkflowNodeExecutionModel.execution_metadata,
|
||||
)
|
||||
.filter(WorkflowNodeExecutionModel.workflow_run_id == workflow_run_id)
|
||||
.all()
|
||||
)
|
||||
return workflow_nodes
|
||||
|
|
@ -4,6 +4,8 @@ from pydantic import BaseModel, ValidationInfo, field_validator
|
|||
|
||||
|
||||
class TracingProviderEnum(StrEnum):
|
||||
ARIZE = "arize"
|
||||
PHOENIX = "phoenix"
|
||||
LANGFUSE = "langfuse"
|
||||
LANGSMITH = "langsmith"
|
||||
OPIK = "opik"
|
||||
|
|
@ -18,6 +20,69 @@ class BaseTracingConfig(BaseModel):
|
|||
...
|
||||
|
||||
|
||||
class ArizeConfig(BaseTracingConfig):
|
||||
"""
|
||||
Model class for Arize tracing config.
|
||||
"""
|
||||
|
||||
api_key: str | None = None
|
||||
space_id: str | None = None
|
||||
project: str | None = None
|
||||
endpoint: str = "https://otlp.arize.com"
|
||||
|
||||
@field_validator("project")
|
||||
@classmethod
|
||||
def project_validator(cls, v, info: ValidationInfo):
|
||||
if v is None or v == "":
|
||||
v = "default"
|
||||
|
||||
return v
|
||||
|
||||
@field_validator("endpoint")
|
||||
@classmethod
|
||||
def endpoint_validator(cls, v, info: ValidationInfo):
|
||||
if v is None or v == "":
|
||||
v = "https://otlp.arize.com"
|
||||
if not v.startswith(("https://", "http://")):
|
||||
raise ValueError("endpoint must start with https:// or http://")
|
||||
if "/" in v[8:]:
|
||||
parts = v.split("/")
|
||||
v = parts[0] + "//" + parts[2]
|
||||
|
||||
return v
|
||||
|
||||
|
||||
class PhoenixConfig(BaseTracingConfig):
|
||||
"""
|
||||
Model class for Phoenix tracing config.
|
||||
"""
|
||||
|
||||
api_key: str | None = None
|
||||
project: str | None = None
|
||||
endpoint: str = "https://app.phoenix.arize.com"
|
||||
|
||||
@field_validator("project")
|
||||
@classmethod
|
||||
def project_validator(cls, v, info: ValidationInfo):
|
||||
if v is None or v == "":
|
||||
v = "default"
|
||||
|
||||
return v
|
||||
|
||||
@field_validator("endpoint")
|
||||
@classmethod
|
||||
def endpoint_validator(cls, v, info: ValidationInfo):
|
||||
if v is None or v == "":
|
||||
v = "https://app.phoenix.arize.com"
|
||||
if not v.startswith(("https://", "http://")):
|
||||
raise ValueError("endpoint must start with https:// or http://")
|
||||
if "/" in v[8:]:
|
||||
parts = v.split("/")
|
||||
v = parts[0] + "//" + parts[2]
|
||||
|
||||
return v
|
||||
|
||||
|
||||
class LangfuseConfig(BaseTracingConfig):
|
||||
"""
|
||||
Model class for Langfuse tracing config.
|
||||
|
|
|
|||
|
|
@ -32,6 +32,7 @@ from core.repositories import SQLAlchemyWorkflowNodeExecutionRepository
|
|||
from core.workflow.nodes.enums import NodeType
|
||||
from extensions.ext_database import db
|
||||
from models import EndUser, WorkflowNodeExecutionTriggeredFrom
|
||||
from models.enums import MessageStatus
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
|
@ -83,6 +84,7 @@ class LangFuseDataTrace(BaseTraceInstance):
|
|||
metadata=metadata,
|
||||
session_id=trace_info.conversation_id,
|
||||
tags=["message", "workflow"],
|
||||
version=trace_info.workflow_run_version,
|
||||
)
|
||||
self.add_trace(langfuse_trace_data=trace_data)
|
||||
workflow_span_data = LangfuseSpan(
|
||||
|
|
@ -108,6 +110,7 @@ class LangFuseDataTrace(BaseTraceInstance):
|
|||
metadata=metadata,
|
||||
session_id=trace_info.conversation_id,
|
||||
tags=["workflow"],
|
||||
version=trace_info.workflow_run_version,
|
||||
)
|
||||
self.add_trace(langfuse_trace_data=trace_data)
|
||||
|
||||
|
|
@ -172,37 +175,7 @@ class LangFuseDataTrace(BaseTraceInstance):
|
|||
}
|
||||
)
|
||||
|
||||
# add span
|
||||
if trace_info.message_id:
|
||||
span_data = LangfuseSpan(
|
||||
id=node_execution_id,
|
||||
name=node_type,
|
||||
input=inputs,
|
||||
output=outputs,
|
||||
trace_id=trace_id,
|
||||
start_time=created_at,
|
||||
end_time=finished_at,
|
||||
metadata=metadata,
|
||||
level=(LevelEnum.DEFAULT if status == "succeeded" else LevelEnum.ERROR),
|
||||
status_message=trace_info.error or "",
|
||||
parent_observation_id=trace_info.workflow_run_id,
|
||||
)
|
||||
else:
|
||||
span_data = LangfuseSpan(
|
||||
id=node_execution_id,
|
||||
name=node_type,
|
||||
input=inputs,
|
||||
output=outputs,
|
||||
trace_id=trace_id,
|
||||
start_time=created_at,
|
||||
end_time=finished_at,
|
||||
metadata=metadata,
|
||||
level=(LevelEnum.DEFAULT if status == "succeeded" else LevelEnum.ERROR),
|
||||
status_message=trace_info.error or "",
|
||||
)
|
||||
|
||||
self.add_span(langfuse_span_data=span_data)
|
||||
|
||||
# add generation span
|
||||
if process_data and process_data.get("model_mode") == "chat":
|
||||
total_token = metadata.get("total_tokens", 0)
|
||||
prompt_tokens = 0
|
||||
|
|
@ -226,10 +199,10 @@ class LangFuseDataTrace(BaseTraceInstance):
|
|||
)
|
||||
|
||||
node_generation_data = LangfuseGeneration(
|
||||
name="llm",
|
||||
id=node_execution_id,
|
||||
name=node_name,
|
||||
trace_id=trace_id,
|
||||
model=process_data.get("model_name"),
|
||||
parent_observation_id=node_execution_id,
|
||||
start_time=created_at,
|
||||
end_time=finished_at,
|
||||
input=inputs,
|
||||
|
|
@ -237,11 +210,30 @@ class LangFuseDataTrace(BaseTraceInstance):
|
|||
metadata=metadata,
|
||||
level=(LevelEnum.DEFAULT if status == "succeeded" else LevelEnum.ERROR),
|
||||
status_message=trace_info.error or "",
|
||||
parent_observation_id=trace_info.workflow_run_id if trace_info.message_id else None,
|
||||
usage=generation_usage,
|
||||
)
|
||||
|
||||
self.add_generation(langfuse_generation_data=node_generation_data)
|
||||
|
||||
# add normal span
|
||||
else:
|
||||
span_data = LangfuseSpan(
|
||||
id=node_execution_id,
|
||||
name=node_name,
|
||||
input=inputs,
|
||||
output=outputs,
|
||||
trace_id=trace_id,
|
||||
start_time=created_at,
|
||||
end_time=finished_at,
|
||||
metadata=metadata,
|
||||
level=(LevelEnum.DEFAULT if status == "succeeded" else LevelEnum.ERROR),
|
||||
status_message=trace_info.error or "",
|
||||
parent_observation_id=trace_info.workflow_run_id if trace_info.message_id else None,
|
||||
)
|
||||
|
||||
self.add_span(langfuse_span_data=span_data)
|
||||
|
||||
def message_trace(self, trace_info: MessageTraceInfo, **kwargs):
|
||||
# get message file data
|
||||
file_list = trace_info.file_list
|
||||
|
|
@ -284,7 +276,7 @@ class LangFuseDataTrace(BaseTraceInstance):
|
|||
)
|
||||
self.add_trace(langfuse_trace_data=trace_data)
|
||||
|
||||
# start add span
|
||||
# add generation
|
||||
generation_usage = GenerationUsage(
|
||||
input=trace_info.message_tokens,
|
||||
output=trace_info.answer_tokens,
|
||||
|
|
@ -302,7 +294,7 @@ class LangFuseDataTrace(BaseTraceInstance):
|
|||
input=trace_info.inputs,
|
||||
output=message_data.answer,
|
||||
metadata=metadata,
|
||||
level=(LevelEnum.DEFAULT if message_data.status != "error" else LevelEnum.ERROR),
|
||||
level=(LevelEnum.DEFAULT if message_data.status != MessageStatus.ERROR else LevelEnum.ERROR),
|
||||
status_message=message_data.error or "",
|
||||
usage=generation_usage,
|
||||
)
|
||||
|
|
@ -348,7 +340,7 @@ class LangFuseDataTrace(BaseTraceInstance):
|
|||
start_time=trace_info.start_time,
|
||||
end_time=trace_info.end_time,
|
||||
metadata=trace_info.metadata,
|
||||
level=(LevelEnum.DEFAULT if message_data.status != "error" else LevelEnum.ERROR),
|
||||
level=(LevelEnum.DEFAULT if message_data.status != MessageStatus.ERROR else LevelEnum.ERROR),
|
||||
status_message=message_data.error or "",
|
||||
usage=generation_usage,
|
||||
)
|
||||
|
|
|
|||
|
|
@ -41,6 +41,28 @@ from tasks.ops_trace_task import process_trace_tasks
|
|||
class OpsTraceProviderConfigMap(dict[str, dict[str, Any]]):
|
||||
def __getitem__(self, provider: str) -> dict[str, Any]:
|
||||
match provider:
|
||||
case TracingProviderEnum.ARIZE:
|
||||
from core.ops.arize_phoenix_trace.arize_phoenix_trace import ArizePhoenixDataTrace
|
||||
from core.ops.entities.config_entity import ArizeConfig
|
||||
|
||||
return {
|
||||
"config_class": ArizeConfig,
|
||||
"secret_keys": ["api_key", "space_id"],
|
||||
"other_keys": ["project", "endpoint"],
|
||||
"trace_instance": ArizePhoenixDataTrace,
|
||||
}
|
||||
|
||||
case TracingProviderEnum.PHOENIX:
|
||||
from core.ops.arize_phoenix_trace.arize_phoenix_trace import ArizePhoenixDataTrace
|
||||
from core.ops.entities.config_entity import PhoenixConfig
|
||||
|
||||
return {
|
||||
"config_class": PhoenixConfig,
|
||||
"secret_keys": ["api_key"],
|
||||
"other_keys": ["project", "endpoint"],
|
||||
"trace_instance": ArizePhoenixDataTrace,
|
||||
}
|
||||
|
||||
case TracingProviderEnum.LANGFUSE:
|
||||
from core.ops.entities.config_entity import LangfuseConfig
|
||||
from core.ops.langfuse_trace.langfuse_trace import LangFuseDataTrace
|
||||
|
|
@ -84,7 +106,26 @@ class OpsTraceProviderConfigMap(dict[str, dict[str, Any]]):
|
|||
"other_keys": ["project", "entity", "endpoint", "host"],
|
||||
"trace_instance": WeaveDataTrace,
|
||||
}
|
||||
case TracingProviderEnum.ARIZE:
|
||||
from core.ops.arize_phoenix_trace.arize_phoenix_trace import ArizePhoenixDataTrace
|
||||
from core.ops.entities.config_entity import ArizeConfig
|
||||
|
||||
return {
|
||||
"config_class": ArizeConfig,
|
||||
"secret_keys": ["api_key", "space_id"],
|
||||
"other_keys": ["project", "endpoint"],
|
||||
"trace_instance": ArizePhoenixDataTrace,
|
||||
}
|
||||
case TracingProviderEnum.PHOENIX:
|
||||
from core.ops.arize_phoenix_trace.arize_phoenix_trace import ArizePhoenixDataTrace
|
||||
from core.ops.entities.config_entity import PhoenixConfig
|
||||
|
||||
return {
|
||||
"config_class": PhoenixConfig,
|
||||
"secret_keys": ["api_key"],
|
||||
"other_keys": ["project", "endpoint"],
|
||||
"trace_instance": ArizePhoenixDataTrace,
|
||||
}
|
||||
case _:
|
||||
raise KeyError(f"Unsupported tracing provider: {provider}")
|
||||
|
||||
|
|
|
|||
|
|
@ -42,4 +42,4 @@ class DynamicSelectClient(BasePluginClient):
|
|||
for options in response:
|
||||
return options
|
||||
|
||||
raise ValueError("Plugin service returned no options")
|
||||
raise ValueError(f"Plugin service returned no options for parameter '{parameter}' in provider '{provider}'")
|
||||
|
|
|
|||
|
|
@ -1,7 +1,6 @@
|
|||
"""Document loader helpers."""
|
||||
|
||||
import concurrent.futures
|
||||
from pathlib import Path
|
||||
from typing import NamedTuple, Optional, cast
|
||||
|
||||
|
||||
|
|
@ -16,7 +15,7 @@ class FileEncoding(NamedTuple):
|
|||
"""The language of the file."""
|
||||
|
||||
|
||||
def detect_file_encodings(file_path: str, timeout: int = 5) -> list[FileEncoding]:
|
||||
def detect_file_encodings(file_path: str, timeout: int = 5, sample_size: int = 1024 * 1024) -> list[FileEncoding]:
|
||||
"""Try to detect the file encoding.
|
||||
|
||||
Returns a list of `FileEncoding` tuples with the detected encodings ordered
|
||||
|
|
@ -25,11 +24,16 @@ def detect_file_encodings(file_path: str, timeout: int = 5) -> list[FileEncoding
|
|||
Args:
|
||||
file_path: The path to the file to detect the encoding for.
|
||||
timeout: The timeout in seconds for the encoding detection.
|
||||
sample_size: The number of bytes to read for encoding detection. Default is 1MB.
|
||||
For large files, reading only a sample is sufficient and prevents timeout.
|
||||
"""
|
||||
import chardet
|
||||
|
||||
def read_and_detect(file_path: str) -> list[dict]:
|
||||
rawdata = Path(file_path).read_bytes()
|
||||
with open(file_path, "rb") as f:
|
||||
# Read only a sample of the file for encoding detection
|
||||
# This prevents timeout on large files while still providing accurate encoding detection
|
||||
rawdata = f.read(sample_size)
|
||||
return cast(list[dict], chardet.detect_all(rawdata))
|
||||
|
||||
with concurrent.futures.ThreadPoolExecutor() as executor:
|
||||
|
|
|
|||
|
|
@ -36,8 +36,12 @@ class TextExtractor(BaseExtractor):
|
|||
break
|
||||
except UnicodeDecodeError:
|
||||
continue
|
||||
else:
|
||||
raise RuntimeError(
|
||||
f"Decode failed: {self._file_path}, all detected encodings failed. Original error: {e}"
|
||||
)
|
||||
else:
|
||||
raise RuntimeError(f"Error loading {self._file_path}") from e
|
||||
raise RuntimeError(f"Decode failed: {self._file_path}, specified encoding failed. Original error: {e}")
|
||||
except Exception as e:
|
||||
raise RuntimeError(f"Error loading {self._file_path}") from e
|
||||
|
||||
|
|
|
|||
|
|
@ -1010,6 +1010,9 @@ class DatasetRetrieval:
|
|||
def _process_metadata_filter_func(
|
||||
self, sequence: int, condition: str, metadata_name: str, value: Optional[Any], filters: list
|
||||
):
|
||||
if value is None:
|
||||
return
|
||||
|
||||
key = f"{metadata_name}_{sequence}"
|
||||
key_value = f"{metadata_name}_{sequence}_value"
|
||||
match condition:
|
||||
|
|
|
|||
|
|
@ -31,6 +31,14 @@ class TTSTool(BuiltinTool):
|
|||
model_type=ModelType.TTS,
|
||||
model=model,
|
||||
)
|
||||
if not voice:
|
||||
voices = model_instance.get_tts_voices()
|
||||
if voices:
|
||||
voice = voices[0].get("value")
|
||||
if not voice:
|
||||
raise ValueError("Sorry, no voice available.")
|
||||
else:
|
||||
raise ValueError("Sorry, no voice available.")
|
||||
tts = model_instance.invoke_tts(
|
||||
content_text=tool_parameters.get("text"), # type: ignore
|
||||
user=user_id,
|
||||
|
|
|
|||
|
|
@ -4,6 +4,7 @@ from typing import Any, Optional
|
|||
from core.helper.code_executor.code_executor import CodeExecutor, CodeLanguage
|
||||
from core.tools.builtin_tool.tool import BuiltinTool
|
||||
from core.tools.entities.tool_entities import ToolInvokeMessage
|
||||
from core.tools.errors import ToolInvokeError
|
||||
|
||||
|
||||
class SimpleCode(BuiltinTool):
|
||||
|
|
@ -25,6 +26,8 @@ class SimpleCode(BuiltinTool):
|
|||
if language not in {CodeLanguage.PYTHON3, CodeLanguage.JAVASCRIPT}:
|
||||
raise ValueError(f"Only python3 and javascript are supported, not {language}")
|
||||
|
||||
result = CodeExecutor.execute_code(language, "", code)
|
||||
|
||||
yield self.create_text_message(result)
|
||||
try:
|
||||
result = CodeExecutor.execute_code(language, "", code)
|
||||
yield self.create_text_message(result)
|
||||
except Exception as e:
|
||||
raise ToolInvokeError(str(e))
|
||||
|
|
|
|||
|
|
@ -66,11 +66,21 @@ class WorkflowNodeExecution(BaseModel):
|
|||
but they are not stored in the model.
|
||||
"""
|
||||
|
||||
# Core identification fields
|
||||
id: str # Unique identifier for this execution record
|
||||
node_execution_id: Optional[str] = None # Optional secondary ID for cross-referencing
|
||||
# --------- Core identification fields ---------
|
||||
|
||||
# Unique identifier for this execution record, used when persisting to storage.
|
||||
# Value is a UUID string (e.g., '09b3e04c-f9ae-404c-ad82-290b8d7bd382').
|
||||
id: str
|
||||
|
||||
# Optional secondary ID for cross-referencing purposes.
|
||||
#
|
||||
# NOTE: For referencing the persisted record, use `id` rather than `node_execution_id`.
|
||||
# While `node_execution_id` may sometimes be a UUID string, this is not guaranteed.
|
||||
# In most scenarios, `id` should be used as the primary identifier.
|
||||
node_execution_id: Optional[str] = None
|
||||
workflow_id: str # ID of the workflow this node belongs to
|
||||
workflow_execution_id: Optional[str] = None # ID of the specific workflow run (null for single-step debugging)
|
||||
# --------- Core identification fields ends ---------
|
||||
|
||||
# Execution positioning and flow
|
||||
index: int # Sequence number for ordering in trace visualization
|
||||
|
|
|
|||
|
|
@ -158,7 +158,10 @@ class AgentNode(ToolNode):
|
|||
# variable_pool.convert_template expects a string template,
|
||||
# but if passing a dict, convert to JSON string first before rendering
|
||||
try:
|
||||
parameter_value = json.dumps(agent_input.value, ensure_ascii=False)
|
||||
if not isinstance(agent_input.value, str):
|
||||
parameter_value = json.dumps(agent_input.value, ensure_ascii=False)
|
||||
else:
|
||||
parameter_value = str(agent_input.value)
|
||||
except TypeError:
|
||||
parameter_value = str(agent_input.value)
|
||||
segment_group = variable_pool.convert_template(parameter_value)
|
||||
|
|
@ -166,7 +169,8 @@ class AgentNode(ToolNode):
|
|||
# variable_pool.convert_template returns a string,
|
||||
# so we need to convert it back to a dictionary
|
||||
try:
|
||||
parameter_value = json.loads(parameter_value)
|
||||
if not isinstance(agent_input.value, str):
|
||||
parameter_value = json.loads(parameter_value)
|
||||
except json.JSONDecodeError:
|
||||
parameter_value = parameter_value
|
||||
else:
|
||||
|
|
|
|||
|
|
@ -2,7 +2,6 @@ import logging
|
|||
from collections.abc import Generator
|
||||
from typing import cast
|
||||
|
||||
from core.file import FILE_MODEL_IDENTITY, File
|
||||
from core.workflow.entities.variable_pool import VariablePool
|
||||
from core.workflow.graph_engine.entities.event import (
|
||||
GraphEngineEvent,
|
||||
|
|
@ -201,44 +200,3 @@ class AnswerStreamProcessor(StreamProcessor):
|
|||
stream_out_answer_node_ids.append(answer_node_id)
|
||||
|
||||
return stream_out_answer_node_ids
|
||||
|
||||
@classmethod
|
||||
def _fetch_files_from_variable_value(cls, value: dict | list) -> list[dict]:
|
||||
"""
|
||||
Fetch files from variable value
|
||||
:param value: variable value
|
||||
:return:
|
||||
"""
|
||||
if not value:
|
||||
return []
|
||||
|
||||
files = []
|
||||
if isinstance(value, list):
|
||||
for item in value:
|
||||
file_var = cls._get_file_var_from_value(item)
|
||||
if file_var:
|
||||
files.append(file_var)
|
||||
elif isinstance(value, dict):
|
||||
file_var = cls._get_file_var_from_value(value)
|
||||
if file_var:
|
||||
files.append(file_var)
|
||||
|
||||
return files
|
||||
|
||||
@classmethod
|
||||
def _get_file_var_from_value(cls, value: dict | list):
|
||||
"""
|
||||
Get file var from value
|
||||
:param value: variable value
|
||||
:return:
|
||||
"""
|
||||
if not value:
|
||||
return None
|
||||
|
||||
if isinstance(value, dict):
|
||||
if "dify_model_identity" in value and value["dify_model_identity"] == FILE_MODEL_IDENTITY:
|
||||
return value
|
||||
elif isinstance(value, File):
|
||||
return value.to_dict()
|
||||
|
||||
return None
|
||||
|
|
|
|||
|
|
@ -333,7 +333,7 @@ class Executor:
|
|||
try:
|
||||
response = getattr(ssrf_proxy, self.method.lower())(**request_args)
|
||||
except (ssrf_proxy.MaxRetriesExceededError, httpx.RequestError) as e:
|
||||
raise HttpRequestNodeError(str(e))
|
||||
raise HttpRequestNodeError(str(e)) from e
|
||||
# FIXME: fix type ignore, this maybe httpx type issue
|
||||
return response # type: ignore
|
||||
|
||||
|
|
|
|||
|
|
@ -490,6 +490,9 @@ class KnowledgeRetrievalNode(LLMNode):
|
|||
def _process_metadata_filter_func(
|
||||
self, sequence: int, condition: str, metadata_name: str, value: Optional[Any], filters: list
|
||||
):
|
||||
if value is None:
|
||||
return
|
||||
|
||||
key = f"{metadata_name}_{sequence}"
|
||||
key_value = f"{metadata_name}_{sequence}_value"
|
||||
match condition:
|
||||
|
|
|
|||
|
|
@ -6,6 +6,7 @@ from sqlalchemy.orm import Session
|
|||
|
||||
from core.callback_handler.workflow_tool_callback_handler import DifyWorkflowCallbackHandler
|
||||
from core.file import File, FileTransferMethod
|
||||
from core.model_runtime.entities.llm_entities import LLMUsage
|
||||
from core.plugin.impl.exc import PluginDaemonClientSideError
|
||||
from core.plugin.impl.plugin import PluginInstaller
|
||||
from core.tools.entities.tool_entities import ToolInvokeMessage, ToolParameter
|
||||
|
|
@ -167,7 +168,9 @@ class ToolNode(BaseNode[ToolNodeData]):
|
|||
if tool_input.type == "variable":
|
||||
variable = variable_pool.get(tool_input.value)
|
||||
if variable is None:
|
||||
raise ToolParameterError(f"Variable {tool_input.value} does not exist")
|
||||
if parameter.required:
|
||||
raise ToolParameterError(f"Variable {tool_input.value} does not exist")
|
||||
continue
|
||||
parameter_value = variable.value
|
||||
elif tool_input.type in {"mixed", "constant"}:
|
||||
segment_group = variable_pool.convert_template(str(tool_input.value))
|
||||
|
|
@ -206,7 +209,7 @@ class ToolNode(BaseNode[ToolNodeData]):
|
|||
|
||||
agent_logs: list[AgentLogEvent] = []
|
||||
agent_execution_metadata: Mapping[WorkflowNodeExecutionMetadataKey, Any] = {}
|
||||
|
||||
llm_usage: LLMUsage | None = None
|
||||
variables: dict[str, Any] = {}
|
||||
|
||||
for message in message_stream:
|
||||
|
|
@ -274,9 +277,10 @@ class ToolNode(BaseNode[ToolNodeData]):
|
|||
elif message.type == ToolInvokeMessage.MessageType.JSON:
|
||||
assert isinstance(message.message, ToolInvokeMessage.JsonMessage)
|
||||
if self.node_type == NodeType.AGENT:
|
||||
msg_metadata = message.message.json_object.pop("execution_metadata", {})
|
||||
msg_metadata: dict[str, Any] = message.message.json_object.pop("execution_metadata", {})
|
||||
llm_usage = LLMUsage.from_metadata(msg_metadata)
|
||||
agent_execution_metadata = {
|
||||
key: value
|
||||
WorkflowNodeExecutionMetadataKey(key): value
|
||||
for key, value in msg_metadata.items()
|
||||
if key in WorkflowNodeExecutionMetadataKey.__members__.values()
|
||||
}
|
||||
|
|
@ -375,6 +379,7 @@ class ToolNode(BaseNode[ToolNodeData]):
|
|||
WorkflowNodeExecutionMetadataKey.AGENT_LOG: agent_logs,
|
||||
},
|
||||
inputs=parameters_for_log,
|
||||
llm_usage=llm_usage,
|
||||
)
|
||||
)
|
||||
|
||||
|
|
|
|||
|
|
@ -0,0 +1,32 @@
|
|||
import abc
|
||||
from collections.abc import Mapping
|
||||
from typing import Any, Protocol
|
||||
|
||||
from sqlalchemy.orm import Session
|
||||
|
||||
from core.workflow.nodes.enums import NodeType
|
||||
|
||||
|
||||
class DraftVariableSaver(Protocol):
|
||||
@abc.abstractmethod
|
||||
def save(self, process_data: Mapping[str, Any] | None, outputs: Mapping[str, Any] | None):
|
||||
pass
|
||||
|
||||
|
||||
class DraftVariableSaverFactory(Protocol):
|
||||
@abc.abstractmethod
|
||||
def __call__(
|
||||
self,
|
||||
session: Session,
|
||||
app_id: str,
|
||||
node_id: str,
|
||||
node_type: NodeType,
|
||||
node_execution_id: str,
|
||||
enclosing_node_id: str | None = None,
|
||||
) -> "DraftVariableSaver":
|
||||
pass
|
||||
|
||||
|
||||
class NoopDraftVariableSaver(DraftVariableSaver):
|
||||
def save(self, process_data: Mapping[str, Any] | None, outputs: Mapping[str, Any] | None):
|
||||
pass
|
||||
|
|
@ -27,6 +27,7 @@ from core.workflow.enums import SystemVariableKey
|
|||
from core.workflow.repositories.workflow_execution_repository import WorkflowExecutionRepository
|
||||
from core.workflow.repositories.workflow_node_execution_repository import WorkflowNodeExecutionRepository
|
||||
from core.workflow.workflow_entry import WorkflowEntry
|
||||
from libs.datetime_utils import naive_utc_now
|
||||
|
||||
|
||||
@dataclass
|
||||
|
|
@ -160,12 +161,13 @@ class WorkflowCycleManager:
|
|||
exceptions_count: int = 0,
|
||||
) -> WorkflowExecution:
|
||||
workflow_execution = self._get_workflow_execution_or_raise_error(workflow_run_id)
|
||||
now = naive_utc_now()
|
||||
|
||||
workflow_execution.status = WorkflowExecutionStatus(status.value)
|
||||
workflow_execution.error_message = error_message
|
||||
workflow_execution.total_tokens = total_tokens
|
||||
workflow_execution.total_steps = total_steps
|
||||
workflow_execution.finished_at = datetime.now(UTC).replace(tzinfo=None)
|
||||
workflow_execution.finished_at = now
|
||||
workflow_execution.exceptions_count = exceptions_count
|
||||
|
||||
# Use the instance repository to find running executions for a workflow run
|
||||
|
|
@ -174,7 +176,6 @@ class WorkflowCycleManager:
|
|||
)
|
||||
|
||||
# Update the domain models
|
||||
now = datetime.now(UTC).replace(tzinfo=None)
|
||||
for node_execution in running_node_executions:
|
||||
if node_execution.node_execution_id:
|
||||
# Update the domain model
|
||||
|
|
|
|||
|
|
@ -12,14 +12,14 @@ def init_app(app: DifyApp):
|
|||
@app.after_request
|
||||
def after_request(response):
|
||||
"""Add Version headers to the response."""
|
||||
response.headers.add("X-Version", dify_config.CURRENT_VERSION)
|
||||
response.headers.add("X-Version", dify_config.project.version)
|
||||
response.headers.add("X-Env", dify_config.DEPLOY_ENV)
|
||||
return response
|
||||
|
||||
@app.route("/health")
|
||||
def health():
|
||||
return Response(
|
||||
json.dumps({"pid": os.getpid(), "status": "ok", "version": dify_config.CURRENT_VERSION}),
|
||||
json.dumps({"pid": os.getpid(), "status": "ok", "version": dify_config.project.version}),
|
||||
status=200,
|
||||
content_type="application/json",
|
||||
)
|
||||
|
|
|
|||
|
|
@ -49,7 +49,7 @@ def init_app(app: DifyApp):
|
|||
logging.getLogger().addHandler(exception_handler)
|
||||
|
||||
def init_flask_instrumentor(app: DifyApp):
|
||||
meter = get_meter("http_metrics", version=dify_config.CURRENT_VERSION)
|
||||
meter = get_meter("http_metrics", version=dify_config.project.version)
|
||||
_http_response_counter = meter.create_counter(
|
||||
"http.server.response.count",
|
||||
description="Total number of HTTP responses by status code, method and target",
|
||||
|
|
@ -163,7 +163,7 @@ def init_app(app: DifyApp):
|
|||
resource = Resource(
|
||||
attributes={
|
||||
ResourceAttributes.SERVICE_NAME: dify_config.APPLICATION_NAME,
|
||||
ResourceAttributes.SERVICE_VERSION: f"dify-{dify_config.CURRENT_VERSION}-{dify_config.COMMIT_SHA}",
|
||||
ResourceAttributes.SERVICE_VERSION: f"dify-{dify_config.project.version}-{dify_config.COMMIT_SHA}",
|
||||
ResourceAttributes.PROCESS_PID: os.getpid(),
|
||||
ResourceAttributes.DEPLOYMENT_ENVIRONMENT: f"{dify_config.DEPLOY_ENV}-{dify_config.EDITION}",
|
||||
ResourceAttributes.HOST_NAME: socket.gethostname(),
|
||||
|
|
|
|||
|
|
@ -35,6 +35,6 @@ def init_app(app: DifyApp):
|
|||
traces_sample_rate=dify_config.SENTRY_TRACES_SAMPLE_RATE,
|
||||
profiles_sample_rate=dify_config.SENTRY_PROFILES_SAMPLE_RATE,
|
||||
environment=dify_config.DEPLOY_ENV,
|
||||
release=f"dify-{dify_config.CURRENT_VERSION}-{dify_config.COMMIT_SHA}",
|
||||
release=f"dify-{dify_config.project.version}-{dify_config.COMMIT_SHA}",
|
||||
before_send=before_send,
|
||||
)
|
||||
|
|
|
|||
|
|
@ -0,0 +1,30 @@
|
|||
from pathlib import Path
|
||||
|
||||
|
||||
def search_file_upwards(
|
||||
base_dir_path: Path,
|
||||
target_file_name: str,
|
||||
max_search_parent_depth: int,
|
||||
) -> Path:
|
||||
"""
|
||||
Find a target file in the current directory or its parent directories up to a specified depth.
|
||||
:param base_dir_path: Starting directory path to search from.
|
||||
:param target_file_name: Name of the file to search for.
|
||||
:param max_search_parent_depth: Maximum number of parent directories to search upwards.
|
||||
:return: Path of the file if found, otherwise None.
|
||||
"""
|
||||
current_path = base_dir_path.resolve()
|
||||
for _ in range(max_search_parent_depth):
|
||||
candidate_path = current_path / target_file_name
|
||||
if candidate_path.is_file():
|
||||
return candidate_path
|
||||
parent_path = current_path.parent
|
||||
if parent_path == current_path: # reached the root directory
|
||||
break
|
||||
else:
|
||||
current_path = parent_path
|
||||
|
||||
raise ValueError(
|
||||
f"File '{target_file_name}' not found in the directory '{base_dir_path.resolve()}' or its parent directories"
|
||||
f" in depth of {max_search_parent_depth}."
|
||||
)
|
||||
|
|
@ -140,7 +140,7 @@ class Dataset(Base):
|
|||
def word_count(self):
|
||||
return (
|
||||
db.session.query(Document)
|
||||
.with_entities(func.coalesce(func.sum(Document.word_count)))
|
||||
.with_entities(func.coalesce(func.sum(Document.word_count), 0))
|
||||
.filter(Document.dataset_id == self.id)
|
||||
.scalar()
|
||||
)
|
||||
|
|
@ -448,7 +448,7 @@ class Document(Base):
|
|||
def hit_count(self):
|
||||
return (
|
||||
db.session.query(DocumentSegment)
|
||||
.with_entities(func.coalesce(func.sum(DocumentSegment.hit_count)))
|
||||
.with_entities(func.coalesce(func.sum(DocumentSegment.hit_count), 0))
|
||||
.filter(DocumentSegment.document_id == self.id)
|
||||
.scalar()
|
||||
)
|
||||
|
|
|
|||
|
|
@ -21,3 +21,12 @@ class DraftVariableType(StrEnum):
|
|||
NODE = "node"
|
||||
SYS = "sys"
|
||||
CONVERSATION = "conversation"
|
||||
|
||||
|
||||
class MessageStatus(StrEnum):
|
||||
"""
|
||||
Message Status Enum
|
||||
"""
|
||||
|
||||
NORMAL = "normal"
|
||||
ERROR = "error"
|
||||
|
|
|
|||
|
|
@ -676,7 +676,7 @@ class Conversation(Base):
|
|||
if isinstance(value, dict) and value.get("dify_model_identity") == FILE_MODEL_IDENTITY:
|
||||
if value["transfer_method"] == FileTransferMethod.TOOL_FILE:
|
||||
value["tool_file_id"] = value["related_id"]
|
||||
elif value["transfer_method"] == FileTransferMethod.LOCAL_FILE:
|
||||
elif value["transfer_method"] in [FileTransferMethod.LOCAL_FILE, FileTransferMethod.REMOTE_URL]:
|
||||
value["upload_file_id"] = value["related_id"]
|
||||
inputs[key] = file_factory.build_from_mapping(mapping=value, tenant_id=value["tenant_id"])
|
||||
elif isinstance(value, list) and all(
|
||||
|
|
@ -686,7 +686,7 @@ class Conversation(Base):
|
|||
for item in value:
|
||||
if item["transfer_method"] == FileTransferMethod.TOOL_FILE:
|
||||
item["tool_file_id"] = item["related_id"]
|
||||
elif item["transfer_method"] == FileTransferMethod.LOCAL_FILE:
|
||||
elif item["transfer_method"] in [FileTransferMethod.LOCAL_FILE, FileTransferMethod.REMOTE_URL]:
|
||||
item["upload_file_id"] = item["related_id"]
|
||||
inputs[key].append(file_factory.build_from_mapping(mapping=item, tenant_id=item["tenant_id"]))
|
||||
|
||||
|
|
@ -946,7 +946,7 @@ class Message(Base):
|
|||
if isinstance(value, dict) and value.get("dify_model_identity") == FILE_MODEL_IDENTITY:
|
||||
if value["transfer_method"] == FileTransferMethod.TOOL_FILE:
|
||||
value["tool_file_id"] = value["related_id"]
|
||||
elif value["transfer_method"] == FileTransferMethod.LOCAL_FILE:
|
||||
elif value["transfer_method"] in [FileTransferMethod.LOCAL_FILE, FileTransferMethod.REMOTE_URL]:
|
||||
value["upload_file_id"] = value["related_id"]
|
||||
inputs[key] = file_factory.build_from_mapping(mapping=value, tenant_id=value["tenant_id"])
|
||||
elif isinstance(value, list) and all(
|
||||
|
|
@ -956,7 +956,7 @@ class Message(Base):
|
|||
for item in value:
|
||||
if item["transfer_method"] == FileTransferMethod.TOOL_FILE:
|
||||
item["tool_file_id"] = item["related_id"]
|
||||
elif item["transfer_method"] == FileTransferMethod.LOCAL_FILE:
|
||||
elif item["transfer_method"] in [FileTransferMethod.LOCAL_FILE, FileTransferMethod.REMOTE_URL]:
|
||||
item["upload_file_id"] = item["related_id"]
|
||||
inputs[key].append(file_factory.build_from_mapping(mapping=item, tenant_id=item["tenant_id"]))
|
||||
return inputs
|
||||
|
|
|
|||
|
|
@ -1,9 +1,10 @@
|
|||
[project]
|
||||
name = "dify-api"
|
||||
dynamic = ["version"]
|
||||
version = "1.5.1"
|
||||
requires-python = ">=3.11,<3.13"
|
||||
|
||||
dependencies = [
|
||||
"arize-phoenix-otel~=0.9.2",
|
||||
"authlib==1.3.1",
|
||||
"azure-identity==1.16.1",
|
||||
"beautifulsoup4==4.12.2",
|
||||
|
|
|
|||
|
|
@ -889,7 +889,7 @@ class RegisterService:
|
|||
|
||||
TenantService.create_owner_tenant_if_not_exist(account=account, is_setup=True)
|
||||
|
||||
dify_setup = DifySetup(version=dify_config.CURRENT_VERSION)
|
||||
dify_setup = DifySetup(version=dify_config.project.version)
|
||||
db.session.add(dify_setup)
|
||||
db.session.commit()
|
||||
except Exception as e:
|
||||
|
|
|
|||
|
|
@ -1,13 +1,17 @@
|
|||
import io
|
||||
import logging
|
||||
import uuid
|
||||
from collections.abc import Generator
|
||||
from typing import Optional
|
||||
|
||||
from flask import Response, stream_with_context
|
||||
from werkzeug.datastructures import FileStorage
|
||||
|
||||
from constants import AUDIO_EXTENSIONS
|
||||
from core.model_manager import ModelManager
|
||||
from core.model_runtime.entities.model_entities import ModelType
|
||||
from extensions.ext_database import db
|
||||
from models.enums import MessageStatus
|
||||
from models.model import App, AppMode, AppModelConfig, Message
|
||||
from services.errors.audio import (
|
||||
AudioTooLargeServiceError,
|
||||
|
|
@ -16,6 +20,7 @@ from services.errors.audio import (
|
|||
ProviderNotSupportTextToSpeechServiceError,
|
||||
UnsupportedAudioTypeServiceError,
|
||||
)
|
||||
from services.workflow_service import WorkflowService
|
||||
|
||||
FILE_SIZE = 30
|
||||
FILE_SIZE_LIMIT = FILE_SIZE * 1024 * 1024
|
||||
|
|
@ -74,35 +79,36 @@ class AudioService:
|
|||
voice: Optional[str] = None,
|
||||
end_user: Optional[str] = None,
|
||||
message_id: Optional[str] = None,
|
||||
is_draft: bool = False,
|
||||
):
|
||||
from collections.abc import Generator
|
||||
|
||||
from flask import Response, stream_with_context
|
||||
|
||||
from app import app
|
||||
from extensions.ext_database import db
|
||||
|
||||
def invoke_tts(text_content: str, app_model: App, voice: Optional[str] = None):
|
||||
def invoke_tts(text_content: str, app_model: App, voice: Optional[str] = None, is_draft: bool = False):
|
||||
with app.app_context():
|
||||
if app_model.mode in {AppMode.ADVANCED_CHAT.value, AppMode.WORKFLOW.value}:
|
||||
workflow = app_model.workflow
|
||||
if workflow is None:
|
||||
raise ValueError("TTS is not enabled")
|
||||
if voice is None:
|
||||
if app_model.mode in {AppMode.ADVANCED_CHAT.value, AppMode.WORKFLOW.value}:
|
||||
if is_draft:
|
||||
workflow = WorkflowService().get_draft_workflow(app_model=app_model)
|
||||
else:
|
||||
workflow = app_model.workflow
|
||||
if (
|
||||
workflow is None
|
||||
or "text_to_speech" not in workflow.features_dict
|
||||
or not workflow.features_dict["text_to_speech"].get("enabled")
|
||||
):
|
||||
raise ValueError("TTS is not enabled")
|
||||
|
||||
features_dict = workflow.features_dict
|
||||
if "text_to_speech" not in features_dict or not features_dict["text_to_speech"].get("enabled"):
|
||||
raise ValueError("TTS is not enabled")
|
||||
voice = workflow.features_dict["text_to_speech"].get("voice")
|
||||
else:
|
||||
if not is_draft:
|
||||
if app_model.app_model_config is None:
|
||||
raise ValueError("AppModelConfig not found")
|
||||
text_to_speech_dict = app_model.app_model_config.text_to_speech_dict
|
||||
|
||||
voice = features_dict["text_to_speech"].get("voice") if voice is None else voice
|
||||
else:
|
||||
if app_model.app_model_config is None:
|
||||
raise ValueError("AppModelConfig not found")
|
||||
text_to_speech_dict = app_model.app_model_config.text_to_speech_dict
|
||||
if not text_to_speech_dict.get("enabled"):
|
||||
raise ValueError("TTS is not enabled")
|
||||
|
||||
if not text_to_speech_dict.get("enabled"):
|
||||
raise ValueError("TTS is not enabled")
|
||||
|
||||
voice = text_to_speech_dict.get("voice") if voice is None else voice
|
||||
voice = text_to_speech_dict.get("voice")
|
||||
|
||||
model_manager = ModelManager()
|
||||
model_instance = model_manager.get_default_model_instance(
|
||||
|
|
@ -132,18 +138,18 @@ class AudioService:
|
|||
message = db.session.query(Message).filter(Message.id == message_id).first()
|
||||
if message is None:
|
||||
return None
|
||||
if message.answer == "" and message.status == "normal":
|
||||
if message.answer == "" and message.status == MessageStatus.NORMAL:
|
||||
return None
|
||||
|
||||
else:
|
||||
response = invoke_tts(message.answer, app_model=app_model, voice=voice)
|
||||
response = invoke_tts(text_content=message.answer, app_model=app_model, voice=voice, is_draft=is_draft)
|
||||
if isinstance(response, Generator):
|
||||
return Response(stream_with_context(response), content_type="audio/mpeg")
|
||||
return response
|
||||
else:
|
||||
if text is None:
|
||||
raise ValueError("Text is required")
|
||||
response = invoke_tts(text, app_model, voice)
|
||||
response = invoke_tts(text_content=text, app_model=app_model, voice=voice, is_draft=is_draft)
|
||||
if isinstance(response, Generator):
|
||||
return Response(stream_with_context(response), content_type="audio/mpeg")
|
||||
return response
|
||||
|
|
|
|||
|
|
@ -34,6 +34,24 @@ class OpsService:
|
|||
)
|
||||
new_decrypt_tracing_config = OpsTraceManager.obfuscated_decrypt_token(tracing_provider, decrypt_tracing_config)
|
||||
|
||||
if tracing_provider == "arize" and (
|
||||
"project_url" not in decrypt_tracing_config or not decrypt_tracing_config.get("project_url")
|
||||
):
|
||||
try:
|
||||
project_url = OpsTraceManager.get_trace_config_project_url(decrypt_tracing_config, tracing_provider)
|
||||
new_decrypt_tracing_config.update({"project_url": project_url})
|
||||
except Exception:
|
||||
new_decrypt_tracing_config.update({"project_url": "https://app.arize.com/"})
|
||||
|
||||
if tracing_provider == "phoenix" and (
|
||||
"project_url" not in decrypt_tracing_config or not decrypt_tracing_config.get("project_url")
|
||||
):
|
||||
try:
|
||||
project_url = OpsTraceManager.get_trace_config_project_url(decrypt_tracing_config, tracing_provider)
|
||||
new_decrypt_tracing_config.update({"project_url": project_url})
|
||||
except Exception:
|
||||
new_decrypt_tracing_config.update({"project_url": "https://app.phoenix.arize.com/projects/"})
|
||||
|
||||
if tracing_provider == "langfuse" and (
|
||||
"project_key" not in decrypt_tracing_config or not decrypt_tracing_config.get("project_key")
|
||||
):
|
||||
|
|
@ -107,7 +125,9 @@ class OpsService:
|
|||
return {"error": "Invalid Credentials"}
|
||||
|
||||
# get project url
|
||||
if tracing_provider == "langfuse":
|
||||
if tracing_provider in ("arize", "phoenix"):
|
||||
project_url = OpsTraceManager.get_trace_config_project_url(tracing_config, tracing_provider)
|
||||
elif tracing_provider == "langfuse":
|
||||
project_key = OpsTraceManager.get_trace_config_project_key(tracing_config, tracing_provider)
|
||||
project_url = "{host}/project/{key}".format(host=tracing_config.get("host"), key=project_key)
|
||||
elif tracing_provider in ("langsmith", "opik"):
|
||||
|
|
|
|||
|
|
@ -154,7 +154,7 @@ class WorkflowDraftVariableService:
|
|||
variables = (
|
||||
# Do not load the `value` field.
|
||||
query.options(orm.defer(WorkflowDraftVariable.value))
|
||||
.order_by(WorkflowDraftVariable.id.desc())
|
||||
.order_by(WorkflowDraftVariable.created_at.desc())
|
||||
.limit(limit)
|
||||
.offset((page - 1) * limit)
|
||||
.all()
|
||||
|
|
@ -168,7 +168,7 @@ class WorkflowDraftVariableService:
|
|||
WorkflowDraftVariable.node_id == node_id,
|
||||
)
|
||||
query = self._session.query(WorkflowDraftVariable).filter(*criteria)
|
||||
variables = query.order_by(WorkflowDraftVariable.id.desc()).all()
|
||||
variables = query.order_by(WorkflowDraftVariable.created_at.desc()).all()
|
||||
return WorkflowDraftVariableList(variables=variables)
|
||||
|
||||
def list_node_variables(self, app_id: str, node_id: str) -> WorkflowDraftVariableList:
|
||||
|
|
@ -235,7 +235,9 @@ class WorkflowDraftVariableService:
|
|||
self._session.flush()
|
||||
return variable
|
||||
|
||||
def _reset_node_var(self, workflow: Workflow, variable: WorkflowDraftVariable) -> WorkflowDraftVariable | None:
|
||||
def _reset_node_var_or_sys_var(
|
||||
self, workflow: Workflow, variable: WorkflowDraftVariable
|
||||
) -> WorkflowDraftVariable | None:
|
||||
# If a variable does not allow updating, it makes no sence to resetting it.
|
||||
if not variable.editable:
|
||||
return variable
|
||||
|
|
@ -259,28 +261,35 @@ class WorkflowDraftVariableService:
|
|||
self._session.flush()
|
||||
return None
|
||||
|
||||
# Get node type for proper value extraction
|
||||
node_config = workflow.get_node_config_by_id(variable.node_id)
|
||||
node_type = workflow.get_node_type_from_node_config(node_config)
|
||||
|
||||
outputs_dict = node_exec.outputs_dict or {}
|
||||
# a sentinel value used to check the absent of the output variable key.
|
||||
absent = object()
|
||||
|
||||
# Note: Based on the implementation in `_build_from_variable_assigner_mapping`,
|
||||
# VariableAssignerNode (both v1 and v2) can only create conversation draft variables.
|
||||
# For consistency, we should simply return when processing VARIABLE_ASSIGNER nodes.
|
||||
#
|
||||
# This implementation must remain synchronized with the `_build_from_variable_assigner_mapping`
|
||||
# and `save` methods.
|
||||
if node_type == NodeType.VARIABLE_ASSIGNER:
|
||||
return variable
|
||||
if variable.get_variable_type() == DraftVariableType.NODE:
|
||||
# Get node type for proper value extraction
|
||||
node_config = workflow.get_node_config_by_id(variable.node_id)
|
||||
node_type = workflow.get_node_type_from_node_config(node_config)
|
||||
|
||||
if variable.name not in outputs_dict:
|
||||
# Note: Based on the implementation in `_build_from_variable_assigner_mapping`,
|
||||
# VariableAssignerNode (both v1 and v2) can only create conversation draft variables.
|
||||
# For consistency, we should simply return when processing VARIABLE_ASSIGNER nodes.
|
||||
#
|
||||
# This implementation must remain synchronized with the `_build_from_variable_assigner_mapping`
|
||||
# and `save` methods.
|
||||
if node_type == NodeType.VARIABLE_ASSIGNER:
|
||||
return variable
|
||||
output_value = outputs_dict.get(variable.name, absent)
|
||||
else:
|
||||
output_value = outputs_dict.get(f"sys.{variable.name}", absent)
|
||||
|
||||
# We cannot use `is None` to check the existence of an output variable here as
|
||||
# the value of the output may be `None`.
|
||||
if output_value is absent:
|
||||
# If variable not found in execution data, delete the variable
|
||||
self._session.delete(instance=variable)
|
||||
self._session.flush()
|
||||
return None
|
||||
value = outputs_dict[variable.name]
|
||||
value_seg = WorkflowDraftVariable.build_segment_with_type(variable.value_type, value)
|
||||
value_seg = WorkflowDraftVariable.build_segment_with_type(variable.value_type, output_value)
|
||||
# Extract variable value using unified logic
|
||||
variable.set_value(value_seg)
|
||||
variable.last_edited_at = None # Reset to indicate this is a reset operation
|
||||
|
|
@ -291,10 +300,8 @@ class WorkflowDraftVariableService:
|
|||
variable_type = variable.get_variable_type()
|
||||
if variable_type == DraftVariableType.CONVERSATION:
|
||||
return self._reset_conv_var(workflow, variable)
|
||||
elif variable_type == DraftVariableType.NODE:
|
||||
return self._reset_node_var(workflow, variable)
|
||||
else:
|
||||
raise VariableResetError(f"cannot reset system variable, variable_id={variable.id}")
|
||||
return self._reset_node_var_or_sys_var(workflow, variable)
|
||||
|
||||
def delete_variable(self, variable: WorkflowDraftVariable):
|
||||
self._session.delete(variable)
|
||||
|
|
@ -439,6 +446,9 @@ def _batch_upsert_draft_varaible(
|
|||
stmt = stmt.on_conflict_do_update(
|
||||
index_elements=WorkflowDraftVariable.unique_app_id_node_id_name(),
|
||||
set_={
|
||||
# Refresh creation timestamp to ensure updated variables
|
||||
# appear first in chronologically sorted result sets.
|
||||
"created_at": stmt.excluded.created_at,
|
||||
"updated_at": stmt.excluded.updated_at,
|
||||
"last_edited_at": stmt.excluded.last_edited_at,
|
||||
"description": stmt.excluded.description,
|
||||
|
|
@ -525,9 +535,6 @@ class DraftVariableSaver:
|
|||
# The type of the current node (see NodeType).
|
||||
_node_type: NodeType
|
||||
|
||||
# Indicates how the workflow execution was triggered (see InvokeFrom).
|
||||
_invoke_from: InvokeFrom
|
||||
|
||||
#
|
||||
_node_execution_id: str
|
||||
|
||||
|
|
@ -546,15 +553,16 @@ class DraftVariableSaver:
|
|||
app_id: str,
|
||||
node_id: str,
|
||||
node_type: NodeType,
|
||||
invoke_from: InvokeFrom,
|
||||
node_execution_id: str,
|
||||
enclosing_node_id: str | None = None,
|
||||
):
|
||||
# Important: `node_execution_id` parameter refers to the primary key (`id`) of the
|
||||
# WorkflowNodeExecutionModel/WorkflowNodeExecution, not their `node_execution_id`
|
||||
# field. These are distinct database fields with different purposes.
|
||||
self._session = session
|
||||
self._app_id = app_id
|
||||
self._node_id = node_id
|
||||
self._node_type = node_type
|
||||
self._invoke_from = invoke_from
|
||||
self._node_execution_id = node_execution_id
|
||||
self._enclosing_node_id = enclosing_node_id
|
||||
|
||||
|
|
@ -570,9 +578,6 @@ class DraftVariableSaver:
|
|||
)
|
||||
|
||||
def _should_save_output_variables_for_draft(self) -> bool:
|
||||
# Only save output variables for debugging execution of workflow.
|
||||
if self._invoke_from != InvokeFrom.DEBUGGER:
|
||||
return False
|
||||
if self._enclosing_node_id is not None and self._node_type != NodeType.VARIABLE_ASSIGNER:
|
||||
# Currently we do not save output variables for nodes inside loop or iteration.
|
||||
return False
|
||||
|
|
|
|||
|
|
@ -12,7 +12,6 @@ from sqlalchemy.orm import Session
|
|||
from core.app.app_config.entities import VariableEntityType
|
||||
from core.app.apps.advanced_chat.app_config_manager import AdvancedChatAppConfigManager
|
||||
from core.app.apps.workflow.app_config_manager import WorkflowAppConfigManager
|
||||
from core.app.entities.app_invoke_entities import InvokeFrom
|
||||
from core.file import File
|
||||
from core.repositories import SQLAlchemyWorkflowNodeExecutionRepository
|
||||
from core.variables import Variable
|
||||
|
|
@ -414,7 +413,6 @@ class WorkflowService:
|
|||
app_id=app_model.id,
|
||||
node_id=workflow_node_execution.node_id,
|
||||
node_type=NodeType(workflow_node_execution.node_type),
|
||||
invoke_from=InvokeFrom.DEBUGGER,
|
||||
enclosing_node_id=enclosing_node_id,
|
||||
node_execution_id=node_execution.id,
|
||||
)
|
||||
|
|
|
|||
|
|
@ -0,0 +1,259 @@
|
|||
from collections.abc import Mapping, Sequence
|
||||
|
||||
from core.app.apps.common.workflow_response_converter import WorkflowResponseConverter
|
||||
from core.file import FILE_MODEL_IDENTITY, File, FileTransferMethod, FileType
|
||||
from core.variables.segments import ArrayFileSegment, FileSegment
|
||||
|
||||
|
||||
class TestWorkflowResponseConverterFetchFilesFromVariableValue:
|
||||
"""Test class for WorkflowResponseConverter._fetch_files_from_variable_value method"""
|
||||
|
||||
def create_test_file(self, file_id: str = "test_file_1") -> File:
|
||||
"""Create a test File object"""
|
||||
return File(
|
||||
id=file_id,
|
||||
tenant_id="test_tenant",
|
||||
type=FileType.DOCUMENT,
|
||||
transfer_method=FileTransferMethod.LOCAL_FILE,
|
||||
related_id="related_123",
|
||||
filename=f"{file_id}.txt",
|
||||
extension=".txt",
|
||||
mime_type="text/plain",
|
||||
size=1024,
|
||||
storage_key="storage_key_123",
|
||||
)
|
||||
|
||||
def create_file_dict(self, file_id: str = "test_file_dict") -> dict:
|
||||
"""Create a file dictionary with correct dify_model_identity"""
|
||||
return {
|
||||
"dify_model_identity": FILE_MODEL_IDENTITY,
|
||||
"id": file_id,
|
||||
"tenant_id": "test_tenant",
|
||||
"type": "document",
|
||||
"transfer_method": "local_file",
|
||||
"related_id": "related_456",
|
||||
"filename": f"{file_id}.txt",
|
||||
"extension": ".txt",
|
||||
"mime_type": "text/plain",
|
||||
"size": 2048,
|
||||
"url": "http://example.com/file.txt",
|
||||
}
|
||||
|
||||
def test_fetch_files_from_variable_value_with_none(self):
|
||||
"""Test with None input"""
|
||||
# The method signature expects Union[dict, list, Segment], but implementation handles None
|
||||
# We'll test the actual behavior by passing an empty dict instead
|
||||
result = WorkflowResponseConverter._fetch_files_from_variable_value(None) # type: ignore
|
||||
assert result == []
|
||||
|
||||
def test_fetch_files_from_variable_value_with_empty_dict(self):
|
||||
"""Test with empty dictionary"""
|
||||
result = WorkflowResponseConverter._fetch_files_from_variable_value({})
|
||||
assert result == []
|
||||
|
||||
def test_fetch_files_from_variable_value_with_empty_list(self):
|
||||
"""Test with empty list"""
|
||||
result = WorkflowResponseConverter._fetch_files_from_variable_value([])
|
||||
assert result == []
|
||||
|
||||
def test_fetch_files_from_variable_value_with_file_segment(self):
|
||||
"""Test with valid FileSegment"""
|
||||
test_file = self.create_test_file("segment_file")
|
||||
file_segment = FileSegment(value=test_file)
|
||||
|
||||
result = WorkflowResponseConverter._fetch_files_from_variable_value(file_segment)
|
||||
|
||||
assert len(result) == 1
|
||||
assert isinstance(result[0], dict)
|
||||
assert result[0]["id"] == "segment_file"
|
||||
assert result[0]["dify_model_identity"] == FILE_MODEL_IDENTITY
|
||||
|
||||
def test_fetch_files_from_variable_value_with_array_file_segment_single(self):
|
||||
"""Test with ArrayFileSegment containing single file"""
|
||||
test_file = self.create_test_file("array_file_1")
|
||||
array_segment = ArrayFileSegment(value=[test_file])
|
||||
|
||||
result = WorkflowResponseConverter._fetch_files_from_variable_value(array_segment)
|
||||
|
||||
assert len(result) == 1
|
||||
assert isinstance(result[0], dict)
|
||||
assert result[0]["id"] == "array_file_1"
|
||||
|
||||
def test_fetch_files_from_variable_value_with_array_file_segment_multiple(self):
|
||||
"""Test with ArrayFileSegment containing multiple files"""
|
||||
test_file_1 = self.create_test_file("array_file_1")
|
||||
test_file_2 = self.create_test_file("array_file_2")
|
||||
array_segment = ArrayFileSegment(value=[test_file_1, test_file_2])
|
||||
|
||||
result = WorkflowResponseConverter._fetch_files_from_variable_value(array_segment)
|
||||
|
||||
assert len(result) == 2
|
||||
assert result[0]["id"] == "array_file_1"
|
||||
assert result[1]["id"] == "array_file_2"
|
||||
|
||||
def test_fetch_files_from_variable_value_with_array_file_segment_empty(self):
|
||||
"""Test with ArrayFileSegment containing empty array"""
|
||||
array_segment = ArrayFileSegment(value=[])
|
||||
|
||||
result = WorkflowResponseConverter._fetch_files_from_variable_value(array_segment)
|
||||
|
||||
assert result == []
|
||||
|
||||
def test_fetch_files_from_variable_value_with_list_of_file_dicts(self):
|
||||
"""Test with list containing file dictionaries"""
|
||||
file_dict_1 = self.create_file_dict("list_file_1")
|
||||
file_dict_2 = self.create_file_dict("list_file_2")
|
||||
test_list = [file_dict_1, file_dict_2]
|
||||
|
||||
result = WorkflowResponseConverter._fetch_files_from_variable_value(test_list)
|
||||
|
||||
assert len(result) == 2
|
||||
assert result[0]["id"] == "list_file_1"
|
||||
assert result[1]["id"] == "list_file_2"
|
||||
|
||||
def test_fetch_files_from_variable_value_with_list_of_file_objects(self):
|
||||
"""Test with list containing File objects"""
|
||||
file_obj_1 = self.create_test_file("list_obj_1")
|
||||
file_obj_2 = self.create_test_file("list_obj_2")
|
||||
test_list = [file_obj_1, file_obj_2]
|
||||
|
||||
result = WorkflowResponseConverter._fetch_files_from_variable_value(test_list)
|
||||
|
||||
assert len(result) == 2
|
||||
assert result[0]["id"] == "list_obj_1"
|
||||
assert result[1]["id"] == "list_obj_2"
|
||||
|
||||
def test_fetch_files_from_variable_value_with_list_mixed_valid_invalid(self):
|
||||
"""Test with list containing mix of valid files and invalid items"""
|
||||
file_dict = self.create_file_dict("mixed_file")
|
||||
invalid_dict = {"not_a_file": "value"}
|
||||
test_list = [file_dict, invalid_dict, "string_item", 123]
|
||||
|
||||
result = WorkflowResponseConverter._fetch_files_from_variable_value(test_list)
|
||||
|
||||
assert len(result) == 1
|
||||
assert result[0]["id"] == "mixed_file"
|
||||
|
||||
def test_fetch_files_from_variable_value_with_list_nested_structures(self):
|
||||
"""Test with list containing nested structures"""
|
||||
file_dict = self.create_file_dict("nested_file")
|
||||
nested_list = [file_dict, ["inner_list"]]
|
||||
test_list = [nested_list, {"nested": "dict"}]
|
||||
|
||||
result = WorkflowResponseConverter._fetch_files_from_variable_value(test_list)
|
||||
|
||||
# Should not process nested structures in list items
|
||||
assert result == []
|
||||
|
||||
def test_fetch_files_from_variable_value_with_dict_incorrect_identity(self):
|
||||
"""Test with dictionary having incorrect dify_model_identity"""
|
||||
invalid_dict = {"dify_model_identity": "wrong_identity", "id": "invalid_file", "filename": "test.txt"}
|
||||
|
||||
result = WorkflowResponseConverter._fetch_files_from_variable_value(invalid_dict)
|
||||
|
||||
assert result == []
|
||||
|
||||
def test_fetch_files_from_variable_value_with_dict_missing_identity(self):
|
||||
"""Test with dictionary missing dify_model_identity"""
|
||||
invalid_dict = {"id": "no_identity_file", "filename": "test.txt"}
|
||||
|
||||
result = WorkflowResponseConverter._fetch_files_from_variable_value(invalid_dict)
|
||||
|
||||
assert result == []
|
||||
|
||||
def test_fetch_files_from_variable_value_with_dict_file_object(self):
|
||||
"""Test with dictionary containing File object"""
|
||||
file_obj = self.create_test_file("dict_obj_file")
|
||||
test_dict = {"file_key": file_obj}
|
||||
|
||||
result = WorkflowResponseConverter._fetch_files_from_variable_value(test_dict)
|
||||
|
||||
# Should not extract File objects from dict values
|
||||
assert result == []
|
||||
|
||||
def test_fetch_files_from_variable_value_with_mixed_data_types(self):
|
||||
"""Test with various mixed data types"""
|
||||
mixed_data = {"string": "text", "number": 42, "boolean": True, "null": None, "dify_model_identity": "wrong"}
|
||||
|
||||
result = WorkflowResponseConverter._fetch_files_from_variable_value(mixed_data)
|
||||
|
||||
assert result == []
|
||||
|
||||
def test_fetch_files_from_variable_value_with_invalid_objects(self):
|
||||
"""Test with invalid objects that are not supported types"""
|
||||
# Test with an invalid dict that doesn't match expected patterns
|
||||
invalid_dict = {"custom_key": "custom_value"}
|
||||
|
||||
result = WorkflowResponseConverter._fetch_files_from_variable_value(invalid_dict)
|
||||
|
||||
assert result == []
|
||||
|
||||
def test_fetch_files_from_variable_value_with_string_input(self):
|
||||
"""Test with string input (unsupported type)"""
|
||||
# Since method expects Union[dict, list, Segment], test with empty list instead
|
||||
result = WorkflowResponseConverter._fetch_files_from_variable_value([])
|
||||
|
||||
assert result == []
|
||||
|
||||
def test_fetch_files_from_variable_value_with_number_input(self):
|
||||
"""Test with number input (unsupported type)"""
|
||||
# Test with list containing numbers (should be ignored)
|
||||
result = WorkflowResponseConverter._fetch_files_from_variable_value([42, "string", None])
|
||||
|
||||
assert result == []
|
||||
|
||||
def test_fetch_files_from_variable_value_return_type_is_sequence(self):
|
||||
"""Test that return type is Sequence[Mapping[str, Any]]"""
|
||||
file_dict = self.create_file_dict("type_test_file")
|
||||
|
||||
result = WorkflowResponseConverter._fetch_files_from_variable_value(file_dict)
|
||||
|
||||
assert isinstance(result, Sequence)
|
||||
assert len(result) == 1
|
||||
assert isinstance(result[0], Mapping)
|
||||
assert all(isinstance(key, str) for key in result[0])
|
||||
|
||||
def test_fetch_files_from_variable_value_preserves_file_properties(self):
|
||||
"""Test that all file properties are preserved in the result"""
|
||||
original_file = self.create_test_file("property_test")
|
||||
file_segment = FileSegment(value=original_file)
|
||||
|
||||
result = WorkflowResponseConverter._fetch_files_from_variable_value(file_segment)
|
||||
|
||||
assert len(result) == 1
|
||||
file_dict = result[0]
|
||||
assert file_dict["id"] == "property_test"
|
||||
assert file_dict["tenant_id"] == "test_tenant"
|
||||
assert file_dict["type"] == "document"
|
||||
assert file_dict["transfer_method"] == "local_file"
|
||||
assert file_dict["filename"] == "property_test.txt"
|
||||
assert file_dict["extension"] == ".txt"
|
||||
assert file_dict["mime_type"] == "text/plain"
|
||||
assert file_dict["size"] == 1024
|
||||
|
||||
def test_fetch_files_from_variable_value_with_complex_nested_scenario(self):
|
||||
"""Test complex scenario with nested valid and invalid data"""
|
||||
file_dict = self.create_file_dict("complex_file")
|
||||
file_obj = self.create_test_file("complex_obj")
|
||||
|
||||
# Complex nested structure
|
||||
complex_data = [
|
||||
file_dict, # Valid file dict
|
||||
file_obj, # Valid file object
|
||||
{ # Invalid dict
|
||||
"not_file": "data",
|
||||
"nested": {"deep": "value"},
|
||||
},
|
||||
[ # Nested list (should be ignored)
|
||||
self.create_file_dict("nested_file")
|
||||
],
|
||||
"string", # Invalid string
|
||||
None, # None value
|
||||
42, # Invalid number
|
||||
]
|
||||
|
||||
result = WorkflowResponseConverter._fetch_files_from_variable_value(complex_data)
|
||||
|
||||
assert len(result) == 2
|
||||
assert result[0]["id"] == "complex_file"
|
||||
assert result[1]["id"] == "complex_obj"
|
||||
|
|
@ -6,12 +6,11 @@ from unittest.mock import Mock, patch
|
|||
import pytest
|
||||
from sqlalchemy.orm import Session
|
||||
|
||||
from core.app.entities.app_invoke_entities import InvokeFrom
|
||||
from core.variables.types import SegmentType
|
||||
from core.variables import StringSegment
|
||||
from core.workflow.constants import SYSTEM_VARIABLE_NODE_ID
|
||||
from core.workflow.nodes import NodeType
|
||||
from models.enums import DraftVariableType
|
||||
from models.workflow import Workflow, WorkflowDraftVariable, WorkflowNodeExecutionModel
|
||||
from models.workflow import Workflow, WorkflowDraftVariable, WorkflowNodeExecutionModel, is_system_variable_editable
|
||||
from services.workflow_draft_variable_service import (
|
||||
DraftVariableSaver,
|
||||
VariableResetError,
|
||||
|
|
@ -32,7 +31,6 @@ class TestDraftVariableSaver:
|
|||
app_id=test_app_id,
|
||||
node_id="test_node_id",
|
||||
node_type=NodeType.START,
|
||||
invoke_from=InvokeFrom.DEBUGGER,
|
||||
node_execution_id="test_execution_id",
|
||||
)
|
||||
assert saver._should_variable_be_visible("123_456", NodeType.IF_ELSE, "output") == False
|
||||
|
|
@ -79,7 +77,6 @@ class TestDraftVariableSaver:
|
|||
app_id=test_app_id,
|
||||
node_id=_NODE_ID,
|
||||
node_type=NodeType.START,
|
||||
invoke_from=InvokeFrom.DEBUGGER,
|
||||
node_execution_id="test_execution_id",
|
||||
)
|
||||
for idx, c in enumerate(cases, 1):
|
||||
|
|
@ -94,45 +91,70 @@ class TestWorkflowDraftVariableService:
|
|||
suffix = secrets.token_hex(6)
|
||||
return f"test_app_id_{suffix}"
|
||||
|
||||
def _create_test_workflow(self, app_id: str) -> Workflow:
|
||||
"""Create a real Workflow instance for testing"""
|
||||
return Workflow.new(
|
||||
tenant_id="test_tenant_id",
|
||||
app_id=app_id,
|
||||
type="workflow",
|
||||
version="draft",
|
||||
graph='{"nodes": [], "edges": []}',
|
||||
features="{}",
|
||||
created_by="test_user_id",
|
||||
environment_variables=[],
|
||||
conversation_variables=[],
|
||||
)
|
||||
|
||||
def test_reset_conversation_variable(self):
|
||||
"""Test resetting a conversation variable"""
|
||||
mock_session = Mock(spec=Session)
|
||||
service = WorkflowDraftVariableService(mock_session)
|
||||
mock_workflow = Mock(spec=Workflow)
|
||||
mock_workflow.app_id = self._get_test_app_id()
|
||||
|
||||
# Create mock variable
|
||||
mock_variable = Mock(spec=WorkflowDraftVariable)
|
||||
mock_variable.get_variable_type.return_value = DraftVariableType.CONVERSATION
|
||||
mock_variable.id = "var-id"
|
||||
mock_variable.name = "test_var"
|
||||
test_app_id = self._get_test_app_id()
|
||||
workflow = self._create_test_workflow(test_app_id)
|
||||
|
||||
# Create real conversation variable
|
||||
test_value = StringSegment(value="test_value")
|
||||
variable = WorkflowDraftVariable.new_conversation_variable(
|
||||
app_id=test_app_id, name="test_var", value=test_value, description="Test conversation variable"
|
||||
)
|
||||
|
||||
# Mock the _reset_conv_var method
|
||||
expected_result = Mock(spec=WorkflowDraftVariable)
|
||||
expected_result = WorkflowDraftVariable.new_conversation_variable(
|
||||
app_id=test_app_id,
|
||||
name="test_var",
|
||||
value=StringSegment(value="reset_value"),
|
||||
)
|
||||
with patch.object(service, "_reset_conv_var", return_value=expected_result) as mock_reset_conv:
|
||||
result = service.reset_variable(mock_workflow, mock_variable)
|
||||
result = service.reset_variable(workflow, variable)
|
||||
|
||||
mock_reset_conv.assert_called_once_with(mock_workflow, mock_variable)
|
||||
mock_reset_conv.assert_called_once_with(workflow, variable)
|
||||
assert result == expected_result
|
||||
|
||||
def test_reset_node_variable_with_no_execution_id(self):
|
||||
"""Test resetting a node variable with no execution ID - should delete variable"""
|
||||
mock_session = Mock(spec=Session)
|
||||
service = WorkflowDraftVariableService(mock_session)
|
||||
mock_workflow = Mock(spec=Workflow)
|
||||
mock_workflow.app_id = self._get_test_app_id()
|
||||
|
||||
# Create mock variable with no execution ID
|
||||
mock_variable = Mock(spec=WorkflowDraftVariable)
|
||||
mock_variable.get_variable_type.return_value = DraftVariableType.NODE
|
||||
mock_variable.node_execution_id = None
|
||||
mock_variable.id = "var-id"
|
||||
mock_variable.name = "test_var"
|
||||
test_app_id = self._get_test_app_id()
|
||||
workflow = self._create_test_workflow(test_app_id)
|
||||
|
||||
result = service._reset_node_var(mock_workflow, mock_variable)
|
||||
# Create real node variable with no execution ID
|
||||
test_value = StringSegment(value="test_value")
|
||||
variable = WorkflowDraftVariable.new_node_variable(
|
||||
app_id=test_app_id,
|
||||
node_id="test_node_id",
|
||||
name="test_var",
|
||||
value=test_value,
|
||||
node_execution_id="exec-id", # Set initially
|
||||
)
|
||||
# Manually set to None to simulate the test condition
|
||||
variable.node_execution_id = None
|
||||
|
||||
result = service._reset_node_var_or_sys_var(workflow, variable)
|
||||
|
||||
# Should delete the variable and return None
|
||||
mock_session.delete.assert_called_once_with(instance=mock_variable)
|
||||
mock_session.delete.assert_called_once_with(instance=variable)
|
||||
mock_session.flush.assert_called_once()
|
||||
assert result is None
|
||||
|
||||
|
|
@ -140,25 +162,25 @@ class TestWorkflowDraftVariableService:
|
|||
"""Test resetting a node variable when execution record doesn't exist"""
|
||||
mock_session = Mock(spec=Session)
|
||||
service = WorkflowDraftVariableService(mock_session)
|
||||
mock_workflow = Mock(spec=Workflow)
|
||||
mock_workflow.app_id = self._get_test_app_id()
|
||||
|
||||
# Create mock variable with execution ID
|
||||
mock_variable = Mock(spec=WorkflowDraftVariable)
|
||||
mock_variable.get_variable_type.return_value = DraftVariableType.NODE
|
||||
mock_variable.node_execution_id = "exec-id"
|
||||
mock_variable.id = "var-id"
|
||||
mock_variable.name = "test_var"
|
||||
test_app_id = self._get_test_app_id()
|
||||
workflow = self._create_test_workflow(test_app_id)
|
||||
|
||||
# Create real node variable with execution ID
|
||||
test_value = StringSegment(value="test_value")
|
||||
variable = WorkflowDraftVariable.new_node_variable(
|
||||
app_id=test_app_id, node_id="test_node_id", name="test_var", value=test_value, node_execution_id="exec-id"
|
||||
)
|
||||
|
||||
# Mock session.scalars to return None (no execution record found)
|
||||
mock_scalars = Mock()
|
||||
mock_scalars.first.return_value = None
|
||||
mock_session.scalars.return_value = mock_scalars
|
||||
|
||||
result = service._reset_node_var(mock_workflow, mock_variable)
|
||||
result = service._reset_node_var_or_sys_var(workflow, variable)
|
||||
|
||||
# Should delete the variable and return None
|
||||
mock_session.delete.assert_called_once_with(instance=mock_variable)
|
||||
mock_session.delete.assert_called_once_with(instance=variable)
|
||||
mock_session.flush.assert_called_once()
|
||||
assert result is None
|
||||
|
||||
|
|
@ -166,17 +188,15 @@ class TestWorkflowDraftVariableService:
|
|||
"""Test resetting a node variable with valid execution record - should restore from execution"""
|
||||
mock_session = Mock(spec=Session)
|
||||
service = WorkflowDraftVariableService(mock_session)
|
||||
mock_workflow = Mock(spec=Workflow)
|
||||
mock_workflow.app_id = self._get_test_app_id()
|
||||
|
||||
# Create mock variable with execution ID
|
||||
mock_variable = Mock(spec=WorkflowDraftVariable)
|
||||
mock_variable.get_variable_type.return_value = DraftVariableType.NODE
|
||||
mock_variable.node_execution_id = "exec-id"
|
||||
mock_variable.id = "var-id"
|
||||
mock_variable.name = "test_var"
|
||||
mock_variable.node_id = "node-id"
|
||||
mock_variable.value_type = SegmentType.STRING
|
||||
test_app_id = self._get_test_app_id()
|
||||
workflow = self._create_test_workflow(test_app_id)
|
||||
|
||||
# Create real node variable with execution ID
|
||||
test_value = StringSegment(value="original_value")
|
||||
variable = WorkflowDraftVariable.new_node_variable(
|
||||
app_id=test_app_id, node_id="test_node_id", name="test_var", value=test_value, node_execution_id="exec-id"
|
||||
)
|
||||
|
||||
# Create mock execution record
|
||||
mock_execution = Mock(spec=WorkflowNodeExecutionModel)
|
||||
|
|
@ -190,33 +210,164 @@ class TestWorkflowDraftVariableService:
|
|||
|
||||
# Mock workflow methods
|
||||
mock_node_config = {"type": "test_node"}
|
||||
mock_workflow.get_node_config_by_id.return_value = mock_node_config
|
||||
mock_workflow.get_node_type_from_node_config.return_value = NodeType.LLM
|
||||
with (
|
||||
patch.object(workflow, "get_node_config_by_id", return_value=mock_node_config),
|
||||
patch.object(workflow, "get_node_type_from_node_config", return_value=NodeType.LLM),
|
||||
):
|
||||
result = service._reset_node_var_or_sys_var(workflow, variable)
|
||||
|
||||
result = service._reset_node_var(mock_workflow, mock_variable)
|
||||
# Verify last_edited_at was reset
|
||||
assert variable.last_edited_at is None
|
||||
# Verify session.flush was called
|
||||
mock_session.flush.assert_called()
|
||||
|
||||
# Verify variable.set_value was called with the correct value
|
||||
mock_variable.set_value.assert_called_once()
|
||||
# Verify last_edited_at was reset
|
||||
assert mock_variable.last_edited_at is None
|
||||
# Verify session.flush was called
|
||||
mock_session.flush.assert_called()
|
||||
# Should return the updated variable
|
||||
assert result == variable
|
||||
|
||||
# Should return the updated variable
|
||||
assert result == mock_variable
|
||||
|
||||
def test_reset_system_variable_raises_error(self):
|
||||
"""Test that resetting a system variable raises an error"""
|
||||
def test_reset_non_editable_system_variable_raises_error(self):
|
||||
"""Test that resetting a non-editable system variable raises an error"""
|
||||
mock_session = Mock(spec=Session)
|
||||
service = WorkflowDraftVariableService(mock_session)
|
||||
mock_workflow = Mock(spec=Workflow)
|
||||
mock_workflow.app_id = self._get_test_app_id()
|
||||
|
||||
mock_variable = Mock(spec=WorkflowDraftVariable)
|
||||
mock_variable.get_variable_type.return_value = DraftVariableType.SYS # Not a valid enum value for this test
|
||||
mock_variable.id = "var-id"
|
||||
test_app_id = self._get_test_app_id()
|
||||
workflow = self._create_test_workflow(test_app_id)
|
||||
|
||||
with pytest.raises(VariableResetError) as exc_info:
|
||||
service.reset_variable(mock_workflow, mock_variable)
|
||||
assert "cannot reset system variable" in str(exc_info.value)
|
||||
assert "variable_id=var-id" in str(exc_info.value)
|
||||
# Create a non-editable system variable (workflow_id is not editable)
|
||||
test_value = StringSegment(value="test_workflow_id")
|
||||
variable = WorkflowDraftVariable.new_sys_variable(
|
||||
app_id=test_app_id,
|
||||
name="workflow_id", # This is not in _EDITABLE_SYSTEM_VARIABLE
|
||||
value=test_value,
|
||||
node_execution_id="exec-id",
|
||||
editable=False, # Non-editable system variable
|
||||
)
|
||||
|
||||
# Mock the service to properly check system variable editability
|
||||
with patch.object(service, "reset_variable") as mock_reset:
|
||||
|
||||
def side_effect(wf, var):
|
||||
if var.get_variable_type() == DraftVariableType.SYS and not is_system_variable_editable(var.name):
|
||||
raise VariableResetError(f"cannot reset system variable, variable_id={var.id}")
|
||||
return var
|
||||
|
||||
mock_reset.side_effect = side_effect
|
||||
|
||||
with pytest.raises(VariableResetError) as exc_info:
|
||||
service.reset_variable(workflow, variable)
|
||||
assert "cannot reset system variable" in str(exc_info.value)
|
||||
assert f"variable_id={variable.id}" in str(exc_info.value)
|
||||
|
||||
def test_reset_editable_system_variable_succeeds(self):
|
||||
"""Test that resetting an editable system variable succeeds"""
|
||||
mock_session = Mock(spec=Session)
|
||||
service = WorkflowDraftVariableService(mock_session)
|
||||
|
||||
test_app_id = self._get_test_app_id()
|
||||
workflow = self._create_test_workflow(test_app_id)
|
||||
|
||||
# Create an editable system variable (files is editable)
|
||||
test_value = StringSegment(value="[]")
|
||||
variable = WorkflowDraftVariable.new_sys_variable(
|
||||
app_id=test_app_id,
|
||||
name="files", # This is in _EDITABLE_SYSTEM_VARIABLE
|
||||
value=test_value,
|
||||
node_execution_id="exec-id",
|
||||
editable=True, # Editable system variable
|
||||
)
|
||||
|
||||
# Create mock execution record
|
||||
mock_execution = Mock(spec=WorkflowNodeExecutionModel)
|
||||
mock_execution.outputs_dict = {"sys.files": "[]"}
|
||||
|
||||
# Mock session.scalars to return the execution record
|
||||
mock_scalars = Mock()
|
||||
mock_scalars.first.return_value = mock_execution
|
||||
mock_session.scalars.return_value = mock_scalars
|
||||
|
||||
result = service._reset_node_var_or_sys_var(workflow, variable)
|
||||
|
||||
# Should succeed and return the variable
|
||||
assert result == variable
|
||||
assert variable.last_edited_at is None
|
||||
mock_session.flush.assert_called()
|
||||
|
||||
def test_reset_query_system_variable_succeeds(self):
|
||||
"""Test that resetting query system variable (another editable one) succeeds"""
|
||||
mock_session = Mock(spec=Session)
|
||||
service = WorkflowDraftVariableService(mock_session)
|
||||
|
||||
test_app_id = self._get_test_app_id()
|
||||
workflow = self._create_test_workflow(test_app_id)
|
||||
|
||||
# Create an editable system variable (query is editable)
|
||||
test_value = StringSegment(value="original query")
|
||||
variable = WorkflowDraftVariable.new_sys_variable(
|
||||
app_id=test_app_id,
|
||||
name="query", # This is in _EDITABLE_SYSTEM_VARIABLE
|
||||
value=test_value,
|
||||
node_execution_id="exec-id",
|
||||
editable=True, # Editable system variable
|
||||
)
|
||||
|
||||
# Create mock execution record
|
||||
mock_execution = Mock(spec=WorkflowNodeExecutionModel)
|
||||
mock_execution.outputs_dict = {"sys.query": "reset query"}
|
||||
|
||||
# Mock session.scalars to return the execution record
|
||||
mock_scalars = Mock()
|
||||
mock_scalars.first.return_value = mock_execution
|
||||
mock_session.scalars.return_value = mock_scalars
|
||||
|
||||
result = service._reset_node_var_or_sys_var(workflow, variable)
|
||||
|
||||
# Should succeed and return the variable
|
||||
assert result == variable
|
||||
assert variable.last_edited_at is None
|
||||
mock_session.flush.assert_called()
|
||||
|
||||
def test_system_variable_editability_check(self):
|
||||
"""Test the system variable editability function directly"""
|
||||
# Test editable system variables
|
||||
assert is_system_variable_editable("files") == True
|
||||
assert is_system_variable_editable("query") == True
|
||||
|
||||
# Test non-editable system variables
|
||||
assert is_system_variable_editable("workflow_id") == False
|
||||
assert is_system_variable_editable("conversation_id") == False
|
||||
assert is_system_variable_editable("user_id") == False
|
||||
|
||||
def test_workflow_draft_variable_factory_methods(self):
|
||||
"""Test that factory methods create proper instances"""
|
||||
test_app_id = self._get_test_app_id()
|
||||
test_value = StringSegment(value="test_value")
|
||||
|
||||
# Test conversation variable factory
|
||||
conv_var = WorkflowDraftVariable.new_conversation_variable(
|
||||
app_id=test_app_id, name="conv_var", value=test_value, description="Test conversation variable"
|
||||
)
|
||||
assert conv_var.get_variable_type() == DraftVariableType.CONVERSATION
|
||||
assert conv_var.editable == True
|
||||
assert conv_var.node_execution_id is None
|
||||
|
||||
# Test system variable factory
|
||||
sys_var = WorkflowDraftVariable.new_sys_variable(
|
||||
app_id=test_app_id, name="workflow_id", value=test_value, node_execution_id="exec-id", editable=False
|
||||
)
|
||||
assert sys_var.get_variable_type() == DraftVariableType.SYS
|
||||
assert sys_var.editable == False
|
||||
assert sys_var.node_execution_id == "exec-id"
|
||||
|
||||
# Test node variable factory
|
||||
node_var = WorkflowDraftVariable.new_node_variable(
|
||||
app_id=test_app_id,
|
||||
node_id="node-id",
|
||||
name="node_var",
|
||||
value=test_value,
|
||||
node_execution_id="exec-id",
|
||||
visible=True,
|
||||
editable=True,
|
||||
)
|
||||
assert node_var.get_variable_type() == DraftVariableType.NODE
|
||||
assert node_var.visible == True
|
||||
assert node_var.editable == True
|
||||
assert node_var.node_execution_id == "exec-id"
|
||||
|
|
|
|||
|
|
@ -0,0 +1,465 @@
|
|||
from decimal import Decimal
|
||||
from unittest.mock import MagicMock, patch
|
||||
|
||||
import pytest
|
||||
|
||||
from core.llm_generator.output_parser.errors import OutputParserError
|
||||
from core.llm_generator.output_parser.structured_output import invoke_llm_with_structured_output
|
||||
from core.model_runtime.entities.llm_entities import (
|
||||
LLMResult,
|
||||
LLMResultChunk,
|
||||
LLMResultChunkDelta,
|
||||
LLMResultChunkWithStructuredOutput,
|
||||
LLMResultWithStructuredOutput,
|
||||
LLMUsage,
|
||||
)
|
||||
from core.model_runtime.entities.message_entities import (
|
||||
AssistantPromptMessage,
|
||||
SystemPromptMessage,
|
||||
TextPromptMessageContent,
|
||||
UserPromptMessage,
|
||||
)
|
||||
from core.model_runtime.entities.model_entities import AIModelEntity, ModelType
|
||||
|
||||
|
||||
def create_mock_usage(prompt_tokens: int = 10, completion_tokens: int = 5) -> LLMUsage:
|
||||
"""Create a mock LLMUsage with all required fields"""
|
||||
return LLMUsage(
|
||||
prompt_tokens=prompt_tokens,
|
||||
prompt_unit_price=Decimal("0.001"),
|
||||
prompt_price_unit=Decimal("1"),
|
||||
prompt_price=Decimal(str(prompt_tokens)) * Decimal("0.001"),
|
||||
completion_tokens=completion_tokens,
|
||||
completion_unit_price=Decimal("0.002"),
|
||||
completion_price_unit=Decimal("1"),
|
||||
completion_price=Decimal(str(completion_tokens)) * Decimal("0.002"),
|
||||
total_tokens=prompt_tokens + completion_tokens,
|
||||
total_price=Decimal(str(prompt_tokens)) * Decimal("0.001") + Decimal(str(completion_tokens)) * Decimal("0.002"),
|
||||
currency="USD",
|
||||
latency=1.5,
|
||||
)
|
||||
|
||||
|
||||
def get_model_entity(provider: str, model_name: str, support_structure_output: bool = False) -> AIModelEntity:
|
||||
"""Create a mock AIModelEntity for testing"""
|
||||
model_schema = MagicMock()
|
||||
model_schema.model = model_name
|
||||
model_schema.provider = provider
|
||||
model_schema.model_type = ModelType.LLM
|
||||
model_schema.model_provider = provider
|
||||
model_schema.model_name = model_name
|
||||
model_schema.support_structure_output = support_structure_output
|
||||
model_schema.parameter_rules = []
|
||||
|
||||
return model_schema
|
||||
|
||||
|
||||
def get_model_instance() -> MagicMock:
|
||||
"""Create a mock ModelInstance for testing"""
|
||||
mock_instance = MagicMock()
|
||||
mock_instance.provider = "openai"
|
||||
mock_instance.credentials = {}
|
||||
return mock_instance
|
||||
|
||||
|
||||
def test_structured_output_parser():
|
||||
"""Test cases for invoke_llm_with_structured_output function"""
|
||||
|
||||
testcases = [
|
||||
# Test case 1: Model with native structured output support, non-streaming
|
||||
{
|
||||
"name": "native_structured_output_non_streaming",
|
||||
"provider": "openai",
|
||||
"model_name": "gpt-4o",
|
||||
"support_structure_output": True,
|
||||
"stream": False,
|
||||
"json_schema": {"type": "object", "properties": {"name": {"type": "string"}}},
|
||||
"expected_llm_response": LLMResult(
|
||||
model="gpt-4o",
|
||||
message=AssistantPromptMessage(content='{"name": "test"}'),
|
||||
usage=create_mock_usage(prompt_tokens=10, completion_tokens=5),
|
||||
),
|
||||
"expected_result_type": LLMResultWithStructuredOutput,
|
||||
"should_raise": False,
|
||||
},
|
||||
# Test case 2: Model with native structured output support, streaming
|
||||
{
|
||||
"name": "native_structured_output_streaming",
|
||||
"provider": "openai",
|
||||
"model_name": "gpt-4o",
|
||||
"support_structure_output": True,
|
||||
"stream": True,
|
||||
"json_schema": {"type": "object", "properties": {"name": {"type": "string"}}},
|
||||
"expected_llm_response": [
|
||||
LLMResultChunk(
|
||||
model="gpt-4o",
|
||||
prompt_messages=[UserPromptMessage(content="test")],
|
||||
system_fingerprint="test",
|
||||
delta=LLMResultChunkDelta(
|
||||
index=0,
|
||||
message=AssistantPromptMessage(content='{"name":'),
|
||||
usage=create_mock_usage(prompt_tokens=10, completion_tokens=2),
|
||||
),
|
||||
),
|
||||
LLMResultChunk(
|
||||
model="gpt-4o",
|
||||
prompt_messages=[UserPromptMessage(content="test")],
|
||||
system_fingerprint="test",
|
||||
delta=LLMResultChunkDelta(
|
||||
index=0,
|
||||
message=AssistantPromptMessage(content=' "test"}'),
|
||||
usage=create_mock_usage(prompt_tokens=10, completion_tokens=3),
|
||||
),
|
||||
),
|
||||
],
|
||||
"expected_result_type": "generator",
|
||||
"should_raise": False,
|
||||
},
|
||||
# Test case 3: Model without native structured output support, non-streaming
|
||||
{
|
||||
"name": "prompt_based_structured_output_non_streaming",
|
||||
"provider": "anthropic",
|
||||
"model_name": "claude-3-sonnet",
|
||||
"support_structure_output": False,
|
||||
"stream": False,
|
||||
"json_schema": {"type": "object", "properties": {"answer": {"type": "string"}}},
|
||||
"expected_llm_response": LLMResult(
|
||||
model="claude-3-sonnet",
|
||||
message=AssistantPromptMessage(content='{"answer": "test response"}'),
|
||||
usage=create_mock_usage(prompt_tokens=15, completion_tokens=8),
|
||||
),
|
||||
"expected_result_type": LLMResultWithStructuredOutput,
|
||||
"should_raise": False,
|
||||
},
|
||||
# Test case 4: Model without native structured output support, streaming
|
||||
{
|
||||
"name": "prompt_based_structured_output_streaming",
|
||||
"provider": "anthropic",
|
||||
"model_name": "claude-3-sonnet",
|
||||
"support_structure_output": False,
|
||||
"stream": True,
|
||||
"json_schema": {"type": "object", "properties": {"answer": {"type": "string"}}},
|
||||
"expected_llm_response": [
|
||||
LLMResultChunk(
|
||||
model="claude-3-sonnet",
|
||||
prompt_messages=[UserPromptMessage(content="test")],
|
||||
system_fingerprint="test",
|
||||
delta=LLMResultChunkDelta(
|
||||
index=0,
|
||||
message=AssistantPromptMessage(content='{"answer": "test'),
|
||||
usage=create_mock_usage(prompt_tokens=15, completion_tokens=3),
|
||||
),
|
||||
),
|
||||
LLMResultChunk(
|
||||
model="claude-3-sonnet",
|
||||
prompt_messages=[UserPromptMessage(content="test")],
|
||||
system_fingerprint="test",
|
||||
delta=LLMResultChunkDelta(
|
||||
index=0,
|
||||
message=AssistantPromptMessage(content=' response"}'),
|
||||
usage=create_mock_usage(prompt_tokens=15, completion_tokens=5),
|
||||
),
|
||||
),
|
||||
],
|
||||
"expected_result_type": "generator",
|
||||
"should_raise": False,
|
||||
},
|
||||
# Test case 5: Streaming with list content
|
||||
{
|
||||
"name": "streaming_with_list_content",
|
||||
"provider": "openai",
|
||||
"model_name": "gpt-4o",
|
||||
"support_structure_output": True,
|
||||
"stream": True,
|
||||
"json_schema": {"type": "object", "properties": {"data": {"type": "string"}}},
|
||||
"expected_llm_response": [
|
||||
LLMResultChunk(
|
||||
model="gpt-4o",
|
||||
prompt_messages=[UserPromptMessage(content="test")],
|
||||
system_fingerprint="test",
|
||||
delta=LLMResultChunkDelta(
|
||||
index=0,
|
||||
message=AssistantPromptMessage(
|
||||
content=[
|
||||
TextPromptMessageContent(data='{"data":'),
|
||||
]
|
||||
),
|
||||
usage=create_mock_usage(prompt_tokens=10, completion_tokens=2),
|
||||
),
|
||||
),
|
||||
LLMResultChunk(
|
||||
model="gpt-4o",
|
||||
prompt_messages=[UserPromptMessage(content="test")],
|
||||
system_fingerprint="test",
|
||||
delta=LLMResultChunkDelta(
|
||||
index=0,
|
||||
message=AssistantPromptMessage(
|
||||
content=[
|
||||
TextPromptMessageContent(data=' "value"}'),
|
||||
]
|
||||
),
|
||||
usage=create_mock_usage(prompt_tokens=10, completion_tokens=3),
|
||||
),
|
||||
),
|
||||
],
|
||||
"expected_result_type": "generator",
|
||||
"should_raise": False,
|
||||
},
|
||||
# Test case 6: Error case - non-string LLM response content (non-streaming)
|
||||
{
|
||||
"name": "error_non_string_content_non_streaming",
|
||||
"provider": "openai",
|
||||
"model_name": "gpt-4o",
|
||||
"support_structure_output": True,
|
||||
"stream": False,
|
||||
"json_schema": {"type": "object", "properties": {"name": {"type": "string"}}},
|
||||
"expected_llm_response": LLMResult(
|
||||
model="gpt-4o",
|
||||
message=AssistantPromptMessage(content=None), # Non-string content
|
||||
usage=create_mock_usage(prompt_tokens=10, completion_tokens=5),
|
||||
),
|
||||
"expected_result_type": None,
|
||||
"should_raise": True,
|
||||
"expected_error": OutputParserError,
|
||||
},
|
||||
# Test case 7: JSON repair scenario
|
||||
{
|
||||
"name": "json_repair_scenario",
|
||||
"provider": "openai",
|
||||
"model_name": "gpt-4o",
|
||||
"support_structure_output": True,
|
||||
"stream": False,
|
||||
"json_schema": {"type": "object", "properties": {"name": {"type": "string"}}},
|
||||
"expected_llm_response": LLMResult(
|
||||
model="gpt-4o",
|
||||
message=AssistantPromptMessage(content='{"name": "test"'), # Invalid JSON - missing closing brace
|
||||
usage=create_mock_usage(prompt_tokens=10, completion_tokens=5),
|
||||
),
|
||||
"expected_result_type": LLMResultWithStructuredOutput,
|
||||
"should_raise": False,
|
||||
},
|
||||
# Test case 8: Model with parameter rules for response format
|
||||
{
|
||||
"name": "model_with_parameter_rules",
|
||||
"provider": "openai",
|
||||
"model_name": "gpt-4o",
|
||||
"support_structure_output": True,
|
||||
"stream": False,
|
||||
"json_schema": {"type": "object", "properties": {"result": {"type": "string"}}},
|
||||
"parameter_rules": [
|
||||
MagicMock(name="response_format", options=["json_schema"], required=False),
|
||||
],
|
||||
"expected_llm_response": LLMResult(
|
||||
model="gpt-4o",
|
||||
message=AssistantPromptMessage(content='{"result": "success"}'),
|
||||
usage=create_mock_usage(prompt_tokens=10, completion_tokens=5),
|
||||
),
|
||||
"expected_result_type": LLMResultWithStructuredOutput,
|
||||
"should_raise": False,
|
||||
},
|
||||
# Test case 9: Model without native support but with JSON response format rules
|
||||
{
|
||||
"name": "non_native_with_json_rules",
|
||||
"provider": "anthropic",
|
||||
"model_name": "claude-3-sonnet",
|
||||
"support_structure_output": False,
|
||||
"stream": False,
|
||||
"json_schema": {"type": "object", "properties": {"output": {"type": "string"}}},
|
||||
"parameter_rules": [
|
||||
MagicMock(name="response_format", options=["JSON"], required=False),
|
||||
],
|
||||
"expected_llm_response": LLMResult(
|
||||
model="claude-3-sonnet",
|
||||
message=AssistantPromptMessage(content='{"output": "result"}'),
|
||||
usage=create_mock_usage(prompt_tokens=15, completion_tokens=8),
|
||||
),
|
||||
"expected_result_type": LLMResultWithStructuredOutput,
|
||||
"should_raise": False,
|
||||
},
|
||||
]
|
||||
|
||||
for case in testcases:
|
||||
print(f"Running test case: {case['name']}")
|
||||
|
||||
# Setup model entity
|
||||
model_schema = get_model_entity(case["provider"], case["model_name"], case["support_structure_output"])
|
||||
|
||||
# Add parameter rules if specified
|
||||
if "parameter_rules" in case:
|
||||
model_schema.parameter_rules = case["parameter_rules"]
|
||||
|
||||
# Setup model instance
|
||||
model_instance = get_model_instance()
|
||||
model_instance.invoke_llm.return_value = case["expected_llm_response"]
|
||||
|
||||
# Setup prompt messages
|
||||
prompt_messages = [
|
||||
SystemPromptMessage(content="You are a helpful assistant."),
|
||||
UserPromptMessage(content="Generate a response according to the schema."),
|
||||
]
|
||||
|
||||
if case["should_raise"]:
|
||||
# Test error cases
|
||||
with pytest.raises(case["expected_error"]): # noqa: PT012
|
||||
if case["stream"]:
|
||||
result_generator = invoke_llm_with_structured_output(
|
||||
provider=case["provider"],
|
||||
model_schema=model_schema,
|
||||
model_instance=model_instance,
|
||||
prompt_messages=prompt_messages,
|
||||
json_schema=case["json_schema"],
|
||||
stream=case["stream"],
|
||||
)
|
||||
# Consume the generator to trigger the error
|
||||
list(result_generator)
|
||||
else:
|
||||
invoke_llm_with_structured_output(
|
||||
provider=case["provider"],
|
||||
model_schema=model_schema,
|
||||
model_instance=model_instance,
|
||||
prompt_messages=prompt_messages,
|
||||
json_schema=case["json_schema"],
|
||||
stream=case["stream"],
|
||||
)
|
||||
else:
|
||||
# Test successful cases
|
||||
with patch("core.llm_generator.output_parser.structured_output.json_repair.loads") as mock_json_repair:
|
||||
# Configure json_repair mock for cases that need it
|
||||
if case["name"] == "json_repair_scenario":
|
||||
mock_json_repair.return_value = {"name": "test"}
|
||||
|
||||
result = invoke_llm_with_structured_output(
|
||||
provider=case["provider"],
|
||||
model_schema=model_schema,
|
||||
model_instance=model_instance,
|
||||
prompt_messages=prompt_messages,
|
||||
json_schema=case["json_schema"],
|
||||
stream=case["stream"],
|
||||
model_parameters={"temperature": 0.7, "max_tokens": 100},
|
||||
user="test_user",
|
||||
)
|
||||
|
||||
if case["expected_result_type"] == "generator":
|
||||
# Test streaming results
|
||||
assert hasattr(result, "__iter__")
|
||||
chunks = list(result)
|
||||
assert len(chunks) > 0
|
||||
|
||||
# Verify all chunks are LLMResultChunkWithStructuredOutput
|
||||
for chunk in chunks[:-1]: # All except last
|
||||
assert isinstance(chunk, LLMResultChunkWithStructuredOutput)
|
||||
assert chunk.model == case["model_name"]
|
||||
|
||||
# Last chunk should have structured output
|
||||
last_chunk = chunks[-1]
|
||||
assert isinstance(last_chunk, LLMResultChunkWithStructuredOutput)
|
||||
assert last_chunk.structured_output is not None
|
||||
assert isinstance(last_chunk.structured_output, dict)
|
||||
else:
|
||||
# Test non-streaming results
|
||||
assert isinstance(result, case["expected_result_type"])
|
||||
assert result.model == case["model_name"]
|
||||
assert result.structured_output is not None
|
||||
assert isinstance(result.structured_output, dict)
|
||||
|
||||
# Verify model_instance.invoke_llm was called with correct parameters
|
||||
model_instance.invoke_llm.assert_called_once()
|
||||
call_args = model_instance.invoke_llm.call_args
|
||||
|
||||
assert call_args.kwargs["stream"] == case["stream"]
|
||||
assert call_args.kwargs["user"] == "test_user"
|
||||
assert "temperature" in call_args.kwargs["model_parameters"]
|
||||
assert "max_tokens" in call_args.kwargs["model_parameters"]
|
||||
|
||||
|
||||
def test_parse_structured_output_edge_cases():
|
||||
"""Test edge cases for structured output parsing"""
|
||||
|
||||
# Test case with list that contains dict (reasoning model scenario)
|
||||
testcase_list_with_dict = {
|
||||
"name": "list_with_dict_parsing",
|
||||
"provider": "deepseek",
|
||||
"model_name": "deepseek-r1",
|
||||
"support_structure_output": False,
|
||||
"stream": False,
|
||||
"json_schema": {"type": "object", "properties": {"thought": {"type": "string"}}},
|
||||
"expected_llm_response": LLMResult(
|
||||
model="deepseek-r1",
|
||||
message=AssistantPromptMessage(content='[{"thought": "reasoning process"}, "other content"]'),
|
||||
usage=create_mock_usage(prompt_tokens=10, completion_tokens=5),
|
||||
),
|
||||
"expected_result_type": LLMResultWithStructuredOutput,
|
||||
"should_raise": False,
|
||||
}
|
||||
|
||||
# Setup for list parsing test
|
||||
model_schema = get_model_entity(
|
||||
testcase_list_with_dict["provider"],
|
||||
testcase_list_with_dict["model_name"],
|
||||
testcase_list_with_dict["support_structure_output"],
|
||||
)
|
||||
|
||||
model_instance = get_model_instance()
|
||||
model_instance.invoke_llm.return_value = testcase_list_with_dict["expected_llm_response"]
|
||||
|
||||
prompt_messages = [UserPromptMessage(content="Test reasoning")]
|
||||
|
||||
with patch("core.llm_generator.output_parser.structured_output.json_repair.loads") as mock_json_repair:
|
||||
# Mock json_repair to return a list with dict
|
||||
mock_json_repair.return_value = [{"thought": "reasoning process"}, "other content"]
|
||||
|
||||
result = invoke_llm_with_structured_output(
|
||||
provider=testcase_list_with_dict["provider"],
|
||||
model_schema=model_schema,
|
||||
model_instance=model_instance,
|
||||
prompt_messages=prompt_messages,
|
||||
json_schema=testcase_list_with_dict["json_schema"],
|
||||
stream=testcase_list_with_dict["stream"],
|
||||
)
|
||||
|
||||
assert isinstance(result, LLMResultWithStructuredOutput)
|
||||
assert result.structured_output == {"thought": "reasoning process"}
|
||||
|
||||
|
||||
def test_model_specific_schema_preparation():
|
||||
"""Test schema preparation for different model types"""
|
||||
|
||||
# Test Gemini model
|
||||
gemini_case = {
|
||||
"provider": "google",
|
||||
"model_name": "gemini-pro",
|
||||
"support_structure_output": True,
|
||||
"stream": False,
|
||||
"json_schema": {"type": "object", "properties": {"result": {"type": "boolean"}}, "additionalProperties": False},
|
||||
}
|
||||
|
||||
model_schema = get_model_entity(
|
||||
gemini_case["provider"], gemini_case["model_name"], gemini_case["support_structure_output"]
|
||||
)
|
||||
|
||||
model_instance = get_model_instance()
|
||||
model_instance.invoke_llm.return_value = LLMResult(
|
||||
model="gemini-pro",
|
||||
message=AssistantPromptMessage(content='{"result": "true"}'),
|
||||
usage=create_mock_usage(prompt_tokens=10, completion_tokens=5),
|
||||
)
|
||||
|
||||
prompt_messages = [UserPromptMessage(content="Test")]
|
||||
|
||||
result = invoke_llm_with_structured_output(
|
||||
provider=gemini_case["provider"],
|
||||
model_schema=model_schema,
|
||||
model_instance=model_instance,
|
||||
prompt_messages=prompt_messages,
|
||||
json_schema=gemini_case["json_schema"],
|
||||
stream=gemini_case["stream"],
|
||||
)
|
||||
|
||||
assert isinstance(result, LLMResultWithStructuredOutput)
|
||||
|
||||
# Verify model_instance.invoke_llm was called and check the schema preparation
|
||||
model_instance.invoke_llm.assert_called_once()
|
||||
call_args = model_instance.invoke_llm.call_args
|
||||
|
||||
# For Gemini, the schema should not have additionalProperties and boolean should be converted to string
|
||||
assert "json_schema" in call_args.kwargs["model_parameters"]
|
||||
4359
api/uv.lock
4359
api/uv.lock
File diff suppressed because it is too large
Load Diff
|
|
@ -7,4 +7,4 @@ cd "$SCRIPT_DIR/.."
|
|||
|
||||
# run mypy checks
|
||||
uv run --directory api --dev --with pip \
|
||||
python -m mypy --install-types --non-interactive ./
|
||||
python -m mypy --install-types --non-interactive --exclude venv ./
|
||||
|
|
|
|||
|
|
@ -2,7 +2,7 @@ x-shared-env: &shared-api-worker-env
|
|||
services:
|
||||
# API service
|
||||
api:
|
||||
image: langgenius/dify-api:1.5.0
|
||||
image: langgenius/dify-api:1.5.1
|
||||
restart: always
|
||||
environment:
|
||||
# Use the shared environment variables.
|
||||
|
|
@ -31,7 +31,7 @@ services:
|
|||
# worker service
|
||||
# The Celery worker for processing the queue.
|
||||
worker:
|
||||
image: langgenius/dify-api:1.5.0
|
||||
image: langgenius/dify-api:1.5.1
|
||||
restart: always
|
||||
environment:
|
||||
# Use the shared environment variables.
|
||||
|
|
@ -57,7 +57,7 @@ services:
|
|||
|
||||
# Frontend web application.
|
||||
web:
|
||||
image: langgenius/dify-web:1.5.0
|
||||
image: langgenius/dify-web:1.5.1
|
||||
restart: always
|
||||
environment:
|
||||
CONSOLE_API_URL: ${CONSOLE_API_URL:-}
|
||||
|
|
@ -142,7 +142,7 @@ services:
|
|||
|
||||
# plugin daemon
|
||||
plugin_daemon:
|
||||
image: langgenius/dify-plugin-daemon:0.1.2-local
|
||||
image: langgenius/dify-plugin-daemon:0.1.3-local
|
||||
restart: always
|
||||
environment:
|
||||
# Use the shared environment variables.
|
||||
|
|
@ -168,7 +168,7 @@ services:
|
|||
PLUGIN_MEDIA_CACHE_PATH: ${PLUGIN_MEDIA_CACHE_PATH:-assets}
|
||||
PLUGIN_STORAGE_OSS_BUCKET: ${PLUGIN_STORAGE_OSS_BUCKET:-}
|
||||
S3_USE_AWS_MANAGED_IAM: ${PLUGIN_S3_USE_AWS_MANAGED_IAM:-false}
|
||||
S3_USE_AWS: ${PLUGIN_S3_USE_AWS:-}
|
||||
S3_USE_AWS: ${PLUGIN_S3_USE_AWS:-false}
|
||||
S3_ENDPOINT: ${PLUGIN_S3_ENDPOINT:-}
|
||||
S3_USE_PATH_STYLE: ${PLUGIN_S3_USE_PATH_STYLE:-false}
|
||||
AWS_ACCESS_KEY: ${PLUGIN_AWS_ACCESS_KEY:-}
|
||||
|
|
|
|||
|
|
@ -71,7 +71,7 @@ services:
|
|||
|
||||
# plugin daemon
|
||||
plugin_daemon:
|
||||
image: langgenius/dify-plugin-daemon:0.1.2-local
|
||||
image: langgenius/dify-plugin-daemon:0.1.3-local
|
||||
restart: always
|
||||
env_file:
|
||||
- ./middleware.env
|
||||
|
|
|
|||
|
|
@ -517,7 +517,7 @@ x-shared-env: &shared-api-worker-env
|
|||
services:
|
||||
# API service
|
||||
api:
|
||||
image: langgenius/dify-api:1.5.0
|
||||
image: langgenius/dify-api:1.5.1
|
||||
restart: always
|
||||
environment:
|
||||
# Use the shared environment variables.
|
||||
|
|
@ -546,7 +546,7 @@ services:
|
|||
# worker service
|
||||
# The Celery worker for processing the queue.
|
||||
worker:
|
||||
image: langgenius/dify-api:1.5.0
|
||||
image: langgenius/dify-api:1.5.1
|
||||
restart: always
|
||||
environment:
|
||||
# Use the shared environment variables.
|
||||
|
|
@ -572,7 +572,7 @@ services:
|
|||
|
||||
# Frontend web application.
|
||||
web:
|
||||
image: langgenius/dify-web:1.5.0
|
||||
image: langgenius/dify-web:1.5.1
|
||||
restart: always
|
||||
environment:
|
||||
CONSOLE_API_URL: ${CONSOLE_API_URL:-}
|
||||
|
|
@ -657,7 +657,7 @@ services:
|
|||
|
||||
# plugin daemon
|
||||
plugin_daemon:
|
||||
image: langgenius/dify-plugin-daemon:0.1.2-local
|
||||
image: langgenius/dify-plugin-daemon:0.1.3-local
|
||||
restart: always
|
||||
environment:
|
||||
# Use the shared environment variables.
|
||||
|
|
@ -683,7 +683,7 @@ services:
|
|||
PLUGIN_MEDIA_CACHE_PATH: ${PLUGIN_MEDIA_CACHE_PATH:-assets}
|
||||
PLUGIN_STORAGE_OSS_BUCKET: ${PLUGIN_STORAGE_OSS_BUCKET:-}
|
||||
S3_USE_AWS_MANAGED_IAM: ${PLUGIN_S3_USE_AWS_MANAGED_IAM:-false}
|
||||
S3_USE_AWS: ${PLUGIN_S3_USE_AWS:-}
|
||||
S3_USE_AWS: ${PLUGIN_S3_USE_AWS:-false}
|
||||
S3_ENDPOINT: ${PLUGIN_S3_ENDPOINT:-}
|
||||
S3_USE_PATH_STYLE: ${PLUGIN_S3_USE_PATH_STYLE:-false}
|
||||
AWS_ACCESS_KEY: ${PLUGIN_AWS_ACCESS_KEY:-}
|
||||
|
|
|
|||
|
|
@ -5,7 +5,7 @@ import { useTranslation } from 'react-i18next'
|
|||
import { useBoolean } from 'ahooks'
|
||||
import TracingIcon from './tracing-icon'
|
||||
import ProviderPanel from './provider-panel'
|
||||
import type { LangFuseConfig, LangSmithConfig, OpikConfig, WeaveConfig } from './type'
|
||||
import type { ArizeConfig, LangFuseConfig, LangSmithConfig, OpikConfig, PhoenixConfig, WeaveConfig } from './type'
|
||||
import { TracingProvider } from './type'
|
||||
import ProviderConfigModal from './provider-config-modal'
|
||||
import Indicator from '@/app/components/header/indicator'
|
||||
|
|
@ -23,11 +23,13 @@ export type PopupProps = {
|
|||
onStatusChange: (enabled: boolean) => void
|
||||
chosenProvider: TracingProvider | null
|
||||
onChooseProvider: (provider: TracingProvider) => void
|
||||
arizeConfig: ArizeConfig | null
|
||||
phoenixConfig: PhoenixConfig | null
|
||||
langSmithConfig: LangSmithConfig | null
|
||||
langFuseConfig: LangFuseConfig | null
|
||||
opikConfig: OpikConfig | null
|
||||
weaveConfig: WeaveConfig | null
|
||||
onConfigUpdated: (provider: TracingProvider, payload: LangSmithConfig | LangFuseConfig | OpikConfig | WeaveConfig) => void
|
||||
onConfigUpdated: (provider: TracingProvider, payload: ArizeConfig | PhoenixConfig | LangSmithConfig | LangFuseConfig | OpikConfig | WeaveConfig) => void
|
||||
onConfigRemoved: (provider: TracingProvider) => void
|
||||
}
|
||||
|
||||
|
|
@ -38,6 +40,8 @@ const ConfigPopup: FC<PopupProps> = ({
|
|||
onStatusChange,
|
||||
chosenProvider,
|
||||
onChooseProvider,
|
||||
arizeConfig,
|
||||
phoenixConfig,
|
||||
langSmithConfig,
|
||||
langFuseConfig,
|
||||
opikConfig,
|
||||
|
|
@ -65,7 +69,7 @@ const ConfigPopup: FC<PopupProps> = ({
|
|||
}
|
||||
}, [onChooseProvider])
|
||||
|
||||
const handleConfigUpdated = useCallback((payload: LangSmithConfig | LangFuseConfig | OpikConfig | WeaveConfig) => {
|
||||
const handleConfigUpdated = useCallback((payload: ArizeConfig | PhoenixConfig | LangSmithConfig | LangFuseConfig | OpikConfig | WeaveConfig) => {
|
||||
onConfigUpdated(currentProvider!, payload)
|
||||
hideConfigModal()
|
||||
}, [currentProvider, hideConfigModal, onConfigUpdated])
|
||||
|
|
@ -75,8 +79,8 @@ const ConfigPopup: FC<PopupProps> = ({
|
|||
hideConfigModal()
|
||||
}, [currentProvider, hideConfigModal, onConfigRemoved])
|
||||
|
||||
const providerAllConfigured = langSmithConfig && langFuseConfig && opikConfig && weaveConfig
|
||||
const providerAllNotConfigured = !langSmithConfig && !langFuseConfig && !opikConfig && !weaveConfig
|
||||
const providerAllConfigured = arizeConfig && phoenixConfig && langSmithConfig && langFuseConfig && opikConfig && weaveConfig
|
||||
const providerAllNotConfigured = !arizeConfig && !phoenixConfig && !langSmithConfig && !langFuseConfig && !opikConfig && !weaveConfig
|
||||
|
||||
const switchContent = (
|
||||
<Switch
|
||||
|
|
@ -86,6 +90,32 @@ const ConfigPopup: FC<PopupProps> = ({
|
|||
disabled={providerAllNotConfigured}
|
||||
/>
|
||||
)
|
||||
const arizePanel = (
|
||||
<ProviderPanel
|
||||
type={TracingProvider.arize}
|
||||
readOnly={readOnly}
|
||||
config={arizeConfig}
|
||||
hasConfigured={!!arizeConfig}
|
||||
onConfig={handleOnConfig(TracingProvider.arize)}
|
||||
isChosen={chosenProvider === TracingProvider.arize}
|
||||
onChoose={handleOnChoose(TracingProvider.arize)}
|
||||
key="arize-provider-panel"
|
||||
/>
|
||||
)
|
||||
|
||||
const phoenixPanel = (
|
||||
<ProviderPanel
|
||||
type={TracingProvider.phoenix}
|
||||
readOnly={readOnly}
|
||||
config={phoenixConfig}
|
||||
hasConfigured={!!phoenixConfig}
|
||||
onConfig={handleOnConfig(TracingProvider.phoenix)}
|
||||
isChosen={chosenProvider === TracingProvider.phoenix}
|
||||
onChoose={handleOnChoose(TracingProvider.phoenix)}
|
||||
key="phoenix-provider-panel"
|
||||
/>
|
||||
)
|
||||
|
||||
const langSmithPanel = (
|
||||
<ProviderPanel
|
||||
type={TracingProvider.langSmith}
|
||||
|
|
@ -152,12 +182,24 @@ const ConfigPopup: FC<PopupProps> = ({
|
|||
if (weaveConfig)
|
||||
configuredPanels.push(weavePanel)
|
||||
|
||||
if (arizeConfig)
|
||||
configuredPanels.push(arizePanel)
|
||||
|
||||
if (phoenixConfig)
|
||||
configuredPanels.push(phoenixPanel)
|
||||
|
||||
return configuredPanels
|
||||
}
|
||||
|
||||
const moreProviderPanel = () => {
|
||||
const notConfiguredPanels: JSX.Element[] = []
|
||||
|
||||
if (!arizeConfig)
|
||||
notConfiguredPanels.push(arizePanel)
|
||||
|
||||
if (!phoenixConfig)
|
||||
notConfiguredPanels.push(phoenixPanel)
|
||||
|
||||
if (!langFuseConfig)
|
||||
notConfiguredPanels.push(langfusePanel)
|
||||
|
||||
|
|
@ -174,6 +216,10 @@ const ConfigPopup: FC<PopupProps> = ({
|
|||
}
|
||||
|
||||
const configuredProviderConfig = () => {
|
||||
if (currentProvider === TracingProvider.arize)
|
||||
return arizeConfig
|
||||
if (currentProvider === TracingProvider.phoenix)
|
||||
return phoenixConfig
|
||||
if (currentProvider === TracingProvider.langSmith)
|
||||
return langSmithConfig
|
||||
if (currentProvider === TracingProvider.langfuse)
|
||||
|
|
@ -220,22 +266,24 @@ const ConfigPopup: FC<PopupProps> = ({
|
|||
? (
|
||||
<>
|
||||
<div className='system-xs-medium-uppercase text-text-tertiary'>{t(`${I18N_PREFIX}.configProviderTitle.${providerAllConfigured ? 'configured' : 'notConfigured'}`)}</div>
|
||||
<div className='mt-2 space-y-2'>
|
||||
<div className='mt-2 max-h-96 space-y-2 overflow-y-auto'>
|
||||
{langfusePanel}
|
||||
{langSmithPanel}
|
||||
{opikPanel}
|
||||
{weavePanel}
|
||||
{arizePanel}
|
||||
{phoenixPanel}
|
||||
</div>
|
||||
</>
|
||||
)
|
||||
: (
|
||||
<>
|
||||
<div className='system-xs-medium-uppercase text-text-tertiary'>{t(`${I18N_PREFIX}.configProviderTitle.configured`)}</div>
|
||||
<div className='mt-2 space-y-2'>
|
||||
<div className='mt-2 max-h-40 space-y-2 overflow-y-auto'>
|
||||
{configuredProviderPanel()}
|
||||
</div>
|
||||
<div className='system-xs-medium-uppercase mt-3 text-text-tertiary'>{t(`${I18N_PREFIX}.configProviderTitle.moreProvider`)}</div>
|
||||
<div className='mt-2 space-y-2'>
|
||||
<div className='mt-2 max-h-40 space-y-2 overflow-y-auto'>
|
||||
{moreProviderPanel()}
|
||||
</div>
|
||||
</>
|
||||
|
|
|
|||
|
|
@ -1,6 +1,8 @@
|
|||
import { TracingProvider } from './type'
|
||||
|
||||
export const docURL = {
|
||||
[TracingProvider.arize]: 'https://docs.arize.com/arize',
|
||||
[TracingProvider.phoenix]: 'https://docs.arize.com/phoenix',
|
||||
[TracingProvider.langSmith]: 'https://docs.smith.langchain.com/',
|
||||
[TracingProvider.langfuse]: 'https://docs.langfuse.com',
|
||||
[TracingProvider.opik]: 'https://www.comet.com/docs/opik/tracing/integrations/dify#setup-instructions',
|
||||
|
|
|
|||
|
|
@ -7,12 +7,12 @@ import {
|
|||
import { useTranslation } from 'react-i18next'
|
||||
import { usePathname } from 'next/navigation'
|
||||
import { useBoolean } from 'ahooks'
|
||||
import type { LangFuseConfig, LangSmithConfig, OpikConfig, WeaveConfig } from './type'
|
||||
import type { ArizeConfig, LangFuseConfig, LangSmithConfig, OpikConfig, PhoenixConfig, WeaveConfig } from './type'
|
||||
import { TracingProvider } from './type'
|
||||
import TracingIcon from './tracing-icon'
|
||||
import ConfigButton from './config-button'
|
||||
import cn from '@/utils/classnames'
|
||||
import { LangfuseIcon, LangsmithIcon, OpikIcon, WeaveIcon } from '@/app/components/base/icons/src/public/tracing'
|
||||
import { ArizeIcon, LangfuseIcon, LangsmithIcon, OpikIcon, PhoenixIcon, WeaveIcon } from '@/app/components/base/icons/src/public/tracing'
|
||||
import Indicator from '@/app/components/header/indicator'
|
||||
import { fetchTracingConfig as doFetchTracingConfig, fetchTracingStatus, updateTracingStatus } from '@/service/apps'
|
||||
import type { TracingStatus } from '@/models/app'
|
||||
|
|
@ -62,24 +62,31 @@ const Panel: FC = () => {
|
|||
}
|
||||
const inUseTracingProvider: TracingProvider | null = tracingStatus?.tracing_provider || null
|
||||
|
||||
const InUseProviderIcon
|
||||
= inUseTracingProvider === TracingProvider.langSmith
|
||||
? LangsmithIcon
|
||||
: inUseTracingProvider === TracingProvider.langfuse
|
||||
? LangfuseIcon
|
||||
: inUseTracingProvider === TracingProvider.opik
|
||||
? OpikIcon
|
||||
: inUseTracingProvider === TracingProvider.weave
|
||||
? WeaveIcon
|
||||
: LangsmithIcon
|
||||
const providerIconMap: Record<TracingProvider, React.FC<{ className?: string }>> = {
|
||||
[TracingProvider.arize]: ArizeIcon,
|
||||
[TracingProvider.phoenix]: PhoenixIcon,
|
||||
[TracingProvider.langSmith]: LangsmithIcon,
|
||||
[TracingProvider.langfuse]: LangfuseIcon,
|
||||
[TracingProvider.opik]: OpikIcon,
|
||||
[TracingProvider.weave]: WeaveIcon,
|
||||
}
|
||||
const InUseProviderIcon = inUseTracingProvider ? providerIconMap[inUseTracingProvider] : undefined
|
||||
|
||||
const [arizeConfig, setArizeConfig] = useState<ArizeConfig | null>(null)
|
||||
const [phoenixConfig, setPhoenixConfig] = useState<PhoenixConfig | null>(null)
|
||||
const [langSmithConfig, setLangSmithConfig] = useState<LangSmithConfig | null>(null)
|
||||
const [langFuseConfig, setLangFuseConfig] = useState<LangFuseConfig | null>(null)
|
||||
const [opikConfig, setOpikConfig] = useState<OpikConfig | null>(null)
|
||||
const [weaveConfig, setWeaveConfig] = useState<WeaveConfig | null>(null)
|
||||
const hasConfiguredTracing = !!(langSmithConfig || langFuseConfig || opikConfig || weaveConfig)
|
||||
const hasConfiguredTracing = !!(langSmithConfig || langFuseConfig || opikConfig || weaveConfig || arizeConfig || phoenixConfig)
|
||||
|
||||
const fetchTracingConfig = async () => {
|
||||
const { tracing_config: arizeConfig, has_not_configured: arizeHasNotConfig } = await doFetchTracingConfig({ appId, provider: TracingProvider.arize })
|
||||
if (!arizeHasNotConfig)
|
||||
setArizeConfig(arizeConfig as ArizeConfig)
|
||||
const { tracing_config: phoenixConfig, has_not_configured: phoenixHasNotConfig } = await doFetchTracingConfig({ appId, provider: TracingProvider.phoenix })
|
||||
if (!phoenixHasNotConfig)
|
||||
setPhoenixConfig(phoenixConfig as PhoenixConfig)
|
||||
const { tracing_config: langSmithConfig, has_not_configured: langSmithHasNotConfig } = await doFetchTracingConfig({ appId, provider: TracingProvider.langSmith })
|
||||
if (!langSmithHasNotConfig)
|
||||
setLangSmithConfig(langSmithConfig as LangSmithConfig)
|
||||
|
|
@ -97,7 +104,11 @@ const Panel: FC = () => {
|
|||
const handleTracingConfigUpdated = async (provider: TracingProvider) => {
|
||||
// call api to hide secret key value
|
||||
const { tracing_config } = await doFetchTracingConfig({ appId, provider })
|
||||
if (provider === TracingProvider.langSmith)
|
||||
if (provider === TracingProvider.arize)
|
||||
setArizeConfig(tracing_config as ArizeConfig)
|
||||
else if (provider === TracingProvider.phoenix)
|
||||
setPhoenixConfig(tracing_config as PhoenixConfig)
|
||||
else if (provider === TracingProvider.langSmith)
|
||||
setLangSmithConfig(tracing_config as LangSmithConfig)
|
||||
else if (provider === TracingProvider.langfuse)
|
||||
setLangFuseConfig(tracing_config as LangFuseConfig)
|
||||
|
|
@ -108,7 +119,11 @@ const Panel: FC = () => {
|
|||
}
|
||||
|
||||
const handleTracingConfigRemoved = (provider: TracingProvider) => {
|
||||
if (provider === TracingProvider.langSmith)
|
||||
if (provider === TracingProvider.arize)
|
||||
setArizeConfig(null)
|
||||
else if (provider === TracingProvider.phoenix)
|
||||
setPhoenixConfig(null)
|
||||
else if (provider === TracingProvider.langSmith)
|
||||
setLangSmithConfig(null)
|
||||
else if (provider === TracingProvider.langfuse)
|
||||
setLangFuseConfig(null)
|
||||
|
|
@ -170,6 +185,8 @@ const Panel: FC = () => {
|
|||
onStatusChange={handleTracingEnabledChange}
|
||||
chosenProvider={inUseTracingProvider}
|
||||
onChooseProvider={handleChooseProvider}
|
||||
arizeConfig={arizeConfig}
|
||||
phoenixConfig={phoenixConfig}
|
||||
langSmithConfig={langSmithConfig}
|
||||
langFuseConfig={langFuseConfig}
|
||||
opikConfig={opikConfig}
|
||||
|
|
@ -205,6 +222,8 @@ const Panel: FC = () => {
|
|||
onStatusChange={handleTracingEnabledChange}
|
||||
chosenProvider={inUseTracingProvider}
|
||||
onChooseProvider={handleChooseProvider}
|
||||
arizeConfig={arizeConfig}
|
||||
phoenixConfig={phoenixConfig}
|
||||
langSmithConfig={langSmithConfig}
|
||||
langFuseConfig={langFuseConfig}
|
||||
opikConfig={opikConfig}
|
||||
|
|
|
|||
|
|
@ -4,7 +4,7 @@ import React, { useCallback, useState } from 'react'
|
|||
import { useTranslation } from 'react-i18next'
|
||||
import { useBoolean } from 'ahooks'
|
||||
import Field from './field'
|
||||
import type { LangFuseConfig, LangSmithConfig, OpikConfig, WeaveConfig } from './type'
|
||||
import type { ArizeConfig, LangFuseConfig, LangSmithConfig, OpikConfig, PhoenixConfig, WeaveConfig } from './type'
|
||||
import { TracingProvider } from './type'
|
||||
import { docURL } from './config'
|
||||
import {
|
||||
|
|
@ -22,15 +22,28 @@ import Divider from '@/app/components/base/divider'
|
|||
type Props = {
|
||||
appId: string
|
||||
type: TracingProvider
|
||||
payload?: LangSmithConfig | LangFuseConfig | OpikConfig | WeaveConfig | null
|
||||
payload?: ArizeConfig | PhoenixConfig | LangSmithConfig | LangFuseConfig | OpikConfig | WeaveConfig | null
|
||||
onRemoved: () => void
|
||||
onCancel: () => void
|
||||
onSaved: (payload: LangSmithConfig | LangFuseConfig | OpikConfig | WeaveConfig) => void
|
||||
onSaved: (payload: ArizeConfig | PhoenixConfig | LangSmithConfig | LangFuseConfig | OpikConfig | WeaveConfig) => void
|
||||
onChosen: (provider: TracingProvider) => void
|
||||
}
|
||||
|
||||
const I18N_PREFIX = 'app.tracing.configProvider'
|
||||
|
||||
const arizeConfigTemplate = {
|
||||
api_key: '',
|
||||
space_id: '',
|
||||
project: '',
|
||||
endpoint: '',
|
||||
}
|
||||
|
||||
const phoenixConfigTemplate = {
|
||||
api_key: '',
|
||||
project: '',
|
||||
endpoint: '',
|
||||
}
|
||||
|
||||
const langSmithConfigTemplate = {
|
||||
api_key: '',
|
||||
project: '',
|
||||
|
|
@ -71,11 +84,17 @@ const ProviderConfigModal: FC<Props> = ({
|
|||
const isEdit = !!payload
|
||||
const isAdd = !isEdit
|
||||
const [isSaving, setIsSaving] = useState(false)
|
||||
const [config, setConfig] = useState<LangSmithConfig | LangFuseConfig | OpikConfig | WeaveConfig>((() => {
|
||||
const [config, setConfig] = useState<ArizeConfig | PhoenixConfig | LangSmithConfig | LangFuseConfig | OpikConfig | WeaveConfig>((() => {
|
||||
if (isEdit)
|
||||
return payload
|
||||
|
||||
if (type === TracingProvider.langSmith)
|
||||
if (type === TracingProvider.arize)
|
||||
return arizeConfigTemplate
|
||||
|
||||
else if (type === TracingProvider.phoenix)
|
||||
return phoenixConfigTemplate
|
||||
|
||||
else if (type === TracingProvider.langSmith)
|
||||
return langSmithConfigTemplate
|
||||
|
||||
else if (type === TracingProvider.langfuse)
|
||||
|
|
@ -115,6 +134,24 @@ const ProviderConfigModal: FC<Props> = ({
|
|||
|
||||
const checkValid = useCallback(() => {
|
||||
let errorMessage = ''
|
||||
if (type === TracingProvider.arize) {
|
||||
const postData = config as ArizeConfig
|
||||
if (!postData.api_key)
|
||||
errorMessage = t('common.errorMsg.fieldRequired', { field: 'API Key' })
|
||||
if (!postData.space_id)
|
||||
errorMessage = t('common.errorMsg.fieldRequired', { field: 'Space ID' })
|
||||
if (!errorMessage && !postData.project)
|
||||
errorMessage = t('common.errorMsg.fieldRequired', { field: t(`${I18N_PREFIX}.project`) })
|
||||
}
|
||||
|
||||
if (type === TracingProvider.phoenix) {
|
||||
const postData = config as PhoenixConfig
|
||||
if (!postData.api_key)
|
||||
errorMessage = t('common.errorMsg.fieldRequired', { field: 'API Key' })
|
||||
if (!errorMessage && !postData.project)
|
||||
errorMessage = t('common.errorMsg.fieldRequired', { field: t(`${I18N_PREFIX}.project`) })
|
||||
}
|
||||
|
||||
if (type === TracingProvider.langSmith) {
|
||||
const postData = config as LangSmithConfig
|
||||
if (!postData.api_key)
|
||||
|
|
@ -195,6 +232,68 @@ const ProviderConfigModal: FC<Props> = ({
|
|||
</div>
|
||||
|
||||
<div className='space-y-4'>
|
||||
{type === TracingProvider.arize && (
|
||||
<>
|
||||
<Field
|
||||
label='API Key'
|
||||
labelClassName='!text-sm'
|
||||
isRequired
|
||||
value={(config as ArizeConfig).api_key}
|
||||
onChange={handleConfigChange('api_key')}
|
||||
placeholder={t(`${I18N_PREFIX}.placeholder`, { key: 'API Key' })!}
|
||||
/>
|
||||
<Field
|
||||
label='Space ID'
|
||||
labelClassName='!text-sm'
|
||||
isRequired
|
||||
value={(config as ArizeConfig).space_id}
|
||||
onChange={handleConfigChange('space_id')}
|
||||
placeholder={t(`${I18N_PREFIX}.placeholder`, { key: 'Space ID' })!}
|
||||
/>
|
||||
<Field
|
||||
label={t(`${I18N_PREFIX}.project`)!}
|
||||
labelClassName='!text-sm'
|
||||
isRequired
|
||||
value={(config as ArizeConfig).project}
|
||||
onChange={handleConfigChange('project')}
|
||||
placeholder={t(`${I18N_PREFIX}.placeholder`, { key: t(`${I18N_PREFIX}.project`) })!}
|
||||
/>
|
||||
<Field
|
||||
label='Endpoint'
|
||||
labelClassName='!text-sm'
|
||||
value={(config as ArizeConfig).endpoint}
|
||||
onChange={handleConfigChange('endpoint')}
|
||||
placeholder={'https://otlp.arize.com'}
|
||||
/>
|
||||
</>
|
||||
)}
|
||||
{type === TracingProvider.phoenix && (
|
||||
<>
|
||||
<Field
|
||||
label='API Key'
|
||||
labelClassName='!text-sm'
|
||||
isRequired
|
||||
value={(config as PhoenixConfig).api_key}
|
||||
onChange={handleConfigChange('api_key')}
|
||||
placeholder={t(`${I18N_PREFIX}.placeholder`, { key: 'API Key' })!}
|
||||
/>
|
||||
<Field
|
||||
label={t(`${I18N_PREFIX}.project`)!}
|
||||
labelClassName='!text-sm'
|
||||
isRequired
|
||||
value={(config as PhoenixConfig).project}
|
||||
onChange={handleConfigChange('project')}
|
||||
placeholder={t(`${I18N_PREFIX}.placeholder`, { key: t(`${I18N_PREFIX}.project`) })!}
|
||||
/>
|
||||
<Field
|
||||
label='Endpoint'
|
||||
labelClassName='!text-sm'
|
||||
value={(config as PhoenixConfig).endpoint}
|
||||
onChange={handleConfigChange('endpoint')}
|
||||
placeholder={'https://app.phoenix.arize.com'}
|
||||
/>
|
||||
</>
|
||||
)}
|
||||
{type === TracingProvider.weave && (
|
||||
<>
|
||||
<Field
|
||||
|
|
|
|||
|
|
@ -7,7 +7,7 @@ import {
|
|||
import { useTranslation } from 'react-i18next'
|
||||
import { TracingProvider } from './type'
|
||||
import cn from '@/utils/classnames'
|
||||
import { LangfuseIconBig, LangsmithIconBig, OpikIconBig, WeaveIconBig } from '@/app/components/base/icons/src/public/tracing'
|
||||
import { ArizeIconBig, LangfuseIconBig, LangsmithIconBig, OpikIconBig, PhoenixIconBig, WeaveIconBig } from '@/app/components/base/icons/src/public/tracing'
|
||||
import { Eye as View } from '@/app/components/base/icons/src/vender/solid/general'
|
||||
|
||||
const I18N_PREFIX = 'app.tracing'
|
||||
|
|
@ -24,6 +24,8 @@ type Props = {
|
|||
|
||||
const getIcon = (type: TracingProvider) => {
|
||||
return ({
|
||||
[TracingProvider.arize]: ArizeIconBig,
|
||||
[TracingProvider.phoenix]: PhoenixIconBig,
|
||||
[TracingProvider.langSmith]: LangsmithIconBig,
|
||||
[TracingProvider.langfuse]: LangfuseIconBig,
|
||||
[TracingProvider.opik]: OpikIconBig,
|
||||
|
|
|
|||
|
|
@ -1,10 +1,25 @@
|
|||
export enum TracingProvider {
|
||||
arize = 'arize',
|
||||
phoenix = 'phoenix',
|
||||
langSmith = 'langsmith',
|
||||
langfuse = 'langfuse',
|
||||
opik = 'opik',
|
||||
weave = 'weave',
|
||||
}
|
||||
|
||||
export type ArizeConfig = {
|
||||
api_key: string
|
||||
space_id: string
|
||||
project: string
|
||||
endpoint: string
|
||||
}
|
||||
|
||||
export type PhoenixConfig = {
|
||||
api_key: string
|
||||
project: string
|
||||
endpoint: string
|
||||
}
|
||||
|
||||
export type LangSmithConfig = {
|
||||
api_key: string
|
||||
project: string
|
||||
|
|
|
|||
|
|
@ -36,6 +36,7 @@ import AccessControl from '@/app/components/app/app-access-control'
|
|||
import { AccessMode } from '@/models/access-control'
|
||||
import { useGlobalPublicStore } from '@/context/global-public-context'
|
||||
import { formatTime } from '@/utils/time'
|
||||
import { useGetUserCanAccessApp } from '@/service/access-control'
|
||||
|
||||
export type AppCardProps = {
|
||||
app: App
|
||||
|
|
@ -190,6 +191,7 @@ const AppCard = ({ app, onRefresh }: AppCardProps) => {
|
|||
}, [onRefresh, mutateApps, setShowAccessControl])
|
||||
|
||||
const Operations = (props: HtmlContentProps) => {
|
||||
const { data: userCanAccessApp, isLoading: isGettingUserCanAccessApp } = useGetUserCanAccessApp({ appId: app?.id, enabled: (!!props?.open && systemFeatures.webapp_auth.enabled) })
|
||||
const onMouseLeave = async () => {
|
||||
props.onClose?.()
|
||||
}
|
||||
|
|
@ -267,10 +269,14 @@ const AppCard = ({ app, onRefresh }: AppCardProps) => {
|
|||
</button>
|
||||
</>
|
||||
)}
|
||||
<Divider className="my-1" />
|
||||
<button className='mx-1 flex h-8 cursor-pointer items-center gap-2 rounded-lg px-3 hover:bg-state-base-hover' onClick={onClickInstalledApp}>
|
||||
<span className='system-sm-regular text-text-secondary'>{t('app.openInExplore')}</span>
|
||||
</button>
|
||||
{
|
||||
(isGettingUserCanAccessApp || !userCanAccessApp?.result) ? null : <>
|
||||
<Divider className="my-1" />
|
||||
<button className='mx-1 flex h-8 cursor-pointer items-center gap-2 rounded-lg px-3 hover:bg-state-base-hover' onClick={onClickInstalledApp}>
|
||||
<span className='system-sm-regular text-text-secondary'>{t('app.openInExplore')}</span>
|
||||
</button>
|
||||
</>
|
||||
}
|
||||
<Divider className="my-1" />
|
||||
{
|
||||
systemFeatures.webapp_auth.enabled && isCurrentWorkspaceEditor && <>
|
||||
|
|
@ -333,7 +339,7 @@ const AppCard = ({ app, onRefresh }: AppCardProps) => {
|
|||
<div className='flex items-center gap-1 text-[10px] font-medium leading-[18px] text-text-tertiary'>
|
||||
<div className='truncate' title={app.author_name}>{app.author_name}</div>
|
||||
<div>·</div>
|
||||
<div className='truncate'>{EditTimeText}</div>
|
||||
<div className='truncate' title={EditTimeText}>{EditTimeText}</div>
|
||||
</div>
|
||||
</div>
|
||||
<div className='flex h-5 w-5 shrink-0 items-center justify-center'>
|
||||
|
|
|
|||
|
|
@ -1124,6 +1124,129 @@ import { Row, Col, Properties, Property, Heading, SubProperty, PropertyInstructi
|
|||
|
||||
<hr className='ml-0 mr-0' />
|
||||
|
||||
<Heading
|
||||
url='/datasets/{dataset_id}/documents/{document_id}'
|
||||
method='GET'
|
||||
title='Get Document Detail'
|
||||
name='#get-document-detail'
|
||||
/>
|
||||
<Row>
|
||||
<Col>
|
||||
Get a document's detail.
|
||||
### Path
|
||||
- `dataset_id` (string) Dataset ID
|
||||
- `document_id` (string) Document ID
|
||||
|
||||
### Query
|
||||
- `metadata` (string) Metadata filter, can be `all`, `only`, or `without`. Default is `all`.
|
||||
|
||||
### Response
|
||||
Returns the document's detail.
|
||||
</Col>
|
||||
<Col sticky>
|
||||
### Request Example
|
||||
<CodeGroup title="Request" tag="GET" label="/datasets/{dataset_id}/documents/{document_id}" targetCode={`curl -X GET '${props.apiBaseUrl}/datasets/{dataset_id}/documents/{document_id}' \\\n-H 'Authorization: Bearer {api_key}'`}>
|
||||
```bash {{ title: 'cURL' }}
|
||||
curl -X GET '${props.apiBaseUrl}/datasets/{dataset_id}/documents/{document_id}' \
|
||||
-H 'Authorization: Bearer {api_key}'
|
||||
```
|
||||
</CodeGroup>
|
||||
|
||||
### Response Example
|
||||
<CodeGroup title="Response">
|
||||
```json {{ title: 'Response' }}
|
||||
{
|
||||
"id": "f46ae30c-5c11-471b-96d0-464f5f32a7b2",
|
||||
"position": 1,
|
||||
"data_source_type": "upload_file",
|
||||
"data_source_info": {
|
||||
"upload_file": {
|
||||
...
|
||||
}
|
||||
},
|
||||
"dataset_process_rule_id": "24b99906-845e-499f-9e3c-d5565dd6962c",
|
||||
"dataset_process_rule": {
|
||||
"mode": "hierarchical",
|
||||
"rules": {
|
||||
"pre_processing_rules": [
|
||||
{
|
||||
"id": "remove_extra_spaces",
|
||||
"enabled": true
|
||||
},
|
||||
{
|
||||
"id": "remove_urls_emails",
|
||||
"enabled": false
|
||||
}
|
||||
],
|
||||
"segmentation": {
|
||||
"separator": "**********page_ending**********",
|
||||
"max_tokens": 1024,
|
||||
"chunk_overlap": 0
|
||||
},
|
||||
"parent_mode": "paragraph",
|
||||
"subchunk_segmentation": {
|
||||
"separator": "\n",
|
||||
"max_tokens": 512,
|
||||
"chunk_overlap": 0
|
||||
}
|
||||
}
|
||||
},
|
||||
"document_process_rule": {
|
||||
"id": "24b99906-845e-499f-9e3c-d5565dd6962c",
|
||||
"dataset_id": "48a0db76-d1a9-46c1-ae35-2baaa919a8a9",
|
||||
"mode": "hierarchical",
|
||||
"rules": {
|
||||
"pre_processing_rules": [
|
||||
{
|
||||
"id": "remove_extra_spaces",
|
||||
"enabled": true
|
||||
},
|
||||
{
|
||||
"id": "remove_urls_emails",
|
||||
"enabled": false
|
||||
}
|
||||
],
|
||||
"segmentation": {
|
||||
"separator": "**********page_ending**********",
|
||||
"max_tokens": 1024,
|
||||
"chunk_overlap": 0
|
||||
},
|
||||
"parent_mode": "paragraph",
|
||||
"subchunk_segmentation": {
|
||||
"separator": "\n",
|
||||
"max_tokens": 512,
|
||||
"chunk_overlap": 0
|
||||
}
|
||||
}
|
||||
},
|
||||
"name": "xxxx",
|
||||
"created_from": "web",
|
||||
"created_by": "17f71940-a7b5-4c77-b60f-2bd645c1ffa0",
|
||||
"created_at": 1750464191,
|
||||
"tokens": null,
|
||||
"indexing_status": "waiting",
|
||||
"completed_at": null,
|
||||
"updated_at": 1750464191,
|
||||
"indexing_latency": null,
|
||||
"error": null,
|
||||
"enabled": true,
|
||||
"disabled_at": null,
|
||||
"disabled_by": null,
|
||||
"archived": false,
|
||||
"segment_count": 0,
|
||||
"average_segment_length": 0,
|
||||
"hit_count": null,
|
||||
"display_status": "queuing",
|
||||
"doc_form": "hierarchical_model",
|
||||
"doc_language": "Chinese Simplified"
|
||||
}
|
||||
```
|
||||
</CodeGroup>
|
||||
</Col>
|
||||
</Row>
|
||||
___
|
||||
<hr className='ml-0 mr-0' />
|
||||
|
||||
<Heading
|
||||
url='/datasets/{dataset_id}/documents/status/{action}'
|
||||
method='PATCH'
|
||||
|
|
|
|||
|
|
@ -881,6 +881,130 @@ import { Row, Col, Properties, Property, Heading, SubProperty, PropertyInstructi
|
|||
|
||||
<hr className='ml-0 mr-0' />
|
||||
|
||||
<Heading
|
||||
url='/datasets/{dataset_id}/documents/{document_id}'
|
||||
method='GET'
|
||||
title='ドキュメントの詳細を取得'
|
||||
name='#get-document-detail'
|
||||
/>
|
||||
<Row>
|
||||
<Col>
|
||||
ドキュメントの詳細を取得.
|
||||
### Path
|
||||
- `dataset_id` (string) ナレッジベースID
|
||||
- `document_id` (string) ドキュメントID
|
||||
|
||||
### Query
|
||||
- `metadata` (string) metadataのフィルター条件 `all`、`only`、または`without`。デフォルトは `all`。
|
||||
|
||||
### Response
|
||||
ナレッジベースドキュメントの詳細を返す.
|
||||
</Col>
|
||||
<Col sticky>
|
||||
### Request Example
|
||||
<CodeGroup title="Request" tag="GET" label="/datasets/{dataset_id}/documents/{document_id}" targetCode={`curl -X GET '${props.apiBaseUrl}/datasets/{dataset_id}/documents/{document_id}' \\\n-H 'Authorization: Bearer {api_key}'`}>
|
||||
```bash {{ title: 'cURL' }}
|
||||
curl -X GET '${props.apiBaseUrl}/datasets/{dataset_id}/documents/{document_id}' \
|
||||
-H 'Authorization: Bearer {api_key}'
|
||||
```
|
||||
</CodeGroup>
|
||||
|
||||
### Response Example
|
||||
<CodeGroup title="Response">
|
||||
```json {{ title: 'Response' }}
|
||||
{
|
||||
"id": "f46ae30c-5c11-471b-96d0-464f5f32a7b2",
|
||||
"position": 1,
|
||||
"data_source_type": "upload_file",
|
||||
"data_source_info": {
|
||||
"upload_file": {
|
||||
...
|
||||
}
|
||||
},
|
||||
"dataset_process_rule_id": "24b99906-845e-499f-9e3c-d5565dd6962c",
|
||||
"dataset_process_rule": {
|
||||
"mode": "hierarchical",
|
||||
"rules": {
|
||||
"pre_processing_rules": [
|
||||
{
|
||||
"id": "remove_extra_spaces",
|
||||
"enabled": true
|
||||
},
|
||||
{
|
||||
"id": "remove_urls_emails",
|
||||
"enabled": false
|
||||
}
|
||||
],
|
||||
"segmentation": {
|
||||
"separator": "**********page_ending**********",
|
||||
"max_tokens": 1024,
|
||||
"chunk_overlap": 0
|
||||
},
|
||||
"parent_mode": "paragraph",
|
||||
"subchunk_segmentation": {
|
||||
"separator": "\n",
|
||||
"max_tokens": 512,
|
||||
"chunk_overlap": 0
|
||||
}
|
||||
}
|
||||
},
|
||||
"document_process_rule": {
|
||||
"id": "24b99906-845e-499f-9e3c-d5565dd6962c",
|
||||
"dataset_id": "48a0db76-d1a9-46c1-ae35-2baaa919a8a9",
|
||||
"mode": "hierarchical",
|
||||
"rules": {
|
||||
"pre_processing_rules": [
|
||||
{
|
||||
"id": "remove_extra_spaces",
|
||||
"enabled": true
|
||||
},
|
||||
{
|
||||
"id": "remove_urls_emails",
|
||||
"enabled": false
|
||||
}
|
||||
],
|
||||
"segmentation": {
|
||||
"separator": "**********page_ending**********",
|
||||
"max_tokens": 1024,
|
||||
"chunk_overlap": 0
|
||||
},
|
||||
"parent_mode": "paragraph",
|
||||
"subchunk_segmentation": {
|
||||
"separator": "\n",
|
||||
"max_tokens": 512,
|
||||
"chunk_overlap": 0
|
||||
}
|
||||
}
|
||||
},
|
||||
"name": "xxxx",
|
||||
"created_from": "web",
|
||||
"created_by": "17f71940-a7b5-4c77-b60f-2bd645c1ffa0",
|
||||
"created_at": 1750464191,
|
||||
"tokens": null,
|
||||
"indexing_status": "waiting",
|
||||
"completed_at": null,
|
||||
"updated_at": 1750464191,
|
||||
"indexing_latency": null,
|
||||
"error": null,
|
||||
"enabled": true,
|
||||
"disabled_at": null,
|
||||
"disabled_by": null,
|
||||
"archived": false,
|
||||
"segment_count": 0,
|
||||
"average_segment_length": 0,
|
||||
"hit_count": null,
|
||||
"display_status": "queuing",
|
||||
"doc_form": "hierarchical_model",
|
||||
"doc_language": "Chinese Simplified"
|
||||
}
|
||||
```
|
||||
</CodeGroup>
|
||||
</Col>
|
||||
</Row>
|
||||
___
|
||||
<hr className='ml-0 mr-0' />
|
||||
|
||||
|
||||
<Heading
|
||||
url='/datasets/{dataset_id}/documents/status/{action}'
|
||||
method='PATCH'
|
||||
|
|
|
|||
|
|
@ -1131,6 +1131,130 @@ import { Row, Col, Properties, Property, Heading, SubProperty, PropertyInstructi
|
|||
|
||||
<hr className='ml-0 mr-0' />
|
||||
|
||||
<Heading
|
||||
url='/datasets/{dataset_id}/documents/{document_id}'
|
||||
method='GET'
|
||||
title='获取文档详情'
|
||||
name='#get-document-detail'
|
||||
/>
|
||||
<Row>
|
||||
<Col>
|
||||
获取文档详情.
|
||||
### Path
|
||||
- `dataset_id` (string) 知识库 ID
|
||||
- `document_id` (string) 文档 ID
|
||||
|
||||
### Query
|
||||
- `metadata` (string) metadata 过滤条件 `all`, `only`, 或者 `without`. 默认是 `all`.
|
||||
|
||||
### Response
|
||||
返回知识库文档的详情.
|
||||
</Col>
|
||||
<Col sticky>
|
||||
### Request Example
|
||||
<CodeGroup title="Request" tag="GET" label="/datasets/{dataset_id}/documents/{document_id}" targetCode={`curl -X GET '${props.apiBaseUrl}/datasets/{dataset_id}/documents/{document_id}' \\\n-H 'Authorization: Bearer {api_key}'`}>
|
||||
```bash {{ title: 'cURL' }}
|
||||
curl -X GET '${props.apiBaseUrl}/datasets/{dataset_id}/documents/{document_id}' \
|
||||
-H 'Authorization: Bearer {api_key}'
|
||||
```
|
||||
</CodeGroup>
|
||||
|
||||
### Response Example
|
||||
<CodeGroup title="Response">
|
||||
```json {{ title: 'Response' }}
|
||||
{
|
||||
"id": "f46ae30c-5c11-471b-96d0-464f5f32a7b2",
|
||||
"position": 1,
|
||||
"data_source_type": "upload_file",
|
||||
"data_source_info": {
|
||||
"upload_file": {
|
||||
...
|
||||
}
|
||||
},
|
||||
"dataset_process_rule_id": "24b99906-845e-499f-9e3c-d5565dd6962c",
|
||||
"dataset_process_rule": {
|
||||
"mode": "hierarchical",
|
||||
"rules": {
|
||||
"pre_processing_rules": [
|
||||
{
|
||||
"id": "remove_extra_spaces",
|
||||
"enabled": true
|
||||
},
|
||||
{
|
||||
"id": "remove_urls_emails",
|
||||
"enabled": false
|
||||
}
|
||||
],
|
||||
"segmentation": {
|
||||
"separator": "**********page_ending**********",
|
||||
"max_tokens": 1024,
|
||||
"chunk_overlap": 0
|
||||
},
|
||||
"parent_mode": "paragraph",
|
||||
"subchunk_segmentation": {
|
||||
"separator": "\n",
|
||||
"max_tokens": 512,
|
||||
"chunk_overlap": 0
|
||||
}
|
||||
}
|
||||
},
|
||||
"document_process_rule": {
|
||||
"id": "24b99906-845e-499f-9e3c-d5565dd6962c",
|
||||
"dataset_id": "48a0db76-d1a9-46c1-ae35-2baaa919a8a9",
|
||||
"mode": "hierarchical",
|
||||
"rules": {
|
||||
"pre_processing_rules": [
|
||||
{
|
||||
"id": "remove_extra_spaces",
|
||||
"enabled": true
|
||||
},
|
||||
{
|
||||
"id": "remove_urls_emails",
|
||||
"enabled": false
|
||||
}
|
||||
],
|
||||
"segmentation": {
|
||||
"separator": "**********page_ending**********",
|
||||
"max_tokens": 1024,
|
||||
"chunk_overlap": 0
|
||||
},
|
||||
"parent_mode": "paragraph",
|
||||
"subchunk_segmentation": {
|
||||
"separator": "\n",
|
||||
"max_tokens": 512,
|
||||
"chunk_overlap": 0
|
||||
}
|
||||
}
|
||||
},
|
||||
"name": "xxxx",
|
||||
"created_from": "web",
|
||||
"created_by": "17f71940-a7b5-4c77-b60f-2bd645c1ffa0",
|
||||
"created_at": 1750464191,
|
||||
"tokens": null,
|
||||
"indexing_status": "waiting",
|
||||
"completed_at": null,
|
||||
"updated_at": 1750464191,
|
||||
"indexing_latency": null,
|
||||
"error": null,
|
||||
"enabled": true,
|
||||
"disabled_at": null,
|
||||
"disabled_by": null,
|
||||
"archived": false,
|
||||
"segment_count": 0,
|
||||
"average_segment_length": 0,
|
||||
"hit_count": null,
|
||||
"display_status": "queuing",
|
||||
"doc_form": "hierarchical_model",
|
||||
"doc_language": "Chinese Simplified"
|
||||
}
|
||||
```
|
||||
</CodeGroup>
|
||||
</Col>
|
||||
</Row>
|
||||
___
|
||||
<hr className='ml-0 mr-0' />
|
||||
|
||||
|
||||
<Heading
|
||||
url='/datasets/{dataset_id}/documents/status/{action}'
|
||||
method='PATCH'
|
||||
|
|
|
|||
|
|
@ -25,10 +25,13 @@ const Layout: FC<{
|
|||
}
|
||||
|
||||
let appCode: string | null = null
|
||||
if (redirectUrl)
|
||||
appCode = redirectUrl?.split('/').pop() || null
|
||||
else
|
||||
if (redirectUrl) {
|
||||
const url = new URL(`${window.location.origin}${decodeURIComponent(redirectUrl)}`)
|
||||
appCode = url.pathname.split('/').pop() || null
|
||||
}
|
||||
else {
|
||||
appCode = pathname.split('/').pop() || null
|
||||
}
|
||||
|
||||
if (!appCode)
|
||||
return
|
||||
|
|
|
|||
|
|
@ -25,7 +25,10 @@ export default function CheckCode() {
|
|||
const redirectUrl = searchParams.get('redirect_url')
|
||||
|
||||
const getAppCodeFromRedirectUrl = useCallback(() => {
|
||||
const appCode = redirectUrl?.split('/').pop()
|
||||
if (!redirectUrl)
|
||||
return null
|
||||
const url = new URL(`${window.location.origin}${decodeURIComponent(redirectUrl)}`)
|
||||
const appCode = url.pathname.split('/').pop()
|
||||
if (!appCode)
|
||||
return null
|
||||
|
||||
|
|
@ -62,7 +65,7 @@ export default function CheckCode() {
|
|||
localStorage.setItem('webapp_access_token', ret.data.access_token)
|
||||
const tokenResp = await fetchAccessToken({ appCode, webAppAccessToken: ret.data.access_token })
|
||||
await setAccessToken(appCode, tokenResp.access_token)
|
||||
router.replace(redirectUrl)
|
||||
router.replace(decodeURIComponent(redirectUrl))
|
||||
}
|
||||
}
|
||||
catch (error) { console.error(error) }
|
||||
|
|
|
|||
|
|
@ -23,7 +23,10 @@ const ExternalMemberSSOAuth = () => {
|
|||
}
|
||||
|
||||
const getAppCodeFromRedirectUrl = useCallback(() => {
|
||||
const appCode = redirectUrl?.split('/').pop()
|
||||
if (!redirectUrl)
|
||||
return null
|
||||
const url = new URL(`${window.location.origin}${decodeURIComponent(redirectUrl)}`)
|
||||
const appCode = url.pathname.split('/').pop()
|
||||
if (!appCode)
|
||||
return null
|
||||
|
||||
|
|
|
|||
|
|
@ -1,3 +1,4 @@
|
|||
'use client'
|
||||
import Link from 'next/link'
|
||||
import { useCallback, useState } from 'react'
|
||||
import { useTranslation } from 'react-i18next'
|
||||
|
|
@ -33,7 +34,10 @@ export default function MailAndPasswordAuth({ isEmailSetup }: MailAndPasswordAut
|
|||
const redirectUrl = searchParams.get('redirect_url')
|
||||
|
||||
const getAppCodeFromRedirectUrl = useCallback(() => {
|
||||
const appCode = redirectUrl?.split('/').pop()
|
||||
if (!redirectUrl)
|
||||
return null
|
||||
const url = new URL(`${window.location.origin}${decodeURIComponent(redirectUrl)}`)
|
||||
const appCode = url.pathname.split('/').pop()
|
||||
if (!appCode)
|
||||
return null
|
||||
|
||||
|
|
@ -87,7 +91,7 @@ export default function MailAndPasswordAuth({ isEmailSetup }: MailAndPasswordAut
|
|||
localStorage.setItem('webapp_access_token', res.data.access_token)
|
||||
const tokenResp = await fetchAccessToken({ appCode, webAppAccessToken: res.data.access_token })
|
||||
await setAccessToken(appCode, tokenResp.access_token)
|
||||
router.replace(redirectUrl)
|
||||
router.replace(decodeURIComponent(redirectUrl))
|
||||
}
|
||||
else {
|
||||
Toast.notify({
|
||||
|
|
|
|||
|
|
@ -23,7 +23,10 @@ const SSOAuth: FC<SSOAuthProps> = ({
|
|||
|
||||
const redirectUrl = searchParams.get('redirect_url')
|
||||
const getAppCodeFromRedirectUrl = useCallback(() => {
|
||||
const appCode = redirectUrl?.split('/').pop()
|
||||
if (!redirectUrl)
|
||||
return null
|
||||
const url = new URL(`${window.location.origin}${decodeURIComponent(redirectUrl)}`)
|
||||
const appCode = url.pathname.split('/').pop()
|
||||
if (!appCode)
|
||||
return null
|
||||
|
||||
|
|
|
|||
|
|
@ -46,7 +46,10 @@ const WebSSOForm: FC = () => {
|
|||
}
|
||||
|
||||
const getAppCodeFromRedirectUrl = useCallback(() => {
|
||||
const appCode = redirectUrl?.split('/').pop()
|
||||
if (!redirectUrl)
|
||||
return null
|
||||
const url = new URL(`${window.location.origin}${decodeURIComponent(redirectUrl)}`)
|
||||
const appCode = url.pathname.split('/').pop()
|
||||
if (!appCode)
|
||||
return null
|
||||
|
||||
|
|
@ -63,20 +66,20 @@ const WebSSOForm: FC = () => {
|
|||
localStorage.setItem('webapp_access_token', tokenFromUrl)
|
||||
const tokenResp = await fetchAccessToken({ appCode, webAppAccessToken: tokenFromUrl })
|
||||
await setAccessToken(appCode, tokenResp.access_token)
|
||||
router.replace(redirectUrl)
|
||||
router.replace(decodeURIComponent(redirectUrl))
|
||||
return
|
||||
}
|
||||
if (appCode && redirectUrl && localStorage.getItem('webapp_access_token')) {
|
||||
const tokenResp = await fetchAccessToken({ appCode, webAppAccessToken: localStorage.getItem('webapp_access_token') })
|
||||
await setAccessToken(appCode, tokenResp.access_token)
|
||||
router.replace(redirectUrl)
|
||||
router.replace(decodeURIComponent(redirectUrl))
|
||||
}
|
||||
})()
|
||||
}, [getAppCodeFromRedirectUrl, redirectUrl, router, tokenFromUrl, message])
|
||||
|
||||
useEffect(() => {
|
||||
if (webAppAccessMode && webAppAccessMode === AccessMode.PUBLIC && redirectUrl)
|
||||
router.replace(redirectUrl)
|
||||
router.replace(decodeURIComponent(redirectUrl))
|
||||
}, [webAppAccessMode, router, redirectUrl])
|
||||
|
||||
if (tokenFromUrl) {
|
||||
|
|
|
|||
|
|
@ -2,7 +2,6 @@
|
|||
import type { FC } from 'react'
|
||||
import React, { useState } from 'react'
|
||||
import { useTranslation } from 'react-i18next'
|
||||
import cn from '@/utils/classnames'
|
||||
import OperationBtn from '@/app/components/app/configuration/base/operation-btn'
|
||||
import {
|
||||
PortalToFollowElem,
|
||||
|
|
@ -28,11 +27,11 @@ type ItemProps = {
|
|||
const SelectItem: FC<ItemProps> = ({ text, type, value, Icon, onClick }) => {
|
||||
return (
|
||||
<div
|
||||
className='flex h-8 cursor-pointer items-center rounded-lg px-3 hover:bg-gray-50'
|
||||
className='flex h-8 cursor-pointer items-center rounded-lg px-3 hover:bg-state-base-hover'
|
||||
onClick={() => onClick(value)}
|
||||
>
|
||||
{Icon ? <Icon className='h-4 w-4 text-gray-500' /> : <InputVarTypeIcon type={type!} className='h-4 w-4 text-gray-500' />}
|
||||
<div className='ml-2 truncate text-xs text-gray-600'>{text}</div>
|
||||
{Icon ? <Icon className='h-4 w-4 text-text-secondary' /> : <InputVarTypeIcon type={type!} className='h-4 w-4 text-text-secondary' />}
|
||||
<div className='ml-2 truncate text-xs text-text-primary'>{text}</div>
|
||||
</div>
|
||||
)
|
||||
}
|
||||
|
|
@ -57,17 +56,17 @@ const SelectVarType: FC<Props> = ({
|
|||
}}
|
||||
>
|
||||
<PortalToFollowElemTrigger onClick={() => setOpen(v => !v)}>
|
||||
<OperationBtn type='add' className={cn(open && 'bg-gray-200')} />
|
||||
<OperationBtn type='add' />
|
||||
</PortalToFollowElemTrigger>
|
||||
<PortalToFollowElemContent style={{ zIndex: 1000 }}>
|
||||
<div className='min-w-[192px] rounded-lg border border-gray-200 bg-white shadow-lg'>
|
||||
<div className='min-w-[192px] rounded-lg border border-components-panel-border bg-components-panel-bg-blur shadow-lg backdrop-blur-sm'>
|
||||
<div className='p-1'>
|
||||
<SelectItem type={InputVarType.textInput} value='string' text={t('appDebug.variableConfig.string')} onClick={handleChange}></SelectItem>
|
||||
<SelectItem type={InputVarType.paragraph} value='paragraph' text={t('appDebug.variableConfig.paragraph')} onClick={handleChange}></SelectItem>
|
||||
<SelectItem type={InputVarType.select} value='select' text={t('appDebug.variableConfig.select')} onClick={handleChange}></SelectItem>
|
||||
<SelectItem type={InputVarType.number} value='number' text={t('appDebug.variableConfig.number')} onClick={handleChange}></SelectItem>
|
||||
</div>
|
||||
<div className='h-[1px] bg-gray-100'></div>
|
||||
<div className='h-[1px] border-t border-components-panel-border'></div>
|
||||
<div className='p-1'>
|
||||
<SelectItem Icon={ApiConnection} value='api' text={t('appDebug.variableConfig.apiBasedVar')} onClick={handleChange}></SelectItem>
|
||||
</div>
|
||||
|
|
|
|||
|
|
@ -80,6 +80,8 @@ import {
|
|||
import PluginDependency from '@/app/components/workflow/plugin-dependency'
|
||||
import { supportFunctionCall } from '@/utils/tool-call'
|
||||
import { MittProvider } from '@/context/mitt-context'
|
||||
import { fetchAndMergeValidCompletionParams } from '@/utils/completion-params'
|
||||
import Toast from '@/app/components/base/toast'
|
||||
|
||||
type PublishConfig = {
|
||||
modelConfig: ModelConfig
|
||||
|
|
@ -453,7 +455,21 @@ const Configuration: FC = () => {
|
|||
...visionConfig,
|
||||
enabled: supportVision,
|
||||
}, true)
|
||||
setCompletionParams({})
|
||||
|
||||
try {
|
||||
const { params: filtered, removedDetails } = await fetchAndMergeValidCompletionParams(
|
||||
provider,
|
||||
modelId,
|
||||
completionParams,
|
||||
)
|
||||
if (Object.keys(removedDetails).length)
|
||||
Toast.notify({ type: 'warning', message: `${t('common.modelProvider.parametersInvalidRemoved')}: ${Object.entries(removedDetails).map(([k, reason]) => `${k} (${reason})`).join(', ')}` })
|
||||
setCompletionParams(filtered)
|
||||
}
|
||||
catch (e) {
|
||||
Toast.notify({ type: 'error', message: t('common.error') })
|
||||
setCompletionParams({})
|
||||
}
|
||||
}
|
||||
|
||||
const isShowVisionConfig = !!currModel?.features?.includes(ModelFeatureEnum.vision)
|
||||
|
|
|
|||
|
|
@ -0,0 +1,27 @@
|
|||
'use client'
|
||||
import type { FC } from 'react'
|
||||
import React from 'react'
|
||||
import { RiRefreshLine } from '@remixicon/react'
|
||||
import cn from '@/utils/classnames'
|
||||
import TooltipPlus from '@/app/components/base/tooltip'
|
||||
|
||||
type Props = {
|
||||
className?: string,
|
||||
popupContent?: string,
|
||||
onClick: () => void
|
||||
}
|
||||
|
||||
const SyncButton: FC<Props> = ({
|
||||
className,
|
||||
popupContent = '',
|
||||
onClick,
|
||||
}) => {
|
||||
return (
|
||||
<TooltipPlus popupContent={popupContent}>
|
||||
<div className={cn(className, 'cursor-pointer select-none rounded-md p-1 hover:bg-state-base-hover')} onClick={onClick}>
|
||||
<RiRefreshLine className='h-4 w-4 text-text-tertiary' />
|
||||
</div>
|
||||
</TooltipPlus>
|
||||
)
|
||||
}
|
||||
export default React.memo(SyncButton)
|
||||
|
|
@ -0,0 +1,17 @@
|
|||
<svg id="Layer_2" data-name="Layer 2" xmlns="http://www.w3.org/2000/svg" viewBox="0 0 111 24" width="111" height="24">
|
||||
<g id="Layer_1-2" data-name="Layer 1">
|
||||
<rect style="fill: none;" y="0" width="111" height="24"/>
|
||||
<g id="Arize_-_standard" data-name="Arize - standard">
|
||||
<g>
|
||||
<path d="M100.94,14.55h-11.29c0,.13-.01.23.02.31.53,1.55,1.54,2.59,3.19,2.88,1.7.29,3.22,0,4.3-1.52.09-.13.28-.27.43-.28.99-.02,1.99-.01,2.99-.01-.16,2.69-3.89,4.98-7.6,4.7-3.99-.3-6.91-3.79-6.58-7.88.37-4.62,3.67-7.31,8.37-6.85,4.05.4,6.68,4.04,6.19,8.64ZM89.71,11.66h7.96c-.43-1.88-2.04-3.02-4.17-2.97-1.87.04-3.48,1.3-3.78,2.97Z"/>
|
||||
<path style="fill: #ff008c;" d="M20.81,2.53c.59-.03,1.11.26,1.49.8,3.05,4.38,6.09,8.77,9.13,13.16.63.91.43,1.99-.43,2.59-.84.59-1.97.35-2.62-.57-2.1-3.01-4.19-6.04-6.28-9.05-.04-.06-.08-.11-.12-.17-.83-1.17-1.65-1.18-2.47,0-2.11,3.05-4.23,6.09-6.34,9.14-.35.5-.77.88-1.4.95-.77.08-1.39-.19-1.79-.86-.41-.69-.38-1.38.07-2.03,1.01-1.47,2.04-2.93,3.06-4.4,2.01-2.89,4.03-5.77,6.03-8.67.39-.56.88-.9,1.67-.89Z"/>
|
||||
<path d="M52.84,20.43h-3.02v-1.16c-.1.02-.16.01-.19.03-2.46,1.73-5.09,1.88-7.68.51-2.61-1.38-3.71-3.77-3.68-6.67.03-3.26,1.82-5.96,4.64-6.86,2.35-.75,4.64-.67,6.69.93.04.03.09.03.2.07v-1.18h3.03v14.31ZM41.36,13.32c.01.17.02.46.05.75.22,2.09,1.76,3.58,3.91,3.76,2.1.18,3.8-.95,4.38-2.96.14-.47.2-.98.22-1.47.13-3.22-2.25-5.27-5.33-4.61-1.92.41-3.19,2.14-3.23,4.53Z"/>
|
||||
<path d="M80.41,8.9h-7.44v-2.77h11.64c0,.81.02,1.61-.01,2.41,0,.17-.19.35-.32.51-2.28,2.68-4.57,5.36-6.85,8.04-.11.13-.21.26-.38.47h7.53v2.86h-11.74c0-.82-.02-1.62.01-2.42,0-.16.17-.33.28-.47,2.32-2.74,4.64-5.47,6.96-8.21.09-.1.16-.21.32-.42Z"/>
|
||||
<path d="M59.39,20.54h-3.03V6.22h3.03v1.54c.8-.48,1.55-1.01,2.37-1.41.85-.41,1.79-.41,2.77-.35v2.94c-.16.01-.3.03-.45.03-2.37.03-4.01,1.36-4.51,3.69-.12.57-.16,1.16-.16,1.74-.02,1.85,0,3.71,0,5.56v.57Z"/>
|
||||
<path d="M67.1,6.09h2.99v14.33h-2.99V6.09Z"/>
|
||||
<path style="fill: #ff008c;" d="M20.73,19.53c1.25,0,2.24.96,2.25,2.19.01,1.24-1.02,2.29-2.24,2.28-1.23-.01-2.21-1.01-2.22-2.24,0-1.25.96-2.22,2.21-2.22Z"/>
|
||||
<path d="M70.7,2.11c0,1.19-.92,2.14-2.09,2.15-1.19.01-2.16-.95-2.16-2.14C66.46.95,67.4,0,68.58,0c1.18,0,2.12.93,2.12,2.11Z"/>
|
||||
</g>
|
||||
</g>
|
||||
</g>
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 2.2 KiB |
|
|
@ -0,0 +1,17 @@
|
|||
<svg id="Layer_2" data-name="Layer 2" xmlns="http://www.w3.org/2000/svg" viewBox="0 0 74 16" width="74" height="16">
|
||||
<g id="Layer_1-2" data-name="Layer 1">
|
||||
<rect style="fill: none;" y="0" width="74" height="16"/>
|
||||
<g id="Arize_-_standard" data-name="Arize - standard">
|
||||
<g>
|
||||
<path d="M67.29,9.7h-7.52c0,.08,0,.15.01.21.35,1.03,1.03,1.73,2.13,1.92,1.13.19,2.14,0,2.86-1.01.06-.09.19-.18.29-.19.66-.02,1.33,0,1.99,0-.1,1.79-2.59,3.32-5.07,3.14-2.66-.2-4.61-2.53-4.39-5.25.25-3.08,2.44-4.88,5.58-4.56,2.7.27,4.45,2.69,4.12,5.76ZM59.81,7.77h5.3c-.28-1.25-1.36-2.01-2.78-1.98-1.25.03-2.32.87-2.52,1.98Z"/>
|
||||
<path style="fill: #ff008c;" d="M13.87,1.69c.4-.02.74.17.99.54,2.03,2.92,4.06,5.85,6.08,8.77.42.61.28,1.33-.29,1.73-.56.39-1.31.24-1.74-.38-1.4-2.01-2.79-4.02-4.19-6.04-.03-.04-.05-.08-.08-.11-.55-.78-1.1-.78-1.64,0-1.41,2.03-2.82,4.06-4.23,6.09-.23.34-.52.59-.93.63-.51.06-.92-.13-1.19-.57-.28-.46-.25-.92.05-1.35.68-.98,1.36-1.96,2.04-2.93,1.34-1.93,2.68-3.85,4.02-5.78.26-.37.59-.6,1.11-.59Z"/>
|
||||
<path d="M35.23,13.62h-2.01v-.77c-.07.01-.1,0-.13.02-1.64,1.16-3.39,1.25-5.12.34-1.74-.92-2.47-2.51-2.45-4.45.02-2.17,1.22-3.97,3.1-4.57,1.57-.5,3.09-.45,4.46.62.02.02.06.02.14.05v-.79h2.02v9.54ZM27.57,8.88c0,.11.01.31.03.5.14,1.39,1.18,2.39,2.61,2.51,1.4.12,2.53-.63,2.92-1.97.09-.32.14-.65.15-.98.09-2.15-1.5-3.51-3.56-3.07-1.28.27-2.13,1.43-2.15,3.02Z"/>
|
||||
<path d="M53.61,5.93h-4.96v-1.85h7.76c0,.54.01,1.07,0,1.61,0,.12-.12.24-.21.34-1.52,1.79-3.05,3.57-4.57,5.36-.07.09-.14.18-.26.32h5.02v1.91h-7.83c0-.54-.01-1.08,0-1.61,0-.11.11-.22.19-.31,1.55-1.83,3.1-3.65,4.64-5.47.06-.07.11-.14.21-.28Z"/>
|
||||
<path d="M39.6,13.69h-2.02V4.15h2.02v1.03c.54-.32,1.04-.68,1.58-.94.57-.28,1.19-.27,1.85-.23v1.96c-.1,0-.2.02-.3.02-1.58.02-2.68.9-3.01,2.46-.08.38-.11.77-.11,1.16-.01,1.24,0,2.47,0,3.71v.38Z"/>
|
||||
<path d="M44.74,4.06h1.99v9.56h-1.99V4.06Z"/>
|
||||
<path style="fill: #ff008c;" d="M13.82,13.02c.84,0,1.49.64,1.5,1.46,0,.83-.68,1.53-1.5,1.52-.82,0-1.47-.67-1.48-1.5,0-.83.64-1.48,1.47-1.48Z"/>
|
||||
<path d="M47.13,1.41c0,.8-.61,1.43-1.39,1.43-.8,0-1.44-.63-1.44-1.43,0-.78.63-1.41,1.42-1.41.79,0,1.41.62,1.41,1.41Z"/>
|
||||
</g>
|
||||
</g>
|
||||
</g>
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 2.2 KiB |
|
|
@ -0,0 +1,97 @@
|
|||
<svg id="Layer_2" data-name="Layer 2" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" viewBox="0 0 111 24" width="111" height="24">
|
||||
<defs>
|
||||
<linearGradient id="linear-gradient" x1="9.07" y1="1.47" x2="19.77" y2="20" gradientUnits="userSpaceOnUse">
|
||||
<stop offset="0" stop-color="#11bab5"/>
|
||||
<stop offset=".5" stop-color="#00adee"/>
|
||||
<stop offset="1" stop-color="#0094c5"/>
|
||||
</linearGradient>
|
||||
<linearGradient id="linear-gradient-2" x1="19.48" y1="16.3" x2="10.46" y2=".67" gradientUnits="userSpaceOnUse">
|
||||
<stop offset="0" stop-color="#fcfdff" stop-opacity="0"/>
|
||||
<stop offset=".12" stop-color="#fcfdff" stop-opacity=".17"/>
|
||||
<stop offset=".3" stop-color="#fcfdff" stop-opacity=".42"/>
|
||||
<stop offset=".47" stop-color="#fcfdff" stop-opacity=".63"/>
|
||||
<stop offset=".64" stop-color="#fcfdff" stop-opacity=".79"/>
|
||||
<stop offset=".78" stop-color="#fcfdff" stop-opacity=".9"/>
|
||||
<stop offset=".91" stop-color="#fcfdff" stop-opacity=".97"/>
|
||||
<stop offset="1" stop-color="#fcfdff"/>
|
||||
</linearGradient>
|
||||
<linearGradient id="linear-gradient-3" x1="16.82" y1="16.21" x2="10.32" y2="4.94" xlink:href="#linear-gradient-2"/>
|
||||
<linearGradient id="linear-gradient-4" x1="15.92" y1="17.03" x2="12.29" y2="10.74" xlink:href="#linear-gradient-2"/>
|
||||
<linearGradient id="linear-gradient-5" x1="16.08" y1="18.3" x2="13.82" y2="14.38" xlink:href="#linear-gradient-2"/>
|
||||
<linearGradient id="linear-gradient-6" x1="12.26" y1="17.94" x2="12.26" y2="22.07" gradientUnits="userSpaceOnUse">
|
||||
<stop offset="0" stop-color="#231f20"/>
|
||||
<stop offset=".03" stop-color="#231f20" stop-opacity=".9"/>
|
||||
<stop offset=".22" stop-color="#231f20" stop-opacity=".4"/>
|
||||
<stop offset=".42" stop-color="#231f20" stop-opacity=".1"/>
|
||||
<stop offset=".65" stop-color="#231f20" stop-opacity="0"/>
|
||||
</linearGradient>
|
||||
<linearGradient id="Fade_to_Black_2" data-name="Fade to Black 2" x1="16.89" y1="17.55" x2="16.89" y2="19.66" gradientUnits="userSpaceOnUse">
|
||||
<stop offset="0" stop-color="#231f20"/>
|
||||
<stop offset="1" stop-color="#231f20" stop-opacity="0"/>
|
||||
</linearGradient>
|
||||
<linearGradient id="linear-gradient-7" x1="17.91" y1="12.1" x2="17.91" y2="10.38" gradientUnits="userSpaceOnUse">
|
||||
<stop offset="0" stop-color="#231f20"/>
|
||||
<stop offset=".03" stop-color="#231f20" stop-opacity=".95"/>
|
||||
<stop offset=".51" stop-color="#231f20" stop-opacity=".26"/>
|
||||
<stop offset=".81" stop-color="#231f20" stop-opacity="0"/>
|
||||
</linearGradient>
|
||||
<linearGradient id="linear-gradient-8" x1="21.67" y1="13.37" x2="21.67" y2="9.21" gradientUnits="userSpaceOnUse">
|
||||
<stop offset="0" stop-color="#231f20"/>
|
||||
<stop offset=".18" stop-color="#231f20" stop-opacity=".48"/>
|
||||
<stop offset=".38" stop-color="#231f20" stop-opacity=".12"/>
|
||||
<stop offset=".58" stop-color="#231f20" stop-opacity="0"/>
|
||||
</linearGradient>
|
||||
<linearGradient id="Fade_to_Black_2-2" data-name="Fade to Black 2" x1="25.54" y1="16.65" x2="24.1" y2="14.16" xlink:href="#Fade_to_Black_2"/>
|
||||
<linearGradient id="linear-gradient-9" x1="26.7" y1="15.97" x2="24.55" y2="12.24" xlink:href="#linear-gradient-2"/>
|
||||
<linearGradient id="linear-gradient-10" x1="21.11" y1="15.34" x2="24.24" y2="13.53" gradientUnits="userSpaceOnUse">
|
||||
<stop offset="0" stop-color="#231f20"/>
|
||||
<stop offset=".31" stop-color="#231f20" stop-opacity=".5"/>
|
||||
<stop offset=".67" stop-color="#231f20" stop-opacity=".13"/>
|
||||
<stop offset="1" stop-color="#231f20" stop-opacity="0"/>
|
||||
</linearGradient>
|
||||
</defs>
|
||||
<g id="Layer_1-2" data-name="Layer 1">
|
||||
<rect style="fill: none;" y=".04" width="111" height="24"/>
|
||||
<g id="Phoenix_horiz_-_gradient" data-name="Phoenix horiz - gradient">
|
||||
<path style="fill: url(#linear-gradient);" d="M26.87,15c-.06-.28-.2-.53-.42-.76-.08-.09-.18-.17-.28-.25-.05-.04-.1-.07-.15-.1-.07-.04-.14-.08-.21-.12-.28-.14-.51-.29-.72-.44-.08-.06-.17-.13-.24-.19-.19-.16-.31-.3-.4-.45-.03-.05-.06-.1-.08-.15-.01-.02-.03-.04-.05-.05-.03-.02-.08-.04-.12-.03-.07.01-.12.07-.13.13,0,.05,0,.11,0,.17,0,.03-.01.07-.04.08,0,0-.01,0-.02,0-.04,0-.08.03-.11.07-.03.04-.03.08-.02.13,0,.04.02.06.03.09v.02s.02.03.03.04c.02.04.04.08.07.12.05.07.1.14.17.22.02.02.02.06,0,.09-.02.03-.05.04-.08.04-.37-.05-.7-.13-1-.25-.04-.01-.07-.03-.11-.04-.28-.12.22-.61.45-.96,1-1.53,1.2-3.29,1.21-5.07,0-1.72-.18-3.41-.52-5.08h0c-.22-1.4-.41-1.93-.53-2.13-.03-.05-.05-.08-.08-.1-.03-.02-.05-.01-.05-.01-.06,0-.14.06-.23.15-.37.39-.35.86-.25,1.34.47,2.21.82,4.43.66,6.7-.11,1.56-.4,3.07-1.5,4.3-.07.07-.14.14-.21.21-.08.08-.21.24-.3.17-.13-.09-.01-.27.03-.36.64-1.5.82-3.07.73-4.69,0-.04,0-.07,0-.11h0s0-.06,0-.09c-.02-.21-.04-.43-.06-.64-.28-2.56-.61-2.73-.61-2.73h0s-.05-.03-.09-.04c-.17-.04-.27.12-.35.23-.35.52-.19,1.07-.08,1.62.39,1.92.4,3.82-.28,5.69-.06.17-.14.34-.21.5-.08.17-.14.39-.42.3-.26-.09-.19-.29-.16-.47.09-.47.16-.93.2-1.4h0s0-.09.01-.13c0-.08.01-.16.02-.24,0-.07,0-.13.01-.2,0-.03,0-.07,0-.1.05-1.48-.2-2.16-.29-2.36-.01-.02-.02-.05-.03-.07h0s0,0,0,0c-.1-.15-.26-.13-.4,0-.26.25-.4.56-.33.93.19,1.1.08,2.2-.13,3.28-.03.15-.09.41-.4.34-.21-.05-.26-.14-.27-.32,0-.14,0-.28,0-.42,0,0,0,0,0-.01h0s0-.04,0-.06h0s0-.02,0-.03c0-.19-.02-.38-.05-.57,0-.02,0-.04,0-.05-.12-1.07-.27-1.17-.27-1.17-.1-.13-.25-.11-.39.03-.26.25-.39.55-.33.93.04.28.03.19.06.43.02.12.05.35.05.42,0,.06-.02.12-.06.16-.13.14-.34,0-.5-.07-3.12-1.29-5.79-3.1-8.12-5.4-.92-.94-2.48-2.83-2.48-2.83h0c-.06-.07-.13-.11-.24-.06-.2.09-.2.35-.22.57-.04.44.2.76.45,1.06,3.52,4.11,7.82,7.06,13,8.54l.93.25s.02,0,.03,0c0,0,0,0,0,0l.8.21h.02s-.06.02-.09.03c-.88.17-2.63-.15-4.04-.47-.7-.15-1.38-.32-2.05-.53-.03,0-.05-.01-.05-.01h0c-2.6-.83-4.97-2.17-7.05-4.21-.2-.19-.38-.4-.56-.61-.16-.2-.5-.61-.62-.75-.01-.02-.03-.03-.04-.05h0s0,0,0,0c-.04-.04-.08-.06-.14-.05-.14.03-.19.18-.21.31-.11.51.05.93.39,1.31,2.13,2.39,4.8,3.91,7.81,4.91,1.71.57,3.46.91,5.25,1.13-1.07.35-2.16.45-3.27.42-2.14-.05-4.15-.57-6.04-1.49-.7-.36-1.34-.76-1.52-.86-.1-.07-.2-.11-.3-.03-.19.15-.06.4,0,.6.12.38.38.65.73.83,3.08,1.63,6.32,2.28,9.78,1.7.35-.06.68-.12,1.05-.25-.58.5-1.25.83-1.95,1.08-2.17.79-4.32.76-6.46.22-.69-.18-1.49-.48-1.49-.48h0c-.12-.05-.23-.07-.32.06-.16.22-.02.46.12.67.32.5.94.55,1.43.72.17.05-.29.36-.43.47-.71.54-1.4,1.04-2.11,1.56l-.46.34h0c-.08.05-.15.12-.11.23.04.13.18.15.31.18.51.1.94-.06,1.33-.36.86-.64,1.73-1.26,2.56-1.93.32-.25.61-.23,1.04-.12-1.09.82-2.11,1.59-3.13,2.36-1.03.78-2.07,1.56-3.1,2.33l-.68.51s-.02.01-.03.02h-.01s0,0,0,0c-.06.05-.1.1-.09.17.03.15.21.19.36.22.61.11.99-.12,1.41-.44,2.09-1.57,4.19-3.13,6.27-4.71.47-.36.95-.56,1.62-.48-.57.43-1.12.84-1.67,1.25l-1.47,1.09s-.02.02-.03.02h0c-.08.06-.13.13-.1.24.06.19.29.22.49.25.37.05.68-.08.96-.29.17-.12.33-.24.5-.37h0s2.25-1.79,2.25-1.79c0,0,.53-.38,1.15-.73.05-.03.11-.07.17-.1.02-.01.08-.04.17-.08.07-.03.13-.07.2-.1.1-.05.21-.11.33-.18.36-.15,1.4-.64,2.26-1.55.41-.32.98-.73,1.43-.92h0s0,0,0,0c.03-.02.07-.03.1-.04h0s0,0,0,0c.02,0,.04-.02.06-.02l.06-.02c.16-.05.43-.06.46.29,0,.09,0,.18,0,.27,0,.07-.02.15-.03.21-.01.06.01.12.06.15,0,0,.02.01.02.01.06.03.14.02.18-.03.02-.02.03-.04.05-.06.22-.23.44-.39.68-.49.35-.14.7-.14,1.07,0,.18.07.35.16.51.28.07.05.14.11.21.17.04.04.08.08.12.12h0s.03.04.03.04c0,0,.01.01.02.02.04.03.08.04.12.03.05-.01.09-.05.11-.1,0,0,0-.02.01-.03.11-.31.13-.6.07-.89Z"/>
|
||||
<path style="fill: #fddab4;" d="M20.22,11.62c-.03.16-.05.31-.08.47.03-.16.06-.31.08-.47h0Z"/>
|
||||
<path style="fill: #fddab4;" d="M22.99,13.35s.02,0,.03.01c-.01,0-.02,0-.03-.01"/>
|
||||
<polyline style="fill: #fddab4;" points="21.68 12.5 21.68 12.5 21.68 12.5"/>
|
||||
<polyline style="fill: #fddab4;" points="21.73 12.39 21.73 12.39 21.73 12.39"/>
|
||||
<polyline style="fill: #fddab4;" points="21.74 12.37 21.73 12.38 21.74 12.37"/>
|
||||
<polyline style="fill: #fddab4;" points="23.45 12.37 23.45 12.38 23.45 12.37"/>
|
||||
<path style="fill: #fddab4;" d="M20.72,12.26s-.04.08-.06.12c.02-.04.04-.08.06-.12"/>
|
||||
<polyline style="fill: #fddab4;" points="18.86 12.17 18.86 12.17 18.86 12.17"/>
|
||||
<polyline style="fill: #fddab4;" points="18.58 12.04 18.58 12.04 18.58 12.04"/>
|
||||
<polyline style="fill: #fddab4;" points="18.58 12.04 18.58 12.04 18.58 12.04"/>
|
||||
<polyline style="fill: #fddab4;" points="18.58 12.04 18.58 12.04 18.58 12.04"/>
|
||||
<polyline style="fill: #fddab4;" points="18.58 12.04 18.58 12.04 18.58 12.04"/>
|
||||
<polyline style="fill: #fddab4;" points="18.58 12.03 18.58 12.04 18.58 12.03"/>
|
||||
<polyline style="fill: #fddab4;" points="18.52 11.84 18.52 11.84 18.52 11.84"/>
|
||||
<path style="fill: #fddab4;" d="M19.22,11.72s-.01.06-.02.09c0-.03.01-.06.02-.09"/>
|
||||
<path style="fill: #fddab4;" d="M17.51,11.51s-.04.04-.07.05c.02,0,.05-.02.07-.05"/>
|
||||
<path style="fill: #e5b3a5;" d="M18.58,12.04s.04.04.07.06h0s-.05-.04-.07-.06M18.58,12.04s0,0,0,0c0,0,0,0,0,0M18.58,12.04s0,0,0,0c0,0,0,0,0,0M18.58,12.04s0,0,0,0c0,0,0,0,0,0M18.58,12.04s0,0,0,0c0,0,0,0,0,0M18.57,12.03s0,0,0,0c0,0,0,0,0,0M18.52,11.84s0,0,0,0c0,0,0,0,0,0M18.52,11.84h0s0,0,0,0c0,0,0,0,0,0M17.17,11.51s0,0,0,0c.06.03.13.05.19.05.03,0,.05,0,.07-.01-.02,0-.05.01-.07.01-.06,0-.13-.02-.19-.05M18.52,11.31s0,0,0,0c0,0,0,0,0,0"/>
|
||||
<path style="fill: #e5b3a5;" d="M23.02,13.37s.01,0,.02,0h0s-.01,0-.02,0M21.78,12.86s0,0,0,0c0,0,0,0,0,0M21.59,12.76s.02.08.06.11c.02.01.03.02.05.02.03,0,.05-.01.08-.03-.03.02-.05.03-.08.03-.02,0-.03,0-.05-.02-.04-.03-.06-.07-.06-.11M21.68,12.5s0,.01,0,.02c0,0,0-.01,0-.02M21.73,12.39s-.03.07-.04.11c.01-.03.03-.07.04-.11M21.73,12.38s0,0,0,.01c0,0,0,0,0-.01M23.45,12.38s0,0,0,0c0,0,0,0,0,0M23.45,12.37s0,0,0,0c0,0,0,0,0,0M21.74,12.37s0,0,0,0c0,0,0,0,0,0M20.1,12.32c0,.1.04.19.19.24.05.02.09.02.12.02.13,0,.19-.09.24-.2-.05.1-.11.2-.24.2-.04,0-.08,0-.12-.02-.15-.05-.19-.14-.19-.24M18.86,12.17h0,0M19.11,12.07c-.05.06-.11.1-.22.1-.01,0-.02,0-.03,0,.01,0,.02,0,.03,0,.1,0,.17-.04.22-.1M21.08,11.32s0,0,0,0c-.05.15-.09.3-.15.44-.06.17-.14.34-.21.5h0c.08-.16.15-.33.21-.5.05-.15.1-.3.15-.45M19.3,11.23c-.02.16-.05.33-.08.49.03-.16.06-.33.08-.49h0M23.28,10.45s0,0,0,0c-.22.73-.57,1.42-1.12,2.04-.07.07-.14.14-.21.21h0c.07-.07.14-.14.21-.21.55-.62.9-1.31,1.12-2.04"/>
|
||||
<path style="fill: url(#linear-gradient-2); opacity: .4;" d="M6.25,3.13s-.06,0-.09.02c-.14.06-.18.21-.2.37.78,1.02,3.84,4.77,8.67,7.35,0,0,4.72,2.49,9.48,2.76-.37-.05-.7-.13-1-.25-.02,0-.04-.01-.05-.02h0c-1.73-.37-3.2-.84-4.24-1.21.02,0,.04,0,.06,0-.02,0-.04,0-.06,0-.06-.01-.11-.03-.15-.05-.82-.3-1.35-.53-1.47-.59-.06-.03-.12-.06-.17-.08-3.12-1.29-5.79-3.1-8.12-5.4-.92-.94-2.48-2.83-2.48-2.83h0s-.09-.08-.15-.08"/>
|
||||
<path style="fill: url(#linear-gradient-3); opacity: .4;" d="M6.59,7.12s-.02,0-.03,0c-.14.03-.19.18-.21.31,0,.02,0,.05-.01.07,2.41,3.27,5.65,4.6,5.65,4.6,3.23,1.52,5.88,1.83,7.5,1.83.67,0,1.16-.05,1.44-.09-.13.01-.27.02-.42.02-.95,0-2.3-.26-3.43-.52-.7-.15-1.38-.32-2.05-.53-.03,0-.05-.01-.05-.01h0c-2.6-.83-4.97-2.17-7.05-4.21-.2-.19-.38-.4-.56-.61-.16-.2-.5-.61-.62-.75-.01-.02-.03-.03-.04-.05h0s0,0,0,0c-.03-.03-.06-.05-.1-.05"/>
|
||||
<path style="fill: url(#linear-gradient-4); opacity: .4;" d="M8.78,12.8s-.08.01-.12.04c-.16.13-.09.34-.02.52,1.21.73,3.98,2.16,7.27,2.16,1.24,0,2.55-.2,3.88-.72-.96.31-1.94.43-2.94.43-.11,0-.22,0-.33,0-2.14-.05-4.15-.57-6.04-1.49-.7-.36-1.34-.76-1.52-.86-.06-.04-.12-.07-.18-.07"/>
|
||||
<path style="fill: url(#linear-gradient-5); opacity: .4;" d="M20.02,15.9c-.53.4-1.12.68-1.74.91-1.16.42-2.32.61-3.47.61-1,0-1.99-.14-2.99-.39-.69-.18-1.49-.48-1.49-.48h0c-.05-.02-.1-.04-.15-.04-.06,0-.12.03-.17.1-.07.1-.08.21-.06.32.76.35,2.4.98,4.39.98,1.73,0,3.73-.47,5.67-2.01"/>
|
||||
<path style="fill: url(#linear-gradient-6); opacity: .4;" d="M14.87,18.37c-1.87,0-3.29-.38-3.47-.43.05.02.1.03.15.05.17.05-.29.36-.43.47-.52.4-1.04.78-1.56,1.16h1.59c.5-.37,1-.74,1.49-1.13.18-.14.35-.2.55-.2.15,0,.31.03.49.08-1.09.82-2.11,1.59-3.13,2.36-.6.45-1.19.9-1.79,1.34h1.6c1.44-1.08,2.88-2.15,4.31-3.24.33-.25.67-.42,1.07-.48-.3.02-.59.03-.88.03Z"/>
|
||||
<path style="fill: url(#Fade_to_Black_2); opacity: .4;" d="M14.54,19.66h1.55l1.14-.91s.53-.38,1.15-.73c.05-.03.11-.07.17-.1.02-.01.08-.04.17-.08.07-.03.13-.07.2-.1.1-.05.22-.11.35-.19-1.1.46-2.23.69-3.28.77.01,0,.03,0,.04,0,.09,0,.18,0,.27.02-.57.43-1.12.84-1.67,1.25l-.09.07Z"/>
|
||||
<path style="fill: url(#linear-gradient-7); opacity: .4;" d="M18.52,11.84c0-.14,0-.28,0-.42h0s0-.01,0-.01c0-.02,0-.04,0-.06h0s0-.03,0-.03c0-.12-.01-.23-.03-.35-.37-.16-.72-.35-1.04-.59,0,.03,0,.07,0,.1.04.28.03.19.06.43.02.12.05.35.05.42,0,.06-.02.12-.06.16-.04.04-.09.06-.14.06-.06,0-.13-.02-.19-.05.12.06.65.29,1.48.59-.09-.05-.12-.14-.12-.27Z"/>
|
||||
<path style="fill: url(#linear-gradient-8); opacity: .4;" d="M24.54,9.21c-.34.48-.77.9-1.26,1.24-.22.73-.57,1.42-1.12,2.04-.07.07-.14.14-.21.21-.07.07-.17.19-.25.19-.02,0-.03,0-.05-.02-.13-.09-.01-.27.03-.36.21-.48.37-.98.48-1.47-.35.13-.71.22-1.08.27-.05.15-.09.3-.15.44-.06.17-.14.34-.21.5-.07.14-.12.32-.3.32-.04,0-.08,0-.12-.02-.26-.09-.19-.29-.16-.47.05-.24.09-.49.12-.73-.33-.01-.65-.05-.96-.12-.03.19-.06.39-.1.58-.02.13-.08.35-.31.35-.03,0-.06,0-.09-.01,1.04.37,2.51.84,4.24,1.21h0s-.03-.01-.05-.02c-.28-.12.22-.61.45-.96.64-.98.95-2.06,1.1-3.18Z"/>
|
||||
<path style="fill: url(#Fade_to_Black_2-2); opacity: .5;" d="M24.28,14.56c-.22,0-.43.02-.65.06-.04,0-.08.01-.12.01-.06,0-.12,0-.17-.03,0,.01.02.02.03.03.09.13.14.27.16.42.02.03.04.06.06.1h0s0,0,0,0c0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0h0s0,0,0,0c0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0t0,0s0,0,0,0c0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0s0,0,0,0c0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0s0,0,0,0c0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0h0s0,0,0,0h0s0,0,0,0h0s0,0,0,0h0s0,0,0,0h0s0,0,0,0h0s0,0,0,0h0s0,0,0,0h0s0,0,0,0c0,0,0,0,0,0,0,0,0,0,0,0h0s0,0,0,0h0s0,0,0,0h0s0,0,0,0h0s0,0,0,0h0s0,0,0,0h0s0,0,0,0h0s0,0,0,0h0s0,0,0,0h0s0,.07,0,.1c0,.05,0,.11,0,.17,0,.07-.02.15-.03.21,0,0,0,.02,0,.03,0,.05.02.09.06.12h.02s.05.03.07.03c.04,0,.08-.02.11-.05.02-.02.03-.04.05-.06.22-.23.44-.39.68-.49.17-.07.35-.11.53-.11s.36.04.55.11c.18.07.35.16.51.28.07.05.14.11.21.17.04.04.08.08.12.12h0s.03.04.03.04l.02.02s.06.03.09.03c-.15-.32-.33-.61-.6-.84-.5-.43-1.13-.62-1.77-.62"/>
|
||||
<path style="fill: url(#linear-gradient-9); opacity: .4;" d="M24.22,12.45h-.03c-.07.01-.12.07-.13.14,0,.05,0,.11,0,.17,0,.02,0,.04-.01.05.27.4.84,1.1,1.72,1.36,0,0,1.36.71,1.01,1.74h0v-.03c.12-.31.14-.6.08-.89-.06-.28-.2-.53-.42-.76-.08-.09-.18-.17-.28-.25-.05-.04-.1-.07-.15-.1-.07-.04-.14-.08-.21-.12-.28-.14-.51-.29-.72-.44-.08-.06-.17-.13-.24-.19-.19-.16-.31-.3-.4-.45-.03-.05-.06-.1-.08-.15-.01-.02-.03-.04-.05-.05-.03-.02-.06-.03-.09-.03"/>
|
||||
<path style="fill: url(#linear-gradient-10); opacity: .5;" d="M22.5,15.32l.28-.16,1.16-1.55s-.63-.12-.94-.26c0,0-.32,1.45-1.49,2.66l.43-.32c.17-.12.33-.23.56-.37Z"/>
|
||||
<g>
|
||||
<path style="fill: #404041;" d="M38,7.1h2.95c2.13,0,3.11,1.1,3.11,3.35v1.12c0,2.25-.98,3.35-3.11,3.35h-1.71v6.14h-1.24V7.1ZM40.95,13.78c1.3,0,1.87-.58,1.87-2.15v-1.26c0-1.55-.58-2.15-1.87-2.15h-1.71v5.56h1.71Z"/>
|
||||
<path style="fill: #404041;" d="M48.61,7.1h1.24v6.12h3.81v-6.12h1.24v13.95h-1.24v-6.72h-3.81v6.72h-1.24V7.1Z"/>
|
||||
<path style="fill: #404041;" d="M59.73,17.84v-7.54c0-2.21,1.12-3.41,3.13-3.41s3.13,1.2,3.13,3.41v7.54c0,2.21-1.12,3.41-3.13,3.41s-3.13-1.2-3.13-3.41ZM64.75,17.92v-7.69c0-1.5-.68-2.21-1.89-2.21s-1.89.72-1.89,2.21v7.69c0,1.5.68,2.19,1.89,2.19s1.89-.7,1.89-2.19Z"/>
|
||||
<path style="fill: #404041;" d="M70.85,7.1h5.58v1.12h-4.35v5h3.57v1.12h-3.57v5.58h4.35v1.14h-5.58V7.1Z"/>
|
||||
<path style="fill: #404041;" d="M80.9,7.1h1.63l3.63,10.78V7.1h1.16v13.95h-1.32l-3.97-11.9v11.9h-1.14V7.1Z"/>
|
||||
<path style="fill: #404041;" d="M92.32,7.1h1.24v13.95h-1.24V7.1Z"/>
|
||||
<path style="fill: #404041;" d="M100.79,13.94l-2.73-6.84h1.32l2.19,5.54,2.21-5.54h1.2l-2.73,6.84,2.85,7.12h-1.32l-2.31-5.86-2.33,5.86h-1.2l2.85-7.12Z"/>
|
||||
</g>
|
||||
</g>
|
||||
</g>
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 16 KiB |
|
|
@ -0,0 +1,97 @@
|
|||
<svg id="Layer_2" data-name="Layer 2" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" viewBox="0 0 74 16" width="74" height="16">
|
||||
<defs>
|
||||
<linearGradient id="linear-gradient" x1="6.05" y1=".98" x2="13.18" y2="13.34" gradientUnits="userSpaceOnUse">
|
||||
<stop offset="0" stop-color="#11bab5"/>
|
||||
<stop offset=".5" stop-color="#00adee"/>
|
||||
<stop offset="1" stop-color="#0094c5"/>
|
||||
</linearGradient>
|
||||
<linearGradient id="linear-gradient-2" x1="12.99" y1="10.87" x2="6.97" y2=".45" gradientUnits="userSpaceOnUse">
|
||||
<stop offset="0" stop-color="#fcfdff" stop-opacity="0"/>
|
||||
<stop offset=".12" stop-color="#fcfdff" stop-opacity=".17"/>
|
||||
<stop offset=".3" stop-color="#fcfdff" stop-opacity=".42"/>
|
||||
<stop offset=".47" stop-color="#fcfdff" stop-opacity=".63"/>
|
||||
<stop offset=".64" stop-color="#fcfdff" stop-opacity=".79"/>
|
||||
<stop offset=".78" stop-color="#fcfdff" stop-opacity=".9"/>
|
||||
<stop offset=".91" stop-color="#fcfdff" stop-opacity=".97"/>
|
||||
<stop offset="1" stop-color="#fcfdff"/>
|
||||
</linearGradient>
|
||||
<linearGradient id="linear-gradient-3" x1="11.21" y1="10.8" x2="6.88" y2="3.29" xlink:href="#linear-gradient-2"/>
|
||||
<linearGradient id="linear-gradient-4" x1="10.62" y1="11.35" x2="8.2" y2="7.16" xlink:href="#linear-gradient-2"/>
|
||||
<linearGradient id="linear-gradient-5" x1="10.72" y1="12.2" x2="9.21" y2="9.59" xlink:href="#linear-gradient-2"/>
|
||||
<linearGradient id="linear-gradient-6" x1="8.17" y1="11.96" x2="8.17" y2="14.71" gradientUnits="userSpaceOnUse">
|
||||
<stop offset="0" stop-color="#231f20"/>
|
||||
<stop offset=".03" stop-color="#231f20" stop-opacity=".9"/>
|
||||
<stop offset=".22" stop-color="#231f20" stop-opacity=".4"/>
|
||||
<stop offset=".42" stop-color="#231f20" stop-opacity=".1"/>
|
||||
<stop offset=".65" stop-color="#231f20" stop-opacity="0"/>
|
||||
</linearGradient>
|
||||
<linearGradient id="Fade_to_Black_2" data-name="Fade to Black 2" x1="11.26" y1="11.7" x2="11.26" y2="13.1" gradientUnits="userSpaceOnUse">
|
||||
<stop offset="0" stop-color="#231f20"/>
|
||||
<stop offset="1" stop-color="#231f20" stop-opacity="0"/>
|
||||
</linearGradient>
|
||||
<linearGradient id="linear-gradient-7" x1="11.94" y1="8.07" x2="11.94" y2="6.92" gradientUnits="userSpaceOnUse">
|
||||
<stop offset="0" stop-color="#231f20"/>
|
||||
<stop offset=".03" stop-color="#231f20" stop-opacity=".95"/>
|
||||
<stop offset=".51" stop-color="#231f20" stop-opacity=".26"/>
|
||||
<stop offset=".81" stop-color="#231f20" stop-opacity="0"/>
|
||||
</linearGradient>
|
||||
<linearGradient id="linear-gradient-8" x1="14.45" y1="8.92" x2="14.45" y2="6.14" gradientUnits="userSpaceOnUse">
|
||||
<stop offset="0" stop-color="#231f20"/>
|
||||
<stop offset=".18" stop-color="#231f20" stop-opacity=".48"/>
|
||||
<stop offset=".38" stop-color="#231f20" stop-opacity=".12"/>
|
||||
<stop offset=".58" stop-color="#231f20" stop-opacity="0"/>
|
||||
</linearGradient>
|
||||
<linearGradient id="Fade_to_Black_2-2" data-name="Fade to Black 2" x1="17.03" y1="11.1" x2="16.07" y2="9.44" xlink:href="#Fade_to_Black_2"/>
|
||||
<linearGradient id="linear-gradient-9" x1="17.8" y1="10.64" x2="16.36" y2="8.16" xlink:href="#linear-gradient-2"/>
|
||||
<linearGradient id="linear-gradient-10" x1="14.07" y1="10.22" x2="16.16" y2="9.02" gradientUnits="userSpaceOnUse">
|
||||
<stop offset="0" stop-color="#231f20"/>
|
||||
<stop offset=".31" stop-color="#231f20" stop-opacity=".5"/>
|
||||
<stop offset=".67" stop-color="#231f20" stop-opacity=".13"/>
|
||||
<stop offset="1" stop-color="#231f20" stop-opacity="0"/>
|
||||
</linearGradient>
|
||||
</defs>
|
||||
<g id="Layer_1-2" data-name="Layer 1">
|
||||
<rect style="fill: none;" y=".02" width="74" height="16"/>
|
||||
<g id="Phoenix_horiz_-_gradient" data-name="Phoenix horiz - gradient">
|
||||
<path style="fill: url(#linear-gradient);" d="M17.91,10c-.04-.19-.14-.36-.28-.51-.06-.06-.12-.11-.19-.16-.03-.02-.07-.05-.1-.07-.04-.03-.09-.05-.14-.08-.19-.09-.34-.19-.48-.29-.06-.04-.11-.09-.16-.13-.12-.11-.21-.2-.27-.3-.02-.03-.04-.07-.05-.1,0-.01-.02-.03-.03-.04-.02-.02-.05-.02-.08-.02-.04,0-.08.04-.08.09,0,.03,0,.07,0,.11,0,.02,0,.04-.03.05,0,0,0,0-.02,0-.03,0-.06.02-.07.04-.02.02-.02.05-.02.08,0,.03.01.04.02.06h0s0,.03.01.04c.01.03.03.05.05.08.03.05.07.09.11.15.01.02.02.04,0,.06-.01.02-.03.03-.05.03-.25-.03-.46-.09-.67-.16-.02,0-.05-.02-.07-.03-.18-.08.15-.41.3-.64.66-1.02.8-2.19.81-3.38,0-1.15-.12-2.27-.35-3.39h0c-.15-.94-.27-1.29-.35-1.42-.02-.03-.04-.06-.06-.07-.02-.01-.03,0-.03,0-.04,0-.09.04-.15.1-.25.26-.23.57-.17.89.31,1.47.55,2.95.44,4.47-.07,1.04-.27,2.05-1,2.86-.04.05-.09.1-.14.14-.05.05-.14.16-.2.11-.08-.06,0-.18.02-.24.43-1,.55-2.05.48-3.12,0-.02,0-.05,0-.07h0s0-.04,0-.06c-.01-.14-.02-.28-.04-.43-.18-1.71-.4-1.82-.4-1.82h0s-.03-.02-.06-.03c-.12-.02-.18.08-.23.15-.23.35-.13.72-.05,1.08.26,1.28.27,2.55-.18,3.79-.04.11-.09.22-.14.33-.05.12-.09.26-.28.2-.18-.06-.13-.19-.1-.31.06-.31.11-.62.14-.93h0s0-.06,0-.09c0-.05,0-.11.01-.16,0-.05,0-.09,0-.13,0-.02,0-.04,0-.07.04-.99-.13-1.44-.19-1.57,0-.02-.01-.03-.02-.04h0s0,0,0,0c-.07-.1-.17-.09-.27,0-.17.17-.27.37-.22.62.13.74.05,1.46-.08,2.19-.02.1-.06.27-.27.23-.14-.03-.17-.09-.18-.21,0-.09,0-.19,0-.28,0,0,0,0,0,0h0s0-.03,0-.04h0s0-.01,0-.02c0-.13-.02-.25-.03-.38,0-.01,0-.02,0-.04-.08-.71-.18-.78-.18-.78-.07-.09-.17-.07-.26.02-.17.17-.26.37-.22.62.03.19.02.13.04.29.01.08.03.23.03.28,0,.04-.01.08-.04.11-.09.09-.23,0-.34-.05-2.08-.86-3.86-2.07-5.42-3.6-.61-.62-1.65-1.88-1.65-1.88h0s-.09-.07-.16-.04c-.14.06-.13.23-.14.38-.02.29.13.51.3.7,2.35,2.74,5.21,4.71,8.67,5.69l.62.16s.01,0,.02,0c0,0,0,0,0,0l.54.14h.01s-.04.01-.06.02c-.59.12-1.76-.1-2.69-.32-.46-.1-.92-.22-1.36-.36-.02,0-.03,0-.03,0h0c-1.73-.55-3.32-1.44-4.7-2.81-.13-.13-.25-.27-.37-.41-.11-.13-.34-.4-.41-.5,0-.01-.02-.02-.03-.03h0s0,0,0,0c-.02-.02-.05-.04-.09-.03-.1.02-.12.12-.14.21-.07.34.04.62.26.88,1.42,1.59,3.2,2.61,5.21,3.28,1.14.38,2.31.61,3.5.76-.71.23-1.44.3-2.18.28-1.43-.04-2.77-.38-4.03-.99-.46-.24-.9-.5-1.01-.58-.07-.04-.13-.07-.2-.02-.13.1-.04.27,0,.4.08.26.25.43.49.55,2.05,1.08,4.22,1.52,6.52,1.13.23-.04.45-.08.7-.16-.39.33-.83.55-1.3.72-1.44.53-2.88.51-4.31.15-.46-.12-.99-.32-.99-.32h0c-.08-.03-.15-.05-.21.04-.1.15-.01.3.08.45.21.33.63.36.95.48.11.04-.19.24-.29.31-.47.36-.94.69-1.41,1.04l-.31.23h0c-.05.04-.1.08-.08.15.03.09.12.1.2.12.34.07.62-.04.89-.24.57-.43,1.15-.84,1.71-1.28.21-.17.4-.15.7-.08-.73.55-1.4,1.06-2.08,1.57-.69.52-1.38,1.04-2.07,1.56l-.46.34s-.01,0-.02.01h0s0,0,0,0c-.04.03-.07.07-.06.11.02.1.14.13.24.15.41.07.66-.08.94-.29,1.39-1.05,2.79-2.09,4.18-3.14.31-.24.63-.37,1.08-.32-.38.29-.75.56-1.11.83l-.98.73s-.01.01-.02.02h0c-.05.04-.09.09-.07.16.04.13.19.15.33.17.24.04.45-.05.64-.19.11-.08.22-.16.33-.24h0s1.5-1.19,1.5-1.19c0,0,.35-.25.76-.49.04-.02.07-.05.11-.07.02,0,.05-.02.11-.05.04-.02.09-.05.13-.07.06-.03.14-.07.22-.12.24-.1.93-.42,1.51-1.03.27-.21.66-.48.95-.62h0s0,0,0,0c.02-.01.04-.02.07-.03h0s0,0,0,0c.01,0,.03-.01.04-.01h.04c.11-.05.28-.06.31.18,0,.06,0,.12,0,.18,0,.05-.01.1-.02.14,0,.04,0,.08.04.1,0,0,.01,0,.02,0,.04.02.09.01.12-.02.01-.01.02-.03.04-.04.15-.15.3-.26.45-.33.23-.1.47-.1.72,0,.12.05.23.11.34.19.05.03.1.07.14.12.03.03.06.05.08.08h0s.02.02.02.02c0,0,0,0,.01.01.02.02.05.02.08.02.03,0,.06-.03.07-.06,0,0,0-.01,0-.02.07-.21.09-.4.04-.59Z"/>
|
||||
<path style="fill: #fddab4;" d="M13.48,7.75c-.02.1-.03.21-.05.31.02-.1.04-.21.05-.31h0Z"/>
|
||||
<path style="fill: #fddab4;" d="M15.33,8.9s.02,0,.02,0c0,0-.02,0-.02,0"/>
|
||||
<polyline style="fill: #fddab4;" points="14.46 8.33 14.46 8.33 14.46 8.33"/>
|
||||
<polyline style="fill: #fddab4;" points="14.48 8.26 14.48 8.26 14.48 8.26"/>
|
||||
<polyline style="fill: #fddab4;" points="14.49 8.25 14.49 8.25 14.49 8.25"/>
|
||||
<polyline style="fill: #fddab4;" points="15.64 8.25 15.63 8.25 15.64 8.25"/>
|
||||
<path style="fill: #fddab4;" d="M13.81,8.17s-.02.06-.04.08c.01-.03.02-.06.04-.08"/>
|
||||
<polyline style="fill: #fddab4;" points="12.57 8.11 12.57 8.11 12.57 8.11"/>
|
||||
<polyline style="fill: #fddab4;" points="12.39 8.03 12.39 8.03 12.39 8.03"/>
|
||||
<polyline style="fill: #fddab4;" points="12.39 8.03 12.39 8.03 12.39 8.03"/>
|
||||
<polyline style="fill: #fddab4;" points="12.39 8.03 12.39 8.03 12.39 8.03"/>
|
||||
<polyline style="fill: #fddab4;" points="12.38 8.02 12.38 8.03 12.38 8.02"/>
|
||||
<polyline style="fill: #fddab4;" points="12.38 8.02 12.38 8.02 12.38 8.02"/>
|
||||
<polyline style="fill: #fddab4;" points="12.35 7.89 12.35 7.89 12.35 7.89"/>
|
||||
<path style="fill: #fddab4;" d="M12.81,7.81s0,.04-.01.06c0-.02,0-.04.01-.06"/>
|
||||
<path style="fill: #fddab4;" d="M11.67,7.67s-.03.02-.05.03c.02,0,.03-.02.05-.03"/>
|
||||
<path style="fill: #e5b3a5;" d="M12.39,8.03s.03.03.05.04h0s-.03-.03-.05-.04M12.39,8.03s0,0,0,0c0,0,0,0,0,0M12.39,8.03s0,0,0,0c0,0,0,0,0,0M12.38,8.03s0,0,0,0c0,0,0,0,0,0M12.38,8.02s0,0,0,0c0,0,0,0,0,0M12.38,8.02s0,0,0,0c0,0,0,0,0,0M12.35,7.89s0,0,0,0c0,0,0,0,0,0M12.35,7.89h0s0,0,0,0c0,0,0,0,0,0M11.45,7.68s0,0,0,0c.04.02.09.03.13.03.02,0,.03,0,.05,0-.02,0-.03,0-.05,0-.04,0-.09-.02-.13-.03M12.34,7.54s0,0,0,0c0,0,0,0,0,0"/>
|
||||
<path style="fill: #e5b3a5;" d="M15.35,8.91s0,0,.01,0h0s0,0-.01,0M14.52,8.58s0,0,0,0c0,0,0,0,0,0M14.39,8.51s.01.06.04.08c.01,0,.02.01.03.01.02,0,.04,0,.05-.02-.02.01-.04.02-.05.02-.01,0-.02,0-.03-.01-.03-.02-.04-.05-.04-.08M14.46,8.33s0,0,0,.01c0,0,0,0,0-.01M14.48,8.26s-.02.05-.03.07c0-.02.02-.05.03-.07M14.49,8.25s0,0,0,0c0,0,0,0,0,0M15.63,8.25s0,0,0,0c0,0,0,0,0,0M15.64,8.24s0,0,0,0c0,0,0,0,0,0M14.49,8.24s0,0,0,0c0,0,0,0,0,0M13.4,8.21c0,.07.03.13.13.16.03.01.06.01.08.01.09,0,.13-.06.16-.13-.03.07-.07.13-.16.13-.02,0-.05,0-.08-.01-.1-.03-.13-.09-.13-.16M12.57,8.11h0,0M12.74,8.04s-.08.07-.14.07c0,0-.01,0-.02,0,0,0,.01,0,.02,0,.07,0,.11-.03.14-.07M14.05,7.54s0,0,0,0c-.03.1-.06.2-.1.3-.04.11-.09.22-.14.33h0c.05-.11.1-.22.14-.33.04-.1.07-.2.1-.3M12.87,7.49c-.02.11-.03.22-.05.33.02-.11.04-.22.06-.33h0M15.52,6.97s0,0,0,0c-.15.49-.38.95-.75,1.36-.04.05-.09.1-.14.14h0s.09-.09.14-.14c.37-.41.6-.87.75-1.36"/>
|
||||
<path style="fill: url(#linear-gradient-2); opacity: .4;" d="M4.17,2.09s-.04,0-.06.01c-.1.04-.12.14-.13.25.52.68,2.56,3.18,5.78,4.9,0,0,3.15,1.66,6.32,1.84-.25-.03-.46-.09-.67-.16-.01,0-.02,0-.04-.01h0c-1.15-.25-2.13-.56-2.83-.81.01,0,.03,0,.04,0-.01,0-.03,0-.04,0-.04,0-.07-.02-.1-.04-.55-.2-.9-.35-.98-.39-.04-.02-.08-.04-.12-.05-2.08-.86-3.86-2.07-5.42-3.6-.61-.62-1.65-1.88-1.65-1.88h0s-.06-.05-.1-.05"/>
|
||||
<path style="fill: url(#linear-gradient-3); opacity: .4;" d="M4.39,4.75s-.01,0-.02,0c-.1.02-.12.12-.14.21,0,.02,0,.03,0,.05,1.61,2.18,3.77,3.07,3.77,3.07,2.15,1.01,3.92,1.22,5,1.22.45,0,.77-.04.96-.06-.09,0-.18.01-.28.01-.63,0-1.53-.17-2.29-.35-.46-.1-.92-.22-1.36-.36-.02,0-.03,0-.03,0h0c-1.73-.55-3.32-1.44-4.7-2.81-.13-.13-.25-.27-.37-.41-.11-.13-.34-.4-.41-.5,0-.01-.02-.02-.03-.03h0s0,0,0,0c-.02-.02-.04-.03-.07-.03"/>
|
||||
<path style="fill: url(#linear-gradient-4); opacity: .4" d="M5.86,8.53s-.05,0-.08.03c-.11.08-.06.22-.02.35.81.49,2.66,1.44,4.84,1.44.83,0,1.7-.14,2.59-.48-.64.21-1.3.28-1.96.28-.07,0-.15,0-.22,0-1.43-.04-2.77-.38-4.03-.99-.46-.24-.9-.5-1.01-.58-.04-.03-.08-.05-.12-.05"/>
|
||||
<path style="fill: url(#linear-gradient-5); opacity: .4;" d="M13.34,10.6c-.35.27-.75.46-1.16.61-.77.28-1.55.41-2.32.41-.67,0-1.33-.09-1.99-.26-.46-.12-.99-.32-.99-.32h0s-.07-.03-.1-.03c-.04,0-.08.02-.11.06-.05.07-.05.14-.04.21.51.24,1.6.66,2.93.66,1.15,0,2.49-.32,3.78-1.34"/>
|
||||
<path style="fill: url(#linear-gradient-6); opacity: .4;" d="M9.91,12.25c-1.25,0-2.19-.25-2.31-.29.04.01.07.02.1.03.11.04-.19.24-.29.31-.35.27-.69.52-1.04.77h1.06c.33-.25.67-.5.99-.75.12-.1.24-.13.37-.13.1,0,.2.02.33.05-.73.55-1.4,1.06-2.08,1.57-.4.3-.79.6-1.19.9h1.07c.96-.72,1.92-1.44,2.87-2.16.22-.17.44-.28.71-.32-.2.01-.4.02-.58.02Z"/>
|
||||
<path style="fill: url(#Fade_to_Black_2); opacity: .4;" d="M9.69,13.1h1.03l.76-.6s.35-.25.76-.49c.04-.02.07-.05.11-.07.02,0,.05-.02.11-.05.04-.02.09-.05.13-.07.07-.03.15-.08.23-.12-.73.31-1.49.46-2.18.52,0,0,.02,0,.03,0,.06,0,.12,0,.18.01-.38.29-.75.56-1.11.83l-.06.05Z"/>
|
||||
<path style="fill: url(#linear-gradient-7); opacity: .4;" d="M12.35,7.89c0-.09,0-.19,0-.28h0s0,0,0,0c0-.01,0-.03,0-.04h0s0-.02,0-.02c0-.08,0-.16-.02-.23-.25-.1-.48-.24-.69-.39,0,.02,0,.04,0,.07.03.19.02.13.04.29.01.08.03.23.03.28,0,.04-.01.08-.04.11-.03.03-.06.04-.09.04-.04,0-.09-.02-.13-.03.08.04.43.19.98.39-.06-.04-.08-.09-.08-.18Z"/>
|
||||
<path style="fill: url(#linear-gradient-8); opacity: .4;" d="M16.36,6.14c-.23.32-.51.6-.84.83-.15.49-.38.95-.75,1.36-.04.05-.09.1-.14.14-.05.04-.11.12-.17.12-.01,0-.02,0-.03-.01-.08-.06,0-.18.02-.24.14-.32.24-.65.32-.98-.23.09-.47.15-.72.18-.03.1-.06.2-.1.3-.04.11-.09.22-.14.33-.05.1-.08.21-.2.21-.02,0-.05,0-.08-.01-.18-.06-.13-.19-.1-.31.03-.16.06-.32.08-.49-.22,0-.43-.04-.64-.08-.02.13-.04.26-.07.39-.02.09-.05.24-.21.24-.02,0-.04,0-.06,0,.69.25,1.68.56,2.83.81h0s-.02,0-.03-.01c-.18-.08.15-.41.3-.64.43-.66.64-1.37.73-2.12Z"/>
|
||||
<path style="fill: url(#Fade_to_Black_2-2); opacity: .5;" d="M16.19,9.7c-.14,0-.29.01-.43.04-.03,0-.05,0-.08,0-.04,0-.08,0-.11-.02,0,0,.01.01.02.02.06.09.1.18.11.28.02.02.03.04.04.07h0s0,0,0,0c0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0h0s0,0,0,0c0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0t0,0s0,0,0,0c0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0s0,0,0,0c0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0s0,0,0,0c0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0h0s0,0,0,0h0s0,0,0,0h0s0,0,0,0h0s0,0,0,0h0s0,0,0,0h0s0,0,0,0h0s0,0,0,0h0s0,0,0,0c0,0,0,0,0,0,0,0,0,0,0,0h0s0,0,0,0h0s0,0,0,0h0s0,0,0,0h0s0,0,0,0h0s0,0,0,0h0s0,0,0,0h0s0,0,0,0h0s0,0,0,0h0s0,.05,0,.07c0,.04,0,.07,0,.11,0,.05-.01.1-.02.14,0,0,0,.01,0,.02,0,.03.02.06.04.08h.02s.03.02.05.02c.03,0,.06-.01.08-.03.01-.01.02-.03.04-.04.15-.15.3-.26.45-.33.12-.05.23-.07.35-.07s.24.02.36.07c.12.05.23.11.34.19.05.03.1.07.14.12.03.03.06.05.08.08h0s.02.02.02.02h.01s.04.03.06.03c-.1-.21-.22-.41-.4-.56-.33-.29-.75-.41-1.18-.41"/>
|
||||
<path style="fill: url(#linear-gradient-9); opacity: .4;" d="M16.15,8.3h-.02s-.08.05-.08.09c0,.03,0,.07,0,.11,0,.01,0,.02,0,.03.18.26.56.73,1.15.91,0,0,.91.47.68,1.16h0v-.02c.08-.21.09-.4.05-.59-.04-.19-.14-.36-.28-.51-.06-.06-.12-.11-.19-.16-.03-.02-.07-.05-.1-.07-.04-.03-.09-.05-.14-.08-.19-.09-.34-.19-.48-.29-.06-.04-.11-.09-.16-.13-.12-.11-.21-.2-.27-.3-.02-.03-.04-.07-.05-.1,0-.01-.02-.03-.03-.04-.02-.01-.04-.02-.06-.02"/>
|
||||
<path style="fill: url(#linear-gradient-10); opacity: .5;" d="M15,10.21l.18-.11.77-1.03s-.42-.08-.63-.17c0,0-.21.96-.99,1.77l.29-.21c.11-.08.22-.15.38-.25Z"/>
|
||||
<g>
|
||||
<path style="fill: #404041;" d="M25.33,4.73h1.97c1.42,0,2.07.73,2.07,2.23v.74c0,1.5-.65,2.23-2.07,2.23h-1.14v4.09h-.82V4.73ZM27.3,9.19c.86,0,1.25-.39,1.25-1.44v-.84c0-1.04-.39-1.44-1.25-1.44h-1.14v3.71h1.14Z"/>
|
||||
<path style="fill: #404041;" d="M32.4,4.73h.82v4.08h2.54v-4.08h.82v9.3h-.82v-4.48h-2.54v4.48h-.82V4.73Z"/>
|
||||
<path style="fill: #404041;" d="M39.82,11.9v-5.02c0-1.48.74-2.27,2.09-2.27s2.09.8,2.09,2.27v5.02c0,1.48-.74,2.27-2.09,2.27s-2.09-.8-2.09-2.27ZM43.17,11.95v-5.13c0-1-.45-1.48-1.26-1.48s-1.26.48-1.26,1.48v5.13c0,1,.45,1.46,1.26,1.46s1.26-.47,1.26-1.46Z"/>
|
||||
<path style="fill: #404041;" d="M47.23,4.73h3.72v.74h-2.9v3.34h2.38v.74h-2.38v3.72h2.9v.76h-3.72V4.73Z"/>
|
||||
<path style="fill: #404041;" d="M53.93,4.73h1.09l2.42,7.19v-7.19h.77v9.3h-.88l-2.64-7.93v7.93h-.76V4.73Z"/>
|
||||
<path style="fill: #404041;" d="M61.55,4.73h.82v9.3h-.82V4.73Z"/>
|
||||
<path style="fill: #404041;" d="M67.19,9.29l-1.82-4.56h.88l1.46,3.69,1.48-3.69h.8l-1.82,4.56,1.9,4.74h-.88l-1.54-3.91-1.55,3.91h-.8l1.9-4.74Z"/>
|
||||
</g>
|
||||
</g>
|
||||
</g>
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 16 KiB |
|
|
@ -0,0 +1,122 @@
|
|||
{
|
||||
"icon": {
|
||||
"type": "element",
|
||||
"isRootNode": true,
|
||||
"name": "svg",
|
||||
"attributes": {
|
||||
"id": "Layer_2",
|
||||
"data-name": "Layer 2",
|
||||
"xmlns": "http://www.w3.org/2000/svg",
|
||||
"viewBox": "0 0 74 16",
|
||||
"width": "74",
|
||||
"height": "16"
|
||||
},
|
||||
"children": [
|
||||
{
|
||||
"type": "element",
|
||||
"name": "g",
|
||||
"attributes": {
|
||||
"id": "Layer_1-2",
|
||||
"data-name": "Layer 1"
|
||||
},
|
||||
"children": [
|
||||
{
|
||||
"type": "element",
|
||||
"name": "rect",
|
||||
"attributes": {
|
||||
"style": "fill: none;",
|
||||
"y": "0",
|
||||
"width": "74",
|
||||
"height": "16"
|
||||
},
|
||||
"children": []
|
||||
},
|
||||
{
|
||||
"type": "element",
|
||||
"name": "g",
|
||||
"attributes": {
|
||||
"id": "Arize_-_standard",
|
||||
"data-name": "Arize - standard"
|
||||
},
|
||||
"children": [
|
||||
{
|
||||
"type": "element",
|
||||
"name": "g",
|
||||
"attributes": {},
|
||||
"children": [
|
||||
{
|
||||
"type": "element",
|
||||
"name": "path",
|
||||
"attributes": {
|
||||
"d": "M67.29,9.7h-7.52c0,.08,0,.15.01.21.35,1.03,1.03,1.73,2.13,1.92,1.13.19,2.14,0,2.86-1.01.06-.09.19-.18.29-.19.66-.02,1.33,0,1.99,0-.1,1.79-2.59,3.32-5.07,3.14-2.66-.2-4.61-2.53-4.39-5.25.25-3.08,2.44-4.88,5.58-4.56,2.7.27,4.45,2.69,4.12,5.76ZM59.81,7.77h5.3c-.28-1.25-1.36-2.01-2.78-1.98-1.25.03-2.32.87-2.52,1.98Z"
|
||||
},
|
||||
"children": []
|
||||
},
|
||||
{
|
||||
"type": "element",
|
||||
"name": "path",
|
||||
"attributes": {
|
||||
"style": "fill: #ff008c;",
|
||||
"d": "M13.87,1.69c.4-.02.74.17.99.54,2.03,2.92,4.06,5.85,6.08,8.77.42.61.28,1.33-.29,1.73-.56.39-1.31.24-1.74-.38-1.4-2.01-2.79-4.02-4.19-6.04-.03-.04-.05-.08-.08-.11-.55-.78-1.1-.78-1.64,0-1.41,2.03-2.82,4.06-4.23,6.09-.23.34-.52.59-.93.63-.51.06-.92-.13-1.19-.57-.28-.46-.25-.92.05-1.35.68-.98,1.36-1.96,2.04-2.93,1.34-1.93,2.68-3.85,4.02-5.78.26-.37.59-.6,1.11-.59Z"
|
||||
},
|
||||
"children": []
|
||||
},
|
||||
{
|
||||
"type": "element",
|
||||
"name": "path",
|
||||
"attributes": {
|
||||
"d": "M35.23,13.62h-2.01v-.77c-.07.01-.1,0-.13.02-1.64,1.16-3.39,1.25-5.12.34-1.74-.92-2.47-2.51-2.45-4.45.02-2.17,1.22-3.97,3.1-4.57,1.57-.5,3.09-.45,4.46.62.02.02.06.02.14.05v-.79h2.02v9.54ZM27.57,8.88c0,.11.01.31.03.5.14,1.39,1.18,2.39,2.61,2.51,1.4.12,2.53-.63,2.92-1.97.09-.32.14-.65.15-.98.09-2.15-1.5-3.51-3.56-3.07-1.28.27-2.13,1.43-2.15,3.02Z"
|
||||
},
|
||||
"children": []
|
||||
},
|
||||
{
|
||||
"type": "element",
|
||||
"name": "path",
|
||||
"attributes": {
|
||||
"d": "M53.61,5.93h-4.96v-1.85h7.76c0,.54.01,1.07,0,1.61,0,.12-.12.24-.21.34-1.52,1.79-3.05,3.57-4.57,5.36-.07.09-.14.18-.26.32h5.02v1.91h-7.83c0-.54-.01-1.08,0-1.61,0-.11.11-.22.19-.31,1.55-1.83,3.1-3.65,4.64-5.47.06-.07.11-.14.21-.28Z"
|
||||
},
|
||||
"children": []
|
||||
},
|
||||
{
|
||||
"type": "element",
|
||||
"name": "path",
|
||||
"attributes": {
|
||||
"d": "M39.6,13.69h-2.02V4.15h2.02v1.03c.54-.32,1.04-.68,1.58-.94.57-.28,1.19-.27,1.85-.23v1.96c-.1,0-.2.02-.3.02-1.58.02-2.68.9-3.01,2.46-.08.38-.11.77-.11,1.16-.01,1.24,0,2.47,0,3.71v.38Z"
|
||||
},
|
||||
"children": []
|
||||
},
|
||||
{
|
||||
"type": "element",
|
||||
"name": "path",
|
||||
"attributes": {
|
||||
"d": "M44.74,4.06h1.99v9.56h-1.99V4.06Z"
|
||||
},
|
||||
"children": []
|
||||
},
|
||||
{
|
||||
"type": "element",
|
||||
"name": "path",
|
||||
"attributes": {
|
||||
"style": "fill: #ff008c;",
|
||||
"d": "M13.82,13.02c.84,0,1.49.64,1.5,1.46,0,.83-.68,1.53-1.5,1.52-.82,0-1.47-.67-1.48-1.5,0-.83.64-1.48,1.47-1.48Z"
|
||||
},
|
||||
"children": []
|
||||
},
|
||||
{
|
||||
"type": "element",
|
||||
"name": "path",
|
||||
"attributes": {
|
||||
"d": "M47.13,1.41c0,.8-.61,1.43-1.39,1.43-.8,0-1.44-.63-1.44-1.43,0-.78.63-1.41,1.42-1.41.79,0,1.41.62,1.41,1.41Z"
|
||||
},
|
||||
"children": []
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
"name": "ArizeIcon"
|
||||
}
|
||||
|
|
@ -0,0 +1,20 @@
|
|||
// GENERATE BY script
|
||||
// DON NOT EDIT IT MANUALLY
|
||||
|
||||
import * as React from 'react'
|
||||
import data from './ArizeIcon.json'
|
||||
import IconBase from '@/app/components/base/icons/IconBase'
|
||||
import type { IconData } from '@/app/components/base/icons/IconBase'
|
||||
|
||||
const Icon = (
|
||||
{
|
||||
ref,
|
||||
...props
|
||||
}: React.SVGProps<SVGSVGElement> & {
|
||||
ref?: React.RefObject<React.MutableRefObject<HTMLOrSVGElement>>;
|
||||
},
|
||||
) => <IconBase {...props} ref={ref} data={data as IconData} />
|
||||
|
||||
Icon.displayName = 'ArizeIcon'
|
||||
|
||||
export default Icon
|
||||
|
|
@ -0,0 +1,122 @@
|
|||
{
|
||||
"icon": {
|
||||
"type": "element",
|
||||
"isRootNode": true,
|
||||
"name": "svg",
|
||||
"attributes": {
|
||||
"id": "Layer_2",
|
||||
"data-name": "Layer 2",
|
||||
"xmlns": "http://www.w3.org/2000/svg",
|
||||
"viewBox": "0 0 111 24",
|
||||
"width": "111",
|
||||
"height": "24"
|
||||
},
|
||||
"children": [
|
||||
{
|
||||
"type": "element",
|
||||
"name": "g",
|
||||
"attributes": {
|
||||
"id": "Layer_1-2",
|
||||
"data-name": "Layer 1"
|
||||
},
|
||||
"children": [
|
||||
{
|
||||
"type": "element",
|
||||
"name": "rect",
|
||||
"attributes": {
|
||||
"style": "fill: none;",
|
||||
"y": "0",
|
||||
"width": "111",
|
||||
"height": "24"
|
||||
},
|
||||
"children": []
|
||||
},
|
||||
{
|
||||
"type": "element",
|
||||
"name": "g",
|
||||
"attributes": {
|
||||
"id": "Arize_-_standard",
|
||||
"data-name": "Arize - standard"
|
||||
},
|
||||
"children": [
|
||||
{
|
||||
"type": "element",
|
||||
"name": "g",
|
||||
"attributes": {},
|
||||
"children": [
|
||||
{
|
||||
"type": "element",
|
||||
"name": "path",
|
||||
"attributes": {
|
||||
"d": "M100.94,14.55h-11.29c0,.13-.01.23.02.31.53,1.55,1.54,2.59,3.19,2.88,1.7.29,3.22,0,4.3-1.52.09-.13.28-.27.43-.28.99-.02,1.99-.01,2.99-.01-.16,2.69-3.89,4.98-7.6,4.7-3.99-.3-6.91-3.79-6.58-7.88.37-4.62,3.67-7.31,8.37-6.85,4.05.4,6.68,4.04,6.19,8.64ZM89.71,11.66h7.96c-.43-1.88-2.04-3.02-4.17-2.97-1.87.04-3.48,1.3-3.78,2.97Z"
|
||||
},
|
||||
"children": []
|
||||
},
|
||||
{
|
||||
"type": "element",
|
||||
"name": "path",
|
||||
"attributes": {
|
||||
"style": "fill: #ff008c;",
|
||||
"d": "M20.81,2.53c.59-.03,1.11.26,1.49.8,3.05,4.38,6.09,8.77,9.13,13.16.63.91.43,1.99-.43,2.59-.84.59-1.97.35-2.62-.57-2.1-3.01-4.19-6.04-6.28-9.05-.04-.06-.08-.11-.12-.17-.83-1.17-1.65-1.18-2.47,0-2.11,3.05-4.23,6.09-6.34,9.14-.35.5-.77.88-1.4.95-.77.08-1.39-.19-1.79-.86-.41-.69-.38-1.38.07-2.03,1.01-1.47,2.04-2.93,3.06-4.4,2.01-2.89,4.03-5.77,6.03-8.67.39-.56.88-.9,1.67-.89Z"
|
||||
},
|
||||
"children": []
|
||||
},
|
||||
{
|
||||
"type": "element",
|
||||
"name": "path",
|
||||
"attributes": {
|
||||
"d": "M52.84,20.43h-3.02v-1.16c-.1.02-.16.01-.19.03-2.46,1.73-5.09,1.88-7.68.51-2.61-1.38-3.71-3.77-3.68-6.67.03-3.26,1.82-5.96,4.64-6.86,2.35-.75,4.64-.67,6.69.93.04.03.09.03.2.07v-1.18h3.03v14.31ZM41.36,13.32c.01.17.02.46.05.75.22,2.09,1.76,3.58,3.91,3.76,2.1.18,3.8-.95,4.38-2.96.14-.47.2-.98.22-1.47.13-3.22-2.25-5.27-5.33-4.61-1.92.41-3.19,2.14-3.23,4.53Z"
|
||||
},
|
||||
"children": []
|
||||
},
|
||||
{
|
||||
"type": "element",
|
||||
"name": "path",
|
||||
"attributes": {
|
||||
"d": "M80.41,8.9h-7.44v-2.77h11.64c0,.81.02,1.61-.01,2.41,0,.17-.19.35-.32.51-2.28,2.68-4.57,5.36-6.85,8.04-.11.13-.21.26-.38.47h7.53v2.86h-11.74c0-.82-.02-1.62.01-2.42,0-.16.17-.33.28-.47,2.32-2.74,4.64-5.47,6.96-8.21.09-.1.16-.21.32-.42Z"
|
||||
},
|
||||
"children": []
|
||||
},
|
||||
{
|
||||
"type": "element",
|
||||
"name": "path",
|
||||
"attributes": {
|
||||
"d": "M59.39,20.54h-3.03V6.22h3.03v1.54c.8-.48,1.55-1.01,2.37-1.41.85-.41,1.79-.41,2.77-.35v2.94c-.16.01-.3.03-.45.03-2.37.03-4.01,1.36-4.51,3.69-.12.57-.16,1.16-.16,1.74-.02,1.85,0,3.71,0,5.56v.57Z"
|
||||
},
|
||||
"children": []
|
||||
},
|
||||
{
|
||||
"type": "element",
|
||||
"name": "path",
|
||||
"attributes": {
|
||||
"d": "M67.1,6.09h2.99v14.33h-2.99V6.09Z"
|
||||
},
|
||||
"children": []
|
||||
},
|
||||
{
|
||||
"type": "element",
|
||||
"name": "path",
|
||||
"attributes": {
|
||||
"style": "fill: #ff008c;",
|
||||
"d": "M20.73,19.53c1.25,0,2.24.96,2.25,2.19.01,1.24-1.02,2.29-2.24,2.28-1.23-.01-2.21-1.01-2.22-2.24,0-1.25.96-2.22,2.21-2.22Z"
|
||||
},
|
||||
"children": []
|
||||
},
|
||||
{
|
||||
"type": "element",
|
||||
"name": "path",
|
||||
"attributes": {
|
||||
"d": "M70.7,2.11c0,1.19-.92,2.14-2.09,2.15-1.19.01-2.16-.95-2.16-2.14C66.46.95,67.4,0,68.58,0c1.18,0,2.12.93,2.12,2.11Z"
|
||||
},
|
||||
"children": []
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
"name": "ArizeIconBig"
|
||||
}
|
||||
|
|
@ -0,0 +1,20 @@
|
|||
// GENERATE BY script
|
||||
// DON NOT EDIT IT MANUALLY
|
||||
|
||||
import * as React from 'react'
|
||||
import data from './ArizeIconBig.json'
|
||||
import IconBase from '@/app/components/base/icons/IconBase'
|
||||
import type { IconData } from '@/app/components/base/icons/IconBase'
|
||||
|
||||
const Icon = (
|
||||
{
|
||||
ref,
|
||||
...props
|
||||
}: React.SVGProps<SVGSVGElement> & {
|
||||
ref?: React.RefObject<React.MutableRefObject<HTMLOrSVGElement>>;
|
||||
},
|
||||
) => <IconBase {...props} ref={ref} data={data as IconData} />
|
||||
|
||||
Icon.displayName = 'ArizeIconBig'
|
||||
|
||||
export default Icon
|
||||
|
|
@ -0,0 +1,853 @@
|
|||
{
|
||||
"icon": {
|
||||
"type": "element",
|
||||
"isRootNode": true,
|
||||
"name": "svg",
|
||||
"attributes": {
|
||||
"id": "Layer_2",
|
||||
"data-name": "Layer 2",
|
||||
"xmlns": "http://www.w3.org/2000/svg",
|
||||
"xmlns:xlink": "http://www.w3.org/1999/xlink",
|
||||
"viewBox": "0 0 74 16",
|
||||
"width": "74",
|
||||
"height": "16"
|
||||
},
|
||||
"children": [
|
||||
{
|
||||
"type": "element",
|
||||
"name": "defs",
|
||||
"attributes": {},
|
||||
"children": [
|
||||
{
|
||||
"type": "element",
|
||||
"name": "linearGradient",
|
||||
"attributes": {
|
||||
"id": "linear-gradient",
|
||||
"x1": "6.05",
|
||||
"y1": ".98",
|
||||
"x2": "13.18",
|
||||
"y2": "13.34",
|
||||
"gradientUnits": "userSpaceOnUse"
|
||||
},
|
||||
"children": [
|
||||
{
|
||||
"type": "element",
|
||||
"name": "stop",
|
||||
"attributes": {
|
||||
"offset": "0",
|
||||
"stop-color": "#11bab5"
|
||||
},
|
||||
"children": []
|
||||
},
|
||||
{
|
||||
"type": "element",
|
||||
"name": "stop",
|
||||
"attributes": {
|
||||
"offset": ".5",
|
||||
"stop-color": "#00adee"
|
||||
},
|
||||
"children": []
|
||||
},
|
||||
{
|
||||
"type": "element",
|
||||
"name": "stop",
|
||||
"attributes": {
|
||||
"offset": "1",
|
||||
"stop-color": "#0094c5"
|
||||
},
|
||||
"children": []
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"type": "element",
|
||||
"name": "linearGradient",
|
||||
"attributes": {
|
||||
"id": "linear-gradient-2",
|
||||
"x1": "12.99",
|
||||
"y1": "10.87",
|
||||
"x2": "6.97",
|
||||
"y2": ".45",
|
||||
"gradientUnits": "userSpaceOnUse"
|
||||
},
|
||||
"children": [
|
||||
{
|
||||
"type": "element",
|
||||
"name": "stop",
|
||||
"attributes": {
|
||||
"offset": "0",
|
||||
"stop-color": "#fcfdff",
|
||||
"stop-opacity": "0"
|
||||
},
|
||||
"children": []
|
||||
},
|
||||
{
|
||||
"type": "element",
|
||||
"name": "stop",
|
||||
"attributes": {
|
||||
"offset": ".12",
|
||||
"stop-color": "#fcfdff",
|
||||
"stop-opacity": ".17"
|
||||
},
|
||||
"children": []
|
||||
},
|
||||
{
|
||||
"type": "element",
|
||||
"name": "stop",
|
||||
"attributes": {
|
||||
"offset": ".3",
|
||||
"stop-color": "#fcfdff",
|
||||
"stop-opacity": ".42"
|
||||
},
|
||||
"children": []
|
||||
},
|
||||
{
|
||||
"type": "element",
|
||||
"name": "stop",
|
||||
"attributes": {
|
||||
"offset": ".47",
|
||||
"stop-color": "#fcfdff",
|
||||
"stop-opacity": ".63"
|
||||
},
|
||||
"children": []
|
||||
},
|
||||
{
|
||||
"type": "element",
|
||||
"name": "stop",
|
||||
"attributes": {
|
||||
"offset": ".64",
|
||||
"stop-color": "#fcfdff",
|
||||
"stop-opacity": ".79"
|
||||
},
|
||||
"children": []
|
||||
},
|
||||
{
|
||||
"type": "element",
|
||||
"name": "stop",
|
||||
"attributes": {
|
||||
"offset": ".78",
|
||||
"stop-color": "#fcfdff",
|
||||
"stop-opacity": ".9"
|
||||
},
|
||||
"children": []
|
||||
},
|
||||
{
|
||||
"type": "element",
|
||||
"name": "stop",
|
||||
"attributes": {
|
||||
"offset": ".91",
|
||||
"stop-color": "#fcfdff",
|
||||
"stop-opacity": ".97"
|
||||
},
|
||||
"children": []
|
||||
},
|
||||
{
|
||||
"type": "element",
|
||||
"name": "stop",
|
||||
"attributes": {
|
||||
"offset": "1",
|
||||
"stop-color": "#fcfdff"
|
||||
},
|
||||
"children": []
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"type": "element",
|
||||
"name": "linearGradient",
|
||||
"attributes": {
|
||||
"id": "linear-gradient-3",
|
||||
"x1": "11.21",
|
||||
"y1": "10.8",
|
||||
"x2": "6.88",
|
||||
"y2": "3.29",
|
||||
"xlink:href": "#linear-gradient-2"
|
||||
},
|
||||
"children": []
|
||||
},
|
||||
{
|
||||
"type": "element",
|
||||
"name": "linearGradient",
|
||||
"attributes": {
|
||||
"id": "linear-gradient-4",
|
||||
"x1": "10.62",
|
||||
"y1": "11.35",
|
||||
"x2": "8.2",
|
||||
"y2": "7.16",
|
||||
"xlink:href": "#linear-gradient-2"
|
||||
},
|
||||
"children": []
|
||||
},
|
||||
{
|
||||
"type": "element",
|
||||
"name": "linearGradient",
|
||||
"attributes": {
|
||||
"id": "linear-gradient-5",
|
||||
"x1": "10.72",
|
||||
"y1": "12.2",
|
||||
"x2": "9.21",
|
||||
"y2": "9.59",
|
||||
"xlink:href": "#linear-gradient-2"
|
||||
},
|
||||
"children": []
|
||||
},
|
||||
{
|
||||
"type": "element",
|
||||
"name": "linearGradient",
|
||||
"attributes": {
|
||||
"id": "linear-gradient-6",
|
||||
"x1": "8.17",
|
||||
"y1": "11.96",
|
||||
"x2": "8.17",
|
||||
"y2": "14.71",
|
||||
"gradientUnits": "userSpaceOnUse"
|
||||
},
|
||||
"children": [
|
||||
{
|
||||
"type": "element",
|
||||
"name": "stop",
|
||||
"attributes": {
|
||||
"offset": "0",
|
||||
"stop-color": "#231f20"
|
||||
},
|
||||
"children": []
|
||||
},
|
||||
{
|
||||
"type": "element",
|
||||
"name": "stop",
|
||||
"attributes": {
|
||||
"offset": ".03",
|
||||
"stop-color": "#231f20",
|
||||
"stop-opacity": ".9"
|
||||
},
|
||||
"children": []
|
||||
},
|
||||
{
|
||||
"type": "element",
|
||||
"name": "stop",
|
||||
"attributes": {
|
||||
"offset": ".22",
|
||||
"stop-color": "#231f20",
|
||||
"stop-opacity": ".4"
|
||||
},
|
||||
"children": []
|
||||
},
|
||||
{
|
||||
"type": "element",
|
||||
"name": "stop",
|
||||
"attributes": {
|
||||
"offset": ".42",
|
||||
"stop-color": "#231f20",
|
||||
"stop-opacity": ".1"
|
||||
},
|
||||
"children": []
|
||||
},
|
||||
{
|
||||
"type": "element",
|
||||
"name": "stop",
|
||||
"attributes": {
|
||||
"offset": ".65",
|
||||
"stop-color": "#231f20",
|
||||
"stop-opacity": "0"
|
||||
},
|
||||
"children": []
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"type": "element",
|
||||
"name": "linearGradient",
|
||||
"attributes": {
|
||||
"id": "Fade_to_Black_2",
|
||||
"data-name": "Fade to Black 2",
|
||||
"x1": "11.26",
|
||||
"y1": "11.7",
|
||||
"x2": "11.26",
|
||||
"y2": "13.1",
|
||||
"gradientUnits": "userSpaceOnUse"
|
||||
},
|
||||
"children": [
|
||||
{
|
||||
"type": "element",
|
||||
"name": "stop",
|
||||
"attributes": {
|
||||
"offset": "0",
|
||||
"stop-color": "#231f20"
|
||||
},
|
||||
"children": []
|
||||
},
|
||||
{
|
||||
"type": "element",
|
||||
"name": "stop",
|
||||
"attributes": {
|
||||
"offset": "1",
|
||||
"stop-color": "#231f20",
|
||||
"stop-opacity": "0"
|
||||
},
|
||||
"children": []
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"type": "element",
|
||||
"name": "linearGradient",
|
||||
"attributes": {
|
||||
"id": "linear-gradient-7",
|
||||
"x1": "11.94",
|
||||
"y1": "8.07",
|
||||
"x2": "11.94",
|
||||
"y2": "6.92",
|
||||
"gradientUnits": "userSpaceOnUse"
|
||||
},
|
||||
"children": [
|
||||
{
|
||||
"type": "element",
|
||||
"name": "stop",
|
||||
"attributes": {
|
||||
"offset": "0",
|
||||
"stop-color": "#231f20"
|
||||
},
|
||||
"children": []
|
||||
},
|
||||
{
|
||||
"type": "element",
|
||||
"name": "stop",
|
||||
"attributes": {
|
||||
"offset": ".03",
|
||||
"stop-color": "#231f20",
|
||||
"stop-opacity": ".95"
|
||||
},
|
||||
"children": []
|
||||
},
|
||||
{
|
||||
"type": "element",
|
||||
"name": "stop",
|
||||
"attributes": {
|
||||
"offset": ".51",
|
||||
"stop-color": "#231f20",
|
||||
"stop-opacity": ".26"
|
||||
},
|
||||
"children": []
|
||||
},
|
||||
{
|
||||
"type": "element",
|
||||
"name": "stop",
|
||||
"attributes": {
|
||||
"offset": ".81",
|
||||
"stop-color": "#231f20",
|
||||
"stop-opacity": "0"
|
||||
},
|
||||
"children": []
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"type": "element",
|
||||
"name": "linearGradient",
|
||||
"attributes": {
|
||||
"id": "linear-gradient-8",
|
||||
"x1": "14.45",
|
||||
"y1": "8.92",
|
||||
"x2": "14.45",
|
||||
"y2": "6.14",
|
||||
"gradientUnits": "userSpaceOnUse"
|
||||
},
|
||||
"children": [
|
||||
{
|
||||
"type": "element",
|
||||
"name": "stop",
|
||||
"attributes": {
|
||||
"offset": "0",
|
||||
"stop-color": "#231f20"
|
||||
},
|
||||
"children": []
|
||||
},
|
||||
{
|
||||
"type": "element",
|
||||
"name": "stop",
|
||||
"attributes": {
|
||||
"offset": ".18",
|
||||
"stop-color": "#231f20",
|
||||
"stop-opacity": ".48"
|
||||
},
|
||||
"children": []
|
||||
},
|
||||
{
|
||||
"type": "element",
|
||||
"name": "stop",
|
||||
"attributes": {
|
||||
"offset": ".38",
|
||||
"stop-color": "#231f20",
|
||||
"stop-opacity": ".12"
|
||||
},
|
||||
"children": []
|
||||
},
|
||||
{
|
||||
"type": "element",
|
||||
"name": "stop",
|
||||
"attributes": {
|
||||
"offset": ".58",
|
||||
"stop-color": "#231f20",
|
||||
"stop-opacity": "0"
|
||||
},
|
||||
"children": []
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"type": "element",
|
||||
"name": "linearGradient",
|
||||
"attributes": {
|
||||
"id": "Fade_to_Black_2-2",
|
||||
"data-name": "Fade to Black 2",
|
||||
"x1": "17.03",
|
||||
"y1": "11.1",
|
||||
"x2": "16.07",
|
||||
"y2": "9.44",
|
||||
"xlink:href": "#Fade_to_Black_2"
|
||||
},
|
||||
"children": []
|
||||
},
|
||||
{
|
||||
"type": "element",
|
||||
"name": "linearGradient",
|
||||
"attributes": {
|
||||
"id": "linear-gradient-9",
|
||||
"x1": "17.8",
|
||||
"y1": "10.64",
|
||||
"x2": "16.36",
|
||||
"y2": "8.16",
|
||||
"xlink:href": "#linear-gradient-2"
|
||||
},
|
||||
"children": []
|
||||
},
|
||||
{
|
||||
"type": "element",
|
||||
"name": "linearGradient",
|
||||
"attributes": {
|
||||
"id": "linear-gradient-10",
|
||||
"x1": "14.07",
|
||||
"y1": "10.22",
|
||||
"x2": "16.16",
|
||||
"y2": "9.02",
|
||||
"gradientUnits": "userSpaceOnUse"
|
||||
},
|
||||
"children": [
|
||||
{
|
||||
"type": "element",
|
||||
"name": "stop",
|
||||
"attributes": {
|
||||
"offset": "0",
|
||||
"stop-color": "#231f20"
|
||||
},
|
||||
"children": []
|
||||
},
|
||||
{
|
||||
"type": "element",
|
||||
"name": "stop",
|
||||
"attributes": {
|
||||
"offset": ".31",
|
||||
"stop-color": "#231f20",
|
||||
"stop-opacity": ".5"
|
||||
},
|
||||
"children": []
|
||||
},
|
||||
{
|
||||
"type": "element",
|
||||
"name": "stop",
|
||||
"attributes": {
|
||||
"offset": ".67",
|
||||
"stop-color": "#231f20",
|
||||
"stop-opacity": ".13"
|
||||
},
|
||||
"children": []
|
||||
},
|
||||
{
|
||||
"type": "element",
|
||||
"name": "stop",
|
||||
"attributes": {
|
||||
"offset": "1",
|
||||
"stop-color": "#231f20",
|
||||
"stop-opacity": "0"
|
||||
},
|
||||
"children": []
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"type": "element",
|
||||
"name": "g",
|
||||
"attributes": {
|
||||
"id": "Layer_1-2",
|
||||
"data-name": "Layer 1"
|
||||
},
|
||||
"children": [
|
||||
{
|
||||
"type": "element",
|
||||
"name": "rect",
|
||||
"attributes": {
|
||||
"style": "fill: none;",
|
||||
"y": ".02",
|
||||
"width": "74",
|
||||
"height": "16"
|
||||
},
|
||||
"children": []
|
||||
},
|
||||
{
|
||||
"type": "element",
|
||||
"name": "g",
|
||||
"attributes": {
|
||||
"id": "Phoenix_horiz_-_gradient",
|
||||
"data-name": "Phoenix horiz - gradient"
|
||||
},
|
||||
"children": [
|
||||
{
|
||||
"type": "element",
|
||||
"name": "path",
|
||||
"attributes": {
|
||||
"style": "fill: url(#linear-gradient);",
|
||||
"d": "M17.91,10c-.04-.19-.14-.36-.28-.51-.06-.06-.12-.11-.19-.16-.03-.02-.07-.05-.1-.07-.04-.03-.09-.05-.14-.08-.19-.09-.34-.19-.48-.29-.06-.04-.11-.09-.16-.13-.12-.11-.21-.2-.27-.3-.02-.03-.04-.07-.05-.1,0-.01-.02-.03-.03-.04-.02-.02-.05-.02-.08-.02-.04,0-.08.04-.08.09,0,.03,0,.07,0,.11,0,.02,0,.04-.03.05,0,0,0,0-.02,0-.03,0-.06.02-.07.04-.02.02-.02.05-.02.08,0,.03.01.04.02.06h0s0,.03.01.04c.01.03.03.05.05.08.03.05.07.09.11.15.01.02.02.04,0,.06-.01.02-.03.03-.05.03-.25-.03-.46-.09-.67-.16-.02,0-.05-.02-.07-.03-.18-.08.15-.41.3-.64.66-1.02.8-2.19.81-3.38,0-1.15-.12-2.27-.35-3.39h0c-.15-.94-.27-1.29-.35-1.42-.02-.03-.04-.06-.06-.07-.02-.01-.03,0-.03,0-.04,0-.09.04-.15.1-.25.26-.23.57-.17.89.31,1.47.55,2.95.44,4.47-.07,1.04-.27,2.05-1,2.86-.04.05-.09.1-.14.14-.05.05-.14.16-.2.11-.08-.06,0-.18.02-.24.43-1,.55-2.05.48-3.12,0-.02,0-.05,0-.07h0s0-.04,0-.06c-.01-.14-.02-.28-.04-.43-.18-1.71-.4-1.82-.4-1.82h0s-.03-.02-.06-.03c-.12-.02-.18.08-.23.15-.23.35-.13.72-.05,1.08.26,1.28.27,2.55-.18,3.79-.04.11-.09.22-.14.33-.05.12-.09.26-.28.2-.18-.06-.13-.19-.1-.31.06-.31.11-.62.14-.93h0s0-.06,0-.09c0-.05,0-.11.01-.16,0-.05,0-.09,0-.13,0-.02,0-.04,0-.07.04-.99-.13-1.44-.19-1.57,0-.02-.01-.03-.02-.04h0s0,0,0,0c-.07-.1-.17-.09-.27,0-.17.17-.27.37-.22.62.13.74.05,1.46-.08,2.19-.02.1-.06.27-.27.23-.14-.03-.17-.09-.18-.21,0-.09,0-.19,0-.28,0,0,0,0,0,0h0s0-.03,0-.04h0s0-.01,0-.02c0-.13-.02-.25-.03-.38,0-.01,0-.02,0-.04-.08-.71-.18-.78-.18-.78-.07-.09-.17-.07-.26.02-.17.17-.26.37-.22.62.03.19.02.13.04.29.01.08.03.23.03.28,0,.04-.01.08-.04.11-.09.09-.23,0-.34-.05-2.08-.86-3.86-2.07-5.42-3.6-.61-.62-1.65-1.88-1.65-1.88h0s-.09-.07-.16-.04c-.14.06-.13.23-.14.38-.02.29.13.51.3.7,2.35,2.74,5.21,4.71,8.67,5.69l.62.16s.01,0,.02,0c0,0,0,0,0,0l.54.14h.01s-.04.01-.06.02c-.59.12-1.76-.1-2.69-.32-.46-.1-.92-.22-1.36-.36-.02,0-.03,0-.03,0h0c-1.73-.55-3.32-1.44-4.7-2.81-.13-.13-.25-.27-.37-.41-.11-.13-.34-.4-.41-.5,0-.01-.02-.02-.03-.03h0s0,0,0,0c-.02-.02-.05-.04-.09-.03-.1.02-.12.12-.14.21-.07.34.04.62.26.88,1.42,1.59,3.2,2.61,5.21,3.28,1.14.38,2.31.61,3.5.76-.71.23-1.44.3-2.18.28-1.43-.04-2.77-.38-4.03-.99-.46-.24-.9-.5-1.01-.58-.07-.04-.13-.07-.2-.02-.13.1-.04.27,0,.4.08.26.25.43.49.55,2.05,1.08,4.22,1.52,6.52,1.13.23-.04.45-.08.7-.16-.39.33-.83.55-1.3.72-1.44.53-2.88.51-4.31.15-.46-.12-.99-.32-.99-.32h0c-.08-.03-.15-.05-.21.04-.1.15-.01.3.08.45.21.33.63.36.95.48.11.04-.19.24-.29.31-.47.36-.94.69-1.41,1.04l-.31.23h0c-.05.04-.1.08-.08.15.03.09.12.1.2.12.34.07.62-.04.89-.24.57-.43,1.15-.84,1.71-1.28.21-.17.4-.15.7-.08-.73.55-1.4,1.06-2.08,1.57-.69.52-1.38,1.04-2.07,1.56l-.46.34s-.01,0-.02.01h0s0,0,0,0c-.04.03-.07.07-.06.11.02.1.14.13.24.15.41.07.66-.08.94-.29,1.39-1.05,2.79-2.09,4.18-3.14.31-.24.63-.37,1.08-.32-.38.29-.75.56-1.11.83l-.98.73s-.01.01-.02.02h0c-.05.04-.09.09-.07.16.04.13.19.15.33.17.24.04.45-.05.64-.19.11-.08.22-.16.33-.24h0s1.5-1.19,1.5-1.19c0,0,.35-.25.76-.49.04-.02.07-.05.11-.07.02,0,.05-.02.11-.05.04-.02.09-.05.13-.07.06-.03.14-.07.22-.12.24-.1.93-.42,1.51-1.03.27-.21.66-.48.95-.62h0s0,0,0,0c.02-.01.04-.02.07-.03h0s0,0,0,0c.01,0,.03-.01.04-.01h.04c.11-.05.28-.06.31.18,0,.06,0,.12,0,.18,0,.05-.01.1-.02.14,0,.04,0,.08.04.1,0,0,.01,0,.02,0,.04.02.09.01.12-.02.01-.01.02-.03.04-.04.15-.15.3-.26.45-.33.23-.1.47-.1.72,0,.12.05.23.11.34.19.05.03.1.07.14.12.03.03.06.05.08.08h0s.02.02.02.02c0,0,0,0,.01.01.02.02.05.02.08.02.03,0,.06-.03.07-.06,0,0,0-.01,0-.02.07-.21.09-.4.04-.59Z"
|
||||
},
|
||||
"children": []
|
||||
},
|
||||
{
|
||||
"type": "element",
|
||||
"name": "path",
|
||||
"attributes": {
|
||||
"style": "fill: #fddab4;",
|
||||
"d": "M13.48,7.75c-.02.1-.03.21-.05.31.02-.1.04-.21.05-.31h0Z"
|
||||
},
|
||||
"children": []
|
||||
},
|
||||
{
|
||||
"type": "element",
|
||||
"name": "path",
|
||||
"attributes": {
|
||||
"style": "fill: #fddab4;",
|
||||
"d": "M15.33,8.9s.02,0,.02,0c0,0-.02,0-.02,0"
|
||||
},
|
||||
"children": []
|
||||
},
|
||||
{
|
||||
"type": "element",
|
||||
"name": "polyline",
|
||||
"attributes": {
|
||||
"style": "fill: #fddab4;",
|
||||
"points": "14.46 8.33 14.46 8.33 14.46 8.33"
|
||||
},
|
||||
"children": []
|
||||
},
|
||||
{
|
||||
"type": "element",
|
||||
"name": "polyline",
|
||||
"attributes": {
|
||||
"style": "fill: #fddab4;",
|
||||
"points": "14.48 8.26 14.48 8.26 14.48 8.26"
|
||||
},
|
||||
"children": []
|
||||
},
|
||||
{
|
||||
"type": "element",
|
||||
"name": "polyline",
|
||||
"attributes": {
|
||||
"style": "fill: #fddab4;",
|
||||
"points": "14.49 8.25 14.49 8.25 14.49 8.25"
|
||||
},
|
||||
"children": []
|
||||
},
|
||||
{
|
||||
"type": "element",
|
||||
"name": "polyline",
|
||||
"attributes": {
|
||||
"style": "fill: #fddab4;",
|
||||
"points": "15.64 8.25 15.63 8.25 15.64 8.25"
|
||||
},
|
||||
"children": []
|
||||
},
|
||||
{
|
||||
"type": "element",
|
||||
"name": "path",
|
||||
"attributes": {
|
||||
"style": "fill: #fddab4;",
|
||||
"d": "M13.81,8.17s-.02.06-.04.08c.01-.03.02-.06.04-.08"
|
||||
},
|
||||
"children": []
|
||||
},
|
||||
{
|
||||
"type": "element",
|
||||
"name": "polyline",
|
||||
"attributes": {
|
||||
"style": "fill: #fddab4;",
|
||||
"points": "12.57 8.11 12.57 8.11 12.57 8.11"
|
||||
},
|
||||
"children": []
|
||||
},
|
||||
{
|
||||
"type": "element",
|
||||
"name": "polyline",
|
||||
"attributes": {
|
||||
"style": "fill: #fddab4;",
|
||||
"points": "12.39 8.03 12.39 8.03 12.39 8.03"
|
||||
},
|
||||
"children": []
|
||||
},
|
||||
{
|
||||
"type": "element",
|
||||
"name": "polyline",
|
||||
"attributes": {
|
||||
"style": "fill: #fddab4;",
|
||||
"points": "12.39 8.03 12.39 8.03 12.39 8.03"
|
||||
},
|
||||
"children": []
|
||||
},
|
||||
{
|
||||
"type": "element",
|
||||
"name": "polyline",
|
||||
"attributes": {
|
||||
"style": "fill: #fddab4;",
|
||||
"points": "12.39 8.03 12.39 8.03 12.39 8.03"
|
||||
},
|
||||
"children": []
|
||||
},
|
||||
{
|
||||
"type": "element",
|
||||
"name": "polyline",
|
||||
"attributes": {
|
||||
"style": "fill: #fddab4;",
|
||||
"points": "12.38 8.02 12.38 8.03 12.38 8.02"
|
||||
},
|
||||
"children": []
|
||||
},
|
||||
{
|
||||
"type": "element",
|
||||
"name": "polyline",
|
||||
"attributes": {
|
||||
"style": "fill: #fddab4;",
|
||||
"points": "12.38 8.02 12.38 8.02 12.38 8.02"
|
||||
},
|
||||
"children": []
|
||||
},
|
||||
{
|
||||
"type": "element",
|
||||
"name": "polyline",
|
||||
"attributes": {
|
||||
"style": "fill: #fddab4;",
|
||||
"points": "12.35 7.89 12.35 7.89 12.35 7.89"
|
||||
},
|
||||
"children": []
|
||||
},
|
||||
{
|
||||
"type": "element",
|
||||
"name": "path",
|
||||
"attributes": {
|
||||
"style": "fill: #fddab4;",
|
||||
"d": "M12.81,7.81s0,.04-.01.06c0-.02,0-.04.01-.06"
|
||||
},
|
||||
"children": []
|
||||
},
|
||||
{
|
||||
"type": "element",
|
||||
"name": "path",
|
||||
"attributes": {
|
||||
"style": "fill: #fddab4;",
|
||||
"d": "M11.67,7.67s-.03.02-.05.03c.02,0,.03-.02.05-.03"
|
||||
},
|
||||
"children": []
|
||||
},
|
||||
{
|
||||
"type": "element",
|
||||
"name": "path",
|
||||
"attributes": {
|
||||
"style": "fill: #e5b3a5;",
|
||||
"d": "M12.39,8.03s.03.03.05.04h0s-.03-.03-.05-.04M12.39,8.03s0,0,0,0c0,0,0,0,0,0M12.39,8.03s0,0,0,0c0,0,0,0,0,0M12.38,8.03s0,0,0,0c0,0,0,0,0,0M12.38,8.02s0,0,0,0c0,0,0,0,0,0M12.38,8.02s0,0,0,0c0,0,0,0,0,0M12.35,7.89s0,0,0,0c0,0,0,0,0,0M12.35,7.89h0s0,0,0,0c0,0,0,0,0,0M11.45,7.68s0,0,0,0c.04.02.09.03.13.03.02,0,.03,0,.05,0-.02,0-.03,0-.05,0-.04,0-.09-.02-.13-.03M12.34,7.54s0,0,0,0c0,0,0,0,0,0"
|
||||
},
|
||||
"children": []
|
||||
},
|
||||
{
|
||||
"type": "element",
|
||||
"name": "path",
|
||||
"attributes": {
|
||||
"style": "fill: #e5b3a5;",
|
||||
"d": "M15.35,8.91s0,0,.01,0h0s0,0-.01,0M14.52,8.58s0,0,0,0c0,0,0,0,0,0M14.39,8.51s.01.06.04.08c.01,0,.02.01.03.01.02,0,.04,0,.05-.02-.02.01-.04.02-.05.02-.01,0-.02,0-.03-.01-.03-.02-.04-.05-.04-.08M14.46,8.33s0,0,0,.01c0,0,0,0,0-.01M14.48,8.26s-.02.05-.03.07c0-.02.02-.05.03-.07M14.49,8.25s0,0,0,0c0,0,0,0,0,0M15.63,8.25s0,0,0,0c0,0,0,0,0,0M15.64,8.24s0,0,0,0c0,0,0,0,0,0M14.49,8.24s0,0,0,0c0,0,0,0,0,0M13.4,8.21c0,.07.03.13.13.16.03.01.06.01.08.01.09,0,.13-.06.16-.13-.03.07-.07.13-.16.13-.02,0-.05,0-.08-.01-.1-.03-.13-.09-.13-.16M12.57,8.11h0,0M12.74,8.04s-.08.07-.14.07c0,0-.01,0-.02,0,0,0,.01,0,.02,0,.07,0,.11-.03.14-.07M14.05,7.54s0,0,0,0c-.03.1-.06.2-.1.3-.04.11-.09.22-.14.33h0c.05-.11.1-.22.14-.33.04-.1.07-.2.1-.3M12.87,7.49c-.02.11-.03.22-.05.33.02-.11.04-.22.06-.33h0M15.52,6.97s0,0,0,0c-.15.49-.38.95-.75,1.36-.04.05-.09.1-.14.14h0s.09-.09.14-.14c.37-.41.6-.87.75-1.36"
|
||||
},
|
||||
"children": []
|
||||
},
|
||||
{
|
||||
"type": "element",
|
||||
"name": "path",
|
||||
"attributes": {
|
||||
"style": "fill: url(#linear-gradient-2); opacity: .4;",
|
||||
"d": "M4.17,2.09s-.04,0-.06.01c-.1.04-.12.14-.13.25.52.68,2.56,3.18,5.78,4.9,0,0,3.15,1.66,6.32,1.84-.25-.03-.46-.09-.67-.16-.01,0-.02,0-.04-.01h0c-1.15-.25-2.13-.56-2.83-.81.01,0,.03,0,.04,0-.01,0-.03,0-.04,0-.04,0-.07-.02-.1-.04-.55-.2-.9-.35-.98-.39-.04-.02-.08-.04-.12-.05-2.08-.86-3.86-2.07-5.42-3.6-.61-.62-1.65-1.88-1.65-1.88h0s-.06-.05-.1-.05"
|
||||
},
|
||||
"children": []
|
||||
},
|
||||
{
|
||||
"type": "element",
|
||||
"name": "path",
|
||||
"attributes": {
|
||||
"style": "fill: url(#linear-gradient-3); opacity: .4;",
|
||||
"d": "M4.39,4.75s-.01,0-.02,0c-.1.02-.12.12-.14.21,0,.02,0,.03,0,.05,1.61,2.18,3.77,3.07,3.77,3.07,2.15,1.01,3.92,1.22,5,1.22.45,0,.77-.04.96-.06-.09,0-.18.01-.28.01-.63,0-1.53-.17-2.29-.35-.46-.1-.92-.22-1.36-.36-.02,0-.03,0-.03,0h0c-1.73-.55-3.32-1.44-4.7-2.81-.13-.13-.25-.27-.37-.41-.11-.13-.34-.4-.41-.5,0-.01-.02-.02-.03-.03h0s0,0,0,0c-.02-.02-.04-.03-.07-.03"
|
||||
},
|
||||
"children": []
|
||||
},
|
||||
{
|
||||
"type": "element",
|
||||
"name": "path",
|
||||
"attributes": {
|
||||
"style": "fill: url(#linear-gradient-4); opacity: .4",
|
||||
"d": "M5.86,8.53s-.05,0-.08.03c-.11.08-.06.22-.02.35.81.49,2.66,1.44,4.84,1.44.83,0,1.7-.14,2.59-.48-.64.21-1.3.28-1.96.28-.07,0-.15,0-.22,0-1.43-.04-2.77-.38-4.03-.99-.46-.24-.9-.5-1.01-.58-.04-.03-.08-.05-.12-.05"
|
||||
},
|
||||
"children": []
|
||||
},
|
||||
{
|
||||
"type": "element",
|
||||
"name": "path",
|
||||
"attributes": {
|
||||
"style": "fill: url(#linear-gradient-5); opacity: .4;",
|
||||
"d": "M13.34,10.6c-.35.27-.75.46-1.16.61-.77.28-1.55.41-2.32.41-.67,0-1.33-.09-1.99-.26-.46-.12-.99-.32-.99-.32h0s-.07-.03-.1-.03c-.04,0-.08.02-.11.06-.05.07-.05.14-.04.21.51.24,1.6.66,2.93.66,1.15,0,2.49-.32,3.78-1.34"
|
||||
},
|
||||
"children": []
|
||||
},
|
||||
{
|
||||
"type": "element",
|
||||
"name": "path",
|
||||
"attributes": {
|
||||
"style": "fill: url(#linear-gradient-6); opacity: .4;",
|
||||
"d": "M9.91,12.25c-1.25,0-2.19-.25-2.31-.29.04.01.07.02.1.03.11.04-.19.24-.29.31-.35.27-.69.52-1.04.77h1.06c.33-.25.67-.5.99-.75.12-.1.24-.13.37-.13.1,0,.2.02.33.05-.73.55-1.4,1.06-2.08,1.57-.4.3-.79.6-1.19.9h1.07c.96-.72,1.92-1.44,2.87-2.16.22-.17.44-.28.71-.32-.2.01-.4.02-.58.02Z"
|
||||
},
|
||||
"children": []
|
||||
},
|
||||
{
|
||||
"type": "element",
|
||||
"name": "path",
|
||||
"attributes": {
|
||||
"style": "fill: url(#Fade_to_Black_2); opacity: .4;",
|
||||
"d": "M9.69,13.1h1.03l.76-.6s.35-.25.76-.49c.04-.02.07-.05.11-.07.02,0,.05-.02.11-.05.04-.02.09-.05.13-.07.07-.03.15-.08.23-.12-.73.31-1.49.46-2.18.52,0,0,.02,0,.03,0,.06,0,.12,0,.18.01-.38.29-.75.56-1.11.83l-.06.05Z"
|
||||
},
|
||||
"children": []
|
||||
},
|
||||
{
|
||||
"type": "element",
|
||||
"name": "path",
|
||||
"attributes": {
|
||||
"style": "fill: url(#linear-gradient-7); opacity: .4;",
|
||||
"d": "M12.35,7.89c0-.09,0-.19,0-.28h0s0,0,0,0c0-.01,0-.03,0-.04h0s0-.02,0-.02c0-.08,0-.16-.02-.23-.25-.1-.48-.24-.69-.39,0,.02,0,.04,0,.07.03.19.02.13.04.29.01.08.03.23.03.28,0,.04-.01.08-.04.11-.03.03-.06.04-.09.04-.04,0-.09-.02-.13-.03.08.04.43.19.98.39-.06-.04-.08-.09-.08-.18Z"
|
||||
},
|
||||
"children": []
|
||||
},
|
||||
{
|
||||
"type": "element",
|
||||
"name": "path",
|
||||
"attributes": {
|
||||
"style": "fill: url(#linear-gradient-8); opacity: .4;",
|
||||
"d": "M16.36,6.14c-.23.32-.51.6-.84.83-.15.49-.38.95-.75,1.36-.04.05-.09.1-.14.14-.05.04-.11.12-.17.12-.01,0-.02,0-.03-.01-.08-.06,0-.18.02-.24.14-.32.24-.65.32-.98-.23.09-.47.15-.72.18-.03.1-.06.2-.1.3-.04.11-.09.22-.14.33-.05.1-.08.21-.2.21-.02,0-.05,0-.08-.01-.18-.06-.13-.19-.1-.31.03-.16.06-.32.08-.49-.22,0-.43-.04-.64-.08-.02.13-.04.26-.07.39-.02.09-.05.24-.21.24-.02,0-.04,0-.06,0,.69.25,1.68.56,2.83.81h0s-.02,0-.03-.01c-.18-.08.15-.41.3-.64.43-.66.64-1.37.73-2.12Z"
|
||||
},
|
||||
"children": []
|
||||
},
|
||||
{
|
||||
"type": "element",
|
||||
"name": "path",
|
||||
"attributes": {
|
||||
"style": "fill: url(#Fade_to_Black_2-2); opacity: .5;",
|
||||
"d": "M16.19,9.7c-.14,0-.29.01-.43.04-.03,0-.05,0-.08,0-.04,0-.08,0-.11-.02,0,0,.01.01.02.02.06.09.1.18.11.28.02.02.03.04.04.07h0s0,0,0,0c0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0h0s0,0,0,0c0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0t0,0s0,0,0,0c0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0s0,0,0,0c0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0s0,0,0,0c0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0h0s0,0,0,0h0s0,0,0,0h0s0,0,0,0h0s0,0,0,0h0s0,0,0,0h0s0,0,0,0h0s0,0,0,0h0s0,0,0,0c0,0,0,0,0,0,0,0,0,0,0,0h0s0,0,0,0h0s0,0,0,0h0s0,0,0,0h0s0,0,0,0h0s0,0,0,0h0s0,0,0,0h0s0,0,0,0h0s0,0,0,0h0s0,.05,0,.07c0,.04,0,.07,0,.11,0,.05-.01.1-.02.14,0,0,0,.01,0,.02,0,.03.02.06.04.08h.02s.03.02.05.02c.03,0,.06-.01.08-.03.01-.01.02-.03.04-.04.15-.15.3-.26.45-.33.12-.05.23-.07.35-.07s.24.02.36.07c.12.05.23.11.34.19.05.03.1.07.14.12.03.03.06.05.08.08h0s.02.02.02.02h.01s.04.03.06.03c-.1-.21-.22-.41-.4-.56-.33-.29-.75-.41-1.18-.41"
|
||||
},
|
||||
"children": []
|
||||
},
|
||||
{
|
||||
"type": "element",
|
||||
"name": "path",
|
||||
"attributes": {
|
||||
"style": "fill: url(#linear-gradient-9); opacity: .4;",
|
||||
"d": "M16.15,8.3h-.02s-.08.05-.08.09c0,.03,0,.07,0,.11,0,.01,0,.02,0,.03.18.26.56.73,1.15.91,0,0,.91.47.68,1.16h0v-.02c.08-.21.09-.4.05-.59-.04-.19-.14-.36-.28-.51-.06-.06-.12-.11-.19-.16-.03-.02-.07-.05-.1-.07-.04-.03-.09-.05-.14-.08-.19-.09-.34-.19-.48-.29-.06-.04-.11-.09-.16-.13-.12-.11-.21-.2-.27-.3-.02-.03-.04-.07-.05-.1,0-.01-.02-.03-.03-.04-.02-.01-.04-.02-.06-.02"
|
||||
},
|
||||
"children": []
|
||||
},
|
||||
{
|
||||
"type": "element",
|
||||
"name": "path",
|
||||
"attributes": {
|
||||
"style": "fill: url(#linear-gradient-10); opacity: .5;",
|
||||
"d": "M15,10.21l.18-.11.77-1.03s-.42-.08-.63-.17c0,0-.21.96-.99,1.77l.29-.21c.11-.08.22-.15.38-.25Z"
|
||||
},
|
||||
"children": []
|
||||
},
|
||||
{
|
||||
"type": "element",
|
||||
"name": "g",
|
||||
"attributes": {},
|
||||
"children": [
|
||||
{
|
||||
"type": "element",
|
||||
"name": "path",
|
||||
"attributes": {
|
||||
"style": "fill: #404041;",
|
||||
"d": "M25.33,4.73h1.97c1.42,0,2.07.73,2.07,2.23v.74c0,1.5-.65,2.23-2.07,2.23h-1.14v4.09h-.82V4.73ZM27.3,9.19c.86,0,1.25-.39,1.25-1.44v-.84c0-1.04-.39-1.44-1.25-1.44h-1.14v3.71h1.14Z"
|
||||
},
|
||||
"children": []
|
||||
},
|
||||
{
|
||||
"type": "element",
|
||||
"name": "path",
|
||||
"attributes": {
|
||||
"style": "fill: #404041;",
|
||||
"d": "M32.4,4.73h.82v4.08h2.54v-4.08h.82v9.3h-.82v-4.48h-2.54v4.48h-.82V4.73Z"
|
||||
},
|
||||
"children": []
|
||||
},
|
||||
{
|
||||
"type": "element",
|
||||
"name": "path",
|
||||
"attributes": {
|
||||
"style": "fill: #404041;",
|
||||
"d": "M39.82,11.9v-5.02c0-1.48.74-2.27,2.09-2.27s2.09.8,2.09,2.27v5.02c0,1.48-.74,2.27-2.09,2.27s-2.09-.8-2.09-2.27ZM43.17,11.95v-5.13c0-1-.45-1.48-1.26-1.48s-1.26.48-1.26,1.48v5.13c0,1,.45,1.46,1.26,1.46s1.26-.47,1.26-1.46Z"
|
||||
},
|
||||
"children": []
|
||||
},
|
||||
{
|
||||
"type": "element",
|
||||
"name": "path",
|
||||
"attributes": {
|
||||
"style": "fill: #404041;",
|
||||
"d": "M47.23,4.73h3.72v.74h-2.9v3.34h2.38v.74h-2.38v3.72h2.9v.76h-3.72V4.73Z"
|
||||
},
|
||||
"children": []
|
||||
},
|
||||
{
|
||||
"type": "element",
|
||||
"name": "path",
|
||||
"attributes": {
|
||||
"style": "fill: #404041;",
|
||||
"d": "M53.93,4.73h1.09l2.42,7.19v-7.19h.77v9.3h-.88l-2.64-7.93v7.93h-.76V4.73Z"
|
||||
},
|
||||
"children": []
|
||||
},
|
||||
{
|
||||
"type": "element",
|
||||
"name": "path",
|
||||
"attributes": {
|
||||
"style": "fill: #404041;",
|
||||
"d": "M61.55,4.73h.82v9.3h-.82V4.73Z"
|
||||
},
|
||||
"children": []
|
||||
},
|
||||
{
|
||||
"type": "element",
|
||||
"name": "path",
|
||||
"attributes": {
|
||||
"style": "fill: #404041;",
|
||||
"d": "M67.19,9.29l-1.82-4.56h.88l1.46,3.69,1.48-3.69h.8l-1.82,4.56,1.9,4.74h-.88l-1.54-3.91-1.55,3.91h-.8l1.9-4.74Z"
|
||||
},
|
||||
"children": []
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
"name": "PhoenixIcon"
|
||||
}
|
||||
|
|
@ -0,0 +1,20 @@
|
|||
// GENERATE BY script
|
||||
// DON NOT EDIT IT MANUALLY
|
||||
|
||||
import * as React from 'react'
|
||||
import data from './PhoenixIcon.json'
|
||||
import IconBase from '@/app/components/base/icons/IconBase'
|
||||
import type { IconData } from '@/app/components/base/icons/IconBase'
|
||||
|
||||
const Icon = (
|
||||
{
|
||||
ref,
|
||||
...props
|
||||
}: React.SVGProps<SVGSVGElement> & {
|
||||
ref?: React.RefObject<React.MutableRefObject<HTMLOrSVGElement>>;
|
||||
},
|
||||
) => <IconBase {...props} ref={ref} data={data as IconData} />
|
||||
|
||||
Icon.displayName = 'PhoenixIcon'
|
||||
|
||||
export default Icon
|
||||
|
|
@ -0,0 +1,853 @@
|
|||
{
|
||||
"icon": {
|
||||
"type": "element",
|
||||
"isRootNode": true,
|
||||
"name": "svg",
|
||||
"attributes": {
|
||||
"id": "Layer_2",
|
||||
"data-name": "Layer 2",
|
||||
"xmlns": "http://www.w3.org/2000/svg",
|
||||
"xmlns:xlink": "http://www.w3.org/1999/xlink",
|
||||
"viewBox": "0 0 111 24",
|
||||
"width": "111",
|
||||
"height": "24"
|
||||
},
|
||||
"children": [
|
||||
{
|
||||
"type": "element",
|
||||
"name": "defs",
|
||||
"attributes": {},
|
||||
"children": [
|
||||
{
|
||||
"type": "element",
|
||||
"name": "linearGradient",
|
||||
"attributes": {
|
||||
"id": "linear-gradient",
|
||||
"x1": "9.07",
|
||||
"y1": "1.47",
|
||||
"x2": "19.77",
|
||||
"y2": "20",
|
||||
"gradientUnits": "userSpaceOnUse"
|
||||
},
|
||||
"children": [
|
||||
{
|
||||
"type": "element",
|
||||
"name": "stop",
|
||||
"attributes": {
|
||||
"offset": "0",
|
||||
"stop-color": "#11bab5"
|
||||
},
|
||||
"children": []
|
||||
},
|
||||
{
|
||||
"type": "element",
|
||||
"name": "stop",
|
||||
"attributes": {
|
||||
"offset": ".5",
|
||||
"stop-color": "#00adee"
|
||||
},
|
||||
"children": []
|
||||
},
|
||||
{
|
||||
"type": "element",
|
||||
"name": "stop",
|
||||
"attributes": {
|
||||
"offset": "1",
|
||||
"stop-color": "#0094c5"
|
||||
},
|
||||
"children": []
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"type": "element",
|
||||
"name": "linearGradient",
|
||||
"attributes": {
|
||||
"id": "linear-gradient-2",
|
||||
"x1": "19.48",
|
||||
"y1": "16.3",
|
||||
"x2": "10.46",
|
||||
"y2": ".67",
|
||||
"gradientUnits": "userSpaceOnUse"
|
||||
},
|
||||
"children": [
|
||||
{
|
||||
"type": "element",
|
||||
"name": "stop",
|
||||
"attributes": {
|
||||
"offset": "0",
|
||||
"stop-color": "#fcfdff",
|
||||
"stop-opacity": "0"
|
||||
},
|
||||
"children": []
|
||||
},
|
||||
{
|
||||
"type": "element",
|
||||
"name": "stop",
|
||||
"attributes": {
|
||||
"offset": ".12",
|
||||
"stop-color": "#fcfdff",
|
||||
"stop-opacity": ".17"
|
||||
},
|
||||
"children": []
|
||||
},
|
||||
{
|
||||
"type": "element",
|
||||
"name": "stop",
|
||||
"attributes": {
|
||||
"offset": ".3",
|
||||
"stop-color": "#fcfdff",
|
||||
"stop-opacity": ".42"
|
||||
},
|
||||
"children": []
|
||||
},
|
||||
{
|
||||
"type": "element",
|
||||
"name": "stop",
|
||||
"attributes": {
|
||||
"offset": ".47",
|
||||
"stop-color": "#fcfdff",
|
||||
"stop-opacity": ".63"
|
||||
},
|
||||
"children": []
|
||||
},
|
||||
{
|
||||
"type": "element",
|
||||
"name": "stop",
|
||||
"attributes": {
|
||||
"offset": ".64",
|
||||
"stop-color": "#fcfdff",
|
||||
"stop-opacity": ".79"
|
||||
},
|
||||
"children": []
|
||||
},
|
||||
{
|
||||
"type": "element",
|
||||
"name": "stop",
|
||||
"attributes": {
|
||||
"offset": ".78",
|
||||
"stop-color": "#fcfdff",
|
||||
"stop-opacity": ".9"
|
||||
},
|
||||
"children": []
|
||||
},
|
||||
{
|
||||
"type": "element",
|
||||
"name": "stop",
|
||||
"attributes": {
|
||||
"offset": ".91",
|
||||
"stop-color": "#fcfdff",
|
||||
"stop-opacity": ".97"
|
||||
},
|
||||
"children": []
|
||||
},
|
||||
{
|
||||
"type": "element",
|
||||
"name": "stop",
|
||||
"attributes": {
|
||||
"offset": "1",
|
||||
"stop-color": "#fcfdff"
|
||||
},
|
||||
"children": []
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"type": "element",
|
||||
"name": "linearGradient",
|
||||
"attributes": {
|
||||
"id": "linear-gradient-3",
|
||||
"x1": "16.82",
|
||||
"y1": "16.21",
|
||||
"x2": "10.32",
|
||||
"y2": "4.94",
|
||||
"xlink:href": "#linear-gradient-2"
|
||||
},
|
||||
"children": []
|
||||
},
|
||||
{
|
||||
"type": "element",
|
||||
"name": "linearGradient",
|
||||
"attributes": {
|
||||
"id": "linear-gradient-4",
|
||||
"x1": "15.92",
|
||||
"y1": "17.03",
|
||||
"x2": "12.29",
|
||||
"y2": "10.74",
|
||||
"xlink:href": "#linear-gradient-2"
|
||||
},
|
||||
"children": []
|
||||
},
|
||||
{
|
||||
"type": "element",
|
||||
"name": "linearGradient",
|
||||
"attributes": {
|
||||
"id": "linear-gradient-5",
|
||||
"x1": "16.08",
|
||||
"y1": "18.3",
|
||||
"x2": "13.82",
|
||||
"y2": "14.38",
|
||||
"xlink:href": "#linear-gradient-2"
|
||||
},
|
||||
"children": []
|
||||
},
|
||||
{
|
||||
"type": "element",
|
||||
"name": "linearGradient",
|
||||
"attributes": {
|
||||
"id": "linear-gradient-6",
|
||||
"x1": "12.26",
|
||||
"y1": "17.94",
|
||||
"x2": "12.26",
|
||||
"y2": "22.07",
|
||||
"gradientUnits": "userSpaceOnUse"
|
||||
},
|
||||
"children": [
|
||||
{
|
||||
"type": "element",
|
||||
"name": "stop",
|
||||
"attributes": {
|
||||
"offset": "0",
|
||||
"stop-color": "#231f20"
|
||||
},
|
||||
"children": []
|
||||
},
|
||||
{
|
||||
"type": "element",
|
||||
"name": "stop",
|
||||
"attributes": {
|
||||
"offset": ".03",
|
||||
"stop-color": "#231f20",
|
||||
"stop-opacity": ".9"
|
||||
},
|
||||
"children": []
|
||||
},
|
||||
{
|
||||
"type": "element",
|
||||
"name": "stop",
|
||||
"attributes": {
|
||||
"offset": ".22",
|
||||
"stop-color": "#231f20",
|
||||
"stop-opacity": ".4"
|
||||
},
|
||||
"children": []
|
||||
},
|
||||
{
|
||||
"type": "element",
|
||||
"name": "stop",
|
||||
"attributes": {
|
||||
"offset": ".42",
|
||||
"stop-color": "#231f20",
|
||||
"stop-opacity": ".1"
|
||||
},
|
||||
"children": []
|
||||
},
|
||||
{
|
||||
"type": "element",
|
||||
"name": "stop",
|
||||
"attributes": {
|
||||
"offset": ".65",
|
||||
"stop-color": "#231f20",
|
||||
"stop-opacity": "0"
|
||||
},
|
||||
"children": []
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"type": "element",
|
||||
"name": "linearGradient",
|
||||
"attributes": {
|
||||
"id": "Fade_to_Black_2",
|
||||
"data-name": "Fade to Black 2",
|
||||
"x1": "16.89",
|
||||
"y1": "17.55",
|
||||
"x2": "16.89",
|
||||
"y2": "19.66",
|
||||
"gradientUnits": "userSpaceOnUse"
|
||||
},
|
||||
"children": [
|
||||
{
|
||||
"type": "element",
|
||||
"name": "stop",
|
||||
"attributes": {
|
||||
"offset": "0",
|
||||
"stop-color": "#231f20"
|
||||
},
|
||||
"children": []
|
||||
},
|
||||
{
|
||||
"type": "element",
|
||||
"name": "stop",
|
||||
"attributes": {
|
||||
"offset": "1",
|
||||
"stop-color": "#231f20",
|
||||
"stop-opacity": "0"
|
||||
},
|
||||
"children": []
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"type": "element",
|
||||
"name": "linearGradient",
|
||||
"attributes": {
|
||||
"id": "linear-gradient-7",
|
||||
"x1": "17.91",
|
||||
"y1": "12.1",
|
||||
"x2": "17.91",
|
||||
"y2": "10.38",
|
||||
"gradientUnits": "userSpaceOnUse"
|
||||
},
|
||||
"children": [
|
||||
{
|
||||
"type": "element",
|
||||
"name": "stop",
|
||||
"attributes": {
|
||||
"offset": "0",
|
||||
"stop-color": "#231f20"
|
||||
},
|
||||
"children": []
|
||||
},
|
||||
{
|
||||
"type": "element",
|
||||
"name": "stop",
|
||||
"attributes": {
|
||||
"offset": ".03",
|
||||
"stop-color": "#231f20",
|
||||
"stop-opacity": ".95"
|
||||
},
|
||||
"children": []
|
||||
},
|
||||
{
|
||||
"type": "element",
|
||||
"name": "stop",
|
||||
"attributes": {
|
||||
"offset": ".51",
|
||||
"stop-color": "#231f20",
|
||||
"stop-opacity": ".26"
|
||||
},
|
||||
"children": []
|
||||
},
|
||||
{
|
||||
"type": "element",
|
||||
"name": "stop",
|
||||
"attributes": {
|
||||
"offset": ".81",
|
||||
"stop-color": "#231f20",
|
||||
"stop-opacity": "0"
|
||||
},
|
||||
"children": []
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"type": "element",
|
||||
"name": "linearGradient",
|
||||
"attributes": {
|
||||
"id": "linear-gradient-8",
|
||||
"x1": "21.67",
|
||||
"y1": "13.37",
|
||||
"x2": "21.67",
|
||||
"y2": "9.21",
|
||||
"gradientUnits": "userSpaceOnUse"
|
||||
},
|
||||
"children": [
|
||||
{
|
||||
"type": "element",
|
||||
"name": "stop",
|
||||
"attributes": {
|
||||
"offset": "0",
|
||||
"stop-color": "#231f20"
|
||||
},
|
||||
"children": []
|
||||
},
|
||||
{
|
||||
"type": "element",
|
||||
"name": "stop",
|
||||
"attributes": {
|
||||
"offset": ".18",
|
||||
"stop-color": "#231f20",
|
||||
"stop-opacity": ".48"
|
||||
},
|
||||
"children": []
|
||||
},
|
||||
{
|
||||
"type": "element",
|
||||
"name": "stop",
|
||||
"attributes": {
|
||||
"offset": ".38",
|
||||
"stop-color": "#231f20",
|
||||
"stop-opacity": ".12"
|
||||
},
|
||||
"children": []
|
||||
},
|
||||
{
|
||||
"type": "element",
|
||||
"name": "stop",
|
||||
"attributes": {
|
||||
"offset": ".58",
|
||||
"stop-color": "#231f20",
|
||||
"stop-opacity": "0"
|
||||
},
|
||||
"children": []
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"type": "element",
|
||||
"name": "linearGradient",
|
||||
"attributes": {
|
||||
"id": "Fade_to_Black_2-2",
|
||||
"data-name": "Fade to Black 2",
|
||||
"x1": "25.54",
|
||||
"y1": "16.65",
|
||||
"x2": "24.1",
|
||||
"y2": "14.16",
|
||||
"xlink:href": "#Fade_to_Black_2"
|
||||
},
|
||||
"children": []
|
||||
},
|
||||
{
|
||||
"type": "element",
|
||||
"name": "linearGradient",
|
||||
"attributes": {
|
||||
"id": "linear-gradient-9",
|
||||
"x1": "26.7",
|
||||
"y1": "15.97",
|
||||
"x2": "24.55",
|
||||
"y2": "12.24",
|
||||
"xlink:href": "#linear-gradient-2"
|
||||
},
|
||||
"children": []
|
||||
},
|
||||
{
|
||||
"type": "element",
|
||||
"name": "linearGradient",
|
||||
"attributes": {
|
||||
"id": "linear-gradient-10",
|
||||
"x1": "21.11",
|
||||
"y1": "15.34",
|
||||
"x2": "24.24",
|
||||
"y2": "13.53",
|
||||
"gradientUnits": "userSpaceOnUse"
|
||||
},
|
||||
"children": [
|
||||
{
|
||||
"type": "element",
|
||||
"name": "stop",
|
||||
"attributes": {
|
||||
"offset": "0",
|
||||
"stop-color": "#231f20"
|
||||
},
|
||||
"children": []
|
||||
},
|
||||
{
|
||||
"type": "element",
|
||||
"name": "stop",
|
||||
"attributes": {
|
||||
"offset": ".31",
|
||||
"stop-color": "#231f20",
|
||||
"stop-opacity": ".5"
|
||||
},
|
||||
"children": []
|
||||
},
|
||||
{
|
||||
"type": "element",
|
||||
"name": "stop",
|
||||
"attributes": {
|
||||
"offset": ".67",
|
||||
"stop-color": "#231f20",
|
||||
"stop-opacity": ".13"
|
||||
},
|
||||
"children": []
|
||||
},
|
||||
{
|
||||
"type": "element",
|
||||
"name": "stop",
|
||||
"attributes": {
|
||||
"offset": "1",
|
||||
"stop-color": "#231f20",
|
||||
"stop-opacity": "0"
|
||||
},
|
||||
"children": []
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"type": "element",
|
||||
"name": "g",
|
||||
"attributes": {
|
||||
"id": "Layer_1-2",
|
||||
"data-name": "Layer 1"
|
||||
},
|
||||
"children": [
|
||||
{
|
||||
"type": "element",
|
||||
"name": "rect",
|
||||
"attributes": {
|
||||
"style": "fill: none;",
|
||||
"y": ".04",
|
||||
"width": "111",
|
||||
"height": "24"
|
||||
},
|
||||
"children": []
|
||||
},
|
||||
{
|
||||
"type": "element",
|
||||
"name": "g",
|
||||
"attributes": {
|
||||
"id": "Phoenix_horiz_-_gradient",
|
||||
"data-name": "Phoenix horiz - gradient"
|
||||
},
|
||||
"children": [
|
||||
{
|
||||
"type": "element",
|
||||
"name": "path",
|
||||
"attributes": {
|
||||
"style": "fill: url(#linear-gradient);",
|
||||
"d": "M26.87,15c-.06-.28-.2-.53-.42-.76-.08-.09-.18-.17-.28-.25-.05-.04-.1-.07-.15-.1-.07-.04-.14-.08-.21-.12-.28-.14-.51-.29-.72-.44-.08-.06-.17-.13-.24-.19-.19-.16-.31-.3-.4-.45-.03-.05-.06-.1-.08-.15-.01-.02-.03-.04-.05-.05-.03-.02-.08-.04-.12-.03-.07.01-.12.07-.13.13,0,.05,0,.11,0,.17,0,.03-.01.07-.04.08,0,0-.01,0-.02,0-.04,0-.08.03-.11.07-.03.04-.03.08-.02.13,0,.04.02.06.03.09v.02s.02.03.03.04c.02.04.04.08.07.12.05.07.1.14.17.22.02.02.02.06,0,.09-.02.03-.05.04-.08.04-.37-.05-.7-.13-1-.25-.04-.01-.07-.03-.11-.04-.28-.12.22-.61.45-.96,1-1.53,1.2-3.29,1.21-5.07,0-1.72-.18-3.41-.52-5.08h0c-.22-1.4-.41-1.93-.53-2.13-.03-.05-.05-.08-.08-.1-.03-.02-.05-.01-.05-.01-.06,0-.14.06-.23.15-.37.39-.35.86-.25,1.34.47,2.21.82,4.43.66,6.7-.11,1.56-.4,3.07-1.5,4.3-.07.07-.14.14-.21.21-.08.08-.21.24-.3.17-.13-.09-.01-.27.03-.36.64-1.5.82-3.07.73-4.69,0-.04,0-.07,0-.11h0s0-.06,0-.09c-.02-.21-.04-.43-.06-.64-.28-2.56-.61-2.73-.61-2.73h0s-.05-.03-.09-.04c-.17-.04-.27.12-.35.23-.35.52-.19,1.07-.08,1.62.39,1.92.4,3.82-.28,5.69-.06.17-.14.34-.21.5-.08.17-.14.39-.42.3-.26-.09-.19-.29-.16-.47.09-.47.16-.93.2-1.4h0s0-.09.01-.13c0-.08.01-.16.02-.24,0-.07,0-.13.01-.2,0-.03,0-.07,0-.1.05-1.48-.2-2.16-.29-2.36-.01-.02-.02-.05-.03-.07h0s0,0,0,0c-.1-.15-.26-.13-.4,0-.26.25-.4.56-.33.93.19,1.1.08,2.2-.13,3.28-.03.15-.09.41-.4.34-.21-.05-.26-.14-.27-.32,0-.14,0-.28,0-.42,0,0,0,0,0-.01h0s0-.04,0-.06h0s0-.02,0-.03c0-.19-.02-.38-.05-.57,0-.02,0-.04,0-.05-.12-1.07-.27-1.17-.27-1.17-.1-.13-.25-.11-.39.03-.26.25-.39.55-.33.93.04.28.03.19.06.43.02.12.05.35.05.42,0,.06-.02.12-.06.16-.13.14-.34,0-.5-.07-3.12-1.29-5.79-3.1-8.12-5.4-.92-.94-2.48-2.83-2.48-2.83h0c-.06-.07-.13-.11-.24-.06-.2.09-.2.35-.22.57-.04.44.2.76.45,1.06,3.52,4.11,7.82,7.06,13,8.54l.93.25s.02,0,.03,0c0,0,0,0,0,0l.8.21h.02s-.06.02-.09.03c-.88.17-2.63-.15-4.04-.47-.7-.15-1.38-.32-2.05-.53-.03,0-.05-.01-.05-.01h0c-2.6-.83-4.97-2.17-7.05-4.21-.2-.19-.38-.4-.56-.61-.16-.2-.5-.61-.62-.75-.01-.02-.03-.03-.04-.05h0s0,0,0,0c-.04-.04-.08-.06-.14-.05-.14.03-.19.18-.21.31-.11.51.05.93.39,1.31,2.13,2.39,4.8,3.91,7.81,4.91,1.71.57,3.46.91,5.25,1.13-1.07.35-2.16.45-3.27.42-2.14-.05-4.15-.57-6.04-1.49-.7-.36-1.34-.76-1.52-.86-.1-.07-.2-.11-.3-.03-.19.15-.06.4,0,.6.12.38.38.65.73.83,3.08,1.63,6.32,2.28,9.78,1.7.35-.06.68-.12,1.05-.25-.58.5-1.25.83-1.95,1.08-2.17.79-4.32.76-6.46.22-.69-.18-1.49-.48-1.49-.48h0c-.12-.05-.23-.07-.32.06-.16.22-.02.46.12.67.32.5.94.55,1.43.72.17.05-.29.36-.43.47-.71.54-1.4,1.04-2.11,1.56l-.46.34h0c-.08.05-.15.12-.11.23.04.13.18.15.31.18.51.1.94-.06,1.33-.36.86-.64,1.73-1.26,2.56-1.93.32-.25.61-.23,1.04-.12-1.09.82-2.11,1.59-3.13,2.36-1.03.78-2.07,1.56-3.1,2.33l-.68.51s-.02.01-.03.02h-.01s0,0,0,0c-.06.05-.1.1-.09.17.03.15.21.19.36.22.61.11.99-.12,1.41-.44,2.09-1.57,4.19-3.13,6.27-4.71.47-.36.95-.56,1.62-.48-.57.43-1.12.84-1.67,1.25l-1.47,1.09s-.02.02-.03.02h0c-.08.06-.13.13-.1.24.06.19.29.22.49.25.37.05.68-.08.96-.29.17-.12.33-.24.5-.37h0s2.25-1.79,2.25-1.79c0,0,.53-.38,1.15-.73.05-.03.11-.07.17-.1.02-.01.08-.04.17-.08.07-.03.13-.07.2-.1.1-.05.21-.11.33-.18.36-.15,1.4-.64,2.26-1.55.41-.32.98-.73,1.43-.92h0s0,0,0,0c.03-.02.07-.03.1-.04h0s0,0,0,0c.02,0,.04-.02.06-.02l.06-.02c.16-.05.43-.06.46.29,0,.09,0,.18,0,.27,0,.07-.02.15-.03.21-.01.06.01.12.06.15,0,0,.02.01.02.01.06.03.14.02.18-.03.02-.02.03-.04.05-.06.22-.23.44-.39.68-.49.35-.14.7-.14,1.07,0,.18.07.35.16.51.28.07.05.14.11.21.17.04.04.08.08.12.12h0s.03.04.03.04c0,0,.01.01.02.02.04.03.08.04.12.03.05-.01.09-.05.11-.1,0,0,0-.02.01-.03.11-.31.13-.6.07-.89Z"
|
||||
},
|
||||
"children": []
|
||||
},
|
||||
{
|
||||
"type": "element",
|
||||
"name": "path",
|
||||
"attributes": {
|
||||
"style": "fill: #fddab4;",
|
||||
"d": "M20.22,11.62c-.03.16-.05.31-.08.47.03-.16.06-.31.08-.47h0Z"
|
||||
},
|
||||
"children": []
|
||||
},
|
||||
{
|
||||
"type": "element",
|
||||
"name": "path",
|
||||
"attributes": {
|
||||
"style": "fill: #fddab4;",
|
||||
"d": "M22.99,13.35s.02,0,.03.01c-.01,0-.02,0-.03-.01"
|
||||
},
|
||||
"children": []
|
||||
},
|
||||
{
|
||||
"type": "element",
|
||||
"name": "polyline",
|
||||
"attributes": {
|
||||
"style": "fill: #fddab4;",
|
||||
"points": "21.68 12.5 21.68 12.5 21.68 12.5"
|
||||
},
|
||||
"children": []
|
||||
},
|
||||
{
|
||||
"type": "element",
|
||||
"name": "polyline",
|
||||
"attributes": {
|
||||
"style": "fill: #fddab4;",
|
||||
"points": "21.73 12.39 21.73 12.39 21.73 12.39"
|
||||
},
|
||||
"children": []
|
||||
},
|
||||
{
|
||||
"type": "element",
|
||||
"name": "polyline",
|
||||
"attributes": {
|
||||
"style": "fill: #fddab4;",
|
||||
"points": "21.74 12.37 21.73 12.38 21.74 12.37"
|
||||
},
|
||||
"children": []
|
||||
},
|
||||
{
|
||||
"type": "element",
|
||||
"name": "polyline",
|
||||
"attributes": {
|
||||
"style": "fill: #fddab4;",
|
||||
"points": "23.45 12.37 23.45 12.38 23.45 12.37"
|
||||
},
|
||||
"children": []
|
||||
},
|
||||
{
|
||||
"type": "element",
|
||||
"name": "path",
|
||||
"attributes": {
|
||||
"style": "fill: #fddab4;",
|
||||
"d": "M20.72,12.26s-.04.08-.06.12c.02-.04.04-.08.06-.12"
|
||||
},
|
||||
"children": []
|
||||
},
|
||||
{
|
||||
"type": "element",
|
||||
"name": "polyline",
|
||||
"attributes": {
|
||||
"style": "fill: #fddab4;",
|
||||
"points": "18.86 12.17 18.86 12.17 18.86 12.17"
|
||||
},
|
||||
"children": []
|
||||
},
|
||||
{
|
||||
"type": "element",
|
||||
"name": "polyline",
|
||||
"attributes": {
|
||||
"style": "fill: #fddab4;",
|
||||
"points": "18.58 12.04 18.58 12.04 18.58 12.04"
|
||||
},
|
||||
"children": []
|
||||
},
|
||||
{
|
||||
"type": "element",
|
||||
"name": "polyline",
|
||||
"attributes": {
|
||||
"style": "fill: #fddab4;",
|
||||
"points": "18.58 12.04 18.58 12.04 18.58 12.04"
|
||||
},
|
||||
"children": []
|
||||
},
|
||||
{
|
||||
"type": "element",
|
||||
"name": "polyline",
|
||||
"attributes": {
|
||||
"style": "fill: #fddab4;",
|
||||
"points": "18.58 12.04 18.58 12.04 18.58 12.04"
|
||||
},
|
||||
"children": []
|
||||
},
|
||||
{
|
||||
"type": "element",
|
||||
"name": "polyline",
|
||||
"attributes": {
|
||||
"style": "fill: #fddab4;",
|
||||
"points": "18.58 12.04 18.58 12.04 18.58 12.04"
|
||||
},
|
||||
"children": []
|
||||
},
|
||||
{
|
||||
"type": "element",
|
||||
"name": "polyline",
|
||||
"attributes": {
|
||||
"style": "fill: #fddab4;",
|
||||
"points": "18.58 12.03 18.58 12.04 18.58 12.03"
|
||||
},
|
||||
"children": []
|
||||
},
|
||||
{
|
||||
"type": "element",
|
||||
"name": "polyline",
|
||||
"attributes": {
|
||||
"style": "fill: #fddab4;",
|
||||
"points": "18.52 11.84 18.52 11.84 18.52 11.84"
|
||||
},
|
||||
"children": []
|
||||
},
|
||||
{
|
||||
"type": "element",
|
||||
"name": "path",
|
||||
"attributes": {
|
||||
"style": "fill: #fddab4;",
|
||||
"d": "M19.22,11.72s-.01.06-.02.09c0-.03.01-.06.02-.09"
|
||||
},
|
||||
"children": []
|
||||
},
|
||||
{
|
||||
"type": "element",
|
||||
"name": "path",
|
||||
"attributes": {
|
||||
"style": "fill: #fddab4;",
|
||||
"d": "M17.51,11.51s-.04.04-.07.05c.02,0,.05-.02.07-.05"
|
||||
},
|
||||
"children": []
|
||||
},
|
||||
{
|
||||
"type": "element",
|
||||
"name": "path",
|
||||
"attributes": {
|
||||
"style": "fill: #e5b3a5;",
|
||||
"d": "M18.58,12.04s.04.04.07.06h0s-.05-.04-.07-.06M18.58,12.04s0,0,0,0c0,0,0,0,0,0M18.58,12.04s0,0,0,0c0,0,0,0,0,0M18.58,12.04s0,0,0,0c0,0,0,0,0,0M18.58,12.04s0,0,0,0c0,0,0,0,0,0M18.57,12.03s0,0,0,0c0,0,0,0,0,0M18.52,11.84s0,0,0,0c0,0,0,0,0,0M18.52,11.84h0s0,0,0,0c0,0,0,0,0,0M17.17,11.51s0,0,0,0c.06.03.13.05.19.05.03,0,.05,0,.07-.01-.02,0-.05.01-.07.01-.06,0-.13-.02-.19-.05M18.52,11.31s0,0,0,0c0,0,0,0,0,0"
|
||||
},
|
||||
"children": []
|
||||
},
|
||||
{
|
||||
"type": "element",
|
||||
"name": "path",
|
||||
"attributes": {
|
||||
"style": "fill: #e5b3a5;",
|
||||
"d": "M23.02,13.37s.01,0,.02,0h0s-.01,0-.02,0M21.78,12.86s0,0,0,0c0,0,0,0,0,0M21.59,12.76s.02.08.06.11c.02.01.03.02.05.02.03,0,.05-.01.08-.03-.03.02-.05.03-.08.03-.02,0-.03,0-.05-.02-.04-.03-.06-.07-.06-.11M21.68,12.5s0,.01,0,.02c0,0,0-.01,0-.02M21.73,12.39s-.03.07-.04.11c.01-.03.03-.07.04-.11M21.73,12.38s0,0,0,.01c0,0,0,0,0-.01M23.45,12.38s0,0,0,0c0,0,0,0,0,0M23.45,12.37s0,0,0,0c0,0,0,0,0,0M21.74,12.37s0,0,0,0c0,0,0,0,0,0M20.1,12.32c0,.1.04.19.19.24.05.02.09.02.12.02.13,0,.19-.09.24-.2-.05.1-.11.2-.24.2-.04,0-.08,0-.12-.02-.15-.05-.19-.14-.19-.24M18.86,12.17h0,0M19.11,12.07c-.05.06-.11.1-.22.1-.01,0-.02,0-.03,0,.01,0,.02,0,.03,0,.1,0,.17-.04.22-.1M21.08,11.32s0,0,0,0c-.05.15-.09.3-.15.44-.06.17-.14.34-.21.5h0c.08-.16.15-.33.21-.5.05-.15.1-.3.15-.45M19.3,11.23c-.02.16-.05.33-.08.49.03-.16.06-.33.08-.49h0M23.28,10.45s0,0,0,0c-.22.73-.57,1.42-1.12,2.04-.07.07-.14.14-.21.21h0c.07-.07.14-.14.21-.21.55-.62.9-1.31,1.12-2.04"
|
||||
},
|
||||
"children": []
|
||||
},
|
||||
{
|
||||
"type": "element",
|
||||
"name": "path",
|
||||
"attributes": {
|
||||
"style": "fill: url(#linear-gradient-2); opacity: .4;",
|
||||
"d": "M6.25,3.13s-.06,0-.09.02c-.14.06-.18.21-.2.37.78,1.02,3.84,4.77,8.67,7.35,0,0,4.72,2.49,9.48,2.76-.37-.05-.7-.13-1-.25-.02,0-.04-.01-.05-.02h0c-1.73-.37-3.2-.84-4.24-1.21.02,0,.04,0,.06,0-.02,0-.04,0-.06,0-.06-.01-.11-.03-.15-.05-.82-.3-1.35-.53-1.47-.59-.06-.03-.12-.06-.17-.08-3.12-1.29-5.79-3.1-8.12-5.4-.92-.94-2.48-2.83-2.48-2.83h0s-.09-.08-.15-.08"
|
||||
},
|
||||
"children": []
|
||||
},
|
||||
{
|
||||
"type": "element",
|
||||
"name": "path",
|
||||
"attributes": {
|
||||
"style": "fill: url(#linear-gradient-3); opacity: .4;",
|
||||
"d": "M6.59,7.12s-.02,0-.03,0c-.14.03-.19.18-.21.31,0,.02,0,.05-.01.07,2.41,3.27,5.65,4.6,5.65,4.6,3.23,1.52,5.88,1.83,7.5,1.83.67,0,1.16-.05,1.44-.09-.13.01-.27.02-.42.02-.95,0-2.3-.26-3.43-.52-.7-.15-1.38-.32-2.05-.53-.03,0-.05-.01-.05-.01h0c-2.6-.83-4.97-2.17-7.05-4.21-.2-.19-.38-.4-.56-.61-.16-.2-.5-.61-.62-.75-.01-.02-.03-.03-.04-.05h0s0,0,0,0c-.03-.03-.06-.05-.1-.05"
|
||||
},
|
||||
"children": []
|
||||
},
|
||||
{
|
||||
"type": "element",
|
||||
"name": "path",
|
||||
"attributes": {
|
||||
"style": "fill: url(#linear-gradient-4); opacity: .4;",
|
||||
"d": "M8.78,12.8s-.08.01-.12.04c-.16.13-.09.34-.02.52,1.21.73,3.98,2.16,7.27,2.16,1.24,0,2.55-.2,3.88-.72-.96.31-1.94.43-2.94.43-.11,0-.22,0-.33,0-2.14-.05-4.15-.57-6.04-1.49-.7-.36-1.34-.76-1.52-.86-.06-.04-.12-.07-.18-.07"
|
||||
},
|
||||
"children": []
|
||||
},
|
||||
{
|
||||
"type": "element",
|
||||
"name": "path",
|
||||
"attributes": {
|
||||
"style": "fill: url(#linear-gradient-5); opacity: .4;",
|
||||
"d": "M20.02,15.9c-.53.4-1.12.68-1.74.91-1.16.42-2.32.61-3.47.61-1,0-1.99-.14-2.99-.39-.69-.18-1.49-.48-1.49-.48h0c-.05-.02-.1-.04-.15-.04-.06,0-.12.03-.17.1-.07.1-.08.21-.06.32.76.35,2.4.98,4.39.98,1.73,0,3.73-.47,5.67-2.01"
|
||||
},
|
||||
"children": []
|
||||
},
|
||||
{
|
||||
"type": "element",
|
||||
"name": "path",
|
||||
"attributes": {
|
||||
"style": "fill: url(#linear-gradient-6); opacity: .4;",
|
||||
"d": "M14.87,18.37c-1.87,0-3.29-.38-3.47-.43.05.02.1.03.15.05.17.05-.29.36-.43.47-.52.4-1.04.78-1.56,1.16h1.59c.5-.37,1-.74,1.49-1.13.18-.14.35-.2.55-.2.15,0,.31.03.49.08-1.09.82-2.11,1.59-3.13,2.36-.6.45-1.19.9-1.79,1.34h1.6c1.44-1.08,2.88-2.15,4.31-3.24.33-.25.67-.42,1.07-.48-.3.02-.59.03-.88.03Z"
|
||||
},
|
||||
"children": []
|
||||
},
|
||||
{
|
||||
"type": "element",
|
||||
"name": "path",
|
||||
"attributes": {
|
||||
"style": "fill: url(#Fade_to_Black_2); opacity: .4;",
|
||||
"d": "M14.54,19.66h1.55l1.14-.91s.53-.38,1.15-.73c.05-.03.11-.07.17-.1.02-.01.08-.04.17-.08.07-.03.13-.07.2-.1.1-.05.22-.11.35-.19-1.1.46-2.23.69-3.28.77.01,0,.03,0,.04,0,.09,0,.18,0,.27.02-.57.43-1.12.84-1.67,1.25l-.09.07Z"
|
||||
},
|
||||
"children": []
|
||||
},
|
||||
{
|
||||
"type": "element",
|
||||
"name": "path",
|
||||
"attributes": {
|
||||
"style": "fill: url(#linear-gradient-7); opacity: .4;",
|
||||
"d": "M18.52,11.84c0-.14,0-.28,0-.42h0s0-.01,0-.01c0-.02,0-.04,0-.06h0s0-.03,0-.03c0-.12-.01-.23-.03-.35-.37-.16-.72-.35-1.04-.59,0,.03,0,.07,0,.1.04.28.03.19.06.43.02.12.05.35.05.42,0,.06-.02.12-.06.16-.04.04-.09.06-.14.06-.06,0-.13-.02-.19-.05.12.06.65.29,1.48.59-.09-.05-.12-.14-.12-.27Z"
|
||||
},
|
||||
"children": []
|
||||
},
|
||||
{
|
||||
"type": "element",
|
||||
"name": "path",
|
||||
"attributes": {
|
||||
"style": "fill: url(#linear-gradient-8); opacity: .4;",
|
||||
"d": "M24.54,9.21c-.34.48-.77.9-1.26,1.24-.22.73-.57,1.42-1.12,2.04-.07.07-.14.14-.21.21-.07.07-.17.19-.25.19-.02,0-.03,0-.05-.02-.13-.09-.01-.27.03-.36.21-.48.37-.98.48-1.47-.35.13-.71.22-1.08.27-.05.15-.09.3-.15.44-.06.17-.14.34-.21.5-.07.14-.12.32-.3.32-.04,0-.08,0-.12-.02-.26-.09-.19-.29-.16-.47.05-.24.09-.49.12-.73-.33-.01-.65-.05-.96-.12-.03.19-.06.39-.1.58-.02.13-.08.35-.31.35-.03,0-.06,0-.09-.01,1.04.37,2.51.84,4.24,1.21h0s-.03-.01-.05-.02c-.28-.12.22-.61.45-.96.64-.98.95-2.06,1.1-3.18Z"
|
||||
},
|
||||
"children": []
|
||||
},
|
||||
{
|
||||
"type": "element",
|
||||
"name": "path",
|
||||
"attributes": {
|
||||
"style": "fill: url(#Fade_to_Black_2-2); opacity: .5;",
|
||||
"d": "M24.28,14.56c-.22,0-.43.02-.65.06-.04,0-.08.01-.12.01-.06,0-.12,0-.17-.03,0,.01.02.02.03.03.09.13.14.27.16.42.02.03.04.06.06.1h0s0,0,0,0c0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0h0s0,0,0,0c0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0t0,0s0,0,0,0c0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0s0,0,0,0c0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0s0,0,0,0c0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0h0s0,0,0,0h0s0,0,0,0h0s0,0,0,0h0s0,0,0,0h0s0,0,0,0h0s0,0,0,0h0s0,0,0,0h0s0,0,0,0c0,0,0,0,0,0,0,0,0,0,0,0h0s0,0,0,0h0s0,0,0,0h0s0,0,0,0h0s0,0,0,0h0s0,0,0,0h0s0,0,0,0h0s0,0,0,0h0s0,0,0,0h0s0,.07,0,.1c0,.05,0,.11,0,.17,0,.07-.02.15-.03.21,0,0,0,.02,0,.03,0,.05.02.09.06.12h.02s.05.03.07.03c.04,0,.08-.02.11-.05.02-.02.03-.04.05-.06.22-.23.44-.39.68-.49.17-.07.35-.11.53-.11s.36.04.55.11c.18.07.35.16.51.28.07.05.14.11.21.17.04.04.08.08.12.12h0s.03.04.03.04l.02.02s.06.03.09.03c-.15-.32-.33-.61-.6-.84-.5-.43-1.13-.62-1.77-.62"
|
||||
},
|
||||
"children": []
|
||||
},
|
||||
{
|
||||
"type": "element",
|
||||
"name": "path",
|
||||
"attributes": {
|
||||
"style": "fill: url(#linear-gradient-9); opacity: .4;",
|
||||
"d": "M24.22,12.45h-.03c-.07.01-.12.07-.13.14,0,.05,0,.11,0,.17,0,.02,0,.04-.01.05.27.4.84,1.1,1.72,1.36,0,0,1.36.71,1.01,1.74h0v-.03c.12-.31.14-.6.08-.89-.06-.28-.2-.53-.42-.76-.08-.09-.18-.17-.28-.25-.05-.04-.1-.07-.15-.1-.07-.04-.14-.08-.21-.12-.28-.14-.51-.29-.72-.44-.08-.06-.17-.13-.24-.19-.19-.16-.31-.3-.4-.45-.03-.05-.06-.1-.08-.15-.01-.02-.03-.04-.05-.05-.03-.02-.06-.03-.09-.03"
|
||||
},
|
||||
"children": []
|
||||
},
|
||||
{
|
||||
"type": "element",
|
||||
"name": "path",
|
||||
"attributes": {
|
||||
"style": "fill: url(#linear-gradient-10); opacity: .5;",
|
||||
"d": "M22.5,15.32l.28-.16,1.16-1.55s-.63-.12-.94-.26c0,0-.32,1.45-1.49,2.66l.43-.32c.17-.12.33-.23.56-.37Z"
|
||||
},
|
||||
"children": []
|
||||
},
|
||||
{
|
||||
"type": "element",
|
||||
"name": "g",
|
||||
"attributes": {},
|
||||
"children": [
|
||||
{
|
||||
"type": "element",
|
||||
"name": "path",
|
||||
"attributes": {
|
||||
"style": "fill: #404041;",
|
||||
"d": "M38,7.1h2.95c2.13,0,3.11,1.1,3.11,3.35v1.12c0,2.25-.98,3.35-3.11,3.35h-1.71v6.14h-1.24V7.1ZM40.95,13.78c1.3,0,1.87-.58,1.87-2.15v-1.26c0-1.55-.58-2.15-1.87-2.15h-1.71v5.56h1.71Z"
|
||||
},
|
||||
"children": []
|
||||
},
|
||||
{
|
||||
"type": "element",
|
||||
"name": "path",
|
||||
"attributes": {
|
||||
"style": "fill: #404041;",
|
||||
"d": "M48.61,7.1h1.24v6.12h3.81v-6.12h1.24v13.95h-1.24v-6.72h-3.81v6.72h-1.24V7.1Z"
|
||||
},
|
||||
"children": []
|
||||
},
|
||||
{
|
||||
"type": "element",
|
||||
"name": "path",
|
||||
"attributes": {
|
||||
"style": "fill: #404041;",
|
||||
"d": "M59.73,17.84v-7.54c0-2.21,1.12-3.41,3.13-3.41s3.13,1.2,3.13,3.41v7.54c0,2.21-1.12,3.41-3.13,3.41s-3.13-1.2-3.13-3.41ZM64.75,17.92v-7.69c0-1.5-.68-2.21-1.89-2.21s-1.89.72-1.89,2.21v7.69c0,1.5.68,2.19,1.89,2.19s1.89-.7,1.89-2.19Z"
|
||||
},
|
||||
"children": []
|
||||
},
|
||||
{
|
||||
"type": "element",
|
||||
"name": "path",
|
||||
"attributes": {
|
||||
"style": "fill: #404041;",
|
||||
"d": "M70.85,7.1h5.58v1.12h-4.35v5h3.57v1.12h-3.57v5.58h4.35v1.14h-5.58V7.1Z"
|
||||
},
|
||||
"children": []
|
||||
},
|
||||
{
|
||||
"type": "element",
|
||||
"name": "path",
|
||||
"attributes": {
|
||||
"style": "fill: #404041;",
|
||||
"d": "M80.9,7.1h1.63l3.63,10.78V7.1h1.16v13.95h-1.32l-3.97-11.9v11.9h-1.14V7.1Z"
|
||||
},
|
||||
"children": []
|
||||
},
|
||||
{
|
||||
"type": "element",
|
||||
"name": "path",
|
||||
"attributes": {
|
||||
"style": "fill: #404041;",
|
||||
"d": "M92.32,7.1h1.24v13.95h-1.24V7.1Z"
|
||||
},
|
||||
"children": []
|
||||
},
|
||||
{
|
||||
"type": "element",
|
||||
"name": "path",
|
||||
"attributes": {
|
||||
"style": "fill: #404041;",
|
||||
"d": "M100.79,13.94l-2.73-6.84h1.32l2.19,5.54,2.21-5.54h1.2l-2.73,6.84,2.85,7.12h-1.32l-2.31-5.86-2.33,5.86h-1.2l2.85-7.12Z"
|
||||
},
|
||||
"children": []
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
"name": "PhoenixIconBig"
|
||||
}
|
||||
|
|
@ -0,0 +1,20 @@
|
|||
// GENERATE BY script
|
||||
// DON NOT EDIT IT MANUALLY
|
||||
|
||||
import * as React from 'react'
|
||||
import data from './PhoenixIconBig.json'
|
||||
import IconBase from '@/app/components/base/icons/IconBase'
|
||||
import type { IconData } from '@/app/components/base/icons/IconBase'
|
||||
|
||||
const Icon = (
|
||||
{
|
||||
ref,
|
||||
...props
|
||||
}: React.SVGProps<SVGSVGElement> & {
|
||||
ref?: React.RefObject<React.MutableRefObject<HTMLOrSVGElement>>;
|
||||
},
|
||||
) => <IconBase {...props} ref={ref} data={data as IconData} />
|
||||
|
||||
Icon.displayName = 'PhoenixIconBig'
|
||||
|
||||
export default Icon
|
||||
|
|
@ -1,9 +1,13 @@
|
|||
export { default as ArizeIconBig } from './ArizeIconBig'
|
||||
export { default as ArizeIcon } from './ArizeIcon'
|
||||
export { default as LangfuseIconBig } from './LangfuseIconBig'
|
||||
export { default as LangfuseIcon } from './LangfuseIcon'
|
||||
export { default as LangsmithIconBig } from './LangsmithIconBig'
|
||||
export { default as LangsmithIcon } from './LangsmithIcon'
|
||||
export { default as OpikIconBig } from './OpikIconBig'
|
||||
export { default as OpikIcon } from './OpikIcon'
|
||||
export { default as PhoenixIconBig } from './PhoenixIconBig'
|
||||
export { default as PhoenixIcon } from './PhoenixIcon'
|
||||
export { default as TracingIcon } from './TracingIcon'
|
||||
export { default as WeaveIconBig } from './WeaveIconBig'
|
||||
export { default as WeaveIcon } from './WeaveIcon'
|
||||
|
|
|
|||
|
|
@ -11,6 +11,7 @@ export const preprocessLaTeX = (content: string) => {
|
|||
|
||||
const codeBlockRegex = /```[\s\S]*?```/g
|
||||
const codeBlocks = content.match(codeBlockRegex) || []
|
||||
const escapeReplacement = (str: string) => str.replace(/\$/g, '_TMP_REPLACE_DOLLAR_')
|
||||
let processedContent = content.replace(codeBlockRegex, 'CODE_BLOCK_PLACEHOLDER')
|
||||
|
||||
processedContent = flow([
|
||||
|
|
@ -21,9 +22,11 @@ export const preprocessLaTeX = (content: string) => {
|
|||
])(processedContent)
|
||||
|
||||
codeBlocks.forEach((block) => {
|
||||
processedContent = processedContent.replace('CODE_BLOCK_PLACEHOLDER', block)
|
||||
processedContent = processedContent.replace('CODE_BLOCK_PLACEHOLDER', escapeReplacement(block))
|
||||
})
|
||||
|
||||
processedContent = processedContent.replace(/_TMP_REPLACE_DOLLAR_/g, '$')
|
||||
|
||||
return processedContent
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -3,6 +3,7 @@ import { Fragment, cloneElement, useRef } from 'react'
|
|||
import cn from '@/utils/classnames'
|
||||
|
||||
export type HtmlContentProps = {
|
||||
open?: boolean
|
||||
onClose?: () => void
|
||||
onClick?: () => void
|
||||
}
|
||||
|
|
@ -100,7 +101,8 @@ export default function CustomPopover({
|
|||
}
|
||||
>
|
||||
{cloneElement(htmlContent as React.ReactElement, {
|
||||
onClose: () => onMouseLeave(open),
|
||||
open,
|
||||
onClose: close,
|
||||
...(manualClose
|
||||
? {
|
||||
onClick: close,
|
||||
|
|
|
|||
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue