mirror of https://github.com/langgenius/dify.git
Merge branch 'refs/heads/main' into feat/grouping-branching
This commit is contained in:
commit
39010fd153
|
|
@ -5,6 +5,7 @@ on:
|
|||
branches: [main]
|
||||
paths:
|
||||
- 'web/i18n/en-US/*.json'
|
||||
workflow_dispatch:
|
||||
|
||||
permissions:
|
||||
contents: write
|
||||
|
|
@ -18,7 +19,8 @@ jobs:
|
|||
run:
|
||||
working-directory: web
|
||||
steps:
|
||||
- uses: actions/checkout@v6
|
||||
# Keep use old checkout action version for https://github.com/peter-evans/create-pull-request/issues/4272
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
|
@ -26,21 +28,28 @@ jobs:
|
|||
- name: Check for file changes in i18n/en-US
|
||||
id: check_files
|
||||
run: |
|
||||
git fetch origin "${{ github.event.before }}" || true
|
||||
git fetch origin "${{ github.sha }}" || true
|
||||
changed_files=$(git diff --name-only "${{ github.event.before }}" "${{ github.sha }}" -- 'i18n/en-US/*.json')
|
||||
echo "Changed files: $changed_files"
|
||||
if [ -n "$changed_files" ]; then
|
||||
# Skip check for manual trigger, translate all files
|
||||
if [ "${{ github.event_name }}" == "workflow_dispatch" ]; then
|
||||
echo "FILES_CHANGED=true" >> $GITHUB_ENV
|
||||
file_args=""
|
||||
for file in $changed_files; do
|
||||
filename=$(basename "$file" .json)
|
||||
file_args="$file_args --file $filename"
|
||||
done
|
||||
echo "FILE_ARGS=$file_args" >> $GITHUB_ENV
|
||||
echo "File arguments: $file_args"
|
||||
echo "FILE_ARGS=" >> $GITHUB_ENV
|
||||
echo "Manual trigger: translating all files"
|
||||
else
|
||||
echo "FILES_CHANGED=false" >> $GITHUB_ENV
|
||||
git fetch origin "${{ github.event.before }}" || true
|
||||
git fetch origin "${{ github.sha }}" || true
|
||||
changed_files=$(git diff --name-only "${{ github.event.before }}" "${{ github.sha }}" -- 'i18n/en-US/*.json')
|
||||
echo "Changed files: $changed_files"
|
||||
if [ -n "$changed_files" ]; then
|
||||
echo "FILES_CHANGED=true" >> $GITHUB_ENV
|
||||
file_args=""
|
||||
for file in $changed_files; do
|
||||
filename=$(basename "$file" .json)
|
||||
file_args="$file_args --file $filename"
|
||||
done
|
||||
echo "FILE_ARGS=$file_args" >> $GITHUB_ENV
|
||||
echo "File arguments: $file_args"
|
||||
else
|
||||
echo "FILES_CHANGED=false" >> $GITHUB_ENV
|
||||
fi
|
||||
fi
|
||||
|
||||
- name: Install pnpm
|
||||
|
|
|
|||
|
|
@ -502,6 +502,8 @@ LOG_FILE_BACKUP_COUNT=5
|
|||
LOG_DATEFORMAT=%Y-%m-%d %H:%M:%S
|
||||
# Log Timezone
|
||||
LOG_TZ=UTC
|
||||
# Log output format: text or json
|
||||
LOG_OUTPUT_FORMAT=text
|
||||
# Log format
|
||||
LOG_FORMAT=%(asctime)s,%(msecs)d %(levelname)-2s [%(filename)s:%(lineno)d] %(req_id)s %(message)s
|
||||
|
||||
|
|
|
|||
|
|
@ -2,9 +2,11 @@ import logging
|
|||
import time
|
||||
|
||||
from opentelemetry.trace import get_current_span
|
||||
from opentelemetry.trace.span import INVALID_SPAN_ID, INVALID_TRACE_ID
|
||||
|
||||
from configs import dify_config
|
||||
from contexts.wrapper import RecyclableContextVar
|
||||
from core.logging.context import init_request_context
|
||||
from dify_app import DifyApp
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
|
@ -25,28 +27,35 @@ def create_flask_app_with_configs() -> DifyApp:
|
|||
# add before request hook
|
||||
@dify_app.before_request
|
||||
def before_request():
|
||||
# add an unique identifier to each request
|
||||
# Initialize logging context for this request
|
||||
init_request_context()
|
||||
RecyclableContextVar.increment_thread_recycles()
|
||||
|
||||
# add after request hook for injecting X-Trace-Id header from OpenTelemetry span context
|
||||
# add after request hook for injecting trace headers from OpenTelemetry span context
|
||||
# Only adds headers when OTEL is enabled and has valid context
|
||||
@dify_app.after_request
|
||||
def add_trace_id_header(response):
|
||||
def add_trace_headers(response):
|
||||
try:
|
||||
span = get_current_span()
|
||||
ctx = span.get_span_context() if span else None
|
||||
if ctx and ctx.is_valid:
|
||||
trace_id_hex = format(ctx.trace_id, "032x")
|
||||
# Avoid duplicates if some middleware added it
|
||||
if "X-Trace-Id" not in response.headers:
|
||||
response.headers["X-Trace-Id"] = trace_id_hex
|
||||
|
||||
if not ctx or not ctx.is_valid:
|
||||
return response
|
||||
|
||||
# Inject trace headers from OTEL context
|
||||
if ctx.trace_id != INVALID_TRACE_ID and "X-Trace-Id" not in response.headers:
|
||||
response.headers["X-Trace-Id"] = format(ctx.trace_id, "032x")
|
||||
if ctx.span_id != INVALID_SPAN_ID and "X-Span-Id" not in response.headers:
|
||||
response.headers["X-Span-Id"] = format(ctx.span_id, "016x")
|
||||
|
||||
except Exception:
|
||||
# Never break the response due to tracing header injection
|
||||
logger.warning("Failed to add trace ID to response header", exc_info=True)
|
||||
logger.warning("Failed to add trace headers to response", exc_info=True)
|
||||
return response
|
||||
|
||||
# Capture the decorator's return value to avoid pyright reportUnusedFunction
|
||||
_ = before_request
|
||||
_ = add_trace_id_header
|
||||
_ = add_trace_headers
|
||||
|
||||
return dify_app
|
||||
|
||||
|
|
|
|||
|
|
@ -587,6 +587,11 @@ class LoggingConfig(BaseSettings):
|
|||
default="INFO",
|
||||
)
|
||||
|
||||
LOG_OUTPUT_FORMAT: Literal["text", "json"] = Field(
|
||||
description="Log output format: 'text' for human-readable, 'json' for structured JSON logs.",
|
||||
default="text",
|
||||
)
|
||||
|
||||
LOG_FILE: str | None = Field(
|
||||
description="File path for log output.",
|
||||
default=None,
|
||||
|
|
|
|||
|
|
@ -13,7 +13,6 @@ from controllers.console.app.wraps import get_app_model
|
|||
from controllers.console.wraps import account_initialization_required, edit_permission_required, setup_required
|
||||
from core.app.entities.app_invoke_entities import InvokeFrom
|
||||
from extensions.ext_database import db
|
||||
from fields.conversation_fields import MessageTextField
|
||||
from fields.raws import FilesContainedField
|
||||
from libs.datetime_utils import naive_utc_now, parse_time_range
|
||||
from libs.helper import TimestampField
|
||||
|
|
@ -177,6 +176,12 @@ annotation_hit_history_model = console_ns.model(
|
|||
},
|
||||
)
|
||||
|
||||
|
||||
class MessageTextField(fields.Raw):
|
||||
def format(self, value):
|
||||
return value[0]["text"] if value else ""
|
||||
|
||||
|
||||
# Simple message detail model
|
||||
simple_message_detail_model = console_ns.model(
|
||||
"SimpleMessageDetail",
|
||||
|
|
|
|||
|
|
@ -1,8 +1,7 @@
|
|||
from typing import Any
|
||||
|
||||
from flask import request
|
||||
from flask_restx import marshal_with
|
||||
from pydantic import BaseModel, Field, model_validator
|
||||
from pydantic import BaseModel, Field, TypeAdapter, model_validator
|
||||
from sqlalchemy.orm import Session
|
||||
from werkzeug.exceptions import NotFound
|
||||
|
||||
|
|
@ -11,7 +10,11 @@ from controllers.console.explore.error import NotChatAppError
|
|||
from controllers.console.explore.wraps import InstalledAppResource
|
||||
from core.app.entities.app_invoke_entities import InvokeFrom
|
||||
from extensions.ext_database import db
|
||||
from fields.conversation_fields import conversation_infinite_scroll_pagination_fields, simple_conversation_fields
|
||||
from fields.conversation_fields import (
|
||||
ConversationInfiniteScrollPagination,
|
||||
ResultResponse,
|
||||
SimpleConversation,
|
||||
)
|
||||
from libs.helper import UUIDStrOrEmpty
|
||||
from libs.login import current_user
|
||||
from models import Account
|
||||
|
|
@ -49,7 +52,6 @@ register_schema_models(console_ns, ConversationListQuery, ConversationRenamePayl
|
|||
endpoint="installed_app_conversations",
|
||||
)
|
||||
class ConversationListApi(InstalledAppResource):
|
||||
@marshal_with(conversation_infinite_scroll_pagination_fields)
|
||||
@console_ns.expect(console_ns.models[ConversationListQuery.__name__])
|
||||
def get(self, installed_app):
|
||||
app_model = installed_app.app
|
||||
|
|
@ -73,7 +75,7 @@ class ConversationListApi(InstalledAppResource):
|
|||
if not isinstance(current_user, Account):
|
||||
raise ValueError("current_user must be an Account instance")
|
||||
with Session(db.engine) as session:
|
||||
return WebConversationService.pagination_by_last_id(
|
||||
pagination = WebConversationService.pagination_by_last_id(
|
||||
session=session,
|
||||
app_model=app_model,
|
||||
user=current_user,
|
||||
|
|
@ -82,6 +84,13 @@ class ConversationListApi(InstalledAppResource):
|
|||
invoke_from=InvokeFrom.EXPLORE,
|
||||
pinned=args.pinned,
|
||||
)
|
||||
adapter = TypeAdapter(SimpleConversation)
|
||||
conversations = [adapter.validate_python(item, from_attributes=True) for item in pagination.data]
|
||||
return ConversationInfiniteScrollPagination(
|
||||
limit=pagination.limit,
|
||||
has_more=pagination.has_more,
|
||||
data=conversations,
|
||||
).model_dump(mode="json")
|
||||
except LastConversationNotExistsError:
|
||||
raise NotFound("Last Conversation Not Exists.")
|
||||
|
||||
|
|
@ -105,7 +114,7 @@ class ConversationApi(InstalledAppResource):
|
|||
except ConversationNotExistsError:
|
||||
raise NotFound("Conversation Not Exists.")
|
||||
|
||||
return {"result": "success"}, 204
|
||||
return ResultResponse(result="success").model_dump(mode="json"), 204
|
||||
|
||||
|
||||
@console_ns.route(
|
||||
|
|
@ -113,7 +122,6 @@ class ConversationApi(InstalledAppResource):
|
|||
endpoint="installed_app_conversation_rename",
|
||||
)
|
||||
class ConversationRenameApi(InstalledAppResource):
|
||||
@marshal_with(simple_conversation_fields)
|
||||
@console_ns.expect(console_ns.models[ConversationRenamePayload.__name__])
|
||||
def post(self, installed_app, c_id):
|
||||
app_model = installed_app.app
|
||||
|
|
@ -128,9 +136,14 @@ class ConversationRenameApi(InstalledAppResource):
|
|||
try:
|
||||
if not isinstance(current_user, Account):
|
||||
raise ValueError("current_user must be an Account instance")
|
||||
return ConversationService.rename(
|
||||
conversation = ConversationService.rename(
|
||||
app_model, conversation_id, current_user, payload.name, payload.auto_generate
|
||||
)
|
||||
return (
|
||||
TypeAdapter(SimpleConversation)
|
||||
.validate_python(conversation, from_attributes=True)
|
||||
.model_dump(mode="json")
|
||||
)
|
||||
except ConversationNotExistsError:
|
||||
raise NotFound("Conversation Not Exists.")
|
||||
|
||||
|
|
@ -155,7 +168,7 @@ class ConversationPinApi(InstalledAppResource):
|
|||
except ConversationNotExistsError:
|
||||
raise NotFound("Conversation Not Exists.")
|
||||
|
||||
return {"result": "success"}
|
||||
return ResultResponse(result="success").model_dump(mode="json")
|
||||
|
||||
|
||||
@console_ns.route(
|
||||
|
|
@ -174,4 +187,4 @@ class ConversationUnPinApi(InstalledAppResource):
|
|||
raise ValueError("current_user must be an Account instance")
|
||||
WebConversationService.unpin(app_model, conversation_id, current_user)
|
||||
|
||||
return {"result": "success"}
|
||||
return ResultResponse(result="success").model_dump(mode="json")
|
||||
|
|
|
|||
|
|
@ -2,8 +2,7 @@ import logging
|
|||
from typing import Literal
|
||||
|
||||
from flask import request
|
||||
from flask_restx import marshal_with
|
||||
from pydantic import BaseModel, Field
|
||||
from pydantic import BaseModel, Field, TypeAdapter
|
||||
from werkzeug.exceptions import InternalServerError, NotFound
|
||||
|
||||
from controllers.common.schema import register_schema_models
|
||||
|
|
@ -23,7 +22,8 @@ from controllers.console.explore.wraps import InstalledAppResource
|
|||
from core.app.entities.app_invoke_entities import InvokeFrom
|
||||
from core.errors.error import ModelCurrentlyNotSupportError, ProviderTokenNotInitError, QuotaExceededError
|
||||
from core.model_runtime.errors.invoke import InvokeError
|
||||
from fields.message_fields import message_infinite_scroll_pagination_fields
|
||||
from fields.conversation_fields import ResultResponse
|
||||
from fields.message_fields import MessageInfiniteScrollPagination, MessageListItem, SuggestedQuestionsResponse
|
||||
from libs import helper
|
||||
from libs.helper import UUIDStrOrEmpty
|
||||
from libs.login import current_account_with_tenant
|
||||
|
|
@ -66,7 +66,6 @@ register_schema_models(console_ns, MessageListQuery, MessageFeedbackPayload, Mor
|
|||
endpoint="installed_app_messages",
|
||||
)
|
||||
class MessageListApi(InstalledAppResource):
|
||||
@marshal_with(message_infinite_scroll_pagination_fields)
|
||||
@console_ns.expect(console_ns.models[MessageListQuery.__name__])
|
||||
def get(self, installed_app):
|
||||
current_user, _ = current_account_with_tenant()
|
||||
|
|
@ -78,13 +77,20 @@ class MessageListApi(InstalledAppResource):
|
|||
args = MessageListQuery.model_validate(request.args.to_dict())
|
||||
|
||||
try:
|
||||
return MessageService.pagination_by_first_id(
|
||||
pagination = MessageService.pagination_by_first_id(
|
||||
app_model,
|
||||
current_user,
|
||||
str(args.conversation_id),
|
||||
str(args.first_id) if args.first_id else None,
|
||||
args.limit,
|
||||
)
|
||||
adapter = TypeAdapter(MessageListItem)
|
||||
items = [adapter.validate_python(message, from_attributes=True) for message in pagination.data]
|
||||
return MessageInfiniteScrollPagination(
|
||||
limit=pagination.limit,
|
||||
has_more=pagination.has_more,
|
||||
data=items,
|
||||
).model_dump(mode="json")
|
||||
except ConversationNotExistsError:
|
||||
raise NotFound("Conversation Not Exists.")
|
||||
except FirstMessageNotExistsError:
|
||||
|
|
@ -116,7 +122,7 @@ class MessageFeedbackApi(InstalledAppResource):
|
|||
except MessageNotExistsError:
|
||||
raise NotFound("Message Not Exists.")
|
||||
|
||||
return {"result": "success"}
|
||||
return ResultResponse(result="success").model_dump(mode="json")
|
||||
|
||||
|
||||
@console_ns.route(
|
||||
|
|
@ -201,4 +207,4 @@ class MessageSuggestedQuestionApi(InstalledAppResource):
|
|||
logger.exception("internal server error.")
|
||||
raise InternalServerError()
|
||||
|
||||
return {"data": questions}
|
||||
return SuggestedQuestionsResponse(data=questions).model_dump(mode="json")
|
||||
|
|
|
|||
|
|
@ -1,14 +1,14 @@
|
|||
from flask import request
|
||||
from flask_restx import fields, marshal_with
|
||||
from pydantic import BaseModel, Field
|
||||
from pydantic import BaseModel, Field, TypeAdapter
|
||||
from werkzeug.exceptions import NotFound
|
||||
|
||||
from controllers.common.schema import register_schema_models
|
||||
from controllers.console import console_ns
|
||||
from controllers.console.explore.error import NotCompletionAppError
|
||||
from controllers.console.explore.wraps import InstalledAppResource
|
||||
from fields.conversation_fields import message_file_fields
|
||||
from libs.helper import TimestampField, UUIDStrOrEmpty
|
||||
from fields.conversation_fields import ResultResponse
|
||||
from fields.message_fields import SavedMessageInfiniteScrollPagination, SavedMessageItem
|
||||
from libs.helper import UUIDStrOrEmpty
|
||||
from libs.login import current_account_with_tenant
|
||||
from services.errors.message import MessageNotExistsError
|
||||
from services.saved_message_service import SavedMessageService
|
||||
|
|
@ -26,28 +26,8 @@ class SavedMessageCreatePayload(BaseModel):
|
|||
register_schema_models(console_ns, SavedMessageListQuery, SavedMessageCreatePayload)
|
||||
|
||||
|
||||
feedback_fields = {"rating": fields.String}
|
||||
|
||||
message_fields = {
|
||||
"id": fields.String,
|
||||
"inputs": fields.Raw,
|
||||
"query": fields.String,
|
||||
"answer": fields.String,
|
||||
"message_files": fields.List(fields.Nested(message_file_fields)),
|
||||
"feedback": fields.Nested(feedback_fields, attribute="user_feedback", allow_null=True),
|
||||
"created_at": TimestampField,
|
||||
}
|
||||
|
||||
|
||||
@console_ns.route("/installed-apps/<uuid:installed_app_id>/saved-messages", endpoint="installed_app_saved_messages")
|
||||
class SavedMessageListApi(InstalledAppResource):
|
||||
saved_message_infinite_scroll_pagination_fields = {
|
||||
"limit": fields.Integer,
|
||||
"has_more": fields.Boolean,
|
||||
"data": fields.List(fields.Nested(message_fields)),
|
||||
}
|
||||
|
||||
@marshal_with(saved_message_infinite_scroll_pagination_fields)
|
||||
@console_ns.expect(console_ns.models[SavedMessageListQuery.__name__])
|
||||
def get(self, installed_app):
|
||||
current_user, _ = current_account_with_tenant()
|
||||
|
|
@ -57,12 +37,19 @@ class SavedMessageListApi(InstalledAppResource):
|
|||
|
||||
args = SavedMessageListQuery.model_validate(request.args.to_dict())
|
||||
|
||||
return SavedMessageService.pagination_by_last_id(
|
||||
pagination = SavedMessageService.pagination_by_last_id(
|
||||
app_model,
|
||||
current_user,
|
||||
str(args.last_id) if args.last_id else None,
|
||||
args.limit,
|
||||
)
|
||||
adapter = TypeAdapter(SavedMessageItem)
|
||||
items = [adapter.validate_python(message, from_attributes=True) for message in pagination.data]
|
||||
return SavedMessageInfiniteScrollPagination(
|
||||
limit=pagination.limit,
|
||||
has_more=pagination.has_more,
|
||||
data=items,
|
||||
).model_dump(mode="json")
|
||||
|
||||
@console_ns.expect(console_ns.models[SavedMessageCreatePayload.__name__])
|
||||
def post(self, installed_app):
|
||||
|
|
@ -78,7 +65,7 @@ class SavedMessageListApi(InstalledAppResource):
|
|||
except MessageNotExistsError:
|
||||
raise NotFound("Message Not Exists.")
|
||||
|
||||
return {"result": "success"}
|
||||
return ResultResponse(result="success").model_dump(mode="json")
|
||||
|
||||
|
||||
@console_ns.route(
|
||||
|
|
@ -96,4 +83,4 @@ class SavedMessageApi(InstalledAppResource):
|
|||
|
||||
SavedMessageService.delete(app_model, current_user, message_id)
|
||||
|
||||
return {"result": "success"}, 204
|
||||
return ResultResponse(result="success").model_dump(mode="json"), 204
|
||||
|
|
|
|||
|
|
@ -3,8 +3,7 @@ from uuid import UUID
|
|||
|
||||
from flask import request
|
||||
from flask_restx import Resource
|
||||
from flask_restx._http import HTTPStatus
|
||||
from pydantic import BaseModel, Field, field_validator, model_validator
|
||||
from pydantic import BaseModel, Field, TypeAdapter, field_validator, model_validator
|
||||
from sqlalchemy.orm import Session
|
||||
from werkzeug.exceptions import BadRequest, NotFound
|
||||
|
||||
|
|
@ -16,9 +15,9 @@ from controllers.service_api.wraps import FetchUserArg, WhereisUserArg, validate
|
|||
from core.app.entities.app_invoke_entities import InvokeFrom
|
||||
from extensions.ext_database import db
|
||||
from fields.conversation_fields import (
|
||||
build_conversation_delete_model,
|
||||
build_conversation_infinite_scroll_pagination_model,
|
||||
build_simple_conversation_model,
|
||||
ConversationDelete,
|
||||
ConversationInfiniteScrollPagination,
|
||||
SimpleConversation,
|
||||
)
|
||||
from fields.conversation_variable_fields import (
|
||||
build_conversation_variable_infinite_scroll_pagination_model,
|
||||
|
|
@ -105,7 +104,6 @@ class ConversationApi(Resource):
|
|||
}
|
||||
)
|
||||
@validate_app_token(fetch_user_arg=FetchUserArg(fetch_from=WhereisUserArg.QUERY))
|
||||
@service_api_ns.marshal_with(build_conversation_infinite_scroll_pagination_model(service_api_ns))
|
||||
def get(self, app_model: App, end_user: EndUser):
|
||||
"""List all conversations for the current user.
|
||||
|
||||
|
|
@ -120,7 +118,7 @@ class ConversationApi(Resource):
|
|||
|
||||
try:
|
||||
with Session(db.engine) as session:
|
||||
return ConversationService.pagination_by_last_id(
|
||||
pagination = ConversationService.pagination_by_last_id(
|
||||
session=session,
|
||||
app_model=app_model,
|
||||
user=end_user,
|
||||
|
|
@ -129,6 +127,13 @@ class ConversationApi(Resource):
|
|||
invoke_from=InvokeFrom.SERVICE_API,
|
||||
sort_by=query_args.sort_by,
|
||||
)
|
||||
adapter = TypeAdapter(SimpleConversation)
|
||||
conversations = [adapter.validate_python(item, from_attributes=True) for item in pagination.data]
|
||||
return ConversationInfiniteScrollPagination(
|
||||
limit=pagination.limit,
|
||||
has_more=pagination.has_more,
|
||||
data=conversations,
|
||||
).model_dump(mode="json")
|
||||
except services.errors.conversation.LastConversationNotExistsError:
|
||||
raise NotFound("Last Conversation Not Exists.")
|
||||
|
||||
|
|
@ -146,7 +151,6 @@ class ConversationDetailApi(Resource):
|
|||
}
|
||||
)
|
||||
@validate_app_token(fetch_user_arg=FetchUserArg(fetch_from=WhereisUserArg.JSON))
|
||||
@service_api_ns.marshal_with(build_conversation_delete_model(service_api_ns), code=HTTPStatus.NO_CONTENT)
|
||||
def delete(self, app_model: App, end_user: EndUser, c_id):
|
||||
"""Delete a specific conversation."""
|
||||
app_mode = AppMode.value_of(app_model.mode)
|
||||
|
|
@ -159,7 +163,7 @@ class ConversationDetailApi(Resource):
|
|||
ConversationService.delete(app_model, conversation_id, end_user)
|
||||
except services.errors.conversation.ConversationNotExistsError:
|
||||
raise NotFound("Conversation Not Exists.")
|
||||
return {"result": "success"}, 204
|
||||
return ConversationDelete(result="success").model_dump(mode="json"), 204
|
||||
|
||||
|
||||
@service_api_ns.route("/conversations/<uuid:c_id>/name")
|
||||
|
|
@ -176,7 +180,6 @@ class ConversationRenameApi(Resource):
|
|||
}
|
||||
)
|
||||
@validate_app_token(fetch_user_arg=FetchUserArg(fetch_from=WhereisUserArg.JSON))
|
||||
@service_api_ns.marshal_with(build_simple_conversation_model(service_api_ns))
|
||||
def post(self, app_model: App, end_user: EndUser, c_id):
|
||||
"""Rename a conversation or auto-generate a name."""
|
||||
app_mode = AppMode.value_of(app_model.mode)
|
||||
|
|
@ -188,7 +191,14 @@ class ConversationRenameApi(Resource):
|
|||
payload = ConversationRenamePayload.model_validate(service_api_ns.payload or {})
|
||||
|
||||
try:
|
||||
return ConversationService.rename(app_model, conversation_id, end_user, payload.name, payload.auto_generate)
|
||||
conversation = ConversationService.rename(
|
||||
app_model, conversation_id, end_user, payload.name, payload.auto_generate
|
||||
)
|
||||
return (
|
||||
TypeAdapter(SimpleConversation)
|
||||
.validate_python(conversation, from_attributes=True)
|
||||
.model_dump(mode="json")
|
||||
)
|
||||
except services.errors.conversation.ConversationNotExistsError:
|
||||
raise NotFound("Conversation Not Exists.")
|
||||
|
||||
|
|
|
|||
|
|
@ -1,11 +1,10 @@
|
|||
import json
|
||||
import logging
|
||||
from typing import Literal
|
||||
from uuid import UUID
|
||||
|
||||
from flask import request
|
||||
from flask_restx import Namespace, Resource, fields
|
||||
from pydantic import BaseModel, Field
|
||||
from flask_restx import Resource
|
||||
from pydantic import BaseModel, Field, TypeAdapter
|
||||
from werkzeug.exceptions import BadRequest, InternalServerError, NotFound
|
||||
|
||||
import services
|
||||
|
|
@ -14,10 +13,8 @@ from controllers.service_api import service_api_ns
|
|||
from controllers.service_api.app.error import NotChatAppError
|
||||
from controllers.service_api.wraps import FetchUserArg, WhereisUserArg, validate_app_token
|
||||
from core.app.entities.app_invoke_entities import InvokeFrom
|
||||
from fields.conversation_fields import build_message_file_model
|
||||
from fields.message_fields import build_agent_thought_model, build_feedback_model
|
||||
from fields.raws import FilesContainedField
|
||||
from libs.helper import TimestampField
|
||||
from fields.conversation_fields import ResultResponse
|
||||
from fields.message_fields import MessageInfiniteScrollPagination, MessageListItem
|
||||
from models.model import App, AppMode, EndUser
|
||||
from services.errors.message import (
|
||||
FirstMessageNotExistsError,
|
||||
|
|
@ -48,49 +45,6 @@ class FeedbackListQuery(BaseModel):
|
|||
register_schema_models(service_api_ns, MessageListQuery, MessageFeedbackPayload, FeedbackListQuery)
|
||||
|
||||
|
||||
def build_message_model(api_or_ns: Namespace):
|
||||
"""Build the message model for the API or Namespace."""
|
||||
# First build the nested models
|
||||
feedback_model = build_feedback_model(api_or_ns)
|
||||
agent_thought_model = build_agent_thought_model(api_or_ns)
|
||||
message_file_model = build_message_file_model(api_or_ns)
|
||||
|
||||
# Then build the message fields with nested models
|
||||
message_fields = {
|
||||
"id": fields.String,
|
||||
"conversation_id": fields.String,
|
||||
"parent_message_id": fields.String,
|
||||
"inputs": FilesContainedField,
|
||||
"query": fields.String,
|
||||
"answer": fields.String(attribute="re_sign_file_url_answer"),
|
||||
"message_files": fields.List(fields.Nested(message_file_model)),
|
||||
"feedback": fields.Nested(feedback_model, attribute="user_feedback", allow_null=True),
|
||||
"retriever_resources": fields.Raw(
|
||||
attribute=lambda obj: json.loads(obj.message_metadata).get("retriever_resources", [])
|
||||
if obj.message_metadata
|
||||
else []
|
||||
),
|
||||
"created_at": TimestampField,
|
||||
"agent_thoughts": fields.List(fields.Nested(agent_thought_model)),
|
||||
"status": fields.String,
|
||||
"error": fields.String,
|
||||
}
|
||||
return api_or_ns.model("Message", message_fields)
|
||||
|
||||
|
||||
def build_message_infinite_scroll_pagination_model(api_or_ns: Namespace):
|
||||
"""Build the message infinite scroll pagination model for the API or Namespace."""
|
||||
# Build the nested message model first
|
||||
message_model = build_message_model(api_or_ns)
|
||||
|
||||
message_infinite_scroll_pagination_fields = {
|
||||
"limit": fields.Integer,
|
||||
"has_more": fields.Boolean,
|
||||
"data": fields.List(fields.Nested(message_model)),
|
||||
}
|
||||
return api_or_ns.model("MessageInfiniteScrollPagination", message_infinite_scroll_pagination_fields)
|
||||
|
||||
|
||||
@service_api_ns.route("/messages")
|
||||
class MessageListApi(Resource):
|
||||
@service_api_ns.expect(service_api_ns.models[MessageListQuery.__name__])
|
||||
|
|
@ -104,7 +58,6 @@ class MessageListApi(Resource):
|
|||
}
|
||||
)
|
||||
@validate_app_token(fetch_user_arg=FetchUserArg(fetch_from=WhereisUserArg.QUERY))
|
||||
@service_api_ns.marshal_with(build_message_infinite_scroll_pagination_model(service_api_ns))
|
||||
def get(self, app_model: App, end_user: EndUser):
|
||||
"""List messages in a conversation.
|
||||
|
||||
|
|
@ -119,9 +72,16 @@ class MessageListApi(Resource):
|
|||
first_id = str(query_args.first_id) if query_args.first_id else None
|
||||
|
||||
try:
|
||||
return MessageService.pagination_by_first_id(
|
||||
pagination = MessageService.pagination_by_first_id(
|
||||
app_model, end_user, conversation_id, first_id, query_args.limit
|
||||
)
|
||||
adapter = TypeAdapter(MessageListItem)
|
||||
items = [adapter.validate_python(message, from_attributes=True) for message in pagination.data]
|
||||
return MessageInfiniteScrollPagination(
|
||||
limit=pagination.limit,
|
||||
has_more=pagination.has_more,
|
||||
data=items,
|
||||
).model_dump(mode="json")
|
||||
except services.errors.conversation.ConversationNotExistsError:
|
||||
raise NotFound("Conversation Not Exists.")
|
||||
except FirstMessageNotExistsError:
|
||||
|
|
@ -162,7 +122,7 @@ class MessageFeedbackApi(Resource):
|
|||
except MessageNotExistsError:
|
||||
raise NotFound("Message Not Exists.")
|
||||
|
||||
return {"result": "success"}
|
||||
return ResultResponse(result="success").model_dump(mode="json")
|
||||
|
||||
|
||||
@service_api_ns.route("/app/feedbacks")
|
||||
|
|
|
|||
|
|
@ -1,5 +1,6 @@
|
|||
from flask_restx import fields, marshal_with, reqparse
|
||||
from flask_restx import reqparse
|
||||
from flask_restx.inputs import int_range
|
||||
from pydantic import TypeAdapter
|
||||
from sqlalchemy.orm import Session
|
||||
from werkzeug.exceptions import NotFound
|
||||
|
||||
|
|
@ -8,7 +9,11 @@ from controllers.web.error import NotChatAppError
|
|||
from controllers.web.wraps import WebApiResource
|
||||
from core.app.entities.app_invoke_entities import InvokeFrom
|
||||
from extensions.ext_database import db
|
||||
from fields.conversation_fields import conversation_infinite_scroll_pagination_fields, simple_conversation_fields
|
||||
from fields.conversation_fields import (
|
||||
ConversationInfiniteScrollPagination,
|
||||
ResultResponse,
|
||||
SimpleConversation,
|
||||
)
|
||||
from libs.helper import uuid_value
|
||||
from models.model import AppMode
|
||||
from services.conversation_service import ConversationService
|
||||
|
|
@ -54,7 +59,6 @@ class ConversationListApi(WebApiResource):
|
|||
500: "Internal Server Error",
|
||||
}
|
||||
)
|
||||
@marshal_with(conversation_infinite_scroll_pagination_fields)
|
||||
def get(self, app_model, end_user):
|
||||
app_mode = AppMode.value_of(app_model.mode)
|
||||
if app_mode not in {AppMode.CHAT, AppMode.AGENT_CHAT, AppMode.ADVANCED_CHAT}:
|
||||
|
|
@ -82,7 +86,7 @@ class ConversationListApi(WebApiResource):
|
|||
|
||||
try:
|
||||
with Session(db.engine) as session:
|
||||
return WebConversationService.pagination_by_last_id(
|
||||
pagination = WebConversationService.pagination_by_last_id(
|
||||
session=session,
|
||||
app_model=app_model,
|
||||
user=end_user,
|
||||
|
|
@ -92,16 +96,19 @@ class ConversationListApi(WebApiResource):
|
|||
pinned=pinned,
|
||||
sort_by=args["sort_by"],
|
||||
)
|
||||
adapter = TypeAdapter(SimpleConversation)
|
||||
conversations = [adapter.validate_python(item, from_attributes=True) for item in pagination.data]
|
||||
return ConversationInfiniteScrollPagination(
|
||||
limit=pagination.limit,
|
||||
has_more=pagination.has_more,
|
||||
data=conversations,
|
||||
).model_dump(mode="json")
|
||||
except LastConversationNotExistsError:
|
||||
raise NotFound("Last Conversation Not Exists.")
|
||||
|
||||
|
||||
@web_ns.route("/conversations/<uuid:c_id>")
|
||||
class ConversationApi(WebApiResource):
|
||||
delete_response_fields = {
|
||||
"result": fields.String,
|
||||
}
|
||||
|
||||
@web_ns.doc("Delete Conversation")
|
||||
@web_ns.doc(description="Delete a specific conversation.")
|
||||
@web_ns.doc(params={"c_id": {"description": "Conversation UUID", "type": "string", "required": True}})
|
||||
|
|
@ -115,7 +122,6 @@ class ConversationApi(WebApiResource):
|
|||
500: "Internal Server Error",
|
||||
}
|
||||
)
|
||||
@marshal_with(delete_response_fields)
|
||||
def delete(self, app_model, end_user, c_id):
|
||||
app_mode = AppMode.value_of(app_model.mode)
|
||||
if app_mode not in {AppMode.CHAT, AppMode.AGENT_CHAT, AppMode.ADVANCED_CHAT}:
|
||||
|
|
@ -126,7 +132,7 @@ class ConversationApi(WebApiResource):
|
|||
ConversationService.delete(app_model, conversation_id, end_user)
|
||||
except ConversationNotExistsError:
|
||||
raise NotFound("Conversation Not Exists.")
|
||||
return {"result": "success"}, 204
|
||||
return ResultResponse(result="success").model_dump(mode="json"), 204
|
||||
|
||||
|
||||
@web_ns.route("/conversations/<uuid:c_id>/name")
|
||||
|
|
@ -155,7 +161,6 @@ class ConversationRenameApi(WebApiResource):
|
|||
500: "Internal Server Error",
|
||||
}
|
||||
)
|
||||
@marshal_with(simple_conversation_fields)
|
||||
def post(self, app_model, end_user, c_id):
|
||||
app_mode = AppMode.value_of(app_model.mode)
|
||||
if app_mode not in {AppMode.CHAT, AppMode.AGENT_CHAT, AppMode.ADVANCED_CHAT}:
|
||||
|
|
@ -171,17 +176,20 @@ class ConversationRenameApi(WebApiResource):
|
|||
args = parser.parse_args()
|
||||
|
||||
try:
|
||||
return ConversationService.rename(app_model, conversation_id, end_user, args["name"], args["auto_generate"])
|
||||
conversation = ConversationService.rename(
|
||||
app_model, conversation_id, end_user, args["name"], args["auto_generate"]
|
||||
)
|
||||
return (
|
||||
TypeAdapter(SimpleConversation)
|
||||
.validate_python(conversation, from_attributes=True)
|
||||
.model_dump(mode="json")
|
||||
)
|
||||
except ConversationNotExistsError:
|
||||
raise NotFound("Conversation Not Exists.")
|
||||
|
||||
|
||||
@web_ns.route("/conversations/<uuid:c_id>/pin")
|
||||
class ConversationPinApi(WebApiResource):
|
||||
pin_response_fields = {
|
||||
"result": fields.String,
|
||||
}
|
||||
|
||||
@web_ns.doc("Pin Conversation")
|
||||
@web_ns.doc(description="Pin a specific conversation to keep it at the top of the list.")
|
||||
@web_ns.doc(params={"c_id": {"description": "Conversation UUID", "type": "string", "required": True}})
|
||||
|
|
@ -195,7 +203,6 @@ class ConversationPinApi(WebApiResource):
|
|||
500: "Internal Server Error",
|
||||
}
|
||||
)
|
||||
@marshal_with(pin_response_fields)
|
||||
def patch(self, app_model, end_user, c_id):
|
||||
app_mode = AppMode.value_of(app_model.mode)
|
||||
if app_mode not in {AppMode.CHAT, AppMode.AGENT_CHAT, AppMode.ADVANCED_CHAT}:
|
||||
|
|
@ -208,15 +215,11 @@ class ConversationPinApi(WebApiResource):
|
|||
except ConversationNotExistsError:
|
||||
raise NotFound("Conversation Not Exists.")
|
||||
|
||||
return {"result": "success"}
|
||||
return ResultResponse(result="success").model_dump(mode="json")
|
||||
|
||||
|
||||
@web_ns.route("/conversations/<uuid:c_id>/unpin")
|
||||
class ConversationUnPinApi(WebApiResource):
|
||||
unpin_response_fields = {
|
||||
"result": fields.String,
|
||||
}
|
||||
|
||||
@web_ns.doc("Unpin Conversation")
|
||||
@web_ns.doc(description="Unpin a specific conversation to remove it from the top of the list.")
|
||||
@web_ns.doc(params={"c_id": {"description": "Conversation UUID", "type": "string", "required": True}})
|
||||
|
|
@ -230,7 +233,6 @@ class ConversationUnPinApi(WebApiResource):
|
|||
500: "Internal Server Error",
|
||||
}
|
||||
)
|
||||
@marshal_with(unpin_response_fields)
|
||||
def patch(self, app_model, end_user, c_id):
|
||||
app_mode = AppMode.value_of(app_model.mode)
|
||||
if app_mode not in {AppMode.CHAT, AppMode.AGENT_CHAT, AppMode.ADVANCED_CHAT}:
|
||||
|
|
@ -239,4 +241,4 @@ class ConversationUnPinApi(WebApiResource):
|
|||
conversation_id = str(c_id)
|
||||
WebConversationService.unpin(app_model, conversation_id, end_user)
|
||||
|
||||
return {"result": "success"}
|
||||
return ResultResponse(result="success").model_dump(mode="json")
|
||||
|
|
|
|||
|
|
@ -2,8 +2,7 @@ import logging
|
|||
from typing import Literal
|
||||
|
||||
from flask import request
|
||||
from flask_restx import fields, marshal_with
|
||||
from pydantic import BaseModel, Field, field_validator
|
||||
from pydantic import BaseModel, Field, TypeAdapter, field_validator
|
||||
from werkzeug.exceptions import InternalServerError, NotFound
|
||||
|
||||
from controllers.common.schema import register_schema_models
|
||||
|
|
@ -22,11 +21,10 @@ from controllers.web.wraps import WebApiResource
|
|||
from core.app.entities.app_invoke_entities import InvokeFrom
|
||||
from core.errors.error import ModelCurrentlyNotSupportError, ProviderTokenNotInitError, QuotaExceededError
|
||||
from core.model_runtime.errors.invoke import InvokeError
|
||||
from fields.conversation_fields import message_file_fields
|
||||
from fields.message_fields import agent_thought_fields, feedback_fields, retriever_resource_fields
|
||||
from fields.raws import FilesContainedField
|
||||
from fields.conversation_fields import ResultResponse
|
||||
from fields.message_fields import SuggestedQuestionsResponse, WebMessageInfiniteScrollPagination, WebMessageListItem
|
||||
from libs import helper
|
||||
from libs.helper import TimestampField, uuid_value
|
||||
from libs.helper import uuid_value
|
||||
from models.model import AppMode
|
||||
from services.app_generate_service import AppGenerateService
|
||||
from services.errors.app import MoreLikeThisDisabledError
|
||||
|
|
@ -70,29 +68,6 @@ register_schema_models(web_ns, MessageListQuery, MessageFeedbackPayload, Message
|
|||
|
||||
@web_ns.route("/messages")
|
||||
class MessageListApi(WebApiResource):
|
||||
message_fields = {
|
||||
"id": fields.String,
|
||||
"conversation_id": fields.String,
|
||||
"parent_message_id": fields.String,
|
||||
"inputs": FilesContainedField,
|
||||
"query": fields.String,
|
||||
"answer": fields.String(attribute="re_sign_file_url_answer"),
|
||||
"message_files": fields.List(fields.Nested(message_file_fields)),
|
||||
"feedback": fields.Nested(feedback_fields, attribute="user_feedback", allow_null=True),
|
||||
"retriever_resources": fields.List(fields.Nested(retriever_resource_fields)),
|
||||
"created_at": TimestampField,
|
||||
"agent_thoughts": fields.List(fields.Nested(agent_thought_fields)),
|
||||
"metadata": fields.Raw(attribute="message_metadata_dict"),
|
||||
"status": fields.String,
|
||||
"error": fields.String,
|
||||
}
|
||||
|
||||
message_infinite_scroll_pagination_fields = {
|
||||
"limit": fields.Integer,
|
||||
"has_more": fields.Boolean,
|
||||
"data": fields.List(fields.Nested(message_fields)),
|
||||
}
|
||||
|
||||
@web_ns.doc("Get Message List")
|
||||
@web_ns.doc(description="Retrieve paginated list of messages from a conversation in a chat application.")
|
||||
@web_ns.doc(
|
||||
|
|
@ -121,7 +96,6 @@ class MessageListApi(WebApiResource):
|
|||
500: "Internal Server Error",
|
||||
}
|
||||
)
|
||||
@marshal_with(message_infinite_scroll_pagination_fields)
|
||||
def get(self, app_model, end_user):
|
||||
app_mode = AppMode.value_of(app_model.mode)
|
||||
if app_mode not in {AppMode.CHAT, AppMode.AGENT_CHAT, AppMode.ADVANCED_CHAT}:
|
||||
|
|
@ -131,9 +105,16 @@ class MessageListApi(WebApiResource):
|
|||
query = MessageListQuery.model_validate(raw_args)
|
||||
|
||||
try:
|
||||
return MessageService.pagination_by_first_id(
|
||||
pagination = MessageService.pagination_by_first_id(
|
||||
app_model, end_user, query.conversation_id, query.first_id, query.limit
|
||||
)
|
||||
adapter = TypeAdapter(WebMessageListItem)
|
||||
items = [adapter.validate_python(message, from_attributes=True) for message in pagination.data]
|
||||
return WebMessageInfiniteScrollPagination(
|
||||
limit=pagination.limit,
|
||||
has_more=pagination.has_more,
|
||||
data=items,
|
||||
).model_dump(mode="json")
|
||||
except ConversationNotExistsError:
|
||||
raise NotFound("Conversation Not Exists.")
|
||||
except FirstMessageNotExistsError:
|
||||
|
|
@ -142,10 +123,6 @@ class MessageListApi(WebApiResource):
|
|||
|
||||
@web_ns.route("/messages/<uuid:message_id>/feedbacks")
|
||||
class MessageFeedbackApi(WebApiResource):
|
||||
feedback_response_fields = {
|
||||
"result": fields.String,
|
||||
}
|
||||
|
||||
@web_ns.doc("Create Message Feedback")
|
||||
@web_ns.doc(description="Submit feedback (like/dislike) for a specific message.")
|
||||
@web_ns.doc(params={"message_id": {"description": "Message UUID", "type": "string", "required": True}})
|
||||
|
|
@ -170,7 +147,6 @@ class MessageFeedbackApi(WebApiResource):
|
|||
500: "Internal Server Error",
|
||||
}
|
||||
)
|
||||
@marshal_with(feedback_response_fields)
|
||||
def post(self, app_model, end_user, message_id):
|
||||
message_id = str(message_id)
|
||||
|
||||
|
|
@ -187,7 +163,7 @@ class MessageFeedbackApi(WebApiResource):
|
|||
except MessageNotExistsError:
|
||||
raise NotFound("Message Not Exists.")
|
||||
|
||||
return {"result": "success"}
|
||||
return ResultResponse(result="success").model_dump(mode="json")
|
||||
|
||||
|
||||
@web_ns.route("/messages/<uuid:message_id>/more-like-this")
|
||||
|
|
@ -247,10 +223,6 @@ class MessageMoreLikeThisApi(WebApiResource):
|
|||
|
||||
@web_ns.route("/messages/<uuid:message_id>/suggested-questions")
|
||||
class MessageSuggestedQuestionApi(WebApiResource):
|
||||
suggested_questions_response_fields = {
|
||||
"data": fields.List(fields.String),
|
||||
}
|
||||
|
||||
@web_ns.doc("Get Suggested Questions")
|
||||
@web_ns.doc(description="Get suggested follow-up questions after a message (chat apps only).")
|
||||
@web_ns.doc(params={"message_id": {"description": "Message UUID", "type": "string", "required": True}})
|
||||
|
|
@ -264,7 +236,6 @@ class MessageSuggestedQuestionApi(WebApiResource):
|
|||
500: "Internal Server Error",
|
||||
}
|
||||
)
|
||||
@marshal_with(suggested_questions_response_fields)
|
||||
def get(self, app_model, end_user, message_id):
|
||||
app_mode = AppMode.value_of(app_model.mode)
|
||||
if app_mode not in {AppMode.CHAT, AppMode.AGENT_CHAT, AppMode.ADVANCED_CHAT}:
|
||||
|
|
@ -277,7 +248,6 @@ class MessageSuggestedQuestionApi(WebApiResource):
|
|||
app_model=app_model, user=end_user, message_id=message_id, invoke_from=InvokeFrom.WEB_APP
|
||||
)
|
||||
# questions is a list of strings, not a list of Message objects
|
||||
# so we can directly return it
|
||||
except MessageNotExistsError:
|
||||
raise NotFound("Message not found")
|
||||
except ConversationNotExistsError:
|
||||
|
|
@ -296,4 +266,4 @@ class MessageSuggestedQuestionApi(WebApiResource):
|
|||
logger.exception("internal server error.")
|
||||
raise InternalServerError()
|
||||
|
||||
return {"data": questions}
|
||||
return SuggestedQuestionsResponse(data=questions).model_dump(mode="json")
|
||||
|
|
|
|||
|
|
@ -1,40 +1,20 @@
|
|||
from flask_restx import fields, marshal_with, reqparse
|
||||
from flask_restx import reqparse
|
||||
from flask_restx.inputs import int_range
|
||||
from pydantic import TypeAdapter
|
||||
from werkzeug.exceptions import NotFound
|
||||
|
||||
from controllers.web import web_ns
|
||||
from controllers.web.error import NotCompletionAppError
|
||||
from controllers.web.wraps import WebApiResource
|
||||
from fields.conversation_fields import message_file_fields
|
||||
from libs.helper import TimestampField, uuid_value
|
||||
from fields.conversation_fields import ResultResponse
|
||||
from fields.message_fields import SavedMessageInfiniteScrollPagination, SavedMessageItem
|
||||
from libs.helper import uuid_value
|
||||
from services.errors.message import MessageNotExistsError
|
||||
from services.saved_message_service import SavedMessageService
|
||||
|
||||
feedback_fields = {"rating": fields.String}
|
||||
|
||||
message_fields = {
|
||||
"id": fields.String,
|
||||
"inputs": fields.Raw,
|
||||
"query": fields.String,
|
||||
"answer": fields.String,
|
||||
"message_files": fields.List(fields.Nested(message_file_fields)),
|
||||
"feedback": fields.Nested(feedback_fields, attribute="user_feedback", allow_null=True),
|
||||
"created_at": TimestampField,
|
||||
}
|
||||
|
||||
|
||||
@web_ns.route("/saved-messages")
|
||||
class SavedMessageListApi(WebApiResource):
|
||||
saved_message_infinite_scroll_pagination_fields = {
|
||||
"limit": fields.Integer,
|
||||
"has_more": fields.Boolean,
|
||||
"data": fields.List(fields.Nested(message_fields)),
|
||||
}
|
||||
|
||||
post_response_fields = {
|
||||
"result": fields.String,
|
||||
}
|
||||
|
||||
@web_ns.doc("Get Saved Messages")
|
||||
@web_ns.doc(description="Retrieve paginated list of saved messages for a completion application.")
|
||||
@web_ns.doc(
|
||||
|
|
@ -58,7 +38,6 @@ class SavedMessageListApi(WebApiResource):
|
|||
500: "Internal Server Error",
|
||||
}
|
||||
)
|
||||
@marshal_with(saved_message_infinite_scroll_pagination_fields)
|
||||
def get(self, app_model, end_user):
|
||||
if app_model.mode != "completion":
|
||||
raise NotCompletionAppError()
|
||||
|
|
@ -70,7 +49,14 @@ class SavedMessageListApi(WebApiResource):
|
|||
)
|
||||
args = parser.parse_args()
|
||||
|
||||
return SavedMessageService.pagination_by_last_id(app_model, end_user, args["last_id"], args["limit"])
|
||||
pagination = SavedMessageService.pagination_by_last_id(app_model, end_user, args["last_id"], args["limit"])
|
||||
adapter = TypeAdapter(SavedMessageItem)
|
||||
items = [adapter.validate_python(message, from_attributes=True) for message in pagination.data]
|
||||
return SavedMessageInfiniteScrollPagination(
|
||||
limit=pagination.limit,
|
||||
has_more=pagination.has_more,
|
||||
data=items,
|
||||
).model_dump(mode="json")
|
||||
|
||||
@web_ns.doc("Save Message")
|
||||
@web_ns.doc(description="Save a specific message for later reference.")
|
||||
|
|
@ -89,7 +75,6 @@ class SavedMessageListApi(WebApiResource):
|
|||
500: "Internal Server Error",
|
||||
}
|
||||
)
|
||||
@marshal_with(post_response_fields)
|
||||
def post(self, app_model, end_user):
|
||||
if app_model.mode != "completion":
|
||||
raise NotCompletionAppError()
|
||||
|
|
@ -102,15 +87,11 @@ class SavedMessageListApi(WebApiResource):
|
|||
except MessageNotExistsError:
|
||||
raise NotFound("Message Not Exists.")
|
||||
|
||||
return {"result": "success"}
|
||||
return ResultResponse(result="success").model_dump(mode="json")
|
||||
|
||||
|
||||
@web_ns.route("/saved-messages/<uuid:message_id>")
|
||||
class SavedMessageApi(WebApiResource):
|
||||
delete_response_fields = {
|
||||
"result": fields.String,
|
||||
}
|
||||
|
||||
@web_ns.doc("Delete Saved Message")
|
||||
@web_ns.doc(description="Remove a message from saved messages.")
|
||||
@web_ns.doc(params={"message_id": {"description": "Message UUID to delete", "type": "string", "required": True}})
|
||||
|
|
@ -124,7 +105,6 @@ class SavedMessageApi(WebApiResource):
|
|||
500: "Internal Server Error",
|
||||
}
|
||||
)
|
||||
@marshal_with(delete_response_fields)
|
||||
def delete(self, app_model, end_user, message_id):
|
||||
message_id = str(message_id)
|
||||
|
||||
|
|
@ -133,4 +113,4 @@ class SavedMessageApi(WebApiResource):
|
|||
|
||||
SavedMessageService.delete(app_model, end_user, message_id)
|
||||
|
||||
return {"result": "success"}, 204
|
||||
return ResultResponse(result="success").model_dump(mode="json"), 204
|
||||
|
|
|
|||
|
|
@ -30,7 +30,6 @@ class SimpleModelProviderEntity(BaseModel):
|
|||
label: I18nObject
|
||||
icon_small: I18nObject | None = None
|
||||
icon_small_dark: I18nObject | None = None
|
||||
icon_large: I18nObject | None = None
|
||||
supported_model_types: list[ModelType]
|
||||
|
||||
def __init__(self, provider_entity: ProviderEntity):
|
||||
|
|
@ -44,7 +43,6 @@ class SimpleModelProviderEntity(BaseModel):
|
|||
label=provider_entity.label,
|
||||
icon_small=provider_entity.icon_small,
|
||||
icon_small_dark=provider_entity.icon_small_dark,
|
||||
icon_large=provider_entity.icon_large,
|
||||
supported_model_types=provider_entity.supported_model_types,
|
||||
)
|
||||
|
||||
|
|
@ -94,7 +92,6 @@ class DefaultModelProviderEntity(BaseModel):
|
|||
provider: str
|
||||
label: I18nObject
|
||||
icon_small: I18nObject | None = None
|
||||
icon_large: I18nObject | None = None
|
||||
supported_model_types: Sequence[ModelType] = []
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -88,7 +88,41 @@ def _get_user_provided_host_header(headers: dict | None) -> str | None:
|
|||
return None
|
||||
|
||||
|
||||
def _inject_trace_headers(headers: dict | None) -> dict:
|
||||
"""
|
||||
Inject W3C traceparent header for distributed tracing.
|
||||
|
||||
When OTEL is enabled, HTTPXClientInstrumentor handles trace propagation automatically.
|
||||
When OTEL is disabled, we manually inject the traceparent header.
|
||||
"""
|
||||
if headers is None:
|
||||
headers = {}
|
||||
|
||||
# Skip if already present (case-insensitive check)
|
||||
for key in headers:
|
||||
if key.lower() == "traceparent":
|
||||
return headers
|
||||
|
||||
# Skip if OTEL is enabled - HTTPXClientInstrumentor handles this automatically
|
||||
if dify_config.ENABLE_OTEL:
|
||||
return headers
|
||||
|
||||
# Generate and inject traceparent for non-OTEL scenarios
|
||||
try:
|
||||
from core.helper.trace_id_helper import generate_traceparent_header
|
||||
|
||||
traceparent = generate_traceparent_header()
|
||||
if traceparent:
|
||||
headers["traceparent"] = traceparent
|
||||
except Exception:
|
||||
# Silently ignore errors to avoid breaking requests
|
||||
logger.debug("Failed to generate traceparent header", exc_info=True)
|
||||
|
||||
return headers
|
||||
|
||||
|
||||
def make_request(method, url, max_retries=SSRF_DEFAULT_MAX_RETRIES, **kwargs):
|
||||
# Convert requests-style allow_redirects to httpx-style follow_redirects
|
||||
if "allow_redirects" in kwargs:
|
||||
allow_redirects = kwargs.pop("allow_redirects")
|
||||
if "follow_redirects" not in kwargs:
|
||||
|
|
@ -106,18 +140,21 @@ def make_request(method, url, max_retries=SSRF_DEFAULT_MAX_RETRIES, **kwargs):
|
|||
verify_option = kwargs.pop("ssl_verify", dify_config.HTTP_REQUEST_NODE_SSL_VERIFY)
|
||||
client = _get_ssrf_client(verify_option)
|
||||
|
||||
# Inject traceparent header for distributed tracing (when OTEL is not enabled)
|
||||
headers = kwargs.get("headers") or {}
|
||||
headers = _inject_trace_headers(headers)
|
||||
kwargs["headers"] = headers
|
||||
|
||||
# Preserve user-provided Host header
|
||||
# When using a forward proxy, httpx may override the Host header based on the URL.
|
||||
# We extract and preserve any explicitly set Host header to support virtual hosting.
|
||||
headers = kwargs.get("headers", {})
|
||||
user_provided_host = _get_user_provided_host_header(headers)
|
||||
|
||||
retries = 0
|
||||
while retries <= max_retries:
|
||||
try:
|
||||
# Build the request manually to preserve the Host header
|
||||
# httpx may override the Host header when using a proxy, so we use
|
||||
# the request API to explicitly set headers before sending
|
||||
# Preserve the user-provided Host header
|
||||
# httpx may override the Host header when using a proxy
|
||||
headers = {k: v for k, v in headers.items() if k.lower() != "host"}
|
||||
if user_provided_host is not None:
|
||||
headers["host"] = user_provided_host
|
||||
|
|
|
|||
|
|
@ -103,3 +103,60 @@ def parse_traceparent_header(traceparent: str) -> str | None:
|
|||
if len(parts) == 4 and len(parts[1]) == 32:
|
||||
return parts[1]
|
||||
return None
|
||||
|
||||
|
||||
def get_span_id_from_otel_context() -> str | None:
|
||||
"""
|
||||
Retrieve the current span ID from the active OpenTelemetry trace context.
|
||||
|
||||
Returns:
|
||||
A 16-character hex string representing the span ID, or None if not available.
|
||||
"""
|
||||
try:
|
||||
from opentelemetry.trace import get_current_span
|
||||
from opentelemetry.trace.span import INVALID_SPAN_ID
|
||||
|
||||
span = get_current_span()
|
||||
if not span:
|
||||
return None
|
||||
|
||||
span_context = span.get_span_context()
|
||||
if not span_context or span_context.span_id == INVALID_SPAN_ID:
|
||||
return None
|
||||
|
||||
return f"{span_context.span_id:016x}"
|
||||
except Exception:
|
||||
return None
|
||||
|
||||
|
||||
def generate_traceparent_header() -> str | None:
|
||||
"""
|
||||
Generate a W3C traceparent header from the current context.
|
||||
|
||||
Uses OpenTelemetry context if available, otherwise uses the
|
||||
ContextVar-based trace_id from the logging context.
|
||||
|
||||
Format: {version}-{trace_id}-{span_id}-{flags}
|
||||
Example: 00-5b8aa5a2d2c872e8321cf37308d69df2-051581bf3bb55c45-01
|
||||
|
||||
Returns:
|
||||
A valid traceparent header string, or None if generation fails.
|
||||
"""
|
||||
import uuid
|
||||
|
||||
# Try OTEL context first
|
||||
trace_id = get_trace_id_from_otel_context()
|
||||
span_id = get_span_id_from_otel_context()
|
||||
|
||||
if trace_id and span_id:
|
||||
return f"00-{trace_id}-{span_id}-01"
|
||||
|
||||
# Fallback: use ContextVar-based trace_id or generate new one
|
||||
from core.logging.context import get_trace_id as get_logging_trace_id
|
||||
|
||||
trace_id = get_logging_trace_id() or uuid.uuid4().hex
|
||||
|
||||
# Generate a new span_id (16 hex chars)
|
||||
span_id = uuid.uuid4().hex[:16]
|
||||
|
||||
return f"00-{trace_id}-{span_id}-01"
|
||||
|
|
|
|||
|
|
@ -0,0 +1,20 @@
|
|||
"""Structured logging components for Dify."""
|
||||
|
||||
from core.logging.context import (
|
||||
clear_request_context,
|
||||
get_request_id,
|
||||
get_trace_id,
|
||||
init_request_context,
|
||||
)
|
||||
from core.logging.filters import IdentityContextFilter, TraceContextFilter
|
||||
from core.logging.structured_formatter import StructuredJSONFormatter
|
||||
|
||||
__all__ = [
|
||||
"IdentityContextFilter",
|
||||
"StructuredJSONFormatter",
|
||||
"TraceContextFilter",
|
||||
"clear_request_context",
|
||||
"get_request_id",
|
||||
"get_trace_id",
|
||||
"init_request_context",
|
||||
]
|
||||
|
|
@ -0,0 +1,35 @@
|
|||
"""Request context for logging - framework agnostic.
|
||||
|
||||
This module provides request-scoped context variables for logging,
|
||||
using Python's contextvars for thread-safe and async-safe storage.
|
||||
"""
|
||||
|
||||
import uuid
|
||||
from contextvars import ContextVar
|
||||
|
||||
_request_id: ContextVar[str] = ContextVar("log_request_id", default="")
|
||||
_trace_id: ContextVar[str] = ContextVar("log_trace_id", default="")
|
||||
|
||||
|
||||
def get_request_id() -> str:
|
||||
"""Get current request ID (10 hex chars)."""
|
||||
return _request_id.get()
|
||||
|
||||
|
||||
def get_trace_id() -> str:
|
||||
"""Get fallback trace ID when OTEL is unavailable (32 hex chars)."""
|
||||
return _trace_id.get()
|
||||
|
||||
|
||||
def init_request_context() -> None:
|
||||
"""Initialize request context. Call at start of each request."""
|
||||
req_id = uuid.uuid4().hex[:10]
|
||||
trace_id = uuid.uuid5(uuid.NAMESPACE_DNS, req_id).hex
|
||||
_request_id.set(req_id)
|
||||
_trace_id.set(trace_id)
|
||||
|
||||
|
||||
def clear_request_context() -> None:
|
||||
"""Clear request context. Call at end of request (optional)."""
|
||||
_request_id.set("")
|
||||
_trace_id.set("")
|
||||
|
|
@ -0,0 +1,94 @@
|
|||
"""Logging filters for structured logging."""
|
||||
|
||||
import contextlib
|
||||
import logging
|
||||
|
||||
import flask
|
||||
|
||||
from core.logging.context import get_request_id, get_trace_id
|
||||
|
||||
|
||||
class TraceContextFilter(logging.Filter):
|
||||
"""
|
||||
Filter that adds trace_id and span_id to log records.
|
||||
Integrates with OpenTelemetry when available, falls back to ContextVar-based trace_id.
|
||||
"""
|
||||
|
||||
def filter(self, record: logging.LogRecord) -> bool:
|
||||
# Get trace context from OpenTelemetry
|
||||
trace_id, span_id = self._get_otel_context()
|
||||
|
||||
# Set trace_id (fallback to ContextVar if no OTEL context)
|
||||
if trace_id:
|
||||
record.trace_id = trace_id
|
||||
else:
|
||||
record.trace_id = get_trace_id()
|
||||
|
||||
record.span_id = span_id or ""
|
||||
|
||||
# For backward compatibility, also set req_id
|
||||
record.req_id = get_request_id()
|
||||
|
||||
return True
|
||||
|
||||
def _get_otel_context(self) -> tuple[str, str]:
|
||||
"""Extract trace_id and span_id from OpenTelemetry context."""
|
||||
with contextlib.suppress(Exception):
|
||||
from opentelemetry.trace import get_current_span
|
||||
from opentelemetry.trace.span import INVALID_SPAN_ID, INVALID_TRACE_ID
|
||||
|
||||
span = get_current_span()
|
||||
if span and span.get_span_context():
|
||||
ctx = span.get_span_context()
|
||||
if ctx.is_valid and ctx.trace_id != INVALID_TRACE_ID:
|
||||
trace_id = f"{ctx.trace_id:032x}"
|
||||
span_id = f"{ctx.span_id:016x}" if ctx.span_id != INVALID_SPAN_ID else ""
|
||||
return trace_id, span_id
|
||||
return "", ""
|
||||
|
||||
|
||||
class IdentityContextFilter(logging.Filter):
|
||||
"""
|
||||
Filter that adds user identity context to log records.
|
||||
Extracts tenant_id, user_id, and user_type from Flask-Login current_user.
|
||||
"""
|
||||
|
||||
def filter(self, record: logging.LogRecord) -> bool:
|
||||
identity = self._extract_identity()
|
||||
record.tenant_id = identity.get("tenant_id", "")
|
||||
record.user_id = identity.get("user_id", "")
|
||||
record.user_type = identity.get("user_type", "")
|
||||
return True
|
||||
|
||||
def _extract_identity(self) -> dict[str, str]:
|
||||
"""Extract identity from current_user if in request context."""
|
||||
try:
|
||||
if not flask.has_request_context():
|
||||
return {}
|
||||
from flask_login import current_user
|
||||
|
||||
# Check if user is authenticated using the proxy
|
||||
if not current_user.is_authenticated:
|
||||
return {}
|
||||
|
||||
# Access the underlying user object
|
||||
user = current_user
|
||||
|
||||
from models import Account
|
||||
from models.model import EndUser
|
||||
|
||||
identity: dict[str, str] = {}
|
||||
|
||||
if isinstance(user, Account):
|
||||
if user.current_tenant_id:
|
||||
identity["tenant_id"] = user.current_tenant_id
|
||||
identity["user_id"] = user.id
|
||||
identity["user_type"] = "account"
|
||||
elif isinstance(user, EndUser):
|
||||
identity["tenant_id"] = user.tenant_id
|
||||
identity["user_id"] = user.id
|
||||
identity["user_type"] = user.type or "end_user"
|
||||
|
||||
return identity
|
||||
except Exception:
|
||||
return {}
|
||||
|
|
@ -0,0 +1,107 @@
|
|||
"""Structured JSON log formatter for Dify."""
|
||||
|
||||
import logging
|
||||
import traceback
|
||||
from datetime import UTC, datetime
|
||||
from typing import Any
|
||||
|
||||
import orjson
|
||||
|
||||
from configs import dify_config
|
||||
|
||||
|
||||
class StructuredJSONFormatter(logging.Formatter):
|
||||
"""
|
||||
JSON log formatter following the specified schema:
|
||||
{
|
||||
"ts": "ISO 8601 UTC",
|
||||
"severity": "INFO|ERROR|WARN|DEBUG",
|
||||
"service": "service name",
|
||||
"caller": "file:line",
|
||||
"trace_id": "hex 32",
|
||||
"span_id": "hex 16",
|
||||
"identity": { "tenant_id", "user_id", "user_type" },
|
||||
"message": "log message",
|
||||
"attributes": { ... },
|
||||
"stack_trace": "..."
|
||||
}
|
||||
"""
|
||||
|
||||
SEVERITY_MAP: dict[int, str] = {
|
||||
logging.DEBUG: "DEBUG",
|
||||
logging.INFO: "INFO",
|
||||
logging.WARNING: "WARN",
|
||||
logging.ERROR: "ERROR",
|
||||
logging.CRITICAL: "ERROR",
|
||||
}
|
||||
|
||||
def __init__(self, service_name: str | None = None):
|
||||
super().__init__()
|
||||
self._service_name = service_name or dify_config.APPLICATION_NAME
|
||||
|
||||
def format(self, record: logging.LogRecord) -> str:
|
||||
log_dict = self._build_log_dict(record)
|
||||
try:
|
||||
return orjson.dumps(log_dict).decode("utf-8")
|
||||
except TypeError:
|
||||
# Fallback: convert non-serializable objects to string
|
||||
import json
|
||||
|
||||
return json.dumps(log_dict, default=str, ensure_ascii=False)
|
||||
|
||||
def _build_log_dict(self, record: logging.LogRecord) -> dict[str, Any]:
|
||||
# Core fields
|
||||
log_dict: dict[str, Any] = {
|
||||
"ts": datetime.now(UTC).isoformat(timespec="milliseconds").replace("+00:00", "Z"),
|
||||
"severity": self.SEVERITY_MAP.get(record.levelno, "INFO"),
|
||||
"service": self._service_name,
|
||||
"caller": f"{record.filename}:{record.lineno}",
|
||||
"message": record.getMessage(),
|
||||
}
|
||||
|
||||
# Trace context (from TraceContextFilter)
|
||||
trace_id = getattr(record, "trace_id", "")
|
||||
span_id = getattr(record, "span_id", "")
|
||||
|
||||
if trace_id:
|
||||
log_dict["trace_id"] = trace_id
|
||||
if span_id:
|
||||
log_dict["span_id"] = span_id
|
||||
|
||||
# Identity context (from IdentityContextFilter)
|
||||
identity = self._extract_identity(record)
|
||||
if identity:
|
||||
log_dict["identity"] = identity
|
||||
|
||||
# Dynamic attributes
|
||||
attributes = getattr(record, "attributes", None)
|
||||
if attributes:
|
||||
log_dict["attributes"] = attributes
|
||||
|
||||
# Stack trace for errors with exceptions
|
||||
if record.exc_info and record.levelno >= logging.ERROR:
|
||||
log_dict["stack_trace"] = self._format_exception(record.exc_info)
|
||||
|
||||
return log_dict
|
||||
|
||||
def _extract_identity(self, record: logging.LogRecord) -> dict[str, str] | None:
|
||||
tenant_id = getattr(record, "tenant_id", None)
|
||||
user_id = getattr(record, "user_id", None)
|
||||
user_type = getattr(record, "user_type", None)
|
||||
|
||||
if not any([tenant_id, user_id, user_type]):
|
||||
return None
|
||||
|
||||
identity: dict[str, str] = {}
|
||||
if tenant_id:
|
||||
identity["tenant_id"] = tenant_id
|
||||
if user_id:
|
||||
identity["user_id"] = user_id
|
||||
if user_type:
|
||||
identity["user_type"] = user_type
|
||||
return identity
|
||||
|
||||
def _format_exception(self, exc_info: tuple[Any, ...]) -> str:
|
||||
if exc_info and exc_info[0] is not None:
|
||||
return "".join(traceback.format_exception(*exc_info))
|
||||
return ""
|
||||
|
|
@ -100,7 +100,6 @@ class SimpleProviderEntity(BaseModel):
|
|||
label: I18nObject
|
||||
icon_small: I18nObject | None = None
|
||||
icon_small_dark: I18nObject | None = None
|
||||
icon_large: I18nObject | None = None
|
||||
supported_model_types: Sequence[ModelType]
|
||||
models: list[AIModelEntity] = []
|
||||
|
||||
|
|
@ -123,7 +122,6 @@ class ProviderEntity(BaseModel):
|
|||
label: I18nObject
|
||||
description: I18nObject | None = None
|
||||
icon_small: I18nObject | None = None
|
||||
icon_large: I18nObject | None = None
|
||||
icon_small_dark: I18nObject | None = None
|
||||
background: str | None = None
|
||||
help: ProviderHelpEntity | None = None
|
||||
|
|
@ -157,7 +155,6 @@ class ProviderEntity(BaseModel):
|
|||
provider=self.provider,
|
||||
label=self.label,
|
||||
icon_small=self.icon_small,
|
||||
icon_large=self.icon_large,
|
||||
supported_model_types=self.supported_model_types,
|
||||
models=self.models,
|
||||
)
|
||||
|
|
|
|||
|
|
@ -285,7 +285,7 @@ class ModelProviderFactory:
|
|||
"""
|
||||
Get provider icon
|
||||
:param provider: provider name
|
||||
:param icon_type: icon type (icon_small or icon_large)
|
||||
:param icon_type: icon type (icon_small or icon_small_dark)
|
||||
:param lang: language (zh_Hans or en_US)
|
||||
:return: provider icon
|
||||
"""
|
||||
|
|
@ -309,13 +309,7 @@ class ModelProviderFactory:
|
|||
else:
|
||||
file_name = provider_schema.icon_small_dark.en_US
|
||||
else:
|
||||
if not provider_schema.icon_large:
|
||||
raise ValueError(f"Provider {provider} does not have large icon.")
|
||||
|
||||
if lang.lower() == "zh_hans":
|
||||
file_name = provider_schema.icon_large.zh_Hans
|
||||
else:
|
||||
file_name = provider_schema.icon_large.en_US
|
||||
raise ValueError(f"Unsupported icon type: {icon_type}.")
|
||||
|
||||
if not file_name:
|
||||
raise ValueError(f"Provider {provider} does not have icon.")
|
||||
|
|
|
|||
|
|
@ -103,6 +103,9 @@ class BasePluginClient:
|
|||
prepared_headers["X-Api-Key"] = dify_config.PLUGIN_DAEMON_KEY
|
||||
prepared_headers.setdefault("Accept-Encoding", "gzip, deflate, br")
|
||||
|
||||
# Inject traceparent header for distributed tracing
|
||||
self._inject_trace_headers(prepared_headers)
|
||||
|
||||
prepared_data: bytes | dict[str, Any] | str | None = (
|
||||
data if isinstance(data, (bytes, str, dict)) or data is None else None
|
||||
)
|
||||
|
|
@ -114,6 +117,31 @@ class BasePluginClient:
|
|||
|
||||
return str(url), prepared_headers, prepared_data, params, files
|
||||
|
||||
def _inject_trace_headers(self, headers: dict[str, str]) -> None:
|
||||
"""
|
||||
Inject W3C traceparent header for distributed tracing.
|
||||
|
||||
This ensures trace context is propagated to plugin daemon even if
|
||||
HTTPXClientInstrumentor doesn't cover module-level httpx functions.
|
||||
"""
|
||||
if not dify_config.ENABLE_OTEL:
|
||||
return
|
||||
|
||||
import contextlib
|
||||
|
||||
# Skip if already present (case-insensitive check)
|
||||
for key in headers:
|
||||
if key.lower() == "traceparent":
|
||||
return
|
||||
|
||||
# Inject traceparent - works as fallback when OTEL instrumentation doesn't cover this call
|
||||
with contextlib.suppress(Exception):
|
||||
from core.helper.trace_id_helper import generate_traceparent_header
|
||||
|
||||
traceparent = generate_traceparent_header()
|
||||
if traceparent:
|
||||
headers["traceparent"] = traceparent
|
||||
|
||||
def _stream_request(
|
||||
self,
|
||||
method: str,
|
||||
|
|
|
|||
|
|
@ -331,7 +331,6 @@ class ProviderManager:
|
|||
provider=provider_schema.provider,
|
||||
label=provider_schema.label,
|
||||
icon_small=provider_schema.icon_small,
|
||||
icon_large=provider_schema.icon_large,
|
||||
supported_model_types=provider_schema.supported_model_types,
|
||||
),
|
||||
)
|
||||
|
|
|
|||
|
|
@ -46,7 +46,11 @@ def _get_celery_ssl_options() -> dict[str, Any] | None:
|
|||
def init_app(app: DifyApp) -> Celery:
|
||||
class FlaskTask(Task):
|
||||
def __call__(self, *args: object, **kwargs: object) -> object:
|
||||
from core.logging.context import init_request_context
|
||||
|
||||
with app.app_context():
|
||||
# Initialize logging context for this task (similar to before_request in Flask)
|
||||
init_request_context()
|
||||
return self.run(*args, **kwargs)
|
||||
|
||||
broker_transport_options = {}
|
||||
|
|
|
|||
|
|
@ -1,18 +1,19 @@
|
|||
"""Logging extension for Dify Flask application."""
|
||||
|
||||
import logging
|
||||
import os
|
||||
import sys
|
||||
import uuid
|
||||
from logging.handlers import RotatingFileHandler
|
||||
|
||||
import flask
|
||||
|
||||
from configs import dify_config
|
||||
from core.helper.trace_id_helper import get_trace_id_from_otel_context
|
||||
from dify_app import DifyApp
|
||||
|
||||
|
||||
def init_app(app: DifyApp):
|
||||
"""Initialize logging with support for text or JSON format."""
|
||||
log_handlers: list[logging.Handler] = []
|
||||
|
||||
# File handler
|
||||
log_file = dify_config.LOG_FILE
|
||||
if log_file:
|
||||
log_dir = os.path.dirname(log_file)
|
||||
|
|
@ -25,27 +26,53 @@ def init_app(app: DifyApp):
|
|||
)
|
||||
)
|
||||
|
||||
# Always add StreamHandler to log to console
|
||||
# Console handler
|
||||
sh = logging.StreamHandler(sys.stdout)
|
||||
log_handlers.append(sh)
|
||||
|
||||
# Apply RequestIdFilter to all handlers
|
||||
for handler in log_handlers:
|
||||
handler.addFilter(RequestIdFilter())
|
||||
# Apply filters to all handlers
|
||||
from core.logging.filters import IdentityContextFilter, TraceContextFilter
|
||||
|
||||
for handler in log_handlers:
|
||||
handler.addFilter(TraceContextFilter())
|
||||
handler.addFilter(IdentityContextFilter())
|
||||
|
||||
# Configure formatter based on format type
|
||||
formatter = _create_formatter()
|
||||
for handler in log_handlers:
|
||||
handler.setFormatter(formatter)
|
||||
|
||||
# Configure root logger
|
||||
logging.basicConfig(
|
||||
level=dify_config.LOG_LEVEL,
|
||||
format=dify_config.LOG_FORMAT,
|
||||
datefmt=dify_config.LOG_DATEFORMAT,
|
||||
handlers=log_handlers,
|
||||
force=True,
|
||||
)
|
||||
|
||||
# Apply RequestIdFormatter to all handlers
|
||||
apply_request_id_formatter()
|
||||
|
||||
# Disable propagation for noisy loggers to avoid duplicate logs
|
||||
logging.getLogger("sqlalchemy.engine").propagate = False
|
||||
|
||||
# Apply timezone if specified (only for text format)
|
||||
if dify_config.LOG_OUTPUT_FORMAT == "text":
|
||||
_apply_timezone(log_handlers)
|
||||
|
||||
|
||||
def _create_formatter() -> logging.Formatter:
|
||||
"""Create appropriate formatter based on configuration."""
|
||||
if dify_config.LOG_OUTPUT_FORMAT == "json":
|
||||
from core.logging.structured_formatter import StructuredJSONFormatter
|
||||
|
||||
return StructuredJSONFormatter()
|
||||
else:
|
||||
# Text format - use existing pattern with backward compatible formatter
|
||||
return _TextFormatter(
|
||||
fmt=dify_config.LOG_FORMAT,
|
||||
datefmt=dify_config.LOG_DATEFORMAT,
|
||||
)
|
||||
|
||||
|
||||
def _apply_timezone(handlers: list[logging.Handler]):
|
||||
"""Apply timezone conversion to text formatters."""
|
||||
log_tz = dify_config.LOG_TZ
|
||||
if log_tz:
|
||||
from datetime import datetime
|
||||
|
|
@ -57,34 +84,51 @@ def init_app(app: DifyApp):
|
|||
def time_converter(seconds):
|
||||
return datetime.fromtimestamp(seconds, tz=timezone).timetuple()
|
||||
|
||||
for handler in logging.root.handlers:
|
||||
for handler in handlers:
|
||||
if handler.formatter:
|
||||
handler.formatter.converter = time_converter
|
||||
handler.formatter.converter = time_converter # type: ignore[attr-defined]
|
||||
|
||||
|
||||
def get_request_id():
|
||||
if getattr(flask.g, "request_id", None):
|
||||
return flask.g.request_id
|
||||
class _TextFormatter(logging.Formatter):
|
||||
"""Text formatter that ensures trace_id and req_id are always present."""
|
||||
|
||||
new_uuid = uuid.uuid4().hex[:10]
|
||||
flask.g.request_id = new_uuid
|
||||
|
||||
return new_uuid
|
||||
def format(self, record: logging.LogRecord) -> str:
|
||||
if not hasattr(record, "req_id"):
|
||||
record.req_id = ""
|
||||
if not hasattr(record, "trace_id"):
|
||||
record.trace_id = ""
|
||||
if not hasattr(record, "span_id"):
|
||||
record.span_id = ""
|
||||
return super().format(record)
|
||||
|
||||
|
||||
def get_request_id() -> str:
|
||||
"""Get request ID for current request context.
|
||||
|
||||
Deprecated: Use core.logging.context.get_request_id() directly.
|
||||
"""
|
||||
from core.logging.context import get_request_id as _get_request_id
|
||||
|
||||
return _get_request_id()
|
||||
|
||||
|
||||
# Backward compatibility aliases
|
||||
class RequestIdFilter(logging.Filter):
|
||||
# This is a logging filter that makes the request ID available for use in
|
||||
# the logging format. Note that we're checking if we're in a request
|
||||
# context, as we may want to log things before Flask is fully loaded.
|
||||
def filter(self, record):
|
||||
trace_id = get_trace_id_from_otel_context() or ""
|
||||
record.req_id = get_request_id() if flask.has_request_context() else ""
|
||||
record.trace_id = trace_id
|
||||
"""Deprecated: Use TraceContextFilter from core.logging.filters instead."""
|
||||
|
||||
def filter(self, record: logging.LogRecord) -> bool:
|
||||
from core.logging.context import get_request_id as _get_request_id
|
||||
from core.logging.context import get_trace_id as _get_trace_id
|
||||
|
||||
record.req_id = _get_request_id()
|
||||
record.trace_id = _get_trace_id()
|
||||
return True
|
||||
|
||||
|
||||
class RequestIdFormatter(logging.Formatter):
|
||||
def format(self, record):
|
||||
"""Deprecated: Use _TextFormatter instead."""
|
||||
|
||||
def format(self, record: logging.LogRecord) -> str:
|
||||
if not hasattr(record, "req_id"):
|
||||
record.req_id = ""
|
||||
if not hasattr(record, "trace_id"):
|
||||
|
|
@ -93,6 +137,7 @@ class RequestIdFormatter(logging.Formatter):
|
|||
|
||||
|
||||
def apply_request_id_formatter():
|
||||
"""Deprecated: Formatter is now applied in init_app."""
|
||||
for handler in logging.root.handlers:
|
||||
if handler.formatter:
|
||||
handler.formatter = RequestIdFormatter(dify_config.LOG_FORMAT, dify_config.LOG_DATEFORMAT)
|
||||
|
|
|
|||
|
|
@ -19,26 +19,43 @@ logger = logging.getLogger(__name__)
|
|||
|
||||
|
||||
class ExceptionLoggingHandler(logging.Handler):
|
||||
"""
|
||||
Handler that records exceptions to the current OpenTelemetry span.
|
||||
|
||||
Unlike creating a new span, this records exceptions on the existing span
|
||||
to maintain trace context consistency throughout the request lifecycle.
|
||||
"""
|
||||
|
||||
def emit(self, record: logging.LogRecord):
|
||||
with contextlib.suppress(Exception):
|
||||
if record.exc_info:
|
||||
tracer = get_tracer_provider().get_tracer("dify.exception.logging")
|
||||
with tracer.start_as_current_span(
|
||||
"log.exception",
|
||||
attributes={
|
||||
"log.level": record.levelname,
|
||||
"log.message": record.getMessage(),
|
||||
"log.logger": record.name,
|
||||
"log.file.path": record.pathname,
|
||||
"log.file.line": record.lineno,
|
||||
},
|
||||
) as span:
|
||||
span.set_status(StatusCode.ERROR)
|
||||
if record.exc_info[1]:
|
||||
span.record_exception(record.exc_info[1])
|
||||
span.set_attribute("exception.message", str(record.exc_info[1]))
|
||||
if record.exc_info[0]:
|
||||
span.set_attribute("exception.type", record.exc_info[0].__name__)
|
||||
if not record.exc_info:
|
||||
return
|
||||
|
||||
from opentelemetry.trace import get_current_span
|
||||
|
||||
span = get_current_span()
|
||||
if not span or not span.is_recording():
|
||||
return
|
||||
|
||||
# Record exception on the current span instead of creating a new one
|
||||
span.set_status(StatusCode.ERROR, record.getMessage())
|
||||
|
||||
# Add log context as span events/attributes
|
||||
span.add_event(
|
||||
"log.exception",
|
||||
attributes={
|
||||
"log.level": record.levelname,
|
||||
"log.message": record.getMessage(),
|
||||
"log.logger": record.name,
|
||||
"log.file.path": record.pathname,
|
||||
"log.file.line": record.lineno,
|
||||
},
|
||||
)
|
||||
|
||||
if record.exc_info[1]:
|
||||
span.record_exception(record.exc_info[1])
|
||||
if record.exc_info[0]:
|
||||
span.set_attribute("exception.type", record.exc_info[0].__name__)
|
||||
|
||||
|
||||
def instrument_exception_logging() -> None:
|
||||
|
|
|
|||
|
|
@ -1,236 +1,338 @@
|
|||
from flask_restx import Namespace, fields
|
||||
from __future__ import annotations
|
||||
|
||||
from fields.member_fields import simple_account_fields
|
||||
from libs.helper import TimestampField
|
||||
from datetime import datetime
|
||||
from typing import Any, TypeAlias
|
||||
|
||||
from .raws import FilesContainedField
|
||||
from pydantic import BaseModel, ConfigDict, Field, field_validator, model_validator
|
||||
|
||||
from core.file import File
|
||||
|
||||
JSONValue: TypeAlias = Any
|
||||
|
||||
|
||||
class MessageTextField(fields.Raw):
|
||||
def format(self, value):
|
||||
return value[0]["text"] if value else ""
|
||||
class ResponseModel(BaseModel):
|
||||
model_config = ConfigDict(
|
||||
from_attributes=True,
|
||||
extra="ignore",
|
||||
populate_by_name=True,
|
||||
serialize_by_alias=True,
|
||||
protected_namespaces=(),
|
||||
)
|
||||
|
||||
|
||||
feedback_fields = {
|
||||
"rating": fields.String,
|
||||
"content": fields.String,
|
||||
"from_source": fields.String,
|
||||
"from_end_user_id": fields.String,
|
||||
"from_account": fields.Nested(simple_account_fields, allow_null=True),
|
||||
}
|
||||
class MessageFile(ResponseModel):
|
||||
id: str
|
||||
filename: str
|
||||
type: str
|
||||
url: str | None = None
|
||||
mime_type: str | None = None
|
||||
size: int | None = None
|
||||
transfer_method: str
|
||||
belongs_to: str | None = None
|
||||
upload_file_id: str | None = None
|
||||
|
||||
annotation_fields = {
|
||||
"id": fields.String,
|
||||
"question": fields.String,
|
||||
"content": fields.String,
|
||||
"account": fields.Nested(simple_account_fields, allow_null=True),
|
||||
"created_at": TimestampField,
|
||||
}
|
||||
|
||||
annotation_hit_history_fields = {
|
||||
"annotation_id": fields.String(attribute="id"),
|
||||
"annotation_create_account": fields.Nested(simple_account_fields, allow_null=True),
|
||||
"created_at": TimestampField,
|
||||
}
|
||||
|
||||
message_file_fields = {
|
||||
"id": fields.String,
|
||||
"filename": fields.String,
|
||||
"type": fields.String,
|
||||
"url": fields.String,
|
||||
"mime_type": fields.String,
|
||||
"size": fields.Integer,
|
||||
"transfer_method": fields.String,
|
||||
"belongs_to": fields.String(default="user"),
|
||||
"upload_file_id": fields.String(default=None),
|
||||
}
|
||||
@field_validator("transfer_method", mode="before")
|
||||
@classmethod
|
||||
def _normalize_transfer_method(cls, value: object) -> str:
|
||||
if isinstance(value, str):
|
||||
return value
|
||||
return str(value)
|
||||
|
||||
|
||||
def build_message_file_model(api_or_ns: Namespace):
|
||||
"""Build the message file fields for the API or Namespace."""
|
||||
return api_or_ns.model("MessageFile", message_file_fields)
|
||||
class SimpleConversation(ResponseModel):
|
||||
id: str
|
||||
name: str
|
||||
inputs: dict[str, JSONValue]
|
||||
status: str
|
||||
introduction: str | None = None
|
||||
created_at: int | None = None
|
||||
updated_at: int | None = None
|
||||
|
||||
@field_validator("inputs", mode="before")
|
||||
@classmethod
|
||||
def _normalize_inputs(cls, value: JSONValue) -> JSONValue:
|
||||
return format_files_contained(value)
|
||||
|
||||
@field_validator("created_at", "updated_at", mode="before")
|
||||
@classmethod
|
||||
def _normalize_timestamp(cls, value: datetime | int | None) -> int | None:
|
||||
if isinstance(value, datetime):
|
||||
return to_timestamp(value)
|
||||
return value
|
||||
|
||||
|
||||
agent_thought_fields = {
|
||||
"id": fields.String,
|
||||
"chain_id": fields.String,
|
||||
"message_id": fields.String,
|
||||
"position": fields.Integer,
|
||||
"thought": fields.String,
|
||||
"tool": fields.String,
|
||||
"tool_labels": fields.Raw,
|
||||
"tool_input": fields.String,
|
||||
"created_at": TimestampField,
|
||||
"observation": fields.String,
|
||||
"files": fields.List(fields.String),
|
||||
}
|
||||
|
||||
message_detail_fields = {
|
||||
"id": fields.String,
|
||||
"conversation_id": fields.String,
|
||||
"inputs": FilesContainedField,
|
||||
"query": fields.String,
|
||||
"message": fields.Raw,
|
||||
"message_tokens": fields.Integer,
|
||||
"answer": fields.String(attribute="re_sign_file_url_answer"),
|
||||
"answer_tokens": fields.Integer,
|
||||
"provider_response_latency": fields.Float,
|
||||
"from_source": fields.String,
|
||||
"from_end_user_id": fields.String,
|
||||
"from_account_id": fields.String,
|
||||
"feedbacks": fields.List(fields.Nested(feedback_fields)),
|
||||
"workflow_run_id": fields.String,
|
||||
"annotation": fields.Nested(annotation_fields, allow_null=True),
|
||||
"annotation_hit_history": fields.Nested(annotation_hit_history_fields, allow_null=True),
|
||||
"created_at": TimestampField,
|
||||
"agent_thoughts": fields.List(fields.Nested(agent_thought_fields)),
|
||||
"message_files": fields.List(fields.Nested(message_file_fields)),
|
||||
"metadata": fields.Raw(attribute="message_metadata_dict"),
|
||||
"status": fields.String,
|
||||
"error": fields.String,
|
||||
"parent_message_id": fields.String,
|
||||
}
|
||||
|
||||
feedback_stat_fields = {"like": fields.Integer, "dislike": fields.Integer}
|
||||
status_count_fields = {"success": fields.Integer, "failed": fields.Integer, "partial_success": fields.Integer}
|
||||
model_config_fields = {
|
||||
"opening_statement": fields.String,
|
||||
"suggested_questions": fields.Raw,
|
||||
"model": fields.Raw,
|
||||
"user_input_form": fields.Raw,
|
||||
"pre_prompt": fields.String,
|
||||
"agent_mode": fields.Raw,
|
||||
}
|
||||
|
||||
simple_model_config_fields = {
|
||||
"model": fields.Raw(attribute="model_dict"),
|
||||
"pre_prompt": fields.String,
|
||||
}
|
||||
|
||||
simple_message_detail_fields = {
|
||||
"inputs": FilesContainedField,
|
||||
"query": fields.String,
|
||||
"message": MessageTextField,
|
||||
"answer": fields.String,
|
||||
}
|
||||
|
||||
conversation_fields = {
|
||||
"id": fields.String,
|
||||
"status": fields.String,
|
||||
"from_source": fields.String,
|
||||
"from_end_user_id": fields.String,
|
||||
"from_end_user_session_id": fields.String(),
|
||||
"from_account_id": fields.String,
|
||||
"from_account_name": fields.String,
|
||||
"read_at": TimestampField,
|
||||
"created_at": TimestampField,
|
||||
"updated_at": TimestampField,
|
||||
"annotation": fields.Nested(annotation_fields, allow_null=True),
|
||||
"model_config": fields.Nested(simple_model_config_fields),
|
||||
"user_feedback_stats": fields.Nested(feedback_stat_fields),
|
||||
"admin_feedback_stats": fields.Nested(feedback_stat_fields),
|
||||
"message": fields.Nested(simple_message_detail_fields, attribute="first_message"),
|
||||
}
|
||||
|
||||
conversation_pagination_fields = {
|
||||
"page": fields.Integer,
|
||||
"limit": fields.Integer(attribute="per_page"),
|
||||
"total": fields.Integer,
|
||||
"has_more": fields.Boolean(attribute="has_next"),
|
||||
"data": fields.List(fields.Nested(conversation_fields), attribute="items"),
|
||||
}
|
||||
|
||||
conversation_message_detail_fields = {
|
||||
"id": fields.String,
|
||||
"status": fields.String,
|
||||
"from_source": fields.String,
|
||||
"from_end_user_id": fields.String,
|
||||
"from_account_id": fields.String,
|
||||
"created_at": TimestampField,
|
||||
"model_config": fields.Nested(model_config_fields),
|
||||
"message": fields.Nested(message_detail_fields, attribute="first_message"),
|
||||
}
|
||||
|
||||
conversation_with_summary_fields = {
|
||||
"id": fields.String,
|
||||
"status": fields.String,
|
||||
"from_source": fields.String,
|
||||
"from_end_user_id": fields.String,
|
||||
"from_end_user_session_id": fields.String,
|
||||
"from_account_id": fields.String,
|
||||
"from_account_name": fields.String,
|
||||
"name": fields.String,
|
||||
"summary": fields.String(attribute="summary_or_query"),
|
||||
"read_at": TimestampField,
|
||||
"created_at": TimestampField,
|
||||
"updated_at": TimestampField,
|
||||
"annotated": fields.Boolean,
|
||||
"model_config": fields.Nested(simple_model_config_fields),
|
||||
"message_count": fields.Integer,
|
||||
"user_feedback_stats": fields.Nested(feedback_stat_fields),
|
||||
"admin_feedback_stats": fields.Nested(feedback_stat_fields),
|
||||
"status_count": fields.Nested(status_count_fields),
|
||||
}
|
||||
|
||||
conversation_with_summary_pagination_fields = {
|
||||
"page": fields.Integer,
|
||||
"limit": fields.Integer(attribute="per_page"),
|
||||
"total": fields.Integer,
|
||||
"has_more": fields.Boolean(attribute="has_next"),
|
||||
"data": fields.List(fields.Nested(conversation_with_summary_fields), attribute="items"),
|
||||
}
|
||||
|
||||
conversation_detail_fields = {
|
||||
"id": fields.String,
|
||||
"status": fields.String,
|
||||
"from_source": fields.String,
|
||||
"from_end_user_id": fields.String,
|
||||
"from_account_id": fields.String,
|
||||
"created_at": TimestampField,
|
||||
"updated_at": TimestampField,
|
||||
"annotated": fields.Boolean,
|
||||
"introduction": fields.String,
|
||||
"model_config": fields.Nested(model_config_fields),
|
||||
"message_count": fields.Integer,
|
||||
"user_feedback_stats": fields.Nested(feedback_stat_fields),
|
||||
"admin_feedback_stats": fields.Nested(feedback_stat_fields),
|
||||
}
|
||||
|
||||
simple_conversation_fields = {
|
||||
"id": fields.String,
|
||||
"name": fields.String,
|
||||
"inputs": FilesContainedField,
|
||||
"status": fields.String,
|
||||
"introduction": fields.String,
|
||||
"created_at": TimestampField,
|
||||
"updated_at": TimestampField,
|
||||
}
|
||||
|
||||
conversation_delete_fields = {
|
||||
"result": fields.String,
|
||||
}
|
||||
|
||||
conversation_infinite_scroll_pagination_fields = {
|
||||
"limit": fields.Integer,
|
||||
"has_more": fields.Boolean,
|
||||
"data": fields.List(fields.Nested(simple_conversation_fields)),
|
||||
}
|
||||
class ConversationInfiniteScrollPagination(ResponseModel):
|
||||
limit: int
|
||||
has_more: bool
|
||||
data: list[SimpleConversation]
|
||||
|
||||
|
||||
def build_conversation_infinite_scroll_pagination_model(api_or_ns: Namespace):
|
||||
"""Build the conversation infinite scroll pagination model for the API or Namespace."""
|
||||
simple_conversation_model = build_simple_conversation_model(api_or_ns)
|
||||
|
||||
copied_fields = conversation_infinite_scroll_pagination_fields.copy()
|
||||
copied_fields["data"] = fields.List(fields.Nested(simple_conversation_model))
|
||||
return api_or_ns.model("ConversationInfiniteScrollPagination", copied_fields)
|
||||
class ConversationDelete(ResponseModel):
|
||||
result: str
|
||||
|
||||
|
||||
def build_conversation_delete_model(api_or_ns: Namespace):
|
||||
"""Build the conversation delete model for the API or Namespace."""
|
||||
return api_or_ns.model("ConversationDelete", conversation_delete_fields)
|
||||
class ResultResponse(ResponseModel):
|
||||
result: str
|
||||
|
||||
|
||||
def build_simple_conversation_model(api_or_ns: Namespace):
|
||||
"""Build the simple conversation model for the API or Namespace."""
|
||||
return api_or_ns.model("SimpleConversation", simple_conversation_fields)
|
||||
class SimpleAccount(ResponseModel):
|
||||
id: str
|
||||
name: str
|
||||
email: str
|
||||
|
||||
|
||||
class Feedback(ResponseModel):
|
||||
rating: str
|
||||
content: str | None = None
|
||||
from_source: str
|
||||
from_end_user_id: str | None = None
|
||||
from_account: SimpleAccount | None = None
|
||||
|
||||
|
||||
class Annotation(ResponseModel):
|
||||
id: str
|
||||
question: str | None = None
|
||||
content: str
|
||||
account: SimpleAccount | None = None
|
||||
created_at: int | None = None
|
||||
|
||||
@field_validator("created_at", mode="before")
|
||||
@classmethod
|
||||
def _normalize_created_at(cls, value: datetime | int | None) -> int | None:
|
||||
if isinstance(value, datetime):
|
||||
return to_timestamp(value)
|
||||
return value
|
||||
|
||||
|
||||
class AnnotationHitHistory(ResponseModel):
|
||||
annotation_id: str
|
||||
annotation_create_account: SimpleAccount | None = None
|
||||
created_at: int | None = None
|
||||
|
||||
@field_validator("created_at", mode="before")
|
||||
@classmethod
|
||||
def _normalize_created_at(cls, value: datetime | int | None) -> int | None:
|
||||
if isinstance(value, datetime):
|
||||
return to_timestamp(value)
|
||||
return value
|
||||
|
||||
|
||||
class AgentThought(ResponseModel):
|
||||
id: str
|
||||
chain_id: str | None = None
|
||||
message_chain_id: str | None = Field(default=None, exclude=True, validation_alias="message_chain_id")
|
||||
message_id: str
|
||||
position: int
|
||||
thought: str | None = None
|
||||
tool: str | None = None
|
||||
tool_labels: JSONValue
|
||||
tool_input: str | None = None
|
||||
created_at: int | None = None
|
||||
observation: str | None = None
|
||||
files: list[str]
|
||||
|
||||
@field_validator("created_at", mode="before")
|
||||
@classmethod
|
||||
def _normalize_created_at(cls, value: datetime | int | None) -> int | None:
|
||||
if isinstance(value, datetime):
|
||||
return to_timestamp(value)
|
||||
return value
|
||||
|
||||
@model_validator(mode="after")
|
||||
def _fallback_chain_id(self):
|
||||
if self.chain_id is None and self.message_chain_id:
|
||||
self.chain_id = self.message_chain_id
|
||||
return self
|
||||
|
||||
|
||||
class MessageDetail(ResponseModel):
|
||||
id: str
|
||||
conversation_id: str
|
||||
inputs: dict[str, JSONValue]
|
||||
query: str
|
||||
message: JSONValue
|
||||
message_tokens: int
|
||||
answer: str
|
||||
answer_tokens: int
|
||||
provider_response_latency: float
|
||||
from_source: str
|
||||
from_end_user_id: str | None = None
|
||||
from_account_id: str | None = None
|
||||
feedbacks: list[Feedback]
|
||||
workflow_run_id: str | None = None
|
||||
annotation: Annotation | None = None
|
||||
annotation_hit_history: AnnotationHitHistory | None = None
|
||||
created_at: int | None = None
|
||||
agent_thoughts: list[AgentThought]
|
||||
message_files: list[MessageFile]
|
||||
metadata: JSONValue
|
||||
status: str
|
||||
error: str | None = None
|
||||
parent_message_id: str | None = None
|
||||
|
||||
@field_validator("inputs", mode="before")
|
||||
@classmethod
|
||||
def _normalize_inputs(cls, value: JSONValue) -> JSONValue:
|
||||
return format_files_contained(value)
|
||||
|
||||
@field_validator("created_at", mode="before")
|
||||
@classmethod
|
||||
def _normalize_created_at(cls, value: datetime | int | None) -> int | None:
|
||||
if isinstance(value, datetime):
|
||||
return to_timestamp(value)
|
||||
return value
|
||||
|
||||
|
||||
class FeedbackStat(ResponseModel):
|
||||
like: int
|
||||
dislike: int
|
||||
|
||||
|
||||
class StatusCount(ResponseModel):
|
||||
success: int
|
||||
failed: int
|
||||
partial_success: int
|
||||
|
||||
|
||||
class ModelConfig(ResponseModel):
|
||||
opening_statement: str | None = None
|
||||
suggested_questions: JSONValue | None = None
|
||||
model: JSONValue | None = None
|
||||
user_input_form: JSONValue | None = None
|
||||
pre_prompt: str | None = None
|
||||
agent_mode: JSONValue | None = None
|
||||
|
||||
|
||||
class SimpleModelConfig(ResponseModel):
|
||||
model: JSONValue | None = None
|
||||
pre_prompt: str | None = None
|
||||
|
||||
|
||||
class SimpleMessageDetail(ResponseModel):
|
||||
inputs: dict[str, JSONValue]
|
||||
query: str
|
||||
message: str
|
||||
answer: str
|
||||
|
||||
@field_validator("inputs", mode="before")
|
||||
@classmethod
|
||||
def _normalize_inputs(cls, value: JSONValue) -> JSONValue:
|
||||
return format_files_contained(value)
|
||||
|
||||
|
||||
class Conversation(ResponseModel):
|
||||
id: str
|
||||
status: str
|
||||
from_source: str
|
||||
from_end_user_id: str | None = None
|
||||
from_end_user_session_id: str | None = None
|
||||
from_account_id: str | None = None
|
||||
from_account_name: str | None = None
|
||||
read_at: int | None = None
|
||||
created_at: int | None = None
|
||||
updated_at: int | None = None
|
||||
annotation: Annotation | None = None
|
||||
model_config_: SimpleModelConfig | None = Field(default=None, alias="model_config")
|
||||
user_feedback_stats: FeedbackStat | None = None
|
||||
admin_feedback_stats: FeedbackStat | None = None
|
||||
message: SimpleMessageDetail | None = None
|
||||
|
||||
|
||||
class ConversationPagination(ResponseModel):
|
||||
page: int
|
||||
limit: int
|
||||
total: int
|
||||
has_more: bool
|
||||
data: list[Conversation]
|
||||
|
||||
|
||||
class ConversationMessageDetail(ResponseModel):
|
||||
id: str
|
||||
status: str
|
||||
from_source: str
|
||||
from_end_user_id: str | None = None
|
||||
from_account_id: str | None = None
|
||||
created_at: int | None = None
|
||||
model_config_: ModelConfig | None = Field(default=None, alias="model_config")
|
||||
message: MessageDetail | None = None
|
||||
|
||||
|
||||
class ConversationWithSummary(ResponseModel):
|
||||
id: str
|
||||
status: str
|
||||
from_source: str
|
||||
from_end_user_id: str | None = None
|
||||
from_end_user_session_id: str | None = None
|
||||
from_account_id: str | None = None
|
||||
from_account_name: str | None = None
|
||||
name: str
|
||||
summary: str
|
||||
read_at: int | None = None
|
||||
created_at: int | None = None
|
||||
updated_at: int | None = None
|
||||
annotated: bool
|
||||
model_config_: SimpleModelConfig | None = Field(default=None, alias="model_config")
|
||||
message_count: int
|
||||
user_feedback_stats: FeedbackStat | None = None
|
||||
admin_feedback_stats: FeedbackStat | None = None
|
||||
status_count: StatusCount | None = None
|
||||
|
||||
|
||||
class ConversationWithSummaryPagination(ResponseModel):
|
||||
page: int
|
||||
limit: int
|
||||
total: int
|
||||
has_more: bool
|
||||
data: list[ConversationWithSummary]
|
||||
|
||||
|
||||
class ConversationDetail(ResponseModel):
|
||||
id: str
|
||||
status: str
|
||||
from_source: str
|
||||
from_end_user_id: str | None = None
|
||||
from_account_id: str | None = None
|
||||
created_at: int | None = None
|
||||
updated_at: int | None = None
|
||||
annotated: bool
|
||||
introduction: str | None = None
|
||||
model_config_: ModelConfig | None = Field(default=None, alias="model_config")
|
||||
message_count: int
|
||||
user_feedback_stats: FeedbackStat | None = None
|
||||
admin_feedback_stats: FeedbackStat | None = None
|
||||
|
||||
|
||||
def to_timestamp(value: datetime | None) -> int | None:
|
||||
if value is None:
|
||||
return None
|
||||
return int(value.timestamp())
|
||||
|
||||
|
||||
def format_files_contained(value: JSONValue) -> JSONValue:
|
||||
if isinstance(value, File):
|
||||
return value.model_dump()
|
||||
if isinstance(value, dict):
|
||||
return {k: format_files_contained(v) for k, v in value.items()}
|
||||
if isinstance(value, list):
|
||||
return [format_files_contained(v) for v in value]
|
||||
return value
|
||||
|
||||
|
||||
def message_text(value: JSONValue) -> str:
|
||||
if isinstance(value, list) and value:
|
||||
first = value[0]
|
||||
if isinstance(first, dict):
|
||||
text = first.get("text")
|
||||
if isinstance(text, str):
|
||||
return text
|
||||
return ""
|
||||
|
||||
|
||||
def extract_model_config(value: object | None) -> dict[str, JSONValue]:
|
||||
if value is None:
|
||||
return {}
|
||||
if isinstance(value, dict):
|
||||
return value
|
||||
if hasattr(value, "to_dict"):
|
||||
return value.to_dict()
|
||||
return {}
|
||||
|
|
|
|||
|
|
@ -1,77 +1,137 @@
|
|||
from flask_restx import Namespace, fields
|
||||
from __future__ import annotations
|
||||
|
||||
from fields.conversation_fields import message_file_fields
|
||||
from libs.helper import TimestampField
|
||||
from datetime import datetime
|
||||
from typing import TypeAlias
|
||||
|
||||
from .raws import FilesContainedField
|
||||
from pydantic import BaseModel, ConfigDict, Field, field_validator
|
||||
|
||||
feedback_fields = {
|
||||
"rating": fields.String,
|
||||
}
|
||||
from core.file import File
|
||||
from fields.conversation_fields import AgentThought, JSONValue, MessageFile
|
||||
|
||||
JSONValueType: TypeAlias = JSONValue
|
||||
|
||||
|
||||
def build_feedback_model(api_or_ns: Namespace):
|
||||
"""Build the feedback model for the API or Namespace."""
|
||||
return api_or_ns.model("Feedback", feedback_fields)
|
||||
class ResponseModel(BaseModel):
|
||||
model_config = ConfigDict(from_attributes=True, extra="ignore")
|
||||
|
||||
|
||||
agent_thought_fields = {
|
||||
"id": fields.String,
|
||||
"chain_id": fields.String,
|
||||
"message_id": fields.String,
|
||||
"position": fields.Integer,
|
||||
"thought": fields.String,
|
||||
"tool": fields.String,
|
||||
"tool_labels": fields.Raw,
|
||||
"tool_input": fields.String,
|
||||
"created_at": TimestampField,
|
||||
"observation": fields.String,
|
||||
"files": fields.List(fields.String),
|
||||
}
|
||||
class SimpleFeedback(ResponseModel):
|
||||
rating: str | None = None
|
||||
|
||||
|
||||
def build_agent_thought_model(api_or_ns: Namespace):
|
||||
"""Build the agent thought model for the API or Namespace."""
|
||||
return api_or_ns.model("AgentThought", agent_thought_fields)
|
||||
class RetrieverResource(ResponseModel):
|
||||
id: str
|
||||
message_id: str
|
||||
position: int
|
||||
dataset_id: str | None = None
|
||||
dataset_name: str | None = None
|
||||
document_id: str | None = None
|
||||
document_name: str | None = None
|
||||
data_source_type: str | None = None
|
||||
segment_id: str | None = None
|
||||
score: float | None = None
|
||||
hit_count: int | None = None
|
||||
word_count: int | None = None
|
||||
segment_position: int | None = None
|
||||
index_node_hash: str | None = None
|
||||
content: str | None = None
|
||||
created_at: int | None = None
|
||||
|
||||
@field_validator("created_at", mode="before")
|
||||
@classmethod
|
||||
def _normalize_created_at(cls, value: datetime | int | None) -> int | None:
|
||||
if isinstance(value, datetime):
|
||||
return to_timestamp(value)
|
||||
return value
|
||||
|
||||
|
||||
retriever_resource_fields = {
|
||||
"id": fields.String,
|
||||
"message_id": fields.String,
|
||||
"position": fields.Integer,
|
||||
"dataset_id": fields.String,
|
||||
"dataset_name": fields.String,
|
||||
"document_id": fields.String,
|
||||
"document_name": fields.String,
|
||||
"data_source_type": fields.String,
|
||||
"segment_id": fields.String,
|
||||
"score": fields.Float,
|
||||
"hit_count": fields.Integer,
|
||||
"word_count": fields.Integer,
|
||||
"segment_position": fields.Integer,
|
||||
"index_node_hash": fields.String,
|
||||
"content": fields.String,
|
||||
"created_at": TimestampField,
|
||||
}
|
||||
class MessageListItem(ResponseModel):
|
||||
id: str
|
||||
conversation_id: str
|
||||
parent_message_id: str | None = None
|
||||
inputs: dict[str, JSONValueType]
|
||||
query: str
|
||||
answer: str = Field(validation_alias="re_sign_file_url_answer")
|
||||
feedback: SimpleFeedback | None = Field(default=None, validation_alias="user_feedback")
|
||||
retriever_resources: list[RetrieverResource]
|
||||
created_at: int | None = None
|
||||
agent_thoughts: list[AgentThought]
|
||||
message_files: list[MessageFile]
|
||||
status: str
|
||||
error: str | None = None
|
||||
|
||||
message_fields = {
|
||||
"id": fields.String,
|
||||
"conversation_id": fields.String,
|
||||
"parent_message_id": fields.String,
|
||||
"inputs": FilesContainedField,
|
||||
"query": fields.String,
|
||||
"answer": fields.String(attribute="re_sign_file_url_answer"),
|
||||
"feedback": fields.Nested(feedback_fields, attribute="user_feedback", allow_null=True),
|
||||
"retriever_resources": fields.List(fields.Nested(retriever_resource_fields)),
|
||||
"created_at": TimestampField,
|
||||
"agent_thoughts": fields.List(fields.Nested(agent_thought_fields)),
|
||||
"message_files": fields.List(fields.Nested(message_file_fields)),
|
||||
"status": fields.String,
|
||||
"error": fields.String,
|
||||
}
|
||||
@field_validator("inputs", mode="before")
|
||||
@classmethod
|
||||
def _normalize_inputs(cls, value: JSONValueType) -> JSONValueType:
|
||||
return format_files_contained(value)
|
||||
|
||||
message_infinite_scroll_pagination_fields = {
|
||||
"limit": fields.Integer,
|
||||
"has_more": fields.Boolean,
|
||||
"data": fields.List(fields.Nested(message_fields)),
|
||||
}
|
||||
@field_validator("created_at", mode="before")
|
||||
@classmethod
|
||||
def _normalize_created_at(cls, value: datetime | int | None) -> int | None:
|
||||
if isinstance(value, datetime):
|
||||
return to_timestamp(value)
|
||||
return value
|
||||
|
||||
|
||||
class WebMessageListItem(MessageListItem):
|
||||
metadata: JSONValueType | None = Field(default=None, validation_alias="message_metadata_dict")
|
||||
|
||||
|
||||
class MessageInfiniteScrollPagination(ResponseModel):
|
||||
limit: int
|
||||
has_more: bool
|
||||
data: list[MessageListItem]
|
||||
|
||||
|
||||
class WebMessageInfiniteScrollPagination(ResponseModel):
|
||||
limit: int
|
||||
has_more: bool
|
||||
data: list[WebMessageListItem]
|
||||
|
||||
|
||||
class SavedMessageItem(ResponseModel):
|
||||
id: str
|
||||
inputs: dict[str, JSONValueType]
|
||||
query: str
|
||||
answer: str
|
||||
message_files: list[MessageFile]
|
||||
feedback: SimpleFeedback | None = Field(default=None, validation_alias="user_feedback")
|
||||
created_at: int | None = None
|
||||
|
||||
@field_validator("inputs", mode="before")
|
||||
@classmethod
|
||||
def _normalize_inputs(cls, value: JSONValueType) -> JSONValueType:
|
||||
return format_files_contained(value)
|
||||
|
||||
@field_validator("created_at", mode="before")
|
||||
@classmethod
|
||||
def _normalize_created_at(cls, value: datetime | int | None) -> int | None:
|
||||
if isinstance(value, datetime):
|
||||
return to_timestamp(value)
|
||||
return value
|
||||
|
||||
|
||||
class SavedMessageInfiniteScrollPagination(ResponseModel):
|
||||
limit: int
|
||||
has_more: bool
|
||||
data: list[SavedMessageItem]
|
||||
|
||||
|
||||
class SuggestedQuestionsResponse(ResponseModel):
|
||||
data: list[str]
|
||||
|
||||
|
||||
def to_timestamp(value: datetime | None) -> int | None:
|
||||
if value is None:
|
||||
return None
|
||||
return int(value.timestamp())
|
||||
|
||||
|
||||
def format_files_contained(value: JSONValueType) -> JSONValueType:
|
||||
if isinstance(value, File):
|
||||
return value.model_dump()
|
||||
if isinstance(value, dict):
|
||||
return {k: format_files_contained(v) for k, v in value.items()}
|
||||
if isinstance(value, list):
|
||||
return [format_files_contained(v) for v in value]
|
||||
return value
|
||||
|
|
|
|||
|
|
@ -1,5 +1,4 @@
|
|||
import re
|
||||
import sys
|
||||
from collections.abc import Mapping
|
||||
from typing import Any
|
||||
|
||||
|
|
@ -109,11 +108,8 @@ def register_external_error_handlers(api: Api):
|
|||
data.setdefault("code", "unknown")
|
||||
data.setdefault("status", status_code)
|
||||
|
||||
# Log stack
|
||||
exc_info: Any = sys.exc_info()
|
||||
if exc_info[1] is None:
|
||||
exc_info = (None, None, None)
|
||||
current_app.log_exception(exc_info)
|
||||
# Note: Exception logging is handled by Flask/Flask-RESTX framework automatically
|
||||
# Explicit log_exception call removed to avoid duplicate log entries
|
||||
|
||||
return data, status_code
|
||||
|
||||
|
|
|
|||
|
|
@ -11,9 +11,6 @@ from alembic import op
|
|||
import models.types
|
||||
|
||||
|
||||
def _is_pg(conn):
|
||||
return conn.dialect.name == "postgresql"
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '00bacef91f18'
|
||||
down_revision = '8ec536f3c800'
|
||||
|
|
@ -23,31 +20,17 @@ depends_on = None
|
|||
|
||||
def upgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
conn = op.get_bind()
|
||||
|
||||
if _is_pg(conn):
|
||||
with op.batch_alter_table('tool_api_providers', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('description', sa.Text(), nullable=False))
|
||||
batch_op.drop_column('description_str')
|
||||
else:
|
||||
with op.batch_alter_table('tool_api_providers', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('description', models.types.LongText(), nullable=False))
|
||||
batch_op.drop_column('description_str')
|
||||
with op.batch_alter_table('tool_api_providers', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('description', models.types.LongText(), nullable=False))
|
||||
batch_op.drop_column('description_str')
|
||||
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
def downgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
conn = op.get_bind()
|
||||
|
||||
if _is_pg(conn):
|
||||
with op.batch_alter_table('tool_api_providers', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('description_str', sa.TEXT(), autoincrement=False, nullable=False))
|
||||
batch_op.drop_column('description')
|
||||
else:
|
||||
with op.batch_alter_table('tool_api_providers', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('description_str', models.types.LongText(), autoincrement=False, nullable=False))
|
||||
batch_op.drop_column('description')
|
||||
with op.batch_alter_table('tool_api_providers', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('description_str', models.types.LongText(), autoincrement=False, nullable=False))
|
||||
batch_op.drop_column('description')
|
||||
|
||||
# ### end Alembic commands ###
|
||||
|
|
|
|||
|
|
@ -7,14 +7,10 @@ Create Date: 2024-01-10 04:40:57.257824
|
|||
"""
|
||||
import sqlalchemy as sa
|
||||
from alembic import op
|
||||
from sqlalchemy.dialects import postgresql
|
||||
|
||||
import models.types
|
||||
|
||||
|
||||
def _is_pg(conn):
|
||||
return conn.dialect.name == "postgresql"
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '114eed84c228'
|
||||
down_revision = 'c71211c8f604'
|
||||
|
|
@ -32,13 +28,7 @@ def upgrade():
|
|||
|
||||
def downgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
conn = op.get_bind()
|
||||
|
||||
if _is_pg(conn):
|
||||
with op.batch_alter_table('tool_model_invokes', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('tool_id', postgresql.UUID(), autoincrement=False, nullable=False))
|
||||
else:
|
||||
with op.batch_alter_table('tool_model_invokes', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('tool_id', models.types.StringUUID(), autoincrement=False, nullable=False))
|
||||
with op.batch_alter_table('tool_model_invokes', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('tool_id', models.types.StringUUID(), autoincrement=False, nullable=False))
|
||||
|
||||
# ### end Alembic commands ###
|
||||
|
|
|
|||
|
|
@ -11,9 +11,6 @@ from alembic import op
|
|||
import models.types
|
||||
|
||||
|
||||
def _is_pg(conn):
|
||||
return conn.dialect.name == "postgresql"
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '161cadc1af8d'
|
||||
down_revision = '7e6a8693e07a'
|
||||
|
|
@ -23,16 +20,9 @@ depends_on = None
|
|||
|
||||
def upgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
conn = op.get_bind()
|
||||
|
||||
if _is_pg(conn):
|
||||
with op.batch_alter_table('dataset_permissions', schema=None) as batch_op:
|
||||
# Step 1: Add column without NOT NULL constraint
|
||||
op.add_column('dataset_permissions', sa.Column('tenant_id', sa.UUID(), nullable=False))
|
||||
else:
|
||||
with op.batch_alter_table('dataset_permissions', schema=None) as batch_op:
|
||||
# Step 1: Add column without NOT NULL constraint
|
||||
op.add_column('dataset_permissions', sa.Column('tenant_id', models.types.StringUUID(), nullable=False))
|
||||
with op.batch_alter_table('dataset_permissions', schema=None) as batch_op:
|
||||
# Step 1: Add column without NOT NULL constraint
|
||||
op.add_column('dataset_permissions', sa.Column('tenant_id', models.types.StringUUID(), nullable=False))
|
||||
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
|
|
|||
|
|
@ -9,11 +9,6 @@ from alembic import op
|
|||
import models.types
|
||||
|
||||
|
||||
def _is_pg(conn):
|
||||
return conn.dialect.name == "postgresql"
|
||||
import sqlalchemy as sa
|
||||
from sqlalchemy.dialects import postgresql
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '6af6a521a53e'
|
||||
down_revision = 'd57ba9ebb251'
|
||||
|
|
@ -23,58 +18,30 @@ depends_on = None
|
|||
|
||||
def upgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
conn = op.get_bind()
|
||||
|
||||
if _is_pg(conn):
|
||||
with op.batch_alter_table('dataset_retriever_resources', schema=None) as batch_op:
|
||||
batch_op.alter_column('document_id',
|
||||
existing_type=sa.UUID(),
|
||||
nullable=True)
|
||||
batch_op.alter_column('data_source_type',
|
||||
existing_type=sa.TEXT(),
|
||||
nullable=True)
|
||||
batch_op.alter_column('segment_id',
|
||||
existing_type=sa.UUID(),
|
||||
nullable=True)
|
||||
else:
|
||||
with op.batch_alter_table('dataset_retriever_resources', schema=None) as batch_op:
|
||||
batch_op.alter_column('document_id',
|
||||
existing_type=models.types.StringUUID(),
|
||||
nullable=True)
|
||||
batch_op.alter_column('data_source_type',
|
||||
existing_type=models.types.LongText(),
|
||||
nullable=True)
|
||||
batch_op.alter_column('segment_id',
|
||||
existing_type=models.types.StringUUID(),
|
||||
nullable=True)
|
||||
with op.batch_alter_table('dataset_retriever_resources', schema=None) as batch_op:
|
||||
batch_op.alter_column('document_id',
|
||||
existing_type=models.types.StringUUID(),
|
||||
nullable=True)
|
||||
batch_op.alter_column('data_source_type',
|
||||
existing_type=models.types.LongText(),
|
||||
nullable=True)
|
||||
batch_op.alter_column('segment_id',
|
||||
existing_type=models.types.StringUUID(),
|
||||
nullable=True)
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
def downgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
conn = op.get_bind()
|
||||
|
||||
if _is_pg(conn):
|
||||
with op.batch_alter_table('dataset_retriever_resources', schema=None) as batch_op:
|
||||
batch_op.alter_column('segment_id',
|
||||
existing_type=sa.UUID(),
|
||||
nullable=False)
|
||||
batch_op.alter_column('data_source_type',
|
||||
existing_type=sa.TEXT(),
|
||||
nullable=False)
|
||||
batch_op.alter_column('document_id',
|
||||
existing_type=sa.UUID(),
|
||||
nullable=False)
|
||||
else:
|
||||
with op.batch_alter_table('dataset_retriever_resources', schema=None) as batch_op:
|
||||
batch_op.alter_column('segment_id',
|
||||
existing_type=models.types.StringUUID(),
|
||||
nullable=False)
|
||||
batch_op.alter_column('data_source_type',
|
||||
existing_type=models.types.LongText(),
|
||||
nullable=False)
|
||||
batch_op.alter_column('document_id',
|
||||
existing_type=models.types.StringUUID(),
|
||||
nullable=False)
|
||||
with op.batch_alter_table('dataset_retriever_resources', schema=None) as batch_op:
|
||||
batch_op.alter_column('segment_id',
|
||||
existing_type=models.types.StringUUID(),
|
||||
nullable=False)
|
||||
batch_op.alter_column('data_source_type',
|
||||
existing_type=models.types.LongText(),
|
||||
nullable=False)
|
||||
batch_op.alter_column('document_id',
|
||||
existing_type=models.types.StringUUID(),
|
||||
nullable=False)
|
||||
|
||||
# ### end Alembic commands ###
|
||||
|
|
|
|||
|
|
@ -8,7 +8,6 @@ Create Date: 2024-11-01 04:34:23.816198
|
|||
from alembic import op
|
||||
import models as models
|
||||
import sqlalchemy as sa
|
||||
from sqlalchemy.dialects import postgresql
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = 'd3f6769a94a3'
|
||||
|
|
|
|||
|
|
@ -28,85 +28,45 @@ def upgrade():
|
|||
op.execute("UPDATE sites SET custom_disclaimer = '' WHERE custom_disclaimer IS NULL")
|
||||
op.execute("UPDATE tool_api_providers SET custom_disclaimer = '' WHERE custom_disclaimer IS NULL")
|
||||
|
||||
if _is_pg(conn):
|
||||
with op.batch_alter_table('recommended_apps', schema=None) as batch_op:
|
||||
batch_op.alter_column('custom_disclaimer',
|
||||
existing_type=sa.VARCHAR(length=255),
|
||||
type_=sa.TEXT(),
|
||||
nullable=False)
|
||||
with op.batch_alter_table('recommended_apps', schema=None) as batch_op:
|
||||
batch_op.alter_column('custom_disclaimer',
|
||||
existing_type=sa.VARCHAR(length=255),
|
||||
type_=models.types.LongText(),
|
||||
nullable=False)
|
||||
|
||||
with op.batch_alter_table('sites', schema=None) as batch_op:
|
||||
batch_op.alter_column('custom_disclaimer',
|
||||
existing_type=sa.VARCHAR(length=255),
|
||||
type_=sa.TEXT(),
|
||||
nullable=False)
|
||||
with op.batch_alter_table('sites', schema=None) as batch_op:
|
||||
batch_op.alter_column('custom_disclaimer',
|
||||
existing_type=sa.VARCHAR(length=255),
|
||||
type_=models.types.LongText(),
|
||||
nullable=False)
|
||||
|
||||
with op.batch_alter_table('tool_api_providers', schema=None) as batch_op:
|
||||
batch_op.alter_column('custom_disclaimer',
|
||||
existing_type=sa.VARCHAR(length=255),
|
||||
type_=sa.TEXT(),
|
||||
nullable=False)
|
||||
else:
|
||||
with op.batch_alter_table('recommended_apps', schema=None) as batch_op:
|
||||
batch_op.alter_column('custom_disclaimer',
|
||||
existing_type=sa.VARCHAR(length=255),
|
||||
type_=models.types.LongText(),
|
||||
nullable=False)
|
||||
|
||||
with op.batch_alter_table('sites', schema=None) as batch_op:
|
||||
batch_op.alter_column('custom_disclaimer',
|
||||
existing_type=sa.VARCHAR(length=255),
|
||||
type_=models.types.LongText(),
|
||||
nullable=False)
|
||||
|
||||
with op.batch_alter_table('tool_api_providers', schema=None) as batch_op:
|
||||
batch_op.alter_column('custom_disclaimer',
|
||||
existing_type=sa.VARCHAR(length=255),
|
||||
type_=models.types.LongText(),
|
||||
nullable=False)
|
||||
with op.batch_alter_table('tool_api_providers', schema=None) as batch_op:
|
||||
batch_op.alter_column('custom_disclaimer',
|
||||
existing_type=sa.VARCHAR(length=255),
|
||||
type_=models.types.LongText(),
|
||||
nullable=False)
|
||||
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
def downgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
conn = op.get_bind()
|
||||
|
||||
if _is_pg(conn):
|
||||
with op.batch_alter_table('tool_api_providers', schema=None) as batch_op:
|
||||
batch_op.alter_column('custom_disclaimer',
|
||||
existing_type=sa.TEXT(),
|
||||
type_=sa.VARCHAR(length=255),
|
||||
nullable=True)
|
||||
with op.batch_alter_table('tool_api_providers', schema=None) as batch_op:
|
||||
batch_op.alter_column('custom_disclaimer',
|
||||
existing_type=models.types.LongText(),
|
||||
type_=sa.VARCHAR(length=255),
|
||||
nullable=True)
|
||||
|
||||
with op.batch_alter_table('sites', schema=None) as batch_op:
|
||||
batch_op.alter_column('custom_disclaimer',
|
||||
existing_type=sa.TEXT(),
|
||||
type_=sa.VARCHAR(length=255),
|
||||
nullable=True)
|
||||
with op.batch_alter_table('sites', schema=None) as batch_op:
|
||||
batch_op.alter_column('custom_disclaimer',
|
||||
existing_type=models.types.LongText(),
|
||||
type_=sa.VARCHAR(length=255),
|
||||
nullable=True)
|
||||
|
||||
with op.batch_alter_table('recommended_apps', schema=None) as batch_op:
|
||||
batch_op.alter_column('custom_disclaimer',
|
||||
existing_type=sa.TEXT(),
|
||||
type_=sa.VARCHAR(length=255),
|
||||
nullable=True)
|
||||
else:
|
||||
with op.batch_alter_table('tool_api_providers', schema=None) as batch_op:
|
||||
batch_op.alter_column('custom_disclaimer',
|
||||
existing_type=models.types.LongText(),
|
||||
type_=sa.VARCHAR(length=255),
|
||||
nullable=True)
|
||||
|
||||
with op.batch_alter_table('sites', schema=None) as batch_op:
|
||||
batch_op.alter_column('custom_disclaimer',
|
||||
existing_type=models.types.LongText(),
|
||||
type_=sa.VARCHAR(length=255),
|
||||
nullable=True)
|
||||
|
||||
with op.batch_alter_table('recommended_apps', schema=None) as batch_op:
|
||||
batch_op.alter_column('custom_disclaimer',
|
||||
existing_type=models.types.LongText(),
|
||||
type_=sa.VARCHAR(length=255),
|
||||
nullable=True)
|
||||
with op.batch_alter_table('recommended_apps', schema=None) as batch_op:
|
||||
batch_op.alter_column('custom_disclaimer',
|
||||
existing_type=models.types.LongText(),
|
||||
type_=sa.VARCHAR(length=255),
|
||||
nullable=True)
|
||||
|
||||
# ### end Alembic commands ###
|
||||
|
|
|
|||
|
|
@ -49,57 +49,33 @@ def upgrade():
|
|||
op.execute("UPDATE workflows SET updated_at = created_at WHERE updated_at IS NULL")
|
||||
op.execute("UPDATE workflows SET graph = '' WHERE graph IS NULL")
|
||||
op.execute("UPDATE workflows SET features = '' WHERE features IS NULL")
|
||||
if _is_pg(conn):
|
||||
with op.batch_alter_table('workflows', schema=None) as batch_op:
|
||||
batch_op.alter_column('graph',
|
||||
existing_type=sa.TEXT(),
|
||||
nullable=False)
|
||||
batch_op.alter_column('features',
|
||||
existing_type=sa.TEXT(),
|
||||
nullable=False)
|
||||
batch_op.alter_column('updated_at',
|
||||
existing_type=postgresql.TIMESTAMP(),
|
||||
nullable=False)
|
||||
else:
|
||||
with op.batch_alter_table('workflows', schema=None) as batch_op:
|
||||
batch_op.alter_column('graph',
|
||||
existing_type=models.types.LongText(),
|
||||
nullable=False)
|
||||
batch_op.alter_column('features',
|
||||
existing_type=models.types.LongText(),
|
||||
nullable=False)
|
||||
batch_op.alter_column('updated_at',
|
||||
existing_type=sa.TIMESTAMP(),
|
||||
nullable=False)
|
||||
|
||||
with op.batch_alter_table('workflows', schema=None) as batch_op:
|
||||
batch_op.alter_column('graph',
|
||||
existing_type=models.types.LongText(),
|
||||
nullable=False)
|
||||
batch_op.alter_column('features',
|
||||
existing_type=models.types.LongText(),
|
||||
nullable=False)
|
||||
batch_op.alter_column('updated_at',
|
||||
existing_type=sa.TIMESTAMP(),
|
||||
nullable=False)
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
def downgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
conn = op.get_bind()
|
||||
|
||||
if _is_pg(conn):
|
||||
with op.batch_alter_table('workflows', schema=None) as batch_op:
|
||||
batch_op.alter_column('updated_at',
|
||||
existing_type=postgresql.TIMESTAMP(),
|
||||
nullable=True)
|
||||
batch_op.alter_column('features',
|
||||
existing_type=sa.TEXT(),
|
||||
nullable=True)
|
||||
batch_op.alter_column('graph',
|
||||
existing_type=sa.TEXT(),
|
||||
nullable=True)
|
||||
else:
|
||||
with op.batch_alter_table('workflows', schema=None) as batch_op:
|
||||
batch_op.alter_column('updated_at',
|
||||
existing_type=sa.TIMESTAMP(),
|
||||
nullable=True)
|
||||
batch_op.alter_column('features',
|
||||
existing_type=models.types.LongText(),
|
||||
nullable=True)
|
||||
batch_op.alter_column('graph',
|
||||
existing_type=models.types.LongText(),
|
||||
nullable=True)
|
||||
with op.batch_alter_table('workflows', schema=None) as batch_op:
|
||||
batch_op.alter_column('updated_at',
|
||||
existing_type=sa.TIMESTAMP(),
|
||||
nullable=True)
|
||||
batch_op.alter_column('features',
|
||||
existing_type=models.types.LongText(),
|
||||
nullable=True)
|
||||
batch_op.alter_column('graph',
|
||||
existing_type=models.types.LongText(),
|
||||
nullable=True)
|
||||
|
||||
if _is_pg(conn):
|
||||
with op.batch_alter_table('messages', schema=None) as batch_op:
|
||||
|
|
|
|||
|
|
@ -86,57 +86,30 @@ def upgrade():
|
|||
|
||||
def migrate_existing_provider_models_data():
|
||||
"""migrate provider_models table data to provider_model_credentials"""
|
||||
conn = op.get_bind()
|
||||
# Define table structure for data manipulation
|
||||
if _is_pg(conn):
|
||||
provider_models_table = table('provider_models',
|
||||
column('id', models.types.StringUUID()),
|
||||
column('tenant_id', models.types.StringUUID()),
|
||||
column('provider_name', sa.String()),
|
||||
column('model_name', sa.String()),
|
||||
column('model_type', sa.String()),
|
||||
column('encrypted_config', sa.Text()),
|
||||
column('created_at', sa.DateTime()),
|
||||
column('updated_at', sa.DateTime()),
|
||||
column('credential_id', models.types.StringUUID()),
|
||||
)
|
||||
else:
|
||||
provider_models_table = table('provider_models',
|
||||
column('id', models.types.StringUUID()),
|
||||
column('tenant_id', models.types.StringUUID()),
|
||||
column('provider_name', sa.String()),
|
||||
column('model_name', sa.String()),
|
||||
column('model_type', sa.String()),
|
||||
column('encrypted_config', models.types.LongText()),
|
||||
column('created_at', sa.DateTime()),
|
||||
column('updated_at', sa.DateTime()),
|
||||
column('credential_id', models.types.StringUUID()),
|
||||
)
|
||||
# Define table structure for data manipulatio
|
||||
provider_models_table = table('provider_models',
|
||||
column('id', models.types.StringUUID()),
|
||||
column('tenant_id', models.types.StringUUID()),
|
||||
column('provider_name', sa.String()),
|
||||
column('model_name', sa.String()),
|
||||
column('model_type', sa.String()),
|
||||
column('encrypted_config', models.types.LongText()),
|
||||
column('created_at', sa.DateTime()),
|
||||
column('updated_at', sa.DateTime()),
|
||||
column('credential_id', models.types.StringUUID()),
|
||||
)
|
||||
|
||||
if _is_pg(conn):
|
||||
provider_model_credentials_table = table('provider_model_credentials',
|
||||
column('id', models.types.StringUUID()),
|
||||
column('tenant_id', models.types.StringUUID()),
|
||||
column('provider_name', sa.String()),
|
||||
column('model_name', sa.String()),
|
||||
column('model_type', sa.String()),
|
||||
column('credential_name', sa.String()),
|
||||
column('encrypted_config', sa.Text()),
|
||||
column('created_at', sa.DateTime()),
|
||||
column('updated_at', sa.DateTime())
|
||||
)
|
||||
else:
|
||||
provider_model_credentials_table = table('provider_model_credentials',
|
||||
column('id', models.types.StringUUID()),
|
||||
column('tenant_id', models.types.StringUUID()),
|
||||
column('provider_name', sa.String()),
|
||||
column('model_name', sa.String()),
|
||||
column('model_type', sa.String()),
|
||||
column('credential_name', sa.String()),
|
||||
column('encrypted_config', models.types.LongText()),
|
||||
column('created_at', sa.DateTime()),
|
||||
column('updated_at', sa.DateTime())
|
||||
)
|
||||
provider_model_credentials_table = table('provider_model_credentials',
|
||||
column('id', models.types.StringUUID()),
|
||||
column('tenant_id', models.types.StringUUID()),
|
||||
column('provider_name', sa.String()),
|
||||
column('model_name', sa.String()),
|
||||
column('model_type', sa.String()),
|
||||
column('credential_name', sa.String()),
|
||||
column('encrypted_config', models.types.LongText()),
|
||||
column('created_at', sa.DateTime()),
|
||||
column('updated_at', sa.DateTime())
|
||||
)
|
||||
|
||||
|
||||
# Get database connection
|
||||
|
|
@ -183,14 +156,8 @@ def migrate_existing_provider_models_data():
|
|||
|
||||
def downgrade():
|
||||
# Re-add encrypted_config column to provider_models table
|
||||
conn = op.get_bind()
|
||||
|
||||
if _is_pg(conn):
|
||||
with op.batch_alter_table('provider_models', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('encrypted_config', sa.Text(), nullable=True))
|
||||
else:
|
||||
with op.batch_alter_table('provider_models', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('encrypted_config', models.types.LongText(), nullable=True))
|
||||
with op.batch_alter_table('provider_models', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('encrypted_config', models.types.LongText(), nullable=True))
|
||||
|
||||
if not context.is_offline_mode():
|
||||
# Migrate data back from provider_model_credentials to provider_models
|
||||
|
|
|
|||
|
|
@ -8,7 +8,6 @@ Create Date: 2025-08-20 17:47:17.015695
|
|||
from alembic import op
|
||||
import models as models
|
||||
import sqlalchemy as sa
|
||||
from libs.uuid_utils import uuidv7
|
||||
|
||||
|
||||
def _is_pg(conn):
|
||||
|
|
|
|||
|
|
@ -9,8 +9,6 @@ from alembic import op
|
|||
import models as models
|
||||
|
||||
|
||||
def _is_pg(conn):
|
||||
return conn.dialect.name == "postgresql"
|
||||
import sqlalchemy as sa
|
||||
|
||||
|
||||
|
|
@ -23,12 +21,7 @@ depends_on = None
|
|||
|
||||
def upgrade():
|
||||
# Add encrypted_headers column to tool_mcp_providers table
|
||||
conn = op.get_bind()
|
||||
|
||||
if _is_pg(conn):
|
||||
op.add_column('tool_mcp_providers', sa.Column('encrypted_headers', sa.Text(), nullable=True))
|
||||
else:
|
||||
op.add_column('tool_mcp_providers', sa.Column('encrypted_headers', models.types.LongText(), nullable=True))
|
||||
op.add_column('tool_mcp_providers', sa.Column('encrypted_headers', models.types.LongText(), nullable=True))
|
||||
|
||||
|
||||
def downgrade():
|
||||
|
|
|
|||
|
|
@ -44,6 +44,7 @@ def upgrade():
|
|||
sa.PrimaryKeyConstraint('id', name='datasource_oauth_config_pkey'),
|
||||
sa.UniqueConstraint('plugin_id', 'provider', name='datasource_oauth_config_datasource_id_provider_idx')
|
||||
)
|
||||
|
||||
if _is_pg(conn):
|
||||
op.create_table('datasource_oauth_tenant_params',
|
||||
sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuidv7()'), nullable=False),
|
||||
|
|
@ -70,6 +71,7 @@ def upgrade():
|
|||
sa.PrimaryKeyConstraint('id', name='datasource_oauth_tenant_config_pkey'),
|
||||
sa.UniqueConstraint('tenant_id', 'plugin_id', 'provider', name='datasource_oauth_tenant_config_unique')
|
||||
)
|
||||
|
||||
if _is_pg(conn):
|
||||
op.create_table('datasource_providers',
|
||||
sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuidv7()'), nullable=False),
|
||||
|
|
@ -104,6 +106,7 @@ def upgrade():
|
|||
sa.PrimaryKeyConstraint('id', name='datasource_provider_pkey'),
|
||||
sa.UniqueConstraint('tenant_id', 'plugin_id', 'provider', 'name', name='datasource_provider_unique_name')
|
||||
)
|
||||
|
||||
with op.batch_alter_table('datasource_providers', schema=None) as batch_op:
|
||||
batch_op.create_index('datasource_provider_auth_type_provider_idx', ['tenant_id', 'plugin_id', 'provider'], unique=False)
|
||||
|
||||
|
|
@ -133,6 +136,7 @@ def upgrade():
|
|||
sa.Column('created_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False),
|
||||
sa.PrimaryKeyConstraint('id', name='document_pipeline_execution_log_pkey')
|
||||
)
|
||||
|
||||
with op.batch_alter_table('document_pipeline_execution_logs', schema=None) as batch_op:
|
||||
batch_op.create_index('document_pipeline_execution_logs_document_id_idx', ['document_id'], unique=False)
|
||||
|
||||
|
|
@ -174,6 +178,7 @@ def upgrade():
|
|||
sa.Column('updated_by', models.types.StringUUID(), nullable=True),
|
||||
sa.PrimaryKeyConstraint('id', name='pipeline_built_in_template_pkey')
|
||||
)
|
||||
|
||||
if _is_pg(conn):
|
||||
op.create_table('pipeline_customized_templates',
|
||||
sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuidv7()'), nullable=False),
|
||||
|
|
@ -193,7 +198,6 @@ def upgrade():
|
|||
sa.PrimaryKeyConstraint('id', name='pipeline_customized_template_pkey')
|
||||
)
|
||||
else:
|
||||
# MySQL: Use compatible syntax
|
||||
op.create_table('pipeline_customized_templates',
|
||||
sa.Column('id', models.types.StringUUID(), nullable=False),
|
||||
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
|
||||
|
|
@ -211,6 +215,7 @@ def upgrade():
|
|||
sa.Column('updated_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False),
|
||||
sa.PrimaryKeyConstraint('id', name='pipeline_customized_template_pkey')
|
||||
)
|
||||
|
||||
with op.batch_alter_table('pipeline_customized_templates', schema=None) as batch_op:
|
||||
batch_op.create_index('pipeline_customized_template_tenant_idx', ['tenant_id'], unique=False)
|
||||
|
||||
|
|
@ -236,6 +241,7 @@ def upgrade():
|
|||
sa.Column('updated_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False),
|
||||
sa.PrimaryKeyConstraint('id', name='pipeline_recommended_plugin_pkey')
|
||||
)
|
||||
|
||||
if _is_pg(conn):
|
||||
op.create_table('pipelines',
|
||||
sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuidv7()'), nullable=False),
|
||||
|
|
@ -266,6 +272,7 @@ def upgrade():
|
|||
sa.Column('updated_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False),
|
||||
sa.PrimaryKeyConstraint('id', name='pipeline_pkey')
|
||||
)
|
||||
|
||||
if _is_pg(conn):
|
||||
op.create_table('workflow_draft_variable_files',
|
||||
sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuidv7()'), nullable=False),
|
||||
|
|
@ -292,6 +299,7 @@ def upgrade():
|
|||
sa.Column('value_type', sa.String(20), nullable=False),
|
||||
sa.PrimaryKeyConstraint('id', name=op.f('workflow_draft_variable_files_pkey'))
|
||||
)
|
||||
|
||||
if _is_pg(conn):
|
||||
op.create_table('workflow_node_execution_offload',
|
||||
sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuidv7()'), nullable=False),
|
||||
|
|
@ -316,6 +324,7 @@ def upgrade():
|
|||
sa.PrimaryKeyConstraint('id', name=op.f('workflow_node_execution_offload_pkey')),
|
||||
sa.UniqueConstraint('node_execution_id', 'type', name=op.f('workflow_node_execution_offload_node_execution_id_key'))
|
||||
)
|
||||
|
||||
if _is_pg(conn):
|
||||
with op.batch_alter_table('datasets', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('keyword_number', sa.Integer(), server_default=sa.text('10'), nullable=True))
|
||||
|
|
@ -342,6 +351,7 @@ def upgrade():
|
|||
comment='Indicates whether the current value is the default for a conversation variable. Always `FALSE` for other types of variables.',)
|
||||
)
|
||||
batch_op.create_index('workflow_draft_variable_file_id_idx', ['file_id'], unique=False)
|
||||
|
||||
if _is_pg(conn):
|
||||
with op.batch_alter_table('workflows', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('rag_pipeline_variables', sa.Text(), server_default='{}', nullable=False))
|
||||
|
|
|
|||
|
|
@ -9,8 +9,6 @@ from alembic import op
|
|||
import models as models
|
||||
|
||||
|
||||
def _is_pg(conn):
|
||||
return conn.dialect.name == "postgresql"
|
||||
import sqlalchemy as sa
|
||||
|
||||
|
||||
|
|
@ -33,15 +31,9 @@ def upgrade():
|
|||
|
||||
def downgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
conn = op.get_bind()
|
||||
|
||||
if _is_pg(conn):
|
||||
with op.batch_alter_table('pipeline_built_in_templates', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('created_by', sa.UUID(), autoincrement=False, nullable=False))
|
||||
batch_op.add_column(sa.Column('updated_by', sa.UUID(), autoincrement=False, nullable=True))
|
||||
else:
|
||||
with op.batch_alter_table('pipeline_built_in_templates', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('created_by', models.types.StringUUID(), autoincrement=False, nullable=False))
|
||||
batch_op.add_column(sa.Column('updated_by', models.types.StringUUID(), autoincrement=False, nullable=True))
|
||||
|
||||
with op.batch_alter_table('pipeline_built_in_templates', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('created_by', models.types.StringUUID(), autoincrement=False, nullable=False))
|
||||
batch_op.add_column(sa.Column('updated_by', models.types.StringUUID(), autoincrement=False, nullable=True))
|
||||
|
||||
# ### end Alembic commands ###
|
||||
|
|
|
|||
|
|
@ -9,7 +9,6 @@ Create Date: 2025-10-22 16:11:31.805407
|
|||
from alembic import op
|
||||
import models as models
|
||||
import sqlalchemy as sa
|
||||
from libs.uuid_utils import uuidv7
|
||||
|
||||
def _is_pg(conn):
|
||||
return conn.dialect.name == "postgresql"
|
||||
|
|
|
|||
|
|
@ -105,6 +105,7 @@ def upgrade():
|
|||
sa.PrimaryKeyConstraint('id', name='trigger_oauth_tenant_client_pkey'),
|
||||
sa.UniqueConstraint('tenant_id', 'plugin_id', 'provider', name='unique_trigger_oauth_tenant_client')
|
||||
)
|
||||
|
||||
if _is_pg(conn):
|
||||
op.create_table('trigger_subscriptions',
|
||||
sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False),
|
||||
|
|
@ -143,6 +144,7 @@ def upgrade():
|
|||
sa.PrimaryKeyConstraint('id', name='trigger_provider_pkey'),
|
||||
sa.UniqueConstraint('tenant_id', 'provider_id', 'name', name='unique_trigger_provider')
|
||||
)
|
||||
|
||||
with op.batch_alter_table('trigger_subscriptions', schema=None) as batch_op:
|
||||
batch_op.create_index('idx_trigger_providers_endpoint', ['endpoint_id'], unique=True)
|
||||
batch_op.create_index('idx_trigger_providers_tenant_endpoint', ['tenant_id', 'endpoint_id'], unique=False)
|
||||
|
|
@ -176,6 +178,7 @@ def upgrade():
|
|||
sa.PrimaryKeyConstraint('id', name='workflow_plugin_trigger_pkey'),
|
||||
sa.UniqueConstraint('app_id', 'node_id', name='uniq_app_node_subscription')
|
||||
)
|
||||
|
||||
with op.batch_alter_table('workflow_plugin_triggers', schema=None) as batch_op:
|
||||
batch_op.create_index('workflow_plugin_trigger_tenant_subscription_idx', ['tenant_id', 'subscription_id', 'event_name'], unique=False)
|
||||
|
||||
|
|
@ -207,6 +210,7 @@ def upgrade():
|
|||
sa.PrimaryKeyConstraint('id', name='workflow_schedule_plan_pkey'),
|
||||
sa.UniqueConstraint('app_id', 'node_id', name='uniq_app_node')
|
||||
)
|
||||
|
||||
with op.batch_alter_table('workflow_schedule_plans', schema=None) as batch_op:
|
||||
batch_op.create_index('workflow_schedule_plan_next_idx', ['next_run_at'], unique=False)
|
||||
|
||||
|
|
@ -264,6 +268,7 @@ def upgrade():
|
|||
sa.Column('finished_at', sa.DateTime(), nullable=True),
|
||||
sa.PrimaryKeyConstraint('id', name='workflow_trigger_log_pkey')
|
||||
)
|
||||
|
||||
with op.batch_alter_table('workflow_trigger_logs', schema=None) as batch_op:
|
||||
batch_op.create_index('workflow_trigger_log_created_at_idx', ['created_at'], unique=False)
|
||||
batch_op.create_index('workflow_trigger_log_status_idx', ['status'], unique=False)
|
||||
|
|
@ -299,6 +304,7 @@ def upgrade():
|
|||
sa.UniqueConstraint('app_id', 'node_id', name='uniq_node'),
|
||||
sa.UniqueConstraint('webhook_id', name='uniq_webhook_id')
|
||||
)
|
||||
|
||||
with op.batch_alter_table('workflow_webhook_triggers', schema=None) as batch_op:
|
||||
batch_op.create_index('workflow_webhook_trigger_tenant_idx', ['tenant_id'], unique=False)
|
||||
|
||||
|
|
|
|||
|
|
@ -11,9 +11,6 @@ from alembic import op
|
|||
import models.types
|
||||
|
||||
|
||||
def _is_pg(conn):
|
||||
return conn.dialect.name == "postgresql"
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '23db93619b9d'
|
||||
down_revision = '8ae9bc661daa'
|
||||
|
|
@ -23,14 +20,8 @@ depends_on = None
|
|||
|
||||
def upgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
conn = op.get_bind()
|
||||
|
||||
if _is_pg(conn):
|
||||
with op.batch_alter_table('message_agent_thoughts', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('message_files', sa.Text(), nullable=True))
|
||||
else:
|
||||
with op.batch_alter_table('message_agent_thoughts', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('message_files', models.types.LongText(), nullable=True))
|
||||
with op.batch_alter_table('message_agent_thoughts', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('message_files', models.types.LongText(), nullable=True))
|
||||
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
|
|
|||
|
|
@ -62,14 +62,8 @@ def upgrade():
|
|||
|
||||
def downgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
conn = op.get_bind()
|
||||
|
||||
if _is_pg(conn):
|
||||
with op.batch_alter_table('app_model_configs', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('annotation_reply', sa.TEXT(), autoincrement=False, nullable=True))
|
||||
else:
|
||||
with op.batch_alter_table('app_model_configs', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('annotation_reply', models.types.LongText(), autoincrement=False, nullable=True))
|
||||
with op.batch_alter_table('app_model_configs', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('annotation_reply', models.types.LongText(), autoincrement=False, nullable=True))
|
||||
|
||||
with op.batch_alter_table('app_annotation_settings', schema=None) as batch_op:
|
||||
batch_op.drop_index('app_annotation_settings_app_idx')
|
||||
|
|
|
|||
|
|
@ -11,9 +11,6 @@ from alembic import op
|
|||
import models as models
|
||||
|
||||
|
||||
def _is_pg(conn):
|
||||
return conn.dialect.name == "postgresql"
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '2a3aebbbf4bb'
|
||||
down_revision = 'c031d46af369'
|
||||
|
|
@ -23,14 +20,8 @@ depends_on = None
|
|||
|
||||
def upgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
conn = op.get_bind()
|
||||
|
||||
if _is_pg(conn):
|
||||
with op.batch_alter_table('apps', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('tracing', sa.Text(), nullable=True))
|
||||
else:
|
||||
with op.batch_alter_table('apps', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('tracing', models.types.LongText(), nullable=True))
|
||||
with op.batch_alter_table('apps', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('tracing', models.types.LongText(), nullable=True))
|
||||
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
|
|
|||
|
|
@ -7,14 +7,10 @@ Create Date: 2023-09-22 15:41:01.243183
|
|||
"""
|
||||
import sqlalchemy as sa
|
||||
from alembic import op
|
||||
from sqlalchemy.dialects import postgresql
|
||||
|
||||
import models.types
|
||||
|
||||
|
||||
def _is_pg(conn):
|
||||
return conn.dialect.name == "postgresql"
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '2e9819ca5b28'
|
||||
down_revision = 'ab23c11305d4'
|
||||
|
|
@ -24,35 +20,19 @@ depends_on = None
|
|||
|
||||
def upgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
conn = op.get_bind()
|
||||
|
||||
if _is_pg(conn):
|
||||
with op.batch_alter_table('api_tokens', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('tenant_id', postgresql.UUID(), nullable=True))
|
||||
batch_op.create_index('api_token_tenant_idx', ['tenant_id', 'type'], unique=False)
|
||||
batch_op.drop_column('dataset_id')
|
||||
else:
|
||||
with op.batch_alter_table('api_tokens', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('tenant_id', models.types.StringUUID(), nullable=True))
|
||||
batch_op.create_index('api_token_tenant_idx', ['tenant_id', 'type'], unique=False)
|
||||
batch_op.drop_column('dataset_id')
|
||||
with op.batch_alter_table('api_tokens', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('tenant_id', models.types.StringUUID(), nullable=True))
|
||||
batch_op.create_index('api_token_tenant_idx', ['tenant_id', 'type'], unique=False)
|
||||
batch_op.drop_column('dataset_id')
|
||||
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
def downgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
conn = op.get_bind()
|
||||
|
||||
if _is_pg(conn):
|
||||
with op.batch_alter_table('api_tokens', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('dataset_id', postgresql.UUID(), autoincrement=False, nullable=True))
|
||||
batch_op.drop_index('api_token_tenant_idx')
|
||||
batch_op.drop_column('tenant_id')
|
||||
else:
|
||||
with op.batch_alter_table('api_tokens', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('dataset_id', models.types.StringUUID(), autoincrement=False, nullable=True))
|
||||
batch_op.drop_index('api_token_tenant_idx')
|
||||
batch_op.drop_column('tenant_id')
|
||||
with op.batch_alter_table('api_tokens', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('dataset_id', models.types.StringUUID(), autoincrement=False, nullable=True))
|
||||
batch_op.drop_index('api_token_tenant_idx')
|
||||
batch_op.drop_column('tenant_id')
|
||||
|
||||
# ### end Alembic commands ###
|
||||
|
|
|
|||
|
|
@ -7,14 +7,10 @@ Create Date: 2024-03-07 08:30:29.133614
|
|||
"""
|
||||
import sqlalchemy as sa
|
||||
from alembic import op
|
||||
from sqlalchemy.dialects import postgresql
|
||||
|
||||
import models.types
|
||||
|
||||
|
||||
def _is_pg(conn):
|
||||
return conn.dialect.name == "postgresql"
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '42e85ed5564d'
|
||||
down_revision = 'f9107f83abab'
|
||||
|
|
@ -24,59 +20,31 @@ depends_on = None
|
|||
|
||||
def upgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
conn = op.get_bind()
|
||||
|
||||
if _is_pg(conn):
|
||||
with op.batch_alter_table('conversations', schema=None) as batch_op:
|
||||
batch_op.alter_column('app_model_config_id',
|
||||
existing_type=postgresql.UUID(),
|
||||
nullable=True)
|
||||
batch_op.alter_column('model_provider',
|
||||
existing_type=sa.VARCHAR(length=255),
|
||||
nullable=True)
|
||||
batch_op.alter_column('model_id',
|
||||
existing_type=sa.VARCHAR(length=255),
|
||||
nullable=True)
|
||||
else:
|
||||
with op.batch_alter_table('conversations', schema=None) as batch_op:
|
||||
batch_op.alter_column('app_model_config_id',
|
||||
existing_type=models.types.StringUUID(),
|
||||
nullable=True)
|
||||
batch_op.alter_column('model_provider',
|
||||
existing_type=sa.VARCHAR(length=255),
|
||||
nullable=True)
|
||||
batch_op.alter_column('model_id',
|
||||
existing_type=sa.VARCHAR(length=255),
|
||||
nullable=True)
|
||||
with op.batch_alter_table('conversations', schema=None) as batch_op:
|
||||
batch_op.alter_column('app_model_config_id',
|
||||
existing_type=models.types.StringUUID(),
|
||||
nullable=True)
|
||||
batch_op.alter_column('model_provider',
|
||||
existing_type=sa.VARCHAR(length=255),
|
||||
nullable=True)
|
||||
batch_op.alter_column('model_id',
|
||||
existing_type=sa.VARCHAR(length=255),
|
||||
nullable=True)
|
||||
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
def downgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
conn = op.get_bind()
|
||||
|
||||
if _is_pg(conn):
|
||||
with op.batch_alter_table('conversations', schema=None) as batch_op:
|
||||
batch_op.alter_column('model_id',
|
||||
existing_type=sa.VARCHAR(length=255),
|
||||
nullable=False)
|
||||
batch_op.alter_column('model_provider',
|
||||
existing_type=sa.VARCHAR(length=255),
|
||||
nullable=False)
|
||||
batch_op.alter_column('app_model_config_id',
|
||||
existing_type=postgresql.UUID(),
|
||||
nullable=False)
|
||||
else:
|
||||
with op.batch_alter_table('conversations', schema=None) as batch_op:
|
||||
batch_op.alter_column('model_id',
|
||||
existing_type=sa.VARCHAR(length=255),
|
||||
nullable=False)
|
||||
batch_op.alter_column('model_provider',
|
||||
existing_type=sa.VARCHAR(length=255),
|
||||
nullable=False)
|
||||
batch_op.alter_column('app_model_config_id',
|
||||
existing_type=models.types.StringUUID(),
|
||||
nullable=False)
|
||||
with op.batch_alter_table('conversations', schema=None) as batch_op:
|
||||
batch_op.alter_column('model_id',
|
||||
existing_type=sa.VARCHAR(length=255),
|
||||
nullable=False)
|
||||
batch_op.alter_column('model_provider',
|
||||
existing_type=sa.VARCHAR(length=255),
|
||||
nullable=False)
|
||||
batch_op.alter_column('app_model_config_id',
|
||||
existing_type=models.types.StringUUID(),
|
||||
nullable=False)
|
||||
|
||||
# ### end Alembic commands ###
|
||||
|
|
|
|||
|
|
@ -6,14 +6,10 @@ Create Date: 2024-01-12 03:42:27.362415
|
|||
|
||||
"""
|
||||
from alembic import op
|
||||
from sqlalchemy.dialects import postgresql
|
||||
|
||||
import models.types
|
||||
|
||||
|
||||
def _is_pg(conn):
|
||||
return conn.dialect.name == "postgresql"
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '4829e54d2fee'
|
||||
down_revision = '114eed84c228'
|
||||
|
|
@ -23,39 +19,21 @@ depends_on = None
|
|||
|
||||
def upgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
conn = op.get_bind()
|
||||
|
||||
if _is_pg(conn):
|
||||
# PostgreSQL: Keep original syntax
|
||||
with op.batch_alter_table('message_agent_thoughts', schema=None) as batch_op:
|
||||
batch_op.alter_column('message_chain_id',
|
||||
existing_type=postgresql.UUID(),
|
||||
nullable=True)
|
||||
else:
|
||||
# MySQL: Use compatible syntax
|
||||
with op.batch_alter_table('message_agent_thoughts', schema=None) as batch_op:
|
||||
batch_op.alter_column('message_chain_id',
|
||||
existing_type=models.types.StringUUID(),
|
||||
nullable=True)
|
||||
|
||||
with op.batch_alter_table('message_agent_thoughts', schema=None) as batch_op:
|
||||
batch_op.alter_column('message_chain_id',
|
||||
existing_type=models.types.StringUUID(),
|
||||
nullable=True)
|
||||
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
def downgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
conn = op.get_bind()
|
||||
|
||||
if _is_pg(conn):
|
||||
# PostgreSQL: Keep original syntax
|
||||
with op.batch_alter_table('message_agent_thoughts', schema=None) as batch_op:
|
||||
batch_op.alter_column('message_chain_id',
|
||||
existing_type=postgresql.UUID(),
|
||||
nullable=False)
|
||||
else:
|
||||
# MySQL: Use compatible syntax
|
||||
with op.batch_alter_table('message_agent_thoughts', schema=None) as batch_op:
|
||||
batch_op.alter_column('message_chain_id',
|
||||
existing_type=models.types.StringUUID(),
|
||||
nullable=False)
|
||||
|
||||
with op.batch_alter_table('message_agent_thoughts', schema=None) as batch_op:
|
||||
batch_op.alter_column('message_chain_id',
|
||||
existing_type=models.types.StringUUID(),
|
||||
nullable=False)
|
||||
|
||||
# ### end Alembic commands ###
|
||||
|
|
|
|||
|
|
@ -6,14 +6,10 @@ Create Date: 2024-03-14 04:54:56.679506
|
|||
|
||||
"""
|
||||
from alembic import op
|
||||
from sqlalchemy.dialects import postgresql
|
||||
|
||||
import models.types
|
||||
|
||||
|
||||
def _is_pg(conn):
|
||||
return conn.dialect.name == "postgresql"
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '563cf8bf777b'
|
||||
down_revision = 'b5429b71023c'
|
||||
|
|
@ -23,35 +19,19 @@ depends_on = None
|
|||
|
||||
def upgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
conn = op.get_bind()
|
||||
|
||||
if _is_pg(conn):
|
||||
with op.batch_alter_table('tool_files', schema=None) as batch_op:
|
||||
batch_op.alter_column('conversation_id',
|
||||
existing_type=postgresql.UUID(),
|
||||
nullable=True)
|
||||
else:
|
||||
with op.batch_alter_table('tool_files', schema=None) as batch_op:
|
||||
batch_op.alter_column('conversation_id',
|
||||
existing_type=models.types.StringUUID(),
|
||||
nullable=True)
|
||||
with op.batch_alter_table('tool_files', schema=None) as batch_op:
|
||||
batch_op.alter_column('conversation_id',
|
||||
existing_type=models.types.StringUUID(),
|
||||
nullable=True)
|
||||
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
def downgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
conn = op.get_bind()
|
||||
|
||||
if _is_pg(conn):
|
||||
with op.batch_alter_table('tool_files', schema=None) as batch_op:
|
||||
batch_op.alter_column('conversation_id',
|
||||
existing_type=postgresql.UUID(),
|
||||
nullable=False)
|
||||
else:
|
||||
with op.batch_alter_table('tool_files', schema=None) as batch_op:
|
||||
batch_op.alter_column('conversation_id',
|
||||
existing_type=models.types.StringUUID(),
|
||||
nullable=False)
|
||||
with op.batch_alter_table('tool_files', schema=None) as batch_op:
|
||||
batch_op.alter_column('conversation_id',
|
||||
existing_type=models.types.StringUUID(),
|
||||
nullable=False)
|
||||
|
||||
# ### end Alembic commands ###
|
||||
|
|
|
|||
|
|
@ -48,12 +48,9 @@ def upgrade():
|
|||
with op.batch_alter_table('dataset_collection_bindings', schema=None) as batch_op:
|
||||
batch_op.create_index('provider_model_name_idx', ['provider_name', 'model_name'], unique=False)
|
||||
|
||||
if _is_pg(conn):
|
||||
with op.batch_alter_table('datasets', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('collection_binding_id', postgresql.UUID(), nullable=True))
|
||||
else:
|
||||
with op.batch_alter_table('datasets', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('collection_binding_id', models.types.StringUUID(), nullable=True))
|
||||
|
||||
with op.batch_alter_table('datasets', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('collection_binding_id', models.types.StringUUID(), nullable=True))
|
||||
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
|
|
|||
|
|
@ -11,9 +11,6 @@ from alembic import op
|
|||
import models.types
|
||||
|
||||
|
||||
def _is_pg(conn):
|
||||
return conn.dialect.name == "postgresql"
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '714aafe25d39'
|
||||
down_revision = 'f2a6fc85e260'
|
||||
|
|
@ -23,16 +20,9 @@ depends_on = None
|
|||
|
||||
def upgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
conn = op.get_bind()
|
||||
|
||||
if _is_pg(conn):
|
||||
with op.batch_alter_table('app_annotation_hit_histories', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('annotation_question', sa.Text(), nullable=False))
|
||||
batch_op.add_column(sa.Column('annotation_content', sa.Text(), nullable=False))
|
||||
else:
|
||||
with op.batch_alter_table('app_annotation_hit_histories', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('annotation_question', models.types.LongText(), nullable=False))
|
||||
batch_op.add_column(sa.Column('annotation_content', models.types.LongText(), nullable=False))
|
||||
with op.batch_alter_table('app_annotation_hit_histories', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('annotation_question', models.types.LongText(), nullable=False))
|
||||
batch_op.add_column(sa.Column('annotation_content', models.types.LongText(), nullable=False))
|
||||
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
|
|
|||
|
|
@ -11,9 +11,6 @@ from alembic import op
|
|||
import models.types
|
||||
|
||||
|
||||
def _is_pg(conn):
|
||||
return conn.dialect.name == "postgresql"
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '77e83833755c'
|
||||
down_revision = '6dcb43972bdc'
|
||||
|
|
@ -23,14 +20,8 @@ depends_on = None
|
|||
|
||||
def upgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
conn = op.get_bind()
|
||||
|
||||
if _is_pg(conn):
|
||||
with op.batch_alter_table('app_model_configs', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('retriever_resource', sa.Text(), nullable=True))
|
||||
else:
|
||||
with op.batch_alter_table('app_model_configs', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('retriever_resource', models.types.LongText(), nullable=True))
|
||||
with op.batch_alter_table('app_model_configs', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('retriever_resource', models.types.LongText(), nullable=True))
|
||||
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
|
|
|||
|
|
@ -27,7 +27,6 @@ def upgrade():
|
|||
conn = op.get_bind()
|
||||
|
||||
if _is_pg(conn):
|
||||
# PostgreSQL: Keep original syntax
|
||||
op.create_table('tool_providers',
|
||||
sa.Column('id', postgresql.UUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False),
|
||||
sa.Column('tenant_id', postgresql.UUID(), nullable=False),
|
||||
|
|
@ -40,7 +39,6 @@ def upgrade():
|
|||
sa.UniqueConstraint('tenant_id', 'tool_name', name='unique_tool_provider_tool_name')
|
||||
)
|
||||
else:
|
||||
# MySQL: Use compatible syntax
|
||||
op.create_table('tool_providers',
|
||||
sa.Column('id', models.types.StringUUID(), nullable=False),
|
||||
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
|
||||
|
|
@ -52,12 +50,9 @@ def upgrade():
|
|||
sa.PrimaryKeyConstraint('id', name='tool_provider_pkey'),
|
||||
sa.UniqueConstraint('tenant_id', 'tool_name', name='unique_tool_provider_tool_name')
|
||||
)
|
||||
if _is_pg(conn):
|
||||
with op.batch_alter_table('app_model_configs', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('sensitive_word_avoidance', sa.Text(), nullable=True))
|
||||
else:
|
||||
with op.batch_alter_table('app_model_configs', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('sensitive_word_avoidance', models.types.LongText(), nullable=True))
|
||||
|
||||
with op.batch_alter_table('app_model_configs', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('sensitive_word_avoidance', models.types.LongText(), nullable=True))
|
||||
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
|
|
|||
|
|
@ -11,9 +11,6 @@ from alembic import op
|
|||
import models.types
|
||||
|
||||
|
||||
def _is_pg(conn):
|
||||
return conn.dialect.name == "postgresql"
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '88072f0caa04'
|
||||
down_revision = '246ba09cbbdb'
|
||||
|
|
@ -23,14 +20,8 @@ depends_on = None
|
|||
|
||||
def upgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
conn = op.get_bind()
|
||||
|
||||
if _is_pg(conn):
|
||||
with op.batch_alter_table('tenants', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('custom_config', sa.Text(), nullable=True))
|
||||
else:
|
||||
with op.batch_alter_table('tenants', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('custom_config', models.types.LongText(), nullable=True))
|
||||
with op.batch_alter_table('tenants', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('custom_config', models.types.LongText(), nullable=True))
|
||||
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
|
|
|||
|
|
@ -11,9 +11,6 @@ from alembic import op
|
|||
import models.types
|
||||
|
||||
|
||||
def _is_pg(conn):
|
||||
return conn.dialect.name == "postgresql"
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '89c7899ca936'
|
||||
down_revision = '187385f442fc'
|
||||
|
|
@ -23,39 +20,21 @@ depends_on = None
|
|||
|
||||
def upgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
conn = op.get_bind()
|
||||
|
||||
if _is_pg(conn):
|
||||
with op.batch_alter_table('sites', schema=None) as batch_op:
|
||||
batch_op.alter_column('description',
|
||||
existing_type=sa.VARCHAR(length=255),
|
||||
type_=sa.Text(),
|
||||
existing_nullable=True)
|
||||
else:
|
||||
with op.batch_alter_table('sites', schema=None) as batch_op:
|
||||
batch_op.alter_column('description',
|
||||
existing_type=sa.VARCHAR(length=255),
|
||||
type_=models.types.LongText(),
|
||||
existing_nullable=True)
|
||||
with op.batch_alter_table('sites', schema=None) as batch_op:
|
||||
batch_op.alter_column('description',
|
||||
existing_type=sa.VARCHAR(length=255),
|
||||
type_=models.types.LongText(),
|
||||
existing_nullable=True)
|
||||
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
def downgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
conn = op.get_bind()
|
||||
|
||||
if _is_pg(conn):
|
||||
with op.batch_alter_table('sites', schema=None) as batch_op:
|
||||
batch_op.alter_column('description',
|
||||
existing_type=sa.Text(),
|
||||
type_=sa.VARCHAR(length=255),
|
||||
existing_nullable=True)
|
||||
else:
|
||||
with op.batch_alter_table('sites', schema=None) as batch_op:
|
||||
batch_op.alter_column('description',
|
||||
existing_type=models.types.LongText(),
|
||||
type_=sa.VARCHAR(length=255),
|
||||
existing_nullable=True)
|
||||
with op.batch_alter_table('sites', schema=None) as batch_op:
|
||||
batch_op.alter_column('description',
|
||||
existing_type=models.types.LongText(),
|
||||
type_=sa.VARCHAR(length=255),
|
||||
existing_nullable=True)
|
||||
|
||||
# ### end Alembic commands ###
|
||||
|
|
|
|||
|
|
@ -11,9 +11,6 @@ from alembic import op
|
|||
import models.types
|
||||
|
||||
|
||||
def _is_pg(conn):
|
||||
return conn.dialect.name == "postgresql"
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '8ec536f3c800'
|
||||
down_revision = 'ad472b61a054'
|
||||
|
|
@ -23,14 +20,8 @@ depends_on = None
|
|||
|
||||
def upgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
conn = op.get_bind()
|
||||
|
||||
if _is_pg(conn):
|
||||
with op.batch_alter_table('tool_api_providers', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('credentials_str', sa.Text(), nullable=False))
|
||||
else:
|
||||
with op.batch_alter_table('tool_api_providers', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('credentials_str', models.types.LongText(), nullable=False))
|
||||
with op.batch_alter_table('tool_api_providers', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('credentials_str', models.types.LongText(), nullable=False))
|
||||
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
|
|
|||
|
|
@ -57,12 +57,9 @@ def upgrade():
|
|||
batch_op.create_index('message_file_created_by_idx', ['created_by'], unique=False)
|
||||
batch_op.create_index('message_file_message_idx', ['message_id'], unique=False)
|
||||
|
||||
if _is_pg(conn):
|
||||
with op.batch_alter_table('app_model_configs', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('file_upload', sa.Text(), nullable=True))
|
||||
else:
|
||||
with op.batch_alter_table('app_model_configs', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('file_upload', models.types.LongText(), nullable=True))
|
||||
|
||||
with op.batch_alter_table('app_model_configs', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('file_upload', models.types.LongText(), nullable=True))
|
||||
|
||||
if _is_pg(conn):
|
||||
with op.batch_alter_table('upload_files', schema=None) as batch_op:
|
||||
|
|
|
|||
|
|
@ -24,7 +24,6 @@ def upgrade():
|
|||
conn = op.get_bind()
|
||||
|
||||
if _is_pg(conn):
|
||||
# PostgreSQL: Keep original syntax
|
||||
with op.batch_alter_table('pinned_conversations', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('created_by_role', sa.String(length=255), server_default=sa.text("'end_user'::character varying"), nullable=False))
|
||||
batch_op.drop_index('pinned_conversation_conversation_idx')
|
||||
|
|
@ -35,7 +34,6 @@ def upgrade():
|
|||
batch_op.drop_index('saved_message_message_idx')
|
||||
batch_op.create_index('saved_message_message_idx', ['app_id', 'message_id', 'created_by_role', 'created_by'], unique=False)
|
||||
else:
|
||||
# MySQL: Use compatible syntax
|
||||
with op.batch_alter_table('pinned_conversations', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('created_by_role', sa.String(length=255), server_default=sa.text("'end_user'"), nullable=False))
|
||||
batch_op.drop_index('pinned_conversation_conversation_idx')
|
||||
|
|
|
|||
|
|
@ -11,9 +11,6 @@ from alembic import op
|
|||
import models.types
|
||||
|
||||
|
||||
def _is_pg(conn):
|
||||
return conn.dialect.name == "postgresql"
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = 'a5b56fb053ef'
|
||||
down_revision = 'd3d503a3471c'
|
||||
|
|
@ -23,14 +20,8 @@ depends_on = None
|
|||
|
||||
def upgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
conn = op.get_bind()
|
||||
|
||||
if _is_pg(conn):
|
||||
with op.batch_alter_table('app_model_configs', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('speech_to_text', sa.Text(), nullable=True))
|
||||
else:
|
||||
with op.batch_alter_table('app_model_configs', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('speech_to_text', models.types.LongText(), nullable=True))
|
||||
with op.batch_alter_table('app_model_configs', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('speech_to_text', models.types.LongText(), nullable=True))
|
||||
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
|
|
|||
|
|
@ -11,9 +11,6 @@ from alembic import op
|
|||
import models.types
|
||||
|
||||
|
||||
def _is_pg(conn):
|
||||
return conn.dialect.name == "postgresql"
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = 'a9836e3baeee'
|
||||
down_revision = '968fff4c0ab9'
|
||||
|
|
@ -23,14 +20,8 @@ depends_on = None
|
|||
|
||||
def upgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
conn = op.get_bind()
|
||||
|
||||
if _is_pg(conn):
|
||||
with op.batch_alter_table('app_model_configs', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('external_data_tools', sa.Text(), nullable=True))
|
||||
else:
|
||||
with op.batch_alter_table('app_model_configs', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('external_data_tools', models.types.LongText(), nullable=True))
|
||||
with op.batch_alter_table('app_model_configs', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('external_data_tools', models.types.LongText(), nullable=True))
|
||||
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
|
|
|||
|
|
@ -11,9 +11,6 @@ from alembic import op
|
|||
import models.types
|
||||
|
||||
|
||||
def _is_pg(conn):
|
||||
return conn.dialect.name == "postgresql"
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = 'b24be59fbb04'
|
||||
down_revision = 'de95f5c77138'
|
||||
|
|
@ -23,14 +20,8 @@ depends_on = None
|
|||
|
||||
def upgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
conn = op.get_bind()
|
||||
|
||||
if _is_pg(conn):
|
||||
with op.batch_alter_table('app_model_configs', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('text_to_speech', sa.Text(), nullable=True))
|
||||
else:
|
||||
with op.batch_alter_table('app_model_configs', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('text_to_speech', models.types.LongText(), nullable=True))
|
||||
with op.batch_alter_table('app_model_configs', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('text_to_speech', models.types.LongText(), nullable=True))
|
||||
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
|
|
|||
|
|
@ -11,9 +11,6 @@ from alembic import op
|
|||
import models.types
|
||||
|
||||
|
||||
def _is_pg(conn):
|
||||
return conn.dialect.name == "postgresql"
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = 'b3a09c049e8e'
|
||||
down_revision = '2e9819ca5b28'
|
||||
|
|
@ -23,20 +20,11 @@ depends_on = None
|
|||
|
||||
def upgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
conn = op.get_bind()
|
||||
|
||||
if _is_pg(conn):
|
||||
with op.batch_alter_table('app_model_configs', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('prompt_type', sa.String(length=255), nullable=False, server_default='simple'))
|
||||
batch_op.add_column(sa.Column('chat_prompt_config', sa.Text(), nullable=True))
|
||||
batch_op.add_column(sa.Column('completion_prompt_config', sa.Text(), nullable=True))
|
||||
batch_op.add_column(sa.Column('dataset_configs', sa.Text(), nullable=True))
|
||||
else:
|
||||
with op.batch_alter_table('app_model_configs', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('prompt_type', sa.String(length=255), nullable=False, server_default='simple'))
|
||||
batch_op.add_column(sa.Column('chat_prompt_config', models.types.LongText(), nullable=True))
|
||||
batch_op.add_column(sa.Column('completion_prompt_config', models.types.LongText(), nullable=True))
|
||||
batch_op.add_column(sa.Column('dataset_configs', models.types.LongText(), nullable=True))
|
||||
with op.batch_alter_table('app_model_configs', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('prompt_type', sa.String(length=255), nullable=False, server_default='simple'))
|
||||
batch_op.add_column(sa.Column('chat_prompt_config', models.types.LongText(), nullable=True))
|
||||
batch_op.add_column(sa.Column('completion_prompt_config', models.types.LongText(), nullable=True))
|
||||
batch_op.add_column(sa.Column('dataset_configs', models.types.LongText(), nullable=True))
|
||||
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
|
|
|||
|
|
@ -7,7 +7,6 @@ Create Date: 2024-06-17 10:01:00.255189
|
|||
"""
|
||||
import sqlalchemy as sa
|
||||
from alembic import op
|
||||
from sqlalchemy.dialects import postgresql
|
||||
|
||||
import models.types
|
||||
|
||||
|
|
|
|||
|
|
@ -54,12 +54,9 @@ def upgrade():
|
|||
batch_op.create_index('app_annotation_hit_histories_annotation_idx', ['annotation_id'], unique=False)
|
||||
batch_op.create_index('app_annotation_hit_histories_app_idx', ['app_id'], unique=False)
|
||||
|
||||
if _is_pg(conn):
|
||||
with op.batch_alter_table('app_model_configs', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('annotation_reply', sa.Text(), nullable=True))
|
||||
else:
|
||||
with op.batch_alter_table('app_model_configs', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('annotation_reply', models.types.LongText(), nullable=True))
|
||||
|
||||
with op.batch_alter_table('app_model_configs', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('annotation_reply', models.types.LongText(), nullable=True))
|
||||
|
||||
if _is_pg(conn):
|
||||
with op.batch_alter_table('dataset_collection_bindings', schema=None) as batch_op:
|
||||
|
|
@ -68,54 +65,31 @@ def upgrade():
|
|||
with op.batch_alter_table('dataset_collection_bindings', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('type', sa.String(length=40), server_default=sa.text("'dataset'"), nullable=False))
|
||||
|
||||
if _is_pg(conn):
|
||||
with op.batch_alter_table('message_annotations', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('question', sa.Text(), nullable=True))
|
||||
batch_op.add_column(sa.Column('hit_count', sa.Integer(), server_default=sa.text('0'), nullable=False))
|
||||
batch_op.alter_column('conversation_id',
|
||||
existing_type=postgresql.UUID(),
|
||||
nullable=True)
|
||||
batch_op.alter_column('message_id',
|
||||
existing_type=postgresql.UUID(),
|
||||
nullable=True)
|
||||
else:
|
||||
with op.batch_alter_table('message_annotations', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('question', models.types.LongText(), nullable=True))
|
||||
batch_op.add_column(sa.Column('hit_count', sa.Integer(), server_default=sa.text('0'), nullable=False))
|
||||
batch_op.alter_column('conversation_id',
|
||||
existing_type=models.types.StringUUID(),
|
||||
nullable=True)
|
||||
batch_op.alter_column('message_id',
|
||||
existing_type=models.types.StringUUID(),
|
||||
nullable=True)
|
||||
with op.batch_alter_table('message_annotations', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('question', models.types.LongText(), nullable=True))
|
||||
batch_op.add_column(sa.Column('hit_count', sa.Integer(), server_default=sa.text('0'), nullable=False))
|
||||
batch_op.alter_column('conversation_id',
|
||||
existing_type=models.types.StringUUID(),
|
||||
nullable=True)
|
||||
batch_op.alter_column('message_id',
|
||||
existing_type=models.types.StringUUID(),
|
||||
nullable=True)
|
||||
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
def downgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
conn = op.get_bind()
|
||||
|
||||
if _is_pg(conn):
|
||||
with op.batch_alter_table('message_annotations', schema=None) as batch_op:
|
||||
batch_op.alter_column('message_id',
|
||||
existing_type=postgresql.UUID(),
|
||||
nullable=False)
|
||||
batch_op.alter_column('conversation_id',
|
||||
existing_type=postgresql.UUID(),
|
||||
nullable=False)
|
||||
batch_op.drop_column('hit_count')
|
||||
batch_op.drop_column('question')
|
||||
else:
|
||||
with op.batch_alter_table('message_annotations', schema=None) as batch_op:
|
||||
batch_op.alter_column('message_id',
|
||||
existing_type=models.types.StringUUID(),
|
||||
nullable=False)
|
||||
batch_op.alter_column('conversation_id',
|
||||
existing_type=models.types.StringUUID(),
|
||||
nullable=False)
|
||||
batch_op.drop_column('hit_count')
|
||||
batch_op.drop_column('question')
|
||||
with op.batch_alter_table('message_annotations', schema=None) as batch_op:
|
||||
batch_op.alter_column('message_id',
|
||||
existing_type=models.types.StringUUID(),
|
||||
nullable=False)
|
||||
batch_op.alter_column('conversation_id',
|
||||
existing_type=models.types.StringUUID(),
|
||||
nullable=False)
|
||||
batch_op.drop_column('hit_count')
|
||||
batch_op.drop_column('question')
|
||||
|
||||
with op.batch_alter_table('dataset_collection_bindings', schema=None) as batch_op:
|
||||
batch_op.drop_column('type')
|
||||
|
|
|
|||
|
|
@ -12,9 +12,6 @@ from sqlalchemy.dialects import postgresql
|
|||
import models.types
|
||||
|
||||
|
||||
def _is_pg(conn):
|
||||
return conn.dialect.name == "postgresql"
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = 'f2a6fc85e260'
|
||||
down_revision = '46976cc39132'
|
||||
|
|
@ -24,16 +21,9 @@ depends_on = None
|
|||
|
||||
def upgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
conn = op.get_bind()
|
||||
|
||||
if _is_pg(conn):
|
||||
with op.batch_alter_table('app_annotation_hit_histories', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('message_id', postgresql.UUID(), nullable=False))
|
||||
batch_op.create_index('app_annotation_hit_histories_message_idx', ['message_id'], unique=False)
|
||||
else:
|
||||
with op.batch_alter_table('app_annotation_hit_histories', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('message_id', models.types.StringUUID(), nullable=False))
|
||||
batch_op.create_index('app_annotation_hit_histories_message_idx', ['message_id'], unique=False)
|
||||
with op.batch_alter_table('app_annotation_hit_histories', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('message_id', models.types.StringUUID(), nullable=False))
|
||||
batch_op.create_index('app_annotation_hit_histories_message_idx', ['message_id'], unique=False)
|
||||
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
|
|
|||
|
|
@ -70,7 +70,6 @@ class ProviderResponse(BaseModel):
|
|||
description: I18nObject | None = None
|
||||
icon_small: I18nObject | None = None
|
||||
icon_small_dark: I18nObject | None = None
|
||||
icon_large: I18nObject | None = None
|
||||
background: str | None = None
|
||||
help: ProviderHelpEntity | None = None
|
||||
supported_model_types: Sequence[ModelType]
|
||||
|
|
@ -98,11 +97,6 @@ class ProviderResponse(BaseModel):
|
|||
en_US=f"{url_prefix}/icon_small_dark/en_US",
|
||||
zh_Hans=f"{url_prefix}/icon_small_dark/zh_Hans",
|
||||
)
|
||||
|
||||
if self.icon_large is not None:
|
||||
self.icon_large = I18nObject(
|
||||
en_US=f"{url_prefix}/icon_large/en_US", zh_Hans=f"{url_prefix}/icon_large/zh_Hans"
|
||||
)
|
||||
return self
|
||||
|
||||
|
||||
|
|
@ -116,7 +110,6 @@ class ProviderWithModelsResponse(BaseModel):
|
|||
label: I18nObject
|
||||
icon_small: I18nObject | None = None
|
||||
icon_small_dark: I18nObject | None = None
|
||||
icon_large: I18nObject | None = None
|
||||
status: CustomConfigurationStatus
|
||||
models: list[ProviderModelWithStatusEntity]
|
||||
|
||||
|
|
@ -134,11 +127,6 @@ class ProviderWithModelsResponse(BaseModel):
|
|||
self.icon_small_dark = I18nObject(
|
||||
en_US=f"{url_prefix}/icon_small_dark/en_US", zh_Hans=f"{url_prefix}/icon_small_dark/zh_Hans"
|
||||
)
|
||||
|
||||
if self.icon_large is not None:
|
||||
self.icon_large = I18nObject(
|
||||
en_US=f"{url_prefix}/icon_large/en_US", zh_Hans=f"{url_prefix}/icon_large/zh_Hans"
|
||||
)
|
||||
return self
|
||||
|
||||
|
||||
|
|
@ -163,11 +151,6 @@ class SimpleProviderEntityResponse(SimpleProviderEntity):
|
|||
self.icon_small_dark = I18nObject(
|
||||
en_US=f"{url_prefix}/icon_small_dark/en_US", zh_Hans=f"{url_prefix}/icon_small_dark/zh_Hans"
|
||||
)
|
||||
|
||||
if self.icon_large is not None:
|
||||
self.icon_large = I18nObject(
|
||||
en_US=f"{url_prefix}/icon_large/en_US", zh_Hans=f"{url_prefix}/icon_large/zh_Hans"
|
||||
)
|
||||
return self
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -99,7 +99,6 @@ class ModelProviderService:
|
|||
description=provider_configuration.provider.description,
|
||||
icon_small=provider_configuration.provider.icon_small,
|
||||
icon_small_dark=provider_configuration.provider.icon_small_dark,
|
||||
icon_large=provider_configuration.provider.icon_large,
|
||||
background=provider_configuration.provider.background,
|
||||
help=provider_configuration.provider.help,
|
||||
supported_model_types=provider_configuration.provider.supported_model_types,
|
||||
|
|
@ -423,7 +422,6 @@ class ModelProviderService:
|
|||
label=first_model.provider.label,
|
||||
icon_small=first_model.provider.icon_small,
|
||||
icon_small_dark=first_model.provider.icon_small_dark,
|
||||
icon_large=first_model.provider.icon_large,
|
||||
status=CustomConfigurationStatus.ACTIVE,
|
||||
models=[
|
||||
ProviderModelWithStatusEntity(
|
||||
|
|
@ -488,7 +486,6 @@ class ModelProviderService:
|
|||
provider=result.provider.provider,
|
||||
label=result.provider.label,
|
||||
icon_small=result.provider.icon_small,
|
||||
icon_large=result.provider.icon_large,
|
||||
supported_model_types=result.provider.supported_model_types,
|
||||
),
|
||||
)
|
||||
|
|
@ -522,7 +519,7 @@ class ModelProviderService:
|
|||
|
||||
:param tenant_id: workspace id
|
||||
:param provider: provider name
|
||||
:param icon_type: icon type (icon_small or icon_large)
|
||||
:param icon_type: icon type (icon_small or icon_small_dark)
|
||||
:param lang: language (zh_Hans or en_US)
|
||||
:return:
|
||||
"""
|
||||
|
|
|
|||
|
|
@ -48,10 +48,6 @@ class MockModelClass(PluginModelClient):
|
|||
en_US="https://example.com/icon_small.png",
|
||||
zh_Hans="https://example.com/icon_small.png",
|
||||
),
|
||||
icon_large=I18nObject(
|
||||
en_US="https://example.com/icon_large.png",
|
||||
zh_Hans="https://example.com/icon_large.png",
|
||||
),
|
||||
supported_model_types=[ModelType.LLM],
|
||||
configurate_methods=[ConfigurateMethod.PREDEFINED_MODEL],
|
||||
models=[
|
||||
|
|
|
|||
|
|
@ -228,7 +228,6 @@ class TestModelProviderService:
|
|||
mock_provider_entity.description = {"en_US": "OpenAI provider", "zh_Hans": "OpenAI 提供商"}
|
||||
mock_provider_entity.icon_small = {"en_US": "icon_small.png", "zh_Hans": "icon_small.png"}
|
||||
mock_provider_entity.icon_small_dark = None
|
||||
mock_provider_entity.icon_large = {"en_US": "icon_large.png", "zh_Hans": "icon_large.png"}
|
||||
mock_provider_entity.background = "#FF6B6B"
|
||||
mock_provider_entity.help = None
|
||||
mock_provider_entity.supported_model_types = [ModelType.LLM, ModelType.TEXT_EMBEDDING]
|
||||
|
|
@ -302,7 +301,6 @@ class TestModelProviderService:
|
|||
mock_provider_entity_llm.description = {"en_US": "OpenAI provider", "zh_Hans": "OpenAI 提供商"}
|
||||
mock_provider_entity_llm.icon_small = {"en_US": "icon_small.png", "zh_Hans": "icon_small.png"}
|
||||
mock_provider_entity_llm.icon_small_dark = None
|
||||
mock_provider_entity_llm.icon_large = {"en_US": "icon_large.png", "zh_Hans": "icon_large.png"}
|
||||
mock_provider_entity_llm.background = "#FF6B6B"
|
||||
mock_provider_entity_llm.help = None
|
||||
mock_provider_entity_llm.supported_model_types = [ModelType.LLM]
|
||||
|
|
@ -316,7 +314,6 @@ class TestModelProviderService:
|
|||
mock_provider_entity_embedding.description = {"en_US": "Cohere provider", "zh_Hans": "Cohere 提供商"}
|
||||
mock_provider_entity_embedding.icon_small = {"en_US": "icon_small.png", "zh_Hans": "icon_small.png"}
|
||||
mock_provider_entity_embedding.icon_small_dark = None
|
||||
mock_provider_entity_embedding.icon_large = {"en_US": "icon_large.png", "zh_Hans": "icon_large.png"}
|
||||
mock_provider_entity_embedding.background = "#4ECDC4"
|
||||
mock_provider_entity_embedding.help = None
|
||||
mock_provider_entity_embedding.supported_model_types = [ModelType.TEXT_EMBEDDING]
|
||||
|
|
@ -419,7 +416,6 @@ class TestModelProviderService:
|
|||
provider="openai",
|
||||
label=I18nObject(en_US="OpenAI", zh_Hans="OpenAI"),
|
||||
icon_small=I18nObject(en_US="icon_small.png", zh_Hans="icon_small.png"),
|
||||
icon_large=I18nObject(en_US="icon_large.png", zh_Hans="icon_large.png"),
|
||||
supported_model_types=[ModelType.LLM],
|
||||
configurate_methods=[],
|
||||
models=[],
|
||||
|
|
@ -431,7 +427,6 @@ class TestModelProviderService:
|
|||
provider="openai",
|
||||
label=I18nObject(en_US="OpenAI", zh_Hans="OpenAI"),
|
||||
icon_small=I18nObject(en_US="icon_small.png", zh_Hans="icon_small.png"),
|
||||
icon_large=I18nObject(en_US="icon_large.png", zh_Hans="icon_large.png"),
|
||||
supported_model_types=[ModelType.LLM],
|
||||
configurate_methods=[],
|
||||
models=[],
|
||||
|
|
@ -655,7 +650,6 @@ class TestModelProviderService:
|
|||
provider="openai",
|
||||
label=I18nObject(en_US="OpenAI", zh_Hans="OpenAI"),
|
||||
icon_small=I18nObject(en_US="icon_small.png", zh_Hans="icon_small.png"),
|
||||
icon_large=I18nObject(en_US="icon_large.png", zh_Hans="icon_large.png"),
|
||||
supported_model_types=[ModelType.LLM],
|
||||
),
|
||||
)
|
||||
|
|
@ -1027,7 +1021,6 @@ class TestModelProviderService:
|
|||
label={"en_US": "OpenAI", "zh_Hans": "OpenAI"},
|
||||
icon_small={"en_US": "icon_small.png", "zh_Hans": "icon_small.png"},
|
||||
icon_small_dark=None,
|
||||
icon_large={"en_US": "icon_large.png", "zh_Hans": "icon_large.png"},
|
||||
),
|
||||
model="gpt-3.5-turbo",
|
||||
model_type=ModelType.LLM,
|
||||
|
|
@ -1045,7 +1038,6 @@ class TestModelProviderService:
|
|||
label={"en_US": "OpenAI", "zh_Hans": "OpenAI"},
|
||||
icon_small={"en_US": "icon_small.png", "zh_Hans": "icon_small.png"},
|
||||
icon_small_dark=None,
|
||||
icon_large={"en_US": "icon_large.png", "zh_Hans": "icon_large.png"},
|
||||
),
|
||||
model="gpt-4",
|
||||
model_type=ModelType.LLM,
|
||||
|
|
|
|||
|
|
@ -0,0 +1,174 @@
|
|||
"""Unit tests for controllers.web.message message list mapping."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import builtins
|
||||
from datetime import datetime
|
||||
from types import ModuleType, SimpleNamespace
|
||||
from unittest.mock import patch
|
||||
from uuid import uuid4
|
||||
|
||||
import pytest
|
||||
from flask import Flask
|
||||
from flask.views import MethodView
|
||||
|
||||
# Ensure flask_restx.api finds MethodView during import.
|
||||
if not hasattr(builtins, "MethodView"):
|
||||
builtins.MethodView = MethodView # type: ignore[attr-defined]
|
||||
|
||||
|
||||
def _load_controller_module():
|
||||
"""Import controllers.web.message using a stub package."""
|
||||
|
||||
import importlib
|
||||
import importlib.util
|
||||
import sys
|
||||
|
||||
parent_module_name = "controllers.web"
|
||||
module_name = f"{parent_module_name}.message"
|
||||
|
||||
if parent_module_name not in sys.modules:
|
||||
from flask_restx import Namespace
|
||||
|
||||
stub = ModuleType(parent_module_name)
|
||||
stub.__file__ = "controllers/web/__init__.py"
|
||||
stub.__path__ = ["controllers/web"]
|
||||
stub.__package__ = "controllers"
|
||||
stub.__spec__ = importlib.util.spec_from_loader(parent_module_name, loader=None, is_package=True)
|
||||
stub.web_ns = Namespace("web", description="Web API", path="/")
|
||||
sys.modules[parent_module_name] = stub
|
||||
|
||||
wraps_module_name = f"{parent_module_name}.wraps"
|
||||
if wraps_module_name not in sys.modules:
|
||||
wraps_stub = ModuleType(wraps_module_name)
|
||||
|
||||
class WebApiResource:
|
||||
pass
|
||||
|
||||
wraps_stub.WebApiResource = WebApiResource
|
||||
sys.modules[wraps_module_name] = wraps_stub
|
||||
|
||||
return importlib.import_module(module_name)
|
||||
|
||||
|
||||
message_module = _load_controller_module()
|
||||
MessageListApi = message_module.MessageListApi
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def app() -> Flask:
|
||||
app = Flask(__name__)
|
||||
app.config["TESTING"] = True
|
||||
return app
|
||||
|
||||
|
||||
def test_message_list_mapping(app: Flask) -> None:
|
||||
conversation_id = str(uuid4())
|
||||
message_id = str(uuid4())
|
||||
|
||||
created_at = datetime(2024, 1, 1, 12, 0, 0)
|
||||
resource_created_at = datetime(2024, 1, 1, 13, 0, 0)
|
||||
thought_created_at = datetime(2024, 1, 1, 14, 0, 0)
|
||||
|
||||
retriever_resource_obj = SimpleNamespace(
|
||||
id="res-obj",
|
||||
message_id=message_id,
|
||||
position=2,
|
||||
dataset_id="ds-1",
|
||||
dataset_name="dataset",
|
||||
document_id="doc-1",
|
||||
document_name="document",
|
||||
data_source_type="file",
|
||||
segment_id="seg-1",
|
||||
score=0.9,
|
||||
hit_count=1,
|
||||
word_count=10,
|
||||
segment_position=0,
|
||||
index_node_hash="hash",
|
||||
content="content",
|
||||
created_at=resource_created_at,
|
||||
)
|
||||
|
||||
agent_thought = SimpleNamespace(
|
||||
id="thought-1",
|
||||
chain_id=None,
|
||||
message_chain_id="chain-1",
|
||||
message_id=message_id,
|
||||
position=1,
|
||||
thought="thinking",
|
||||
tool="tool",
|
||||
tool_labels={"label": "value"},
|
||||
tool_input="{}",
|
||||
created_at=thought_created_at,
|
||||
observation="observed",
|
||||
files=["file-a"],
|
||||
)
|
||||
|
||||
message_file_obj = SimpleNamespace(
|
||||
id="file-obj",
|
||||
filename="b.txt",
|
||||
type="file",
|
||||
url=None,
|
||||
mime_type=None,
|
||||
size=None,
|
||||
transfer_method="local",
|
||||
belongs_to=None,
|
||||
upload_file_id=None,
|
||||
)
|
||||
|
||||
message = SimpleNamespace(
|
||||
id=message_id,
|
||||
conversation_id=conversation_id,
|
||||
parent_message_id=None,
|
||||
inputs={"foo": "bar"},
|
||||
query="hello",
|
||||
re_sign_file_url_answer="answer",
|
||||
user_feedback=SimpleNamespace(rating="like"),
|
||||
retriever_resources=[
|
||||
{"id": "res-dict", "message_id": message_id, "position": 1},
|
||||
retriever_resource_obj,
|
||||
],
|
||||
created_at=created_at,
|
||||
agent_thoughts=[agent_thought],
|
||||
message_files=[
|
||||
{"id": "file-dict", "filename": "a.txt", "type": "file", "transfer_method": "local"},
|
||||
message_file_obj,
|
||||
],
|
||||
status="success",
|
||||
error=None,
|
||||
message_metadata_dict={"meta": "value"},
|
||||
)
|
||||
|
||||
pagination = SimpleNamespace(limit=20, has_more=False, data=[message])
|
||||
app_model = SimpleNamespace(mode="chat")
|
||||
end_user = SimpleNamespace()
|
||||
|
||||
with (
|
||||
patch.object(message_module.MessageService, "pagination_by_first_id", return_value=pagination) as mock_page,
|
||||
app.test_request_context(f"/messages?conversation_id={conversation_id}&limit=20"),
|
||||
):
|
||||
response = MessageListApi().get(app_model, end_user)
|
||||
|
||||
mock_page.assert_called_once_with(app_model, end_user, conversation_id, None, 20)
|
||||
assert response["limit"] == 20
|
||||
assert response["has_more"] is False
|
||||
assert len(response["data"]) == 1
|
||||
|
||||
item = response["data"][0]
|
||||
assert item["id"] == message_id
|
||||
assert item["conversation_id"] == conversation_id
|
||||
assert item["inputs"] == {"foo": "bar"}
|
||||
assert item["answer"] == "answer"
|
||||
assert item["feedback"]["rating"] == "like"
|
||||
assert item["metadata"] == {"meta": "value"}
|
||||
assert item["created_at"] == int(created_at.timestamp())
|
||||
|
||||
assert item["retriever_resources"][0]["id"] == "res-dict"
|
||||
assert item["retriever_resources"][1]["id"] == "res-obj"
|
||||
assert item["retriever_resources"][1]["created_at"] == int(resource_created_at.timestamp())
|
||||
|
||||
assert item["agent_thoughts"][0]["chain_id"] == "chain-1"
|
||||
assert item["agent_thoughts"][0]["created_at"] == int(thought_created_at.timestamp())
|
||||
|
||||
assert item["message_files"][0]["id"] == "file-dict"
|
||||
assert item["message_files"][1]["id"] == "file-obj"
|
||||
|
|
@ -14,12 +14,12 @@ def test_successful_request(mock_get_client):
|
|||
mock_client = MagicMock()
|
||||
mock_response = MagicMock()
|
||||
mock_response.status_code = 200
|
||||
mock_client.send.return_value = mock_response
|
||||
mock_client.request.return_value = mock_response
|
||||
mock_get_client.return_value = mock_client
|
||||
|
||||
response = make_request("GET", "http://example.com")
|
||||
assert response.status_code == 200
|
||||
mock_client.request.assert_called_once()
|
||||
|
||||
|
||||
@patch("core.helper.ssrf_proxy._get_ssrf_client")
|
||||
|
|
@ -27,7 +27,6 @@ def test_retry_exceed_max_retries(mock_get_client):
|
|||
mock_client = MagicMock()
|
||||
mock_response = MagicMock()
|
||||
mock_response.status_code = 500
|
||||
mock_client.send.return_value = mock_response
|
||||
mock_client.request.return_value = mock_response
|
||||
mock_get_client.return_value = mock_client
|
||||
|
||||
|
|
@ -72,34 +71,12 @@ class TestGetUserProvidedHostHeader:
|
|||
assert result in ("first.com", "second.com")
|
||||
|
||||
|
||||
@patch("core.helper.ssrf_proxy._get_ssrf_client")
|
||||
def test_host_header_preservation_without_user_header(mock_get_client):
|
||||
"""Test that when no Host header is provided, the default behavior is maintained."""
|
||||
mock_client = MagicMock()
|
||||
mock_request = MagicMock()
|
||||
mock_request.headers = {}
|
||||
mock_response = MagicMock()
|
||||
mock_response.status_code = 200
|
||||
mock_client.send.return_value = mock_response
|
||||
mock_client.request.return_value = mock_response
|
||||
mock_get_client.return_value = mock_client
|
||||
|
||||
response = make_request("GET", "http://example.com")
|
||||
|
||||
assert response.status_code == 200
|
||||
# Host should not be set if not provided by user
|
||||
assert "Host" not in mock_request.headers or mock_request.headers.get("Host") is None
|
||||
|
||||
|
||||
@patch("core.helper.ssrf_proxy._get_ssrf_client")
|
||||
def test_host_header_preservation_with_user_header(mock_get_client):
|
||||
"""Test that user-provided Host header is preserved in the request."""
|
||||
mock_client = MagicMock()
|
||||
mock_request = MagicMock()
|
||||
mock_request.headers = {}
|
||||
mock_response = MagicMock()
|
||||
mock_response.status_code = 200
|
||||
mock_client.send.return_value = mock_response
|
||||
mock_client.request.return_value = mock_response
|
||||
mock_get_client.return_value = mock_client
|
||||
|
||||
|
|
@ -107,3 +84,93 @@ def test_host_header_preservation_with_user_header(mock_get_client):
|
|||
response = make_request("GET", "http://example.com", headers={"Host": custom_host})
|
||||
|
||||
assert response.status_code == 200
|
||||
# Verify client.request was called with the host header preserved (lowercase)
|
||||
call_kwargs = mock_client.request.call_args.kwargs
|
||||
assert call_kwargs["headers"]["host"] == custom_host
|
||||
|
||||
|
||||
@patch("core.helper.ssrf_proxy._get_ssrf_client")
|
||||
@pytest.mark.parametrize("host_key", ["host", "HOST", "Host"])
|
||||
def test_host_header_preservation_case_insensitive(mock_get_client, host_key):
|
||||
"""Test that Host header is preserved regardless of case."""
|
||||
mock_client = MagicMock()
|
||||
mock_response = MagicMock()
|
||||
mock_response.status_code = 200
|
||||
mock_client.request.return_value = mock_response
|
||||
mock_get_client.return_value = mock_client
|
||||
|
||||
response = make_request("GET", "http://example.com", headers={host_key: "api.example.com"})
|
||||
|
||||
assert response.status_code == 200
|
||||
# Host header should be normalized to lowercase "host"
|
||||
call_kwargs = mock_client.request.call_args.kwargs
|
||||
assert call_kwargs["headers"]["host"] == "api.example.com"
|
||||
|
||||
|
||||
class TestFollowRedirectsParameter:
|
||||
"""Tests for follow_redirects parameter handling.
|
||||
|
||||
These tests verify that follow_redirects is correctly passed to client.request().
|
||||
"""
|
||||
|
||||
@patch("core.helper.ssrf_proxy._get_ssrf_client")
|
||||
def test_follow_redirects_passed_to_request(self, mock_get_client):
|
||||
"""Verify follow_redirects IS passed to client.request()."""
|
||||
mock_client = MagicMock()
|
||||
mock_response = MagicMock()
|
||||
mock_response.status_code = 200
|
||||
mock_client.request.return_value = mock_response
|
||||
mock_get_client.return_value = mock_client
|
||||
|
||||
make_request("GET", "http://example.com", follow_redirects=True)
|
||||
|
||||
# Verify follow_redirects was passed to request
|
||||
call_kwargs = mock_client.request.call_args.kwargs
|
||||
assert call_kwargs.get("follow_redirects") is True
|
||||
|
||||
@patch("core.helper.ssrf_proxy._get_ssrf_client")
|
||||
def test_allow_redirects_converted_to_follow_redirects(self, mock_get_client):
|
||||
"""Verify allow_redirects (requests-style) is converted to follow_redirects (httpx-style)."""
|
||||
mock_client = MagicMock()
|
||||
mock_response = MagicMock()
|
||||
mock_response.status_code = 200
|
||||
mock_client.request.return_value = mock_response
|
||||
mock_get_client.return_value = mock_client
|
||||
|
||||
# Use allow_redirects (requests-style parameter)
|
||||
make_request("GET", "http://example.com", allow_redirects=True)
|
||||
|
||||
# Verify it was converted to follow_redirects
|
||||
call_kwargs = mock_client.request.call_args.kwargs
|
||||
assert call_kwargs.get("follow_redirects") is True
|
||||
assert "allow_redirects" not in call_kwargs
|
||||
|
||||
@patch("core.helper.ssrf_proxy._get_ssrf_client")
|
||||
def test_follow_redirects_not_set_when_not_specified(self, mock_get_client):
|
||||
"""Verify follow_redirects is not in kwargs when not specified (httpx default behavior)."""
|
||||
mock_client = MagicMock()
|
||||
mock_response = MagicMock()
|
||||
mock_response.status_code = 200
|
||||
mock_client.request.return_value = mock_response
|
||||
mock_get_client.return_value = mock_client
|
||||
|
||||
make_request("GET", "http://example.com")
|
||||
|
||||
# follow_redirects should not be in kwargs, letting httpx use its default
|
||||
call_kwargs = mock_client.request.call_args.kwargs
|
||||
assert "follow_redirects" not in call_kwargs
|
||||
|
||||
@patch("core.helper.ssrf_proxy._get_ssrf_client")
|
||||
def test_follow_redirects_takes_precedence_over_allow_redirects(self, mock_get_client):
|
||||
"""Verify follow_redirects takes precedence when both are specified."""
|
||||
mock_client = MagicMock()
|
||||
mock_response = MagicMock()
|
||||
mock_response.status_code = 200
|
||||
mock_client.request.return_value = mock_response
|
||||
mock_get_client.return_value = mock_client
|
||||
|
||||
# Both specified - follow_redirects should take precedence
|
||||
make_request("GET", "http://example.com", allow_redirects=False, follow_redirects=True)
|
||||
|
||||
call_kwargs = mock_client.request.call_args.kwargs
|
||||
assert call_kwargs.get("follow_redirects") is True
|
||||
|
|
|
|||
|
|
@ -0,0 +1,79 @@
|
|||
"""Tests for logging context module."""
|
||||
|
||||
import uuid
|
||||
|
||||
from core.logging.context import (
|
||||
clear_request_context,
|
||||
get_request_id,
|
||||
get_trace_id,
|
||||
init_request_context,
|
||||
)
|
||||
|
||||
|
||||
class TestLoggingContext:
|
||||
"""Tests for the logging context functions."""
|
||||
|
||||
def test_init_creates_request_id(self):
|
||||
"""init_request_context should create a 10-char request ID."""
|
||||
init_request_context()
|
||||
request_id = get_request_id()
|
||||
assert len(request_id) == 10
|
||||
assert all(c in "0123456789abcdef" for c in request_id)
|
||||
|
||||
def test_init_creates_trace_id(self):
|
||||
"""init_request_context should create a 32-char trace ID."""
|
||||
init_request_context()
|
||||
trace_id = get_trace_id()
|
||||
assert len(trace_id) == 32
|
||||
assert all(c in "0123456789abcdef" for c in trace_id)
|
||||
|
||||
def test_trace_id_derived_from_request_id(self):
|
||||
"""trace_id should be deterministically derived from request_id."""
|
||||
init_request_context()
|
||||
request_id = get_request_id()
|
||||
trace_id = get_trace_id()
|
||||
|
||||
# Verify trace_id is derived using uuid5
|
||||
expected_trace = uuid.uuid5(uuid.NAMESPACE_DNS, request_id).hex
|
||||
assert trace_id == expected_trace
|
||||
|
||||
def test_clear_resets_context(self):
|
||||
"""clear_request_context should reset both IDs to empty strings."""
|
||||
init_request_context()
|
||||
assert get_request_id() != ""
|
||||
assert get_trace_id() != ""
|
||||
|
||||
clear_request_context()
|
||||
assert get_request_id() == ""
|
||||
assert get_trace_id() == ""
|
||||
|
||||
def test_default_values_are_empty(self):
|
||||
"""Default values should be empty strings before init."""
|
||||
clear_request_context()
|
||||
assert get_request_id() == ""
|
||||
assert get_trace_id() == ""
|
||||
|
||||
def test_multiple_inits_create_different_ids(self):
|
||||
"""Each init should create new unique IDs."""
|
||||
init_request_context()
|
||||
first_request_id = get_request_id()
|
||||
first_trace_id = get_trace_id()
|
||||
|
||||
init_request_context()
|
||||
second_request_id = get_request_id()
|
||||
second_trace_id = get_trace_id()
|
||||
|
||||
assert first_request_id != second_request_id
|
||||
assert first_trace_id != second_trace_id
|
||||
|
||||
def test_context_isolation(self):
|
||||
"""Context should be isolated per-call (no thread leakage in same thread)."""
|
||||
init_request_context()
|
||||
id1 = get_request_id()
|
||||
|
||||
# Simulate another request
|
||||
init_request_context()
|
||||
id2 = get_request_id()
|
||||
|
||||
# IDs should be different
|
||||
assert id1 != id2
|
||||
|
|
@ -0,0 +1,114 @@
|
|||
"""Tests for logging filters."""
|
||||
|
||||
import logging
|
||||
from unittest import mock
|
||||
|
||||
import pytest
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def log_record():
|
||||
return logging.LogRecord(
|
||||
name="test",
|
||||
level=logging.INFO,
|
||||
pathname="",
|
||||
lineno=0,
|
||||
msg="test",
|
||||
args=(),
|
||||
exc_info=None,
|
||||
)
|
||||
|
||||
|
||||
class TestTraceContextFilter:
|
||||
def test_sets_empty_trace_id_without_context(self, log_record):
|
||||
from core.logging.context import clear_request_context
|
||||
from core.logging.filters import TraceContextFilter
|
||||
|
||||
# Ensure no context is set
|
||||
clear_request_context()
|
||||
|
||||
filter = TraceContextFilter()
|
||||
result = filter.filter(log_record)
|
||||
|
||||
assert result is True
|
||||
assert hasattr(log_record, "trace_id")
|
||||
assert hasattr(log_record, "span_id")
|
||||
assert hasattr(log_record, "req_id")
|
||||
# Without context, IDs should be empty
|
||||
assert log_record.trace_id == ""
|
||||
assert log_record.req_id == ""
|
||||
|
||||
def test_sets_trace_id_from_context(self, log_record):
|
||||
"""Test that trace_id and req_id are set from ContextVar when initialized."""
|
||||
from core.logging.context import init_request_context
|
||||
from core.logging.filters import TraceContextFilter
|
||||
|
||||
# Initialize context (no Flask needed!)
|
||||
init_request_context()
|
||||
|
||||
filter = TraceContextFilter()
|
||||
filter.filter(log_record)
|
||||
|
||||
# With context initialized, IDs should be set
|
||||
assert log_record.trace_id != ""
|
||||
assert len(log_record.trace_id) == 32
|
||||
assert log_record.req_id != ""
|
||||
assert len(log_record.req_id) == 10
|
||||
|
||||
def test_filter_always_returns_true(self, log_record):
|
||||
from core.logging.filters import TraceContextFilter
|
||||
|
||||
filter = TraceContextFilter()
|
||||
result = filter.filter(log_record)
|
||||
assert result is True
|
||||
|
||||
def test_sets_trace_id_from_otel_when_available(self, log_record):
|
||||
from core.logging.filters import TraceContextFilter
|
||||
|
||||
mock_span = mock.MagicMock()
|
||||
mock_context = mock.MagicMock()
|
||||
mock_context.trace_id = 0x5B8AA5A2D2C872E8321CF37308D69DF2
|
||||
mock_context.span_id = 0x051581BF3BB55C45
|
||||
mock_span.get_span_context.return_value = mock_context
|
||||
|
||||
with (
|
||||
mock.patch("opentelemetry.trace.get_current_span", return_value=mock_span),
|
||||
mock.patch("opentelemetry.trace.span.INVALID_TRACE_ID", 0),
|
||||
mock.patch("opentelemetry.trace.span.INVALID_SPAN_ID", 0),
|
||||
):
|
||||
filter = TraceContextFilter()
|
||||
filter.filter(log_record)
|
||||
|
||||
assert log_record.trace_id == "5b8aa5a2d2c872e8321cf37308d69df2"
|
||||
assert log_record.span_id == "051581bf3bb55c45"
|
||||
|
||||
|
||||
class TestIdentityContextFilter:
|
||||
def test_sets_empty_identity_without_request_context(self, log_record):
|
||||
from core.logging.filters import IdentityContextFilter
|
||||
|
||||
filter = IdentityContextFilter()
|
||||
result = filter.filter(log_record)
|
||||
|
||||
assert result is True
|
||||
assert log_record.tenant_id == ""
|
||||
assert log_record.user_id == ""
|
||||
assert log_record.user_type == ""
|
||||
|
||||
def test_filter_always_returns_true(self, log_record):
|
||||
from core.logging.filters import IdentityContextFilter
|
||||
|
||||
filter = IdentityContextFilter()
|
||||
result = filter.filter(log_record)
|
||||
assert result is True
|
||||
|
||||
def test_handles_exception_gracefully(self, log_record):
|
||||
from core.logging.filters import IdentityContextFilter
|
||||
|
||||
filter = IdentityContextFilter()
|
||||
|
||||
# Should not raise even if something goes wrong
|
||||
with mock.patch("core.logging.filters.flask.has_request_context", side_effect=Exception("Test error")):
|
||||
result = filter.filter(log_record)
|
||||
assert result is True
|
||||
assert log_record.tenant_id == ""
|
||||
|
|
@ -0,0 +1,267 @@
|
|||
"""Tests for structured JSON formatter."""
|
||||
|
||||
import logging
|
||||
import sys
|
||||
|
||||
import orjson
|
||||
|
||||
|
||||
class TestStructuredJSONFormatter:
|
||||
def test_basic_log_format(self):
|
||||
from core.logging.structured_formatter import StructuredJSONFormatter
|
||||
|
||||
formatter = StructuredJSONFormatter(service_name="test-service")
|
||||
record = logging.LogRecord(
|
||||
name="test",
|
||||
level=logging.INFO,
|
||||
pathname="test.py",
|
||||
lineno=42,
|
||||
msg="Test message",
|
||||
args=(),
|
||||
exc_info=None,
|
||||
)
|
||||
|
||||
output = formatter.format(record)
|
||||
log_dict = orjson.loads(output)
|
||||
|
||||
assert log_dict["severity"] == "INFO"
|
||||
assert log_dict["service"] == "test-service"
|
||||
assert log_dict["caller"] == "test.py:42"
|
||||
assert log_dict["message"] == "Test message"
|
||||
assert "ts" in log_dict
|
||||
assert log_dict["ts"].endswith("Z")
|
||||
|
||||
def test_severity_mapping(self):
|
||||
from core.logging.structured_formatter import StructuredJSONFormatter
|
||||
|
||||
formatter = StructuredJSONFormatter()
|
||||
|
||||
test_cases = [
|
||||
(logging.DEBUG, "DEBUG"),
|
||||
(logging.INFO, "INFO"),
|
||||
(logging.WARNING, "WARN"),
|
||||
(logging.ERROR, "ERROR"),
|
||||
(logging.CRITICAL, "ERROR"),
|
||||
]
|
||||
|
||||
for level, expected_severity in test_cases:
|
||||
record = logging.LogRecord(
|
||||
name="test",
|
||||
level=level,
|
||||
pathname="test.py",
|
||||
lineno=1,
|
||||
msg="Test",
|
||||
args=(),
|
||||
exc_info=None,
|
||||
)
|
||||
output = formatter.format(record)
|
||||
log_dict = orjson.loads(output)
|
||||
assert log_dict["severity"] == expected_severity, f"Level {level} should map to {expected_severity}"
|
||||
|
||||
def test_error_with_stack_trace(self):
|
||||
from core.logging.structured_formatter import StructuredJSONFormatter
|
||||
|
||||
formatter = StructuredJSONFormatter()
|
||||
|
||||
try:
|
||||
raise ValueError("Test error")
|
||||
except ValueError:
|
||||
exc_info = sys.exc_info()
|
||||
|
||||
record = logging.LogRecord(
|
||||
name="test",
|
||||
level=logging.ERROR,
|
||||
pathname="test.py",
|
||||
lineno=10,
|
||||
msg="Error occurred",
|
||||
args=(),
|
||||
exc_info=exc_info,
|
||||
)
|
||||
|
||||
output = formatter.format(record)
|
||||
log_dict = orjson.loads(output)
|
||||
|
||||
assert log_dict["severity"] == "ERROR"
|
||||
assert "stack_trace" in log_dict
|
||||
assert "ValueError: Test error" in log_dict["stack_trace"]
|
||||
|
||||
def test_no_stack_trace_for_info(self):
|
||||
from core.logging.structured_formatter import StructuredJSONFormatter
|
||||
|
||||
formatter = StructuredJSONFormatter()
|
||||
|
||||
try:
|
||||
raise ValueError("Test error")
|
||||
except ValueError:
|
||||
exc_info = sys.exc_info()
|
||||
|
||||
record = logging.LogRecord(
|
||||
name="test",
|
||||
level=logging.INFO,
|
||||
pathname="test.py",
|
||||
lineno=10,
|
||||
msg="Info message",
|
||||
args=(),
|
||||
exc_info=exc_info,
|
||||
)
|
||||
|
||||
output = formatter.format(record)
|
||||
log_dict = orjson.loads(output)
|
||||
|
||||
assert "stack_trace" not in log_dict
|
||||
|
||||
def test_trace_context_included(self):
|
||||
from core.logging.structured_formatter import StructuredJSONFormatter
|
||||
|
||||
formatter = StructuredJSONFormatter()
|
||||
record = logging.LogRecord(
|
||||
name="test",
|
||||
level=logging.INFO,
|
||||
pathname="test.py",
|
||||
lineno=1,
|
||||
msg="Test",
|
||||
args=(),
|
||||
exc_info=None,
|
||||
)
|
||||
record.trace_id = "5b8aa5a2d2c872e8321cf37308d69df2"
|
||||
record.span_id = "051581bf3bb55c45"
|
||||
|
||||
output = formatter.format(record)
|
||||
log_dict = orjson.loads(output)
|
||||
|
||||
assert log_dict["trace_id"] == "5b8aa5a2d2c872e8321cf37308d69df2"
|
||||
assert log_dict["span_id"] == "051581bf3bb55c45"
|
||||
|
||||
def test_identity_context_included(self):
|
||||
from core.logging.structured_formatter import StructuredJSONFormatter
|
||||
|
||||
formatter = StructuredJSONFormatter()
|
||||
record = logging.LogRecord(
|
||||
name="test",
|
||||
level=logging.INFO,
|
||||
pathname="test.py",
|
||||
lineno=1,
|
||||
msg="Test",
|
||||
args=(),
|
||||
exc_info=None,
|
||||
)
|
||||
record.tenant_id = "t-global-corp"
|
||||
record.user_id = "u-admin-007"
|
||||
record.user_type = "admin"
|
||||
|
||||
output = formatter.format(record)
|
||||
log_dict = orjson.loads(output)
|
||||
|
||||
assert "identity" in log_dict
|
||||
assert log_dict["identity"]["tenant_id"] == "t-global-corp"
|
||||
assert log_dict["identity"]["user_id"] == "u-admin-007"
|
||||
assert log_dict["identity"]["user_type"] == "admin"
|
||||
|
||||
def test_no_identity_when_empty(self):
|
||||
from core.logging.structured_formatter import StructuredJSONFormatter
|
||||
|
||||
formatter = StructuredJSONFormatter()
|
||||
record = logging.LogRecord(
|
||||
name="test",
|
||||
level=logging.INFO,
|
||||
pathname="test.py",
|
||||
lineno=1,
|
||||
msg="Test",
|
||||
args=(),
|
||||
exc_info=None,
|
||||
)
|
||||
|
||||
output = formatter.format(record)
|
||||
log_dict = orjson.loads(output)
|
||||
|
||||
assert "identity" not in log_dict
|
||||
|
||||
def test_attributes_included(self):
|
||||
from core.logging.structured_formatter import StructuredJSONFormatter
|
||||
|
||||
formatter = StructuredJSONFormatter()
|
||||
record = logging.LogRecord(
|
||||
name="test",
|
||||
level=logging.INFO,
|
||||
pathname="test.py",
|
||||
lineno=1,
|
||||
msg="Test",
|
||||
args=(),
|
||||
exc_info=None,
|
||||
)
|
||||
record.attributes = {"order_id": "ord-123", "amount": 99.99}
|
||||
|
||||
output = formatter.format(record)
|
||||
log_dict = orjson.loads(output)
|
||||
|
||||
assert log_dict["attributes"]["order_id"] == "ord-123"
|
||||
assert log_dict["attributes"]["amount"] == 99.99
|
||||
|
||||
def test_message_with_args(self):
|
||||
from core.logging.structured_formatter import StructuredJSONFormatter
|
||||
|
||||
formatter = StructuredJSONFormatter()
|
||||
record = logging.LogRecord(
|
||||
name="test",
|
||||
level=logging.INFO,
|
||||
pathname="test.py",
|
||||
lineno=1,
|
||||
msg="User %s logged in from %s",
|
||||
args=("john", "192.168.1.1"),
|
||||
exc_info=None,
|
||||
)
|
||||
|
||||
output = formatter.format(record)
|
||||
log_dict = orjson.loads(output)
|
||||
|
||||
assert log_dict["message"] == "User john logged in from 192.168.1.1"
|
||||
|
||||
def test_timestamp_format(self):
|
||||
from core.logging.structured_formatter import StructuredJSONFormatter
|
||||
|
||||
formatter = StructuredJSONFormatter()
|
||||
record = logging.LogRecord(
|
||||
name="test",
|
||||
level=logging.INFO,
|
||||
pathname="test.py",
|
||||
lineno=1,
|
||||
msg="Test",
|
||||
args=(),
|
||||
exc_info=None,
|
||||
)
|
||||
|
||||
output = formatter.format(record)
|
||||
log_dict = orjson.loads(output)
|
||||
|
||||
# Verify ISO 8601 format with Z suffix
|
||||
ts = log_dict["ts"]
|
||||
assert ts.endswith("Z")
|
||||
assert "T" in ts
|
||||
# Should have milliseconds
|
||||
assert "." in ts
|
||||
|
||||
def test_fallback_for_non_serializable_attributes(self):
|
||||
from core.logging.structured_formatter import StructuredJSONFormatter
|
||||
|
||||
formatter = StructuredJSONFormatter()
|
||||
record = logging.LogRecord(
|
||||
name="test",
|
||||
level=logging.INFO,
|
||||
pathname="test.py",
|
||||
lineno=1,
|
||||
msg="Test with non-serializable",
|
||||
args=(),
|
||||
exc_info=None,
|
||||
)
|
||||
# Set is not serializable by orjson
|
||||
record.attributes = {"items": {1, 2, 3}, "custom": object()}
|
||||
|
||||
# Should not raise, fallback to json.dumps with default=str
|
||||
output = formatter.format(record)
|
||||
|
||||
# Verify it's valid JSON (parsed by stdlib json since orjson may fail)
|
||||
import json
|
||||
|
||||
log_dict = json.loads(output)
|
||||
assert log_dict["message"] == "Test with non-serializable"
|
||||
assert "attributes" in log_dict
|
||||
|
|
@ -0,0 +1,102 @@
|
|||
"""Tests for trace helper functions."""
|
||||
|
||||
import re
|
||||
from unittest import mock
|
||||
|
||||
|
||||
class TestGetSpanIdFromOtelContext:
|
||||
def test_returns_none_without_span(self):
|
||||
from core.helper.trace_id_helper import get_span_id_from_otel_context
|
||||
|
||||
with mock.patch("opentelemetry.trace.get_current_span", return_value=None):
|
||||
result = get_span_id_from_otel_context()
|
||||
assert result is None
|
||||
|
||||
def test_returns_span_id_when_available(self):
|
||||
from core.helper.trace_id_helper import get_span_id_from_otel_context
|
||||
|
||||
mock_span = mock.MagicMock()
|
||||
mock_context = mock.MagicMock()
|
||||
mock_context.span_id = 0x051581BF3BB55C45
|
||||
mock_span.get_span_context.return_value = mock_context
|
||||
|
||||
with mock.patch("opentelemetry.trace.get_current_span", return_value=mock_span):
|
||||
with mock.patch("opentelemetry.trace.span.INVALID_SPAN_ID", 0):
|
||||
result = get_span_id_from_otel_context()
|
||||
assert result == "051581bf3bb55c45"
|
||||
|
||||
def test_returns_none_on_exception(self):
|
||||
from core.helper.trace_id_helper import get_span_id_from_otel_context
|
||||
|
||||
with mock.patch("opentelemetry.trace.get_current_span", side_effect=Exception("Test error")):
|
||||
result = get_span_id_from_otel_context()
|
||||
assert result is None
|
||||
|
||||
|
||||
class TestGenerateTraceparentHeader:
|
||||
def test_generates_valid_format(self):
|
||||
from core.helper.trace_id_helper import generate_traceparent_header
|
||||
|
||||
with mock.patch("opentelemetry.trace.get_current_span", return_value=None):
|
||||
result = generate_traceparent_header()
|
||||
|
||||
assert result is not None
|
||||
# Format: 00-{trace_id}-{span_id}-01
|
||||
parts = result.split("-")
|
||||
assert len(parts) == 4
|
||||
assert parts[0] == "00" # version
|
||||
assert len(parts[1]) == 32 # trace_id (32 hex chars)
|
||||
assert len(parts[2]) == 16 # span_id (16 hex chars)
|
||||
assert parts[3] == "01" # flags
|
||||
|
||||
def test_uses_otel_context_when_available(self):
|
||||
from core.helper.trace_id_helper import generate_traceparent_header
|
||||
|
||||
mock_span = mock.MagicMock()
|
||||
mock_context = mock.MagicMock()
|
||||
mock_context.trace_id = 0x5B8AA5A2D2C872E8321CF37308D69DF2
|
||||
mock_context.span_id = 0x051581BF3BB55C45
|
||||
mock_span.get_span_context.return_value = mock_context
|
||||
|
||||
with mock.patch("opentelemetry.trace.get_current_span", return_value=mock_span):
|
||||
with (
|
||||
mock.patch("opentelemetry.trace.span.INVALID_TRACE_ID", 0),
|
||||
mock.patch("opentelemetry.trace.span.INVALID_SPAN_ID", 0),
|
||||
):
|
||||
result = generate_traceparent_header()
|
||||
|
||||
assert result == "00-5b8aa5a2d2c872e8321cf37308d69df2-051581bf3bb55c45-01"
|
||||
|
||||
def test_generates_hex_only_values(self):
|
||||
from core.helper.trace_id_helper import generate_traceparent_header
|
||||
|
||||
with mock.patch("opentelemetry.trace.get_current_span", return_value=None):
|
||||
result = generate_traceparent_header()
|
||||
|
||||
parts = result.split("-")
|
||||
# All parts should be valid hex
|
||||
assert re.match(r"^[0-9a-f]+$", parts[1])
|
||||
assert re.match(r"^[0-9a-f]+$", parts[2])
|
||||
|
||||
|
||||
class TestParseTraceparentHeader:
|
||||
def test_parses_valid_traceparent(self):
|
||||
from core.helper.trace_id_helper import parse_traceparent_header
|
||||
|
||||
traceparent = "00-5b8aa5a2d2c872e8321cf37308d69df2-051581bf3bb55c45-01"
|
||||
result = parse_traceparent_header(traceparent)
|
||||
|
||||
assert result == "5b8aa5a2d2c872e8321cf37308d69df2"
|
||||
|
||||
def test_returns_none_for_invalid_format(self):
|
||||
from core.helper.trace_id_helper import parse_traceparent_header
|
||||
|
||||
# Wrong number of parts
|
||||
assert parse_traceparent_header("00-abc-def") is None
|
||||
# Wrong trace_id length
|
||||
assert parse_traceparent_header("00-abc-def-01") is None
|
||||
|
||||
def test_returns_none_for_empty_string(self):
|
||||
from core.helper.trace_id_helper import parse_traceparent_header
|
||||
|
||||
assert parse_traceparent_header("") is None
|
||||
|
|
@ -32,7 +32,6 @@ def mock_provider_entity():
|
|||
label=I18nObject(en_US="OpenAI", zh_Hans="OpenAI"),
|
||||
description=I18nObject(en_US="OpenAI provider", zh_Hans="OpenAI 提供商"),
|
||||
icon_small=I18nObject(en_US="icon.png", zh_Hans="icon.png"),
|
||||
icon_large=I18nObject(en_US="icon.png", zh_Hans="icon.png"),
|
||||
background="background.png",
|
||||
help=None,
|
||||
supported_model_types=[ModelType.LLM],
|
||||
|
|
|
|||
|
|
@ -99,29 +99,20 @@ def test_external_api_json_message_and_bad_request_rewrite():
|
|||
assert res.get_json()["message"] == "Invalid JSON payload received or JSON payload is empty."
|
||||
|
||||
|
||||
def test_external_api_param_mapping_and_quota_and_exc_info_none():
|
||||
# Force exc_info() to return (None,None,None) only during request
|
||||
import libs.external_api as ext
|
||||
def test_external_api_param_mapping_and_quota():
|
||||
app = _create_api_app()
|
||||
client = app.test_client()
|
||||
|
||||
orig_exc_info = ext.sys.exc_info
|
||||
try:
|
||||
ext.sys.exc_info = lambda: (None, None, None)
|
||||
# Param errors mapping payload path
|
||||
res = client.get("/api/param-errors")
|
||||
assert res.status_code == 400
|
||||
data = res.get_json()
|
||||
assert data["code"] == "invalid_param"
|
||||
assert data["params"] == "field"
|
||||
|
||||
app = _create_api_app()
|
||||
client = app.test_client()
|
||||
|
||||
# Param errors mapping payload path
|
||||
res = client.get("/api/param-errors")
|
||||
assert res.status_code == 400
|
||||
data = res.get_json()
|
||||
assert data["code"] == "invalid_param"
|
||||
assert data["params"] == "field"
|
||||
|
||||
# Quota path — depending on Flask-RESTX internals it may be handled
|
||||
res = client.get("/api/quota")
|
||||
assert res.status_code in (400, 429)
|
||||
finally:
|
||||
ext.sys.exc_info = orig_exc_info # type: ignore[assignment]
|
||||
# Quota path — depending on Flask-RESTX internals it may be handled
|
||||
res = client.get("/api/quota")
|
||||
assert res.status_code in (400, 429)
|
||||
|
||||
|
||||
def test_unauthorized_and_force_logout_clears_cookies():
|
||||
|
|
|
|||
|
|
@ -27,7 +27,6 @@ def service_with_fake_configurations():
|
|||
description=None,
|
||||
icon_small=None,
|
||||
icon_small_dark=None,
|
||||
icon_large=None,
|
||||
background=None,
|
||||
help=None,
|
||||
supported_model_types=[ModelType.LLM],
|
||||
|
|
|
|||
|
|
@ -1953,14 +1953,14 @@ wheels = [
|
|||
|
||||
[[package]]
|
||||
name = "fickling"
|
||||
version = "0.1.5"
|
||||
version = "0.1.6"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "stdlib-list" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/41/94/0d0ce455952c036cfee235637f786c1d1d07d1b90f6a4dfb50e0eff929d6/fickling-0.1.5.tar.gz", hash = "sha256:92f9b49e717fa8dbc198b4b7b685587adb652d85aa9ede8131b3e44494efca05", size = 282462, upload-time = "2025-11-18T05:04:30.748Z" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/07/ab/7571453f9365c17c047b5a7b7e82692a7f6be51203f295030886758fd57a/fickling-0.1.6.tar.gz", hash = "sha256:03cb5d7bd09f9169c7583d2079fad4b3b88b25f865ed0049172e5cb68582311d", size = 284033, upload-time = "2025-12-15T18:14:58.721Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/bf/a7/d25912b2e3a5b0a37e6f460050bbc396042b5906a6563a1962c484abc3c6/fickling-0.1.5-py3-none-any.whl", hash = "sha256:6aed7270bfa276e188b0abe043a27b3a042129d28ec1fa6ff389bdcc5ad178bb", size = 46240, upload-time = "2025-11-18T05:04:29.048Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/76/99/cc04258dda421bc612cdfe4be8c253f45b922f1c7f268b5a0b9962d9cd12/fickling-0.1.6-py3-none-any.whl", hash = "sha256:465d0069548bfc731bdd75a583cb4cf5a4b2666739c0f76287807d724b147ed3", size = 47922, upload-time = "2025-12-15T18:14:57.526Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
|
|
|||
|
|
@ -69,6 +69,8 @@ PYTHONIOENCODING=utf-8
|
|||
# The log level for the application.
|
||||
# Supported values are `DEBUG`, `INFO`, `WARNING`, `ERROR`, `CRITICAL`
|
||||
LOG_LEVEL=INFO
|
||||
# Log output format: text or json
|
||||
LOG_OUTPUT_FORMAT=text
|
||||
# Log file path
|
||||
LOG_FILE=/app/logs/server.log
|
||||
# Log file max size, the unit is MB
|
||||
|
|
|
|||
|
|
@ -129,6 +129,7 @@ services:
|
|||
- ./middleware.env
|
||||
environment:
|
||||
# Use the shared environment variables.
|
||||
LOG_OUTPUT_FORMAT: ${LOG_OUTPUT_FORMAT:-text}
|
||||
DB_DATABASE: ${DB_PLUGIN_DATABASE:-dify_plugin}
|
||||
REDIS_HOST: ${REDIS_HOST:-redis}
|
||||
REDIS_PORT: ${REDIS_PORT:-6379}
|
||||
|
|
|
|||
|
|
@ -17,6 +17,7 @@ x-shared-env: &shared-api-worker-env
|
|||
LC_ALL: ${LC_ALL:-en_US.UTF-8}
|
||||
PYTHONIOENCODING: ${PYTHONIOENCODING:-utf-8}
|
||||
LOG_LEVEL: ${LOG_LEVEL:-INFO}
|
||||
LOG_OUTPUT_FORMAT: ${LOG_OUTPUT_FORMAT:-text}
|
||||
LOG_FILE: ${LOG_FILE:-/app/logs/server.log}
|
||||
LOG_FILE_MAX_SIZE: ${LOG_FILE_MAX_SIZE:-20}
|
||||
LOG_FILE_BACKUP_COUNT: ${LOG_FILE_BACKUP_COUNT:-5}
|
||||
|
|
|
|||
|
|
@ -93,7 +93,6 @@ function createMockProviderContext(overrides: Partial<ProviderContextState> = {}
|
|||
provider: 'openai',
|
||||
label: { en_US: 'OpenAI', zh_Hans: 'OpenAI' },
|
||||
icon_small: { en_US: 'icon', zh_Hans: 'icon' },
|
||||
icon_large: { en_US: 'icon', zh_Hans: 'icon' },
|
||||
status: ModelStatusEnum.active,
|
||||
models: [
|
||||
{
|
||||
|
|
@ -711,7 +710,6 @@ describe('DebugWithSingleModel', () => {
|
|||
provider: 'openai',
|
||||
label: { en_US: 'OpenAI', zh_Hans: 'OpenAI' },
|
||||
icon_small: { en_US: 'icon', zh_Hans: 'icon' },
|
||||
icon_large: { en_US: 'icon', zh_Hans: 'icon' },
|
||||
status: ModelStatusEnum.active,
|
||||
models: [
|
||||
{
|
||||
|
|
@ -742,7 +740,6 @@ describe('DebugWithSingleModel', () => {
|
|||
provider: 'different-provider',
|
||||
label: { en_US: 'Different Provider', zh_Hans: '不同提供商' },
|
||||
icon_small: { en_US: 'icon', zh_Hans: 'icon' },
|
||||
icon_large: { en_US: 'icon', zh_Hans: 'icon' },
|
||||
status: ModelStatusEnum.active,
|
||||
models: [],
|
||||
},
|
||||
|
|
@ -925,7 +922,6 @@ describe('DebugWithSingleModel', () => {
|
|||
provider: 'openai',
|
||||
label: { en_US: 'OpenAI', zh_Hans: 'OpenAI' },
|
||||
icon_small: { en_US: 'icon', zh_Hans: 'icon' },
|
||||
icon_large: { en_US: 'icon', zh_Hans: 'icon' },
|
||||
status: ModelStatusEnum.active,
|
||||
models: [
|
||||
{
|
||||
|
|
@ -975,7 +971,6 @@ describe('DebugWithSingleModel', () => {
|
|||
provider: 'openai',
|
||||
label: { en_US: 'OpenAI', zh_Hans: 'OpenAI' },
|
||||
icon_small: { en_US: 'icon', zh_Hans: 'icon' },
|
||||
icon_large: { en_US: 'icon', zh_Hans: 'icon' },
|
||||
status: ModelStatusEnum.active,
|
||||
models: [
|
||||
{
|
||||
|
|
|
|||
|
|
@ -0,0 +1,17 @@
|
|||
<svg width="25" height="25" viewBox="0 0 25 25" fill="none" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink">
|
||||
<g clip-path="url(#clip0_6305_73327)">
|
||||
<path d="M0.5 12.5C0.5 8.77247 0.5 6.9087 1.10896 5.43853C1.92092 3.47831 3.47831 1.92092 5.43853 1.10896C6.9087 0.5 8.77247 0.5 12.5 0.5C16.2275 0.5 18.0913 0.5 19.5615 1.10896C21.5217 1.92092 23.0791 3.47831 23.891 5.43853C24.5 6.9087 24.5 8.77247 24.5 12.5C24.5 16.2275 24.5 18.0913 23.891 19.5615C23.0791 21.5217 21.5217 23.0791 19.5615 23.891C18.0913 24.5 16.2275 24.5 12.5 24.5C8.77247 24.5 6.9087 24.5 5.43853 23.891C3.47831 23.0791 1.92092 21.5217 1.10896 19.5615C0.5 18.0913 0.5 16.2275 0.5 12.5Z" fill="white"/>
|
||||
<rect width="24" height="24" transform="translate(0.5 0.5)" fill="url(#pattern0_6305_73327)"/>
|
||||
<rect width="24" height="24" transform="translate(0.5 0.5)" fill="white" fill-opacity="0.01"/>
|
||||
</g>
|
||||
<path d="M12.5 0.25C14.3603 0.25 15.7684 0.250313 16.8945 0.327148C18.0228 0.404144 18.8867 0.558755 19.6572 0.87793C21.6787 1.71525 23.2847 3.32133 24.1221 5.34277C24.4412 6.11333 24.5959 6.97723 24.6729 8.10547C24.7497 9.23161 24.75 10.6397 24.75 12.5C24.75 14.3603 24.7497 15.7684 24.6729 16.8945C24.5959 18.0228 24.4412 18.8867 24.1221 19.6572C23.2847 21.6787 21.6787 23.2847 19.6572 24.1221C18.8867 24.4412 18.0228 24.5959 16.8945 24.6729C15.7684 24.7497 14.3603 24.75 12.5 24.75C10.6397 24.75 9.23161 24.7497 8.10547 24.6729C6.97723 24.5959 6.11333 24.4412 5.34277 24.1221C3.32133 23.2847 1.71525 21.6787 0.87793 19.6572C0.558755 18.8867 0.404144 18.0228 0.327148 16.8945C0.250313 15.7684 0.25 14.3603 0.25 12.5C0.25 10.6397 0.250313 9.23161 0.327148 8.10547C0.404144 6.97723 0.558755 6.11333 0.87793 5.34277C1.71525 3.32133 3.32133 1.71525 5.34277 0.87793C6.11333 0.558755 6.97723 0.404144 8.10547 0.327148C9.23161 0.250313 10.6397 0.25 12.5 0.25Z" stroke="#101828" stroke-opacity="0.08" stroke-width="0.5"/>
|
||||
<defs>
|
||||
<pattern id="pattern0_6305_73327" patternContentUnits="objectBoundingBox" width="1" height="1">
|
||||
<use xlink:href="#image0_6305_73327" transform="scale(0.00625)"/>
|
||||
</pattern>
|
||||
<clipPath id="clip0_6305_73327">
|
||||
<path d="M0.5 12.5C0.5 8.77247 0.5 6.9087 1.10896 5.43853C1.92092 3.47831 3.47831 1.92092 5.43853 1.10896C6.9087 0.5 8.77247 0.5 12.5 0.5C16.2275 0.5 18.0913 0.5 19.5615 1.10896C21.5217 1.92092 23.0791 3.47831 23.891 5.43853C24.5 6.9087 24.5 8.77247 24.5 12.5C24.5 16.2275 24.5 18.0913 23.891 19.5615C23.0791 21.5217 21.5217 23.0791 19.5615 23.891C18.0913 24.5 16.2275 24.5 12.5 24.5C8.77247 24.5 6.9087 24.5 5.43853 23.891C3.47831 23.0791 1.92092 21.5217 1.10896 19.5615C0.5 18.0913 0.5 16.2275 0.5 12.5Z" fill="white"/>
|
||||
</clipPath>
|
||||
<image id="image0_6305_73327" width="160" height="160" preserveAspectRatio="none" xlink:href="data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAKAAAACgCAYAAACLz2ctAAAACXBIWXMAACxLAAAsSwGlPZapAAAAAXNSR0IArs4c6QAAAARnQU1BAACxjwv8YQUAAAm6SURBVHgB7Z09jFVFGIZn/Wf9I5FSKIVSKYndErolVJJQQUKliRWFNhY2UGxFItUmUJFARZaOQGekA0poF8slUcFFRVnPu9cDw/HuPWdmvpnvu3PeJ9m4MQH23H3P/DzzzczCVoMjRInXHCGKMIBEFQaQqMIAElUYQKIKA0hUYQCJKgwgUYUBJKowgEQVBpCowgASVRhAogoDSFRhAIkqDCBRhQEkqjCARBUGkKjCABJVGECiCgNIVGEAiSpvuJGyubnlLq0+cQ/u/739fSyLiwvu8JF33PKxXY6EM9oW8Mrl393dO8+Swgfw59euPXU//finI+GMMoCPNp43gflr+3u0YBJcubzZhPG5I2GMMoA/nH/84vvjJxbd/gPpIxG0hDdvsBUMZXQBRFf5cP2f7e8Pff729tfysUUnwa0bf7iNDbaCIYwugNeb8VpLO3FAC4ggpoJW8GoztiTDGVUAEb62hULg9ux5+fjoiiXGg5jYYGZNhjGaAGLicbPpIsFHTfC62gThW2p0igTXr206MozRBHDt2uYL5XK0CZ/f+rXA5037/6GgBaSWGcYoAuhrF7R+O4330Ap+cUJmQkItM4xRBNDXLkd7Viw+O/gWtUxBqg/gNO3SB7VMOaoP4DTt0gdaQIkJCbVMP1UXI8zSLn2gq77dtJ6pa8XQMheaIcCuxfLvOiZVe/e97ixTbQD7tEsfrZbxW9BYEEINMPw4880HImPaXFTbBQ/RLn1IaRlNLq4+cZapMoBDtUsfUpUymuCzWBNoxXNRZQBDtMss/DHkPIPZuFUnWV0AY7TLNCataB0eD0OR60ZbweoCGKNdpoExZE0OD1L84bq9IomqAuh3mUsJEwh/DFkTWB60RjUB7GqXwwki2R9D1gSKJKyVilUTQAntAvwxZI1Y0zJVBFBKuwCrg3UprGmZKgLY3WQUSy3apQ9LWmbuA9jVLiinisEfQ9aOJS1jYpEQA+NHG3HjLkntklp4ME9Ay+CF3btPNwLqAYQakGh5qF3CwWePYgVNVLtgdJ9S3d6Ci2+9atUufVjQMqotoN99Yuz26cE3XQzrzRgQQV46Eq5f0O0eFtoNNy/cu/PM3b0zafG1JyNqAeyWqz+4/8ydPI29ueGN8qHm6+dmmQmnXYV2Kah4kdiUPk/4L772GFClC54240zdxIN9HBZNvzWkliulUPnXd1roT9nEg6pffFkvwNREcrlSiuIBnDXjTN3Ec+r0e+5p83fcGonPC0VquVKS4j9BX0VGytkqeKvRrWCpiZvCX0VyuVKSogGEdmlnX7NIOVul7VZqX9MNRWq5UpqiARwaipSzVTCrxYoIJjTcFD5BarkyB8UCGDrBSDlbBa0gJiSXOCHZRmq5MgdFAhiz0E8tI4M17dKlyE8Tu7+CWiYNi9qlS/YApiz0U8ukYVG7dMn+E6W2QNQycVjVLl2yLwRKjMGgZfZHlg2h20ELiEnN/gNxxQ7ziD/mtqRdumQPIE5nSt3k02qZmLe41TII4Bhr/sC9xr1aUi8+C1sNLiMIzsXV9DPyEKSzKx9GVcuAlXO/zc2MGF3muZXdLhX/ma2ekpV9DIhWy8KRt1KnnpZAqsv0n9nqyf1FpkUWjrxttYx1JFcq/Ge2enJ/kQBauYkIWsb6kWvSKxX+M1u0AcXEEDyU9k1ErZaxSo6VCv+ZJ2LaVitYLICSv/zUahmLrWDOlQr/ma2d3F9UjVu4iajVMtbIuVLhP/NkU7qdCUnRAEr+8iWqZaxQYqXCf2b4UCtKqvjiILXM/ym1UmFRy6isTlPLvKRkgahFLaMSQGqZl5Qej1rTMmr1OdQyOgWi1rSMWgDHrmVStUtKmZslLaNaoThmLZN6jDBmshLPrK1lVAM4Vi0jdYxwyhjOipZRr9Eeo5aROkY4dQxnQcuY2CQwJi2Teoxw94BxqWfW0jImAjgmLZN6jHCX1DGctpYxs01qDFpmOWHiMWt3YcoYTlvLmAlg7VomdeKB8vpZSD1zaS1jaqOoFS2TY202Vbv0hUJKRZXWMqYCaEXLSM3MW0rd3jSPWsbcVvkatQwG+rGE3N40j1rG3lkNzo6WkahSSXmhYu51mzctYzKAVrQMxoKpExJp7dKHpJYpcXWZ2X2KuDNE4g1stUxMK9TOzGNPW82lXfrAn3u6+djtitzEjwAiyCWurTUbwKuCt3tLnC0Teo9cbu3SB168VGIvDgrBZBc8RDuEoKFlcmuXEhw/8a7LjbkAouvJccB4SS1Tw6XZ+PlLFMuaC2Cut7+klimlXXKBF6hUjaSpAOa+Tr6ElimtXXIgtSI1BFMBXMssP0tomdLaRZrSZ0mbCeBkopD/AMmc1TJa2kWSo4W3J5gJYMk7PXJUy2hrFwmgXUqfJW0igKW1A1rA2JPzd9Iy1C5xqAcwl3bpI6VypDvRoHaJRz2AWm+/pJahdolHNYDa2iFVy6A7pnZJQ3UteH1dpugRV0Hs3Rf3KLjCIEY7oOVGK0rtkoZqAGOvXHj171hwX379ftE3uB23Uruko9oFS+zF1TjgBy0XamPmXbvgs9O+wku9HAtT/++/+9XFgO6j9BvctlynTr+rrl1Snh9DFgxdtFEPID6Epf9q7kLR6D7Q+qVol0nFsszEA89v9RLCoZgQ0TGb0j9uglv6w29PpUrRLlL7bjWePwcmAojwhW5K/6qZeJQGLZcV7aLx/DkwdTTH0DGVhrVvx20WtIvWqkUOTD3FyQFdm8aBkhLaBRt8JLSL1XtOYjEVwCFaZl61y4Xzj50ES4qrFjkw9ySzKjI0tYuFaheN58+NuQC2WmYa1C51hQ+YbMunaRkN7YDqaWqXvJgM4DQto6EdsH+E2iUvZk9GQCt42xs7fXvmF6fB3oQja6ld+jH9VCcTuj4pYjcxUbsMY2GrwRkGv8gSpzR1weQBtYIAXfCZwLNl0GJLjP0QvhonHy3mA6jJhfNPmhZwEkJUvwydBEC7XFyN33/cgtn3uZXdrmbqHFgI4W9EH3q2DLVLGAzgDPyN6EM3MVG7hMEA9hByhQG1SzgMYA/+RvS+s2WoXcJhAAfgy+idtAy1SxwM4ED6rjBgtUscDOBA/PMBu2fLsNolHgYwAF/LtGfLULukwQAGME3LULukwZWQQBA8LLPhv+19GhKcbVY8xjT2a2ELGEhXy0gwJu3ShQGMAIFZFrpg+5NmcpPjeth5gV0wUYUtIFGFASSqMIBEFQaQqMIAElUYQKIKA0hUYQCJKgwgUYUBJKowgEQVBpCowgASVRhAogoDSFRhAIkqDCBRhQEkqjCARBUGkKjCABJVGECiCgNIVPkXGPWKHZj1nMYAAAAASUVORK5CYII="/>
|
||||
</defs>
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 6.1 KiB |
|
|
@ -0,0 +1,4 @@
|
|||
<svg width="40" height="40" viewBox="0 0 40 40" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<rect width="40" height="40" fill="white"/>
|
||||
<path d="M25.7926 10.1311H21.5089L29.3208 29.869H33.6045L25.7926 10.1311ZM13.4164 10.1311L5.60449 29.869H9.97273L11.5703 25.724H19.743L21.3405 29.869H25.7087L17.8969 10.1311H13.4164ZM12.9834 22.0583L15.6566 15.1217L18.3299 22.0583H12.9834Z" fill="black"/>
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 403 B |
|
|
@ -0,0 +1,4 @@
|
|||
<svg width="40" height="40" viewBox="0 0 40 40" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<rect width="40" height="40" fill="white"/>
|
||||
<path d="M36.6676 11.2917C36.3316 11.1277 36.1871 11.4402 35.9906 11.599C35.9242 11.6511 35.8668 11.7188 35.8108 11.7787C35.3199 12.3048 34.747 12.6485 33.9996 12.6068C32.9046 12.5469 31.971 12.8907 31.1455 13.7293C30.9696 12.6954 30.3863 12.0782 29.4996 11.6824C29.0348 11.4766 28.5647 11.2709 28.2406 10.823C28.0127 10.5053 27.9515 10.1511 27.8368 9.80214C27.7652 9.59121 27.6923 9.37506 27.4502 9.33861C27.1871 9.29694 27.0843 9.51829 26.9814 9.70318C26.5674 10.4584 26.4084 11.2917 26.4228 12.1355C26.4592 14.0313 27.26 15.5417 28.8486 16.6173C29.0296 16.7397 29.0764 16.8646 29.0191 17.0443C28.9111 17.4141 28.7822 17.7735 28.6676 18.1433C28.596 18.3803 28.4879 18.4323 28.2354 18.3282C27.363 17.9637 26.609 17.4246 25.9436 16.7709C24.8135 15.6771 23.7914 14.4689 22.5166 13.5235C22.2171 13.3021 21.919 13.0964 21.609 12.9011C20.3082 11.6355 21.7796 10.5964 22.1194 10.474C22.4762 10.3464 22.2431 9.9037 21.092 9.90891C19.9423 9.91413 18.889 10.2995 17.5478 10.8126C17.3512 10.8907 17.1455 10.948 16.9332 10.9922C15.7158 10.7631 14.4515 10.711 13.1298 10.8594C10.6428 11.1381 8.65587 12.3152 7.19493 14.3255C5.44102 16.7397 5.02826 19.4845 5.53347 22.349C6.06473 25.3646 7.60249 27.8646 9.96707 29.8178C12.4176 31.8413 15.2406 32.8334 18.4606 32.6433C20.4163 32.5313 22.5947 32.2683 25.0504 30.1875C25.6702 30.4949 26.3199 30.6173 27.3994 30.711C28.2302 30.7891 29.0296 30.6694 29.6494 30.5417C30.6194 30.3361 30.5518 29.4375 30.2015 29.2709C27.3578 27.9454 27.9814 28.4845 27.4136 28.0495C28.859 26.3361 31.0374 24.5574 31.889 18.797C31.9554 18.3386 31.898 18.0522 31.889 17.6798C31.8838 17.4558 31.9346 17.3673 32.1923 17.3413C32.9046 17.2605 33.596 17.0651 34.2314 16.7137C36.0739 15.7058 36.816 14.0522 36.9918 12.0678C37.0179 11.7657 36.9866 11.4506 36.6676 11.2917ZM20.613 29.1485C17.8564 26.9793 16.5204 26.2657 15.9684 26.297C15.4527 26.3255 15.5452 26.9167 15.6584 27.3022C15.777 27.6823 15.9319 27.9454 16.1494 28.2787C16.2991 28.5001 16.402 28.8307 15.9996 29.0755C15.1116 29.6277 13.5687 28.8907 13.4958 28.8542C11.7001 27.797 10.1988 26.3985 9.14025 24.487C8.11941 22.6459 7.52566 20.6719 7.42801 18.5651C7.40197 18.0547 7.5517 17.875 8.05691 17.7839C8.72227 17.6615 9.40978 17.6355 10.0751 17.7318C12.8876 18.1433 15.2822 19.4037 17.2887 21.3959C18.4346 22.5339 19.3018 23.8907 20.195 25.2162C21.1442 26.6251 22.1663 27.9662 23.4671 29.0651C23.9254 29.4506 24.2926 29.7449 24.6428 29.961C23.5856 30.0782 21.8199 30.1042 20.613 29.1485ZM21.9332 20.6407C21.9332 20.4141 22.1142 20.2345 22.342 20.2345C22.3928 20.2345 22.4398 20.2449 22.4814 20.2605C22.5374 20.2813 22.5895 20.3126 22.6299 20.3594C22.7027 20.4298 22.7444 20.5339 22.7444 20.6407C22.7444 20.8673 22.5635 21.047 22.3368 21.047C22.109 21.047 21.9332 20.8673 21.9332 20.6407ZM26.036 22.7501C25.7731 22.8569 25.51 22.9506 25.2575 22.961C24.8655 22.9793 24.4371 22.8203 24.204 22.6251C23.8434 22.323 23.5856 22.1537 23.4762 21.6225C23.4306 21.3959 23.4567 21.047 23.497 20.8465C23.5908 20.4141 23.4866 20.1381 23.1832 19.8855C22.9346 19.6798 22.6207 19.6251 22.2744 19.6251C22.1455 19.6251 22.027 19.5678 21.9384 19.5209C21.7939 19.4479 21.6754 19.2683 21.7887 19.047C21.8251 18.9766 22.001 18.8022 22.0426 18.7709C22.5114 18.5027 23.053 18.5913 23.5543 18.7918C24.0191 18.9818 24.3694 19.3307 24.8746 19.823C25.3915 20.4194 25.484 20.5861 25.7783 21.0313C26.01 21.3829 26.2223 21.7422 26.3668 22.1537C26.454 22.4089 26.3408 22.6198 26.036 22.7501Z" fill="#4D6BFE"/>
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 3.5 KiB |
|
|
@ -0,0 +1,105 @@
|
|||
<svg width="40" height="40" viewBox="0 0 40 40" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<rect width="40" height="40" fill="white"/>
|
||||
<mask id="mask0_3892_95663" style="mask-type:alpha" maskUnits="userSpaceOnUse" x="6" y="6" width="28" height="29">
|
||||
<path d="M20 6C20.2936 6 20.5488 6.2005 20.6205 6.48556C20.8393 7.3566 21.1277 8.20866 21.4828 9.03356C22.4116 11.191 23.6854 13.0791 25.3032 14.6968C26.9218 16.3146 28.8095 17.5888 30.9664 18.5172C31.7941 18.8735 32.6436 19.16 33.5149 19.3795C33.6533 19.4143 33.7762 19.4942 33.8641 19.6067C33.9519 19.7192 33.9998 19.8578 34 20.0005C34 20.2941 33.7995 20.5492 33.5149 20.621C32.6437 20.8399 31.7915 21.1282 30.9664 21.4833C28.8095 22.4121 26.9209 23.6859 25.3032 25.3036C23.6854 26.9223 22.4116 28.8099 21.4828 30.9669C21.1278 31.7919 20.8394 32.6439 20.6205 33.5149C20.586 33.6534 20.5062 33.7764 20.3937 33.8644C20.2813 33.9524 20.1427 34.0003 20 34.0005C19.8572 34.0003 19.7186 33.9525 19.6062 33.8645C19.4937 33.7765 19.414 33.6535 19.3795 33.5149C19.1605 32.6439 18.872 31.7918 18.5167 30.9669C17.5884 28.8099 16.3151 26.9214 14.6964 25.3036C13.0782 23.6859 11.1906 22.4121 9.03309 21.4833C8.20814 21.1283 7.35608 20.8399 6.48509 20.621C6.34667 20.5864 6.22377 20.5065 6.13589 20.3941C6.04801 20.2817 6.00018 20.1432 6 20.0005C6.00024 19.8578 6.04808 19.7192 6.13594 19.6067C6.2238 19.4942 6.34667 19.4143 6.48509 19.3795C7.35612 19.1607 8.20819 18.8723 9.03309 18.5172C11.1906 17.5888 13.0786 16.3146 14.6964 14.6968C16.3141 13.0791 17.5884 11.191 18.5167 9.03356C18.8719 8.20862 19.1604 7.35656 19.3795 6.48556C19.4508 6.2005 19.7064 6 20 6Z" fill="black"/>
|
||||
<path d="M20 6C20.2936 6 20.5488 6.2005 20.6205 6.48556C20.8393 7.3566 21.1277 8.20866 21.4828 9.03356C22.4116 11.191 23.6854 13.0791 25.3032 14.6968C26.9218 16.3146 28.8095 17.5888 30.9664 18.5172C31.7941 18.8735 32.6436 19.16 33.5149 19.3795C33.6533 19.4143 33.7762 19.4942 33.8641 19.6067C33.9519 19.7192 33.9998 19.8578 34 20.0005C34 20.2941 33.7995 20.5492 33.5149 20.621C32.6437 20.8399 31.7915 21.1282 30.9664 21.4833C28.8095 22.4121 26.9209 23.6859 25.3032 25.3036C23.6854 26.9223 22.4116 28.8099 21.4828 30.9669C21.1278 31.7919 20.8394 32.6439 20.6205 33.5149C20.586 33.6534 20.5062 33.7764 20.3937 33.8644C20.2813 33.9524 20.1427 34.0003 20 34.0005C19.8572 34.0003 19.7186 33.9525 19.6062 33.8645C19.4937 33.7765 19.414 33.6535 19.3795 33.5149C19.1605 32.6439 18.872 31.7918 18.5167 30.9669C17.5884 28.8099 16.3151 26.9214 14.6964 25.3036C13.0782 23.6859 11.1906 22.4121 9.03309 21.4833C8.20814 21.1283 7.35608 20.8399 6.48509 20.621C6.34667 20.5864 6.22377 20.5065 6.13589 20.3941C6.04801 20.2817 6.00018 20.1432 6 20.0005C6.00024 19.8578 6.04808 19.7192 6.13594 19.6067C6.2238 19.4942 6.34667 19.4143 6.48509 19.3795C7.35612 19.1607 8.20819 18.8723 9.03309 18.5172C11.1906 17.5888 13.0786 16.3146 14.6964 14.6968C16.3141 13.0791 17.5884 11.191 18.5167 9.03356C18.8719 8.20862 19.1604 7.35656 19.3795 6.48556C19.4508 6.2005 19.7064 6 20 6Z" fill="url(#paint0_linear_3892_95663)"/>
|
||||
</mask>
|
||||
<g mask="url(#mask0_3892_95663)">
|
||||
<g filter="url(#filter0_f_3892_95663)">
|
||||
<path d="M3.47232 27.8921C6.70753 29.0411 10.426 26.8868 11.7778 23.0804C13.1296 19.274 11.6028 15.2569 8.36763 14.108C5.13242 12.959 1.41391 15.1133 0.06211 18.9197C-1.28969 22.7261 0.23711 26.7432 3.47232 27.8921Z" fill="#FFE432"/>
|
||||
</g>
|
||||
<g filter="url(#filter1_f_3892_95663)">
|
||||
<path d="M17.8359 15.341C22.2806 15.341 25.8838 11.6588 25.8838 7.11644C25.8838 2.57412 22.2806 -1.10815 17.8359 -1.10815C13.3912 -1.10815 9.78809 2.57412 9.78809 7.11644C9.78809 11.6588 13.3912 15.341 17.8359 15.341Z" fill="#FC413D"/>
|
||||
</g>
|
||||
<g filter="url(#filter2_f_3892_95663)">
|
||||
<path d="M14.7081 41.6431C19.3478 41.4163 22.8707 36.3599 22.5768 30.3493C22.283 24.3387 18.2836 19.65 13.644 19.8769C9.00433 20.1037 5.48139 25.1601 5.77525 31.1707C6.06911 37.1813 10.0685 41.87 14.7081 41.6431Z" fill="#00B95C"/>
|
||||
</g>
|
||||
<g filter="url(#filter3_f_3892_95663)">
|
||||
<path d="M14.7081 41.6431C19.3478 41.4163 22.8707 36.3599 22.5768 30.3493C22.283 24.3387 18.2836 19.65 13.644 19.8769C9.00433 20.1037 5.48139 25.1601 5.77525 31.1707C6.06911 37.1813 10.0685 41.87 14.7081 41.6431Z" fill="#00B95C"/>
|
||||
</g>
|
||||
<g filter="url(#filter4_f_3892_95663)">
|
||||
<path d="M19.355 38.0071C23.2447 35.6405 24.2857 30.2506 21.6803 25.9684C19.0748 21.6862 13.8095 20.1334 9.91983 22.5C6.03016 24.8666 4.98909 30.2565 7.59454 34.5387C10.2 38.8209 15.4653 40.3738 19.355 38.0071Z" fill="#00B95C"/>
|
||||
</g>
|
||||
<g filter="url(#filter5_f_3892_95663)">
|
||||
<path d="M35.0759 24.5504C39.4477 24.5504 42.9917 21.1377 42.9917 16.9278C42.9917 12.7179 39.4477 9.30518 35.0759 9.30518C30.7042 9.30518 27.1602 12.7179 27.1602 16.9278C27.1602 21.1377 30.7042 24.5504 35.0759 24.5504Z" fill="#3186FF"/>
|
||||
</g>
|
||||
<g filter="url(#filter6_f_3892_95663)">
|
||||
<path d="M0.362818 23.6667C4.3882 26.7279 10.2688 25.7676 13.4976 21.5219C16.7264 17.2762 16.0806 11.3528 12.0552 8.29156C8.02982 5.23037 2.14917 6.19062 -1.07959 10.4364C-4.30835 14.6821 -3.66256 20.6055 0.362818 23.6667Z" fill="#FBBC04"/>
|
||||
</g>
|
||||
<g filter="url(#filter7_f_3892_95663)">
|
||||
<path d="M20.9877 28.1903C25.7924 31.4936 32.1612 30.5732 35.2128 26.1346C38.2644 21.696 36.8432 15.4199 32.0385 12.1166C27.2338 8.81334 20.865 9.73372 17.8134 14.1723C14.7618 18.611 16.183 24.887 20.9877 28.1903Z" fill="#3186FF"/>
|
||||
</g>
|
||||
<g filter="url(#filter8_f_3892_95663)">
|
||||
<path d="M29.7231 4.99175C30.9455 6.65415 29.3748 9.88535 26.2149 12.2096C23.0549 14.5338 19.5026 15.0707 18.2801 13.4088C17.0576 11.7468 18.6284 8.51514 21.7883 6.19092C24.9482 3.86717 28.5006 3.32982 29.7231 4.99175Z" fill="#749BFF"/>
|
||||
</g>
|
||||
<g filter="url(#filter9_f_3892_95663)">
|
||||
<path d="M19.6891 12.9486C24.5759 8.41581 26.2531 2.27858 23.4354 -0.759249C20.6176 -3.79708 14.3718 -2.58516 9.485 1.94765C4.59823 6.48046 2.92099 12.6177 5.73879 15.6555C8.55658 18.6933 14.8024 17.4814 19.6891 12.9486Z" fill="#FC413D"/>
|
||||
</g>
|
||||
<g filter="url(#filter10_f_3892_95663)">
|
||||
<path d="M9.6712 29.23C12.5757 31.3088 15.9102 31.6247 17.1191 29.9356C18.328 28.2465 16.9535 25.1921 14.049 23.1133C11.1446 21.0345 7.81003 20.7186 6.60113 22.4077C5.39223 24.0968 6.76675 27.1512 9.6712 29.23Z" fill="#FFEE48"/>
|
||||
</g>
|
||||
</g>
|
||||
<defs>
|
||||
<filter id="filter0_f_3892_95663" x="-3.44095" y="10.7885" width="18.7217" height="20.4229" filterUnits="userSpaceOnUse" color-interpolation-filters="sRGB">
|
||||
<feFlood flood-opacity="0" result="BackgroundImageFix"/>
|
||||
<feBlend mode="normal" in="SourceGraphic" in2="BackgroundImageFix" result="shape"/>
|
||||
<feGaussianBlur stdDeviation="1.50514" result="effect1_foregroundBlur_3892_95663"/>
|
||||
</filter>
|
||||
<filter id="filter1_f_3892_95663" x="-4.76352" y="-15.6598" width="45.1989" height="45.5524" filterUnits="userSpaceOnUse" color-interpolation-filters="sRGB">
|
||||
<feFlood flood-opacity="0" result="BackgroundImageFix"/>
|
||||
<feBlend mode="normal" in="SourceGraphic" in2="BackgroundImageFix" result="shape"/>
|
||||
<feGaussianBlur stdDeviation="7.2758" result="effect1_foregroundBlur_3892_95663"/>
|
||||
</filter>
|
||||
<filter id="filter2_f_3892_95663" x="-6.61209" y="7.49899" width="41.5757" height="46.522" filterUnits="userSpaceOnUse" color-interpolation-filters="sRGB">
|
||||
<feFlood flood-opacity="0" result="BackgroundImageFix"/>
|
||||
<feBlend mode="normal" in="SourceGraphic" in2="BackgroundImageFix" result="shape"/>
|
||||
<feGaussianBlur stdDeviation="6.18495" result="effect1_foregroundBlur_3892_95663"/>
|
||||
</filter>
|
||||
<filter id="filter3_f_3892_95663" x="-6.61209" y="7.49899" width="41.5757" height="46.522" filterUnits="userSpaceOnUse" color-interpolation-filters="sRGB">
|
||||
<feFlood flood-opacity="0" result="BackgroundImageFix"/>
|
||||
<feBlend mode="normal" in="SourceGraphic" in2="BackgroundImageFix" result="shape"/>
|
||||
<feGaussianBlur stdDeviation="6.18495" result="effect1_foregroundBlur_3892_95663"/>
|
||||
</filter>
|
||||
<filter id="filter4_f_3892_95663" x="-6.21073" y="9.02316" width="41.6959" height="42.4608" filterUnits="userSpaceOnUse" color-interpolation-filters="sRGB">
|
||||
<feFlood flood-opacity="0" result="BackgroundImageFix"/>
|
||||
<feBlend mode="normal" in="SourceGraphic" in2="BackgroundImageFix" result="shape"/>
|
||||
<feGaussianBlur stdDeviation="6.18495" result="effect1_foregroundBlur_3892_95663"/>
|
||||
</filter>
|
||||
<filter id="filter5_f_3892_95663" x="15.405" y="-2.44994" width="39.3423" height="38.7556" filterUnits="userSpaceOnUse" color-interpolation-filters="sRGB">
|
||||
<feFlood flood-opacity="0" result="BackgroundImageFix"/>
|
||||
<feBlend mode="normal" in="SourceGraphic" in2="BackgroundImageFix" result="shape"/>
|
||||
<feGaussianBlur stdDeviation="5.87756" result="effect1_foregroundBlur_3892_95663"/>
|
||||
</filter>
|
||||
<filter id="filter6_f_3892_95663" x="-13.7886" y="-4.15284" width="39.9951" height="40.2639" filterUnits="userSpaceOnUse" color-interpolation-filters="sRGB">
|
||||
<feFlood flood-opacity="0" result="BackgroundImageFix"/>
|
||||
<feBlend mode="normal" in="SourceGraphic" in2="BackgroundImageFix" result="shape"/>
|
||||
<feGaussianBlur stdDeviation="5.32691" result="effect1_foregroundBlur_3892_95663"/>
|
||||
</filter>
|
||||
<filter id="filter7_f_3892_95663" x="6.6925" y="0.620963" width="39.6414" height="39.065" filterUnits="userSpaceOnUse" color-interpolation-filters="sRGB">
|
||||
<feFlood flood-opacity="0" result="BackgroundImageFix"/>
|
||||
<feBlend mode="normal" in="SourceGraphic" in2="BackgroundImageFix" result="shape"/>
|
||||
<feGaussianBlur stdDeviation="4.75678" result="effect1_foregroundBlur_3892_95663"/>
|
||||
</filter>
|
||||
<filter id="filter8_f_3892_95663" x="9.35225" y="-4.48661" width="29.2984" height="27.3739" filterUnits="userSpaceOnUse" color-interpolation-filters="sRGB">
|
||||
<feFlood flood-opacity="0" result="BackgroundImageFix"/>
|
||||
<feBlend mode="normal" in="SourceGraphic" in2="BackgroundImageFix" result="shape"/>
|
||||
<feGaussianBlur stdDeviation="4.25649" result="effect1_foregroundBlur_3892_95663"/>
|
||||
</filter>
|
||||
<filter id="filter9_f_3892_95663" x="-2.81919" y="-9.62339" width="34.8122" height="34.143" filterUnits="userSpaceOnUse" color-interpolation-filters="sRGB">
|
||||
<feFlood flood-opacity="0" result="BackgroundImageFix"/>
|
||||
<feBlend mode="normal" in="SourceGraphic" in2="BackgroundImageFix" result="shape"/>
|
||||
<feGaussianBlur stdDeviation="3.59514" result="effect1_foregroundBlur_3892_95663"/>
|
||||
</filter>
|
||||
<filter id="filter10_f_3892_95663" x="-2.73761" y="12.4221" width="29.1949" height="27.4994" filterUnits="userSpaceOnUse" color-interpolation-filters="sRGB">
|
||||
<feFlood flood-opacity="0" result="BackgroundImageFix"/>
|
||||
<feBlend mode="normal" in="SourceGraphic" in2="BackgroundImageFix" result="shape"/>
|
||||
<feGaussianBlur stdDeviation="4.44986" result="effect1_foregroundBlur_3892_95663"/>
|
||||
</filter>
|
||||
<linearGradient id="paint0_linear_3892_95663" x1="13.9595" y1="24.7349" x2="28.5025" y2="12.4738" gradientUnits="userSpaceOnUse">
|
||||
<stop stop-color="#4893FC"/>
|
||||
<stop offset="0.27" stop-color="#4893FC"/>
|
||||
<stop offset="0.777" stop-color="#969DFF"/>
|
||||
<stop offset="1" stop-color="#BD99FE"/>
|
||||
</linearGradient>
|
||||
</defs>
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 10 KiB |
|
|
@ -0,0 +1,11 @@
|
|||
<svg width="40" height="40" viewBox="0 0 40 40" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<rect width="40" height="40" fill="white"/>
|
||||
<g clip-path="url(#clip0_3892_95659)">
|
||||
<path d="M15.745 24.54L26.715 16.35C27.254 15.95 28.022 16.106 28.279 16.73C29.628 20.018 29.025 23.971 26.341 26.685C23.658 29.399 19.924 29.995 16.511 28.639L12.783 30.384C18.13 34.081 24.623 33.166 28.681 29.06C31.9 25.805 32.897 21.368 31.965 17.367L31.973 17.376C30.622 11.498 32.305 9.149 35.755 4.345L36 4L31.46 8.59V8.576L15.743 24.544M13.48 26.531C9.643 22.824 10.305 17.085 13.58 13.776C16 11.327 19.968 10.328 23.432 11.797L27.152 10.06C26.482 9.57 25.622 9.043 24.637 8.673C20.182 6.819 14.848 7.742 11.227 11.401C7.744 14.924 6.648 20.341 8.53 24.962C9.935 28.416 7.631 30.86 5.31 33.326C4.49 34.2 3.666 35.074 3 36L13.478 26.534" fill="black"/>
|
||||
</g>
|
||||
<defs>
|
||||
<clipPath id="clip0_3892_95659">
|
||||
<rect width="33" height="32" fill="white" transform="translate(3 4)"/>
|
||||
</clipPath>
|
||||
</defs>
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 981 B |
|
|
@ -0,0 +1,17 @@
|
|||
<svg width="26" height="26" viewBox="0 0 26 26" fill="none" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink">
|
||||
<g clip-path="url(#clip0_3892_83671)">
|
||||
<path d="M1 13C1 9.27247 1 7.4087 1.60896 5.93853C2.42092 3.97831 3.97831 2.42092 5.93853 1.60896C7.4087 1 9.27247 1 13 1C16.7275 1 18.5913 1 20.0615 1.60896C22.0217 2.42092 23.5791 3.97831 24.391 5.93853C25 7.4087 25 9.27247 25 13C25 16.7275 25 18.5913 24.391 20.0615C23.5791 22.0217 22.0217 23.5791 20.0615 24.391C18.5913 25 16.7275 25 13 25C9.27247 25 7.4087 25 5.93853 24.391C3.97831 23.5791 2.42092 22.0217 1.60896 20.0615C1 18.5913 1 16.7275 1 13Z" fill="white"/>
|
||||
<rect width="24" height="24" transform="translate(1 1)" fill="url(#pattern0_3892_83671)"/>
|
||||
<rect width="24" height="24" transform="translate(1 1)" fill="white" fill-opacity="0.01"/>
|
||||
</g>
|
||||
<path d="M13 0.75C14.8603 0.75 16.2684 0.750313 17.3945 0.827148C18.5228 0.904144 19.3867 1.05876 20.1572 1.37793C22.1787 2.21525 23.7847 3.82133 24.6221 5.84277C24.9412 6.61333 25.0959 7.47723 25.1729 8.60547C25.2497 9.73161 25.25 11.1397 25.25 13C25.25 14.8603 25.2497 16.2684 25.1729 17.3945C25.0959 18.5228 24.9412 19.3867 24.6221 20.1572C23.7847 22.1787 22.1787 23.7847 20.1572 24.6221C19.3867 24.9412 18.5228 25.0959 17.3945 25.1729C16.2684 25.2497 14.8603 25.25 13 25.25C11.1397 25.25 9.73161 25.2497 8.60547 25.1729C7.47723 25.0959 6.61333 24.9412 5.84277 24.6221C3.82133 23.7847 2.21525 22.1787 1.37793 20.1572C1.05876 19.3867 0.904144 18.5228 0.827148 17.3945C0.750313 16.2684 0.75 14.8603 0.75 13C0.75 11.1397 0.750313 9.73161 0.827148 8.60547C0.904144 7.47723 1.05876 6.61333 1.37793 5.84277C2.21525 3.82133 3.82133 2.21525 5.84277 1.37793C6.61333 1.05876 7.47723 0.904144 8.60547 0.827148C9.73161 0.750313 11.1397 0.75 13 0.75Z" stroke="#101828" stroke-opacity="0.08" stroke-width="0.5"/>
|
||||
<defs>
|
||||
<pattern id="pattern0_3892_83671" patternContentUnits="objectBoundingBox" width="1" height="1">
|
||||
<use xlink:href="#image0_3892_83671" transform="scale(0.00625)"/>
|
||||
</pattern>
|
||||
<clipPath id="clip0_3892_83671">
|
||||
<path d="M1 13C1 9.27247 1 7.4087 1.60896 5.93853C2.42092 3.97831 3.97831 2.42092 5.93853 1.60896C7.4087 1 9.27247 1 13 1C16.7275 1 18.5913 1 20.0615 1.60896C22.0217 2.42092 23.5791 3.97831 24.391 5.93853C25 7.4087 25 9.27247 25 13C25 16.7275 25 18.5913 24.391 20.0615C23.5791 22.0217 22.0217 23.5791 20.0615 24.391C18.5913 25 16.7275 25 13 25C9.27247 25 7.4087 25 5.93853 24.391C3.97831 23.5791 2.42092 22.0217 1.60896 20.0615C1 18.5913 1 16.7275 1 13Z" fill="white"/>
|
||||
</clipPath>
|
||||
<image id="image0_3892_83671" width="160" height="160" preserveAspectRatio="none" xlink:href="data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAKAAAACgCAYAAACLz2ctAAAACXBIWXMAACxLAAAsSwGlPZapAAAAAXNSR0IArs4c6QAAAARnQU1BAACxjwv8YQUAAA0ySURBVHgB7Z3vsdM6E8Z93nm/QwdABUAFgQqACgIVABUAFcCpAKgAOoBUwKECOBUAFfj6yYzumNx4pV2vtKtkfzO+3CHESezHWu0frS7GiSEIjPjfEASGhAADU0KAgSkhwMCUEGBgSggwMCUEGJgSAgxMCQEGpoQAA1NCgIEpIcDAlBBgYEoIMDAlBBiYEgIMTAkBBqaEAANTQoCBKSHAwJQQYGBKCDAwJQQYmPL/4cz5/fv38PXr12G32w0/f/4crq6u9n+P/0/cvHlzuH379v7Pe/fuDZvNZv8n/i5Yx8U5LkyH6D5+/Dh8/vx5Lz4pEOGDBw+G58+fhxiljGfEly9fxkkweODUD5wX5w94nIUAf/z4UU14x4SIzwvKOHkBvn79epzmbk3ENz9evHgx/vr1awxoTnYOCCfi2bNnq+Z4a8G8cDLLMT8kOEkBwpN9+PDh3tngMvd4EzgPBC05H3j37t3eUTkknffw3PjsdMDROWnGE+PDhw8sk4s526tXr/YORM5k4vVPnz6NT58+HSeRskwypgJ4/yRG9vsnEe5NOj771DgpAUJ8JTcUAoXovn37Nq7h/fv3zZyb+XeHgE/F4z4ZAUJMJTdwMoXqzgFGJu6IqHHgM/HQ9cxJCBBhj5wA8PraES8HRtXWIsTRc+jnJHLBcDjmqbNDttvtMImv+oT+4uLiL+elFfD079y5M7x582bojrFzMLkfiNEBo1JtMB+zMMHHDjgsPY2GXYdhMOrhyV9iEt8wCXSo+fnSWCNG41TQcOvWrb9eQ0jm+vp6H07CwQ3/dBV/HDsG3uCwMBI8fvx4rAWcmNzIOyzM1eA5c50gzF3fvn3LGmXhLdee82rQrQBh9gbC4ahlhiAgbmoPD4PW9+EUVPQgwm4FSI1+EIk2kkqamhUy+I0lI2LNh1GDLgWIC9rK9MJcUmJfGnlgMmuD61Dy3eCYeC2M6FKA1PxLc8SRVNLUCHTnKIk/IpXnkS4FiCd6yeRpIAmrWAeDS0ToMX3XnQAp87t27icpXIVQvdzYnAjx4HqjOwFCZEsXWDoCpbAKx9ymggZvcyuYWup7e8sddyNA3GiIb8n8Sp9uSVgFE3+vk3p8L2r6gNc84V6AMG/wbHMi4U6yvYRVMGpri5mKkXqbC7oVIFcgKPQshZvFgPi1Y4v4vvOHStuJoa6dlrOmgTsBSlewlVYLU05Mi3le7sGCedcQYm4U9DKFcCVAbjm9xKyUjn7aIxInoK1VaEoJnWMxauJGgDnvTUuAORHUCKtIl4auTaNREYOaxRoczAWIkQEXY434tASILIYmWnWCUrOMa7t0TjwQHjAVIC7QUlhl6aLVFKDWvKhGJwYIWWI2qe/hoUjBtCQfxZypGxUFGgChwHJK8A81WVtOj8JRlMXfv39ffUE8il+nacq+ABdNlUqhliGUXPvamAkQNyp3ISEIiA7igwg9MzkNe+GhAru0ghm/aZqnsbprQYhPnjzZP7zUOpjE3bt3F1/78+fPYM5oADU5TsextQ3U+zRMsARJQPtYuVZpadXhAQeHglqumntvC5oLEBc65xFut9uj8zFPAsT3k3juubgirg9nXjwMdNiGinuepQBzTznEt4QXAR5mMUoOblyxtOJ5fhzzlikB4t9b01SAOdObKyiwFiA+QzI6SeOKGCkli91THxoQI+CMXJVGboSwEiC+FzdWmdJ4Gkjmh8ksexdgMy8YXiLltcEb9LaOdR5W4YQ+0nvRKUEDXBdcnynfzfKWJ9Huu0ZQ5zVnbEQuAV9CyxFQK4tRo4GQZH645prVpIkAcxUopZPzFgKs1U9au2WGNGwzPzysGW5igi8vLxdfg5nwYnphbpFpKM1ipC6mJSDrgHOXBpBzJLM8CVEUpHfTfXVsAOU5ckMTQ8URkHOksvw1DoImXLPsZYFSdQFS5pdbmetBgEtl+dIVddpmkBO2OYswzOQ9Ll4AbnWHpQBL43laAeQ1cEZlaxFWFyCVruJ6YRYClJblS7pZaYuh1JO3rI6uLkDKLHFpLcC1bTY8zA9LC36tPOLqAtRcx9tKgHhoNG+IRIjaZjk3N4TwLRYqVRfgUtJesjSwtgBrt9mQzA/ned215Kp3LBoYVRfg0o+VLIrxWA8ogSNAbbOc89RbZ0fMKqItusn3SsrrIpC9Noidyye37rRvJkDpvmvnTGrKviabggcfGZQlkAVqucFjdQEuPW0a6ahzBZVFqHLBru8SkLqj0nctR8EYAR0BUcDUljA3y9xSMZAbBVvdn+oCXEp4r9n+9Bi73W7onVRgwKmNnK+S41xPnJ8aBakCEk3MTDDQnGtgOWSXW1UdASMbqlyw0U6pswazDCFywPmXaDUPrC5A6inTHrUgQqlJ8gh+D/a4KzXLXAcC92ZJ4K3McHUBbjabxdfw1HIoqV/jLtz2zrzur8Qsf//+feBAibvFKFhdgHjClkZBPGGcHwkBlhZfQtzd7iB5BIgPIoQYKbPMHbVaWqhjNPGCNV1+3IBSkwSSWZaGLLyB345gshaUhWpiQcYGUG3CcEiS7pK8KtJ/mtU5UgaiAKEE7aWWS9exRUPzJiMgZYYB5mtcJJ4inJOWUf5esEyLNgtE51x+qTC4nmLwXyyzVc0EmEv/cAOpc5KnCCF6W9zeA2cxAgJqFEzhkzXAS06eYgixHMu0aFMBYgREl88lYIZfvnw5rAXmGE0tqc86Rm1vGb8Pn+GNJQE2GRnHxuS2khoG3ZVaksZC2uXwKO8vWbJp5QVrb3/GwaRDKtW1c5iFTDTXKFgsl+Q2sbQS4NK5WuyoZNYlH8sWczekxhoNbr89SXd6ye6bVgKkdlRqsUbEdJuGUjFYLtyeC7FkXcaarloWAsTDtXSu0u3P1mC+UY1lKwnNndElzco9CNB6HxEXW3Vx2t566beXRmXOHnDeBEidp1XzIhcClOyjpr2ZoMQs43tzvzs14rcWIPV7W60RNhdgiUdcMhJpIdkmgfvAeBCgVtPQtZhu1QXWRuHXrhA7BBkUpPS0sik4B4LiODxlZ6gyOCQMWn1XcwFSCe/SSPzaFWLHwPkgRCp9SIHvjvdKO5jWBNeLqkbfbrdDK8wFeH19vfgaCi+lK8Q0KjkgInS656akUqkYKnW8geuS65zftLJoNIbawTzNQ/CnZFusNY19pCGaksD5YDgHzKUlERFoibkAKU/s2LZT3LwuN2wjabULT5hz46wEmHuILbbu6mIEPEQSt8ttk5DSZ5xz4pB0T7UQYC7EhIeolec7x7UAc00icfMl+dbDC91i88E5LQVYOqK3Nr0JcwFSPf9KcpHSvC7MET5b0tl+bYFEKwHi95U8WFp72kkwF6BWF32MlhrbV1EmSutG1RYgJy8taRSqibkAqYsp7aKqLcS0KY0G1BJVDQFyphKYF1v0hZ7jOheMv5cgMcvHjtKwCgfKE9UQYOmhXfArxYUAqdDKGgFIwjZJ+NqT8pK4YisBYvsJL7gQIFUUqTFH4ZhlSViFghNX1CokaPlgrcWFAHOtO7QEQZVCaZd3ScrySz+fKqNv9fu0cCFAQI0SmrVph/PDGvO80vDH4chbChU5OCa81lsvcHAjwNxT7WFz5RySsnyJQHIZG4gfUxfPwku4ESCg5mle9rc9hqQsf818bEnkuEY4pwfvthRXAqSyItqmWAPJPE+6++acpc/z5mCU4EqAIFcOb7m16BzNFXUcqPmfRycjhzsBlnh42qviOEjKtTQX2C/FNT1PUSjcCRCUFJ+23umb22Zjbm610FiM5A2XAsTNLlmZppmjpZCUa9X4bpzi3V5wKUCAC1oyx6qxUD3RKqxSArV81aKSWQu3AgS46KUjj+aNl+SQa6a5ci3teoj3LeFagIAjwjQiYq7GDVzjJksKVHFo548P0aig8coF/jM4Jy0l5C61xHJKtO3FgeWGN27c+Ot1LAnFOa+urvYHF6z3rd0OGAvIqeWdPwp3UHLL2Am1WmZIjhp9C4+RS7lZltJr0Y0AE9wGk5qHdlhliZKQT4tNZFrQnQCBJBisddSOt2HumhvpIb5ewy6HdCnARI31H6UmWLt7KGddcg+VQaV0LcAE5mPwBiV9BtccGIXXxiAhJk5BQ48FBxQnIcA5GJkwf+J2XYWA0TgdI5GkkTnej8/OhWPwOh4YiI4zjfBYTq9BF2GYNSC8gh6E6UggRIPwBY5j3a+m9Jt405wU/pmDz0bIR9IPEedDf8GSDbt74+QFuAZp/FGTFrFGS8z7A3omdUvlbvmlQWpw6a2zqjpjUETL0I/XFWw1CAEy0dgPhBJez4UFEmIOKATzwsvLy/0OmJI8cgJzvEePHu3b4lru22tFCFCBVNCw2+3+9boPPd40j0uFEZvNZi++cxTdnBBgYEp4wYEpIcDAlBBgYEoIMDAlBBiYEgIMTAkBBqaEAANTQoCBKSHAwJQQYGBKCDAwJQQYmBICDEwJAQamhAADU0KAgSkhwMCUEGBgSggwMCUEGJgSAgxMCQEGpvwDojzI2oXtJzYAAAAASUVORK5CYII="/>
|
||||
</defs>
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 7.1 KiB |
|
|
@ -0,0 +1,36 @@
|
|||
{
|
||||
"icon": {
|
||||
"type": "element",
|
||||
"isRootNode": true,
|
||||
"name": "svg",
|
||||
"attributes": {
|
||||
"width": "40",
|
||||
"height": "40",
|
||||
"viewBox": "0 0 40 40",
|
||||
"fill": "none",
|
||||
"xmlns": "http://www.w3.org/2000/svg"
|
||||
},
|
||||
"children": [
|
||||
{
|
||||
"type": "element",
|
||||
"name": "rect",
|
||||
"attributes": {
|
||||
"width": "40",
|
||||
"height": "40",
|
||||
"fill": "white"
|
||||
},
|
||||
"children": []
|
||||
},
|
||||
{
|
||||
"type": "element",
|
||||
"name": "path",
|
||||
"attributes": {
|
||||
"d": "M25.7926 10.1311H21.5089L29.3208 29.869H33.6045L25.7926 10.1311ZM13.4164 10.1311L5.60449 29.869H9.97273L11.5703 25.724H19.743L21.3405 29.869H25.7087L17.8969 10.1311H13.4164ZM12.9834 22.0583L15.6566 15.1217L18.3299 22.0583H12.9834Z",
|
||||
"fill": "black"
|
||||
},
|
||||
"children": []
|
||||
}
|
||||
]
|
||||
},
|
||||
"name": "AnthropicShortLight"
|
||||
}
|
||||
|
|
@ -0,0 +1,20 @@
|
|||
// GENERATE BY script
|
||||
// DON NOT EDIT IT MANUALLY
|
||||
|
||||
import type { IconData } from '@/app/components/base/icons/IconBase'
|
||||
import * as React from 'react'
|
||||
import IconBase from '@/app/components/base/icons/IconBase'
|
||||
import data from './AnthropicShortLight.json'
|
||||
|
||||
const Icon = (
|
||||
{
|
||||
ref,
|
||||
...props
|
||||
}: React.SVGProps<SVGSVGElement> & {
|
||||
ref?: React.RefObject<React.RefObject<HTMLOrSVGElement>>
|
||||
},
|
||||
) => <IconBase {...props} ref={ref} data={data as IconData} />
|
||||
|
||||
Icon.displayName = 'AnthropicShortLight'
|
||||
|
||||
export default Icon
|
||||
|
|
@ -0,0 +1,36 @@
|
|||
{
|
||||
"icon": {
|
||||
"type": "element",
|
||||
"isRootNode": true,
|
||||
"name": "svg",
|
||||
"attributes": {
|
||||
"width": "40",
|
||||
"height": "40",
|
||||
"viewBox": "0 0 40 40",
|
||||
"fill": "none",
|
||||
"xmlns": "http://www.w3.org/2000/svg"
|
||||
},
|
||||
"children": [
|
||||
{
|
||||
"type": "element",
|
||||
"name": "rect",
|
||||
"attributes": {
|
||||
"width": "40",
|
||||
"height": "40",
|
||||
"fill": "white"
|
||||
},
|
||||
"children": []
|
||||
},
|
||||
{
|
||||
"type": "element",
|
||||
"name": "path",
|
||||
"attributes": {
|
||||
"d": "M36.6676 11.2917C36.3316 11.1277 36.1871 11.4402 35.9906 11.599C35.9242 11.6511 35.8668 11.7188 35.8108 11.7787C35.3199 12.3048 34.747 12.6485 33.9996 12.6068C32.9046 12.5469 31.971 12.8907 31.1455 13.7293C30.9696 12.6954 30.3863 12.0782 29.4996 11.6824C29.0348 11.4766 28.5647 11.2709 28.2406 10.823C28.0127 10.5053 27.9515 10.1511 27.8368 9.80214C27.7652 9.59121 27.6923 9.37506 27.4502 9.33861C27.1871 9.29694 27.0843 9.51829 26.9814 9.70318C26.5674 10.4584 26.4084 11.2917 26.4228 12.1355C26.4592 14.0313 27.26 15.5417 28.8486 16.6173C29.0296 16.7397 29.0764 16.8646 29.0191 17.0443C28.9111 17.4141 28.7822 17.7735 28.6676 18.1433C28.596 18.3803 28.4879 18.4323 28.2354 18.3282C27.363 17.9637 26.609 17.4246 25.9436 16.7709C24.8135 15.6771 23.7914 14.4689 22.5166 13.5235C22.2171 13.3021 21.919 13.0964 21.609 12.9011C20.3082 11.6355 21.7796 10.5964 22.1194 10.474C22.4762 10.3464 22.2431 9.9037 21.092 9.90891C19.9423 9.91413 18.889 10.2995 17.5478 10.8126C17.3512 10.8907 17.1455 10.948 16.9332 10.9922C15.7158 10.7631 14.4515 10.711 13.1298 10.8594C10.6428 11.1381 8.65587 12.3152 7.19493 14.3255C5.44102 16.7397 5.02826 19.4845 5.53347 22.349C6.06473 25.3646 7.60249 27.8646 9.96707 29.8178C12.4176 31.8413 15.2406 32.8334 18.4606 32.6433C20.4163 32.5313 22.5947 32.2683 25.0504 30.1875C25.6702 30.4949 26.3199 30.6173 27.3994 30.711C28.2302 30.7891 29.0296 30.6694 29.6494 30.5417C30.6194 30.3361 30.5518 29.4375 30.2015 29.2709C27.3578 27.9454 27.9814 28.4845 27.4136 28.0495C28.859 26.3361 31.0374 24.5574 31.889 18.797C31.9554 18.3386 31.898 18.0522 31.889 17.6798C31.8838 17.4558 31.9346 17.3673 32.1923 17.3413C32.9046 17.2605 33.596 17.0651 34.2314 16.7137C36.0739 15.7058 36.816 14.0522 36.9918 12.0678C37.0179 11.7657 36.9866 11.4506 36.6676 11.2917ZM20.613 29.1485C17.8564 26.9793 16.5204 26.2657 15.9684 26.297C15.4527 26.3255 15.5452 26.9167 15.6584 27.3022C15.777 27.6823 15.9319 27.9454 16.1494 28.2787C16.2991 28.5001 16.402 28.8307 15.9996 29.0755C15.1116 29.6277 13.5687 28.8907 13.4958 28.8542C11.7001 27.797 10.1988 26.3985 9.14025 24.487C8.11941 22.6459 7.52566 20.6719 7.42801 18.5651C7.40197 18.0547 7.5517 17.875 8.05691 17.7839C8.72227 17.6615 9.40978 17.6355 10.0751 17.7318C12.8876 18.1433 15.2822 19.4037 17.2887 21.3959C18.4346 22.5339 19.3018 23.8907 20.195 25.2162C21.1442 26.6251 22.1663 27.9662 23.4671 29.0651C23.9254 29.4506 24.2926 29.7449 24.6428 29.961C23.5856 30.0782 21.8199 30.1042 20.613 29.1485ZM21.9332 20.6407C21.9332 20.4141 22.1142 20.2345 22.342 20.2345C22.3928 20.2345 22.4398 20.2449 22.4814 20.2605C22.5374 20.2813 22.5895 20.3126 22.6299 20.3594C22.7027 20.4298 22.7444 20.5339 22.7444 20.6407C22.7444 20.8673 22.5635 21.047 22.3368 21.047C22.109 21.047 21.9332 20.8673 21.9332 20.6407ZM26.036 22.7501C25.7731 22.8569 25.51 22.9506 25.2575 22.961C24.8655 22.9793 24.4371 22.8203 24.204 22.6251C23.8434 22.323 23.5856 22.1537 23.4762 21.6225C23.4306 21.3959 23.4567 21.047 23.497 20.8465C23.5908 20.4141 23.4866 20.1381 23.1832 19.8855C22.9346 19.6798 22.6207 19.6251 22.2744 19.6251C22.1455 19.6251 22.027 19.5678 21.9384 19.5209C21.7939 19.4479 21.6754 19.2683 21.7887 19.047C21.8251 18.9766 22.001 18.8022 22.0426 18.7709C22.5114 18.5027 23.053 18.5913 23.5543 18.7918C24.0191 18.9818 24.3694 19.3307 24.8746 19.823C25.3915 20.4194 25.484 20.5861 25.7783 21.0313C26.01 21.3829 26.2223 21.7422 26.3668 22.1537C26.454 22.4089 26.3408 22.6198 26.036 22.7501Z",
|
||||
"fill": "#4D6BFE"
|
||||
},
|
||||
"children": []
|
||||
}
|
||||
]
|
||||
},
|
||||
"name": "Deepseek"
|
||||
}
|
||||
|
|
@ -0,0 +1,20 @@
|
|||
// GENERATE BY script
|
||||
// DON NOT EDIT IT MANUALLY
|
||||
|
||||
import type { IconData } from '@/app/components/base/icons/IconBase'
|
||||
import * as React from 'react'
|
||||
import IconBase from '@/app/components/base/icons/IconBase'
|
||||
import data from './Deepseek.json'
|
||||
|
||||
const Icon = (
|
||||
{
|
||||
ref,
|
||||
...props
|
||||
}: React.SVGProps<SVGSVGElement> & {
|
||||
ref?: React.RefObject<React.RefObject<HTMLOrSVGElement>>
|
||||
},
|
||||
) => <IconBase {...props} ref={ref} data={data as IconData} />
|
||||
|
||||
Icon.displayName = 'Deepseek'
|
||||
|
||||
export default Icon
|
||||
|
|
@ -0,0 +1,807 @@
|
|||
{
|
||||
"icon": {
|
||||
"type": "element",
|
||||
"isRootNode": true,
|
||||
"name": "svg",
|
||||
"attributes": {
|
||||
"width": "40",
|
||||
"height": "40",
|
||||
"viewBox": "0 0 40 40",
|
||||
"fill": "none",
|
||||
"xmlns": "http://www.w3.org/2000/svg"
|
||||
},
|
||||
"children": [
|
||||
{
|
||||
"type": "element",
|
||||
"name": "rect",
|
||||
"attributes": {
|
||||
"width": "40",
|
||||
"height": "40",
|
||||
"fill": "white"
|
||||
},
|
||||
"children": []
|
||||
},
|
||||
{
|
||||
"type": "element",
|
||||
"name": "mask",
|
||||
"attributes": {
|
||||
"id": "mask0_3892_95663",
|
||||
"style": "mask-type:alpha",
|
||||
"maskUnits": "userSpaceOnUse",
|
||||
"x": "6",
|
||||
"y": "6",
|
||||
"width": "28",
|
||||
"height": "29"
|
||||
},
|
||||
"children": [
|
||||
{
|
||||
"type": "element",
|
||||
"name": "path",
|
||||
"attributes": {
|
||||
"d": "M20 6C20.2936 6 20.5488 6.2005 20.6205 6.48556C20.8393 7.3566 21.1277 8.20866 21.4828 9.03356C22.4116 11.191 23.6854 13.0791 25.3032 14.6968C26.9218 16.3146 28.8095 17.5888 30.9664 18.5172C31.7941 18.8735 32.6436 19.16 33.5149 19.3795C33.6533 19.4143 33.7762 19.4942 33.8641 19.6067C33.9519 19.7192 33.9998 19.8578 34 20.0005C34 20.2941 33.7995 20.5492 33.5149 20.621C32.6437 20.8399 31.7915 21.1282 30.9664 21.4833C28.8095 22.4121 26.9209 23.6859 25.3032 25.3036C23.6854 26.9223 22.4116 28.8099 21.4828 30.9669C21.1278 31.7919 20.8394 32.6439 20.6205 33.5149C20.586 33.6534 20.5062 33.7764 20.3937 33.8644C20.2813 33.9524 20.1427 34.0003 20 34.0005C19.8572 34.0003 19.7186 33.9525 19.6062 33.8645C19.4937 33.7765 19.414 33.6535 19.3795 33.5149C19.1605 32.6439 18.872 31.7918 18.5167 30.9669C17.5884 28.8099 16.3151 26.9214 14.6964 25.3036C13.0782 23.6859 11.1906 22.4121 9.03309 21.4833C8.20814 21.1283 7.35608 20.8399 6.48509 20.621C6.34667 20.5864 6.22377 20.5065 6.13589 20.3941C6.04801 20.2817 6.00018 20.1432 6 20.0005C6.00024 19.8578 6.04808 19.7192 6.13594 19.6067C6.2238 19.4942 6.34667 19.4143 6.48509 19.3795C7.35612 19.1607 8.20819 18.8723 9.03309 18.5172C11.1906 17.5888 13.0786 16.3146 14.6964 14.6968C16.3141 13.0791 17.5884 11.191 18.5167 9.03356C18.8719 8.20862 19.1604 7.35656 19.3795 6.48556C19.4508 6.2005 19.7064 6 20 6Z",
|
||||
"fill": "black"
|
||||
},
|
||||
"children": []
|
||||
},
|
||||
{
|
||||
"type": "element",
|
||||
"name": "path",
|
||||
"attributes": {
|
||||
"d": "M20 6C20.2936 6 20.5488 6.2005 20.6205 6.48556C20.8393 7.3566 21.1277 8.20866 21.4828 9.03356C22.4116 11.191 23.6854 13.0791 25.3032 14.6968C26.9218 16.3146 28.8095 17.5888 30.9664 18.5172C31.7941 18.8735 32.6436 19.16 33.5149 19.3795C33.6533 19.4143 33.7762 19.4942 33.8641 19.6067C33.9519 19.7192 33.9998 19.8578 34 20.0005C34 20.2941 33.7995 20.5492 33.5149 20.621C32.6437 20.8399 31.7915 21.1282 30.9664 21.4833C28.8095 22.4121 26.9209 23.6859 25.3032 25.3036C23.6854 26.9223 22.4116 28.8099 21.4828 30.9669C21.1278 31.7919 20.8394 32.6439 20.6205 33.5149C20.586 33.6534 20.5062 33.7764 20.3937 33.8644C20.2813 33.9524 20.1427 34.0003 20 34.0005C19.8572 34.0003 19.7186 33.9525 19.6062 33.8645C19.4937 33.7765 19.414 33.6535 19.3795 33.5149C19.1605 32.6439 18.872 31.7918 18.5167 30.9669C17.5884 28.8099 16.3151 26.9214 14.6964 25.3036C13.0782 23.6859 11.1906 22.4121 9.03309 21.4833C8.20814 21.1283 7.35608 20.8399 6.48509 20.621C6.34667 20.5864 6.22377 20.5065 6.13589 20.3941C6.04801 20.2817 6.00018 20.1432 6 20.0005C6.00024 19.8578 6.04808 19.7192 6.13594 19.6067C6.2238 19.4942 6.34667 19.4143 6.48509 19.3795C7.35612 19.1607 8.20819 18.8723 9.03309 18.5172C11.1906 17.5888 13.0786 16.3146 14.6964 14.6968C16.3141 13.0791 17.5884 11.191 18.5167 9.03356C18.8719 8.20862 19.1604 7.35656 19.3795 6.48556C19.4508 6.2005 19.7064 6 20 6Z",
|
||||
"fill": "url(#paint0_linear_3892_95663)"
|
||||
},
|
||||
"children": []
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"type": "element",
|
||||
"name": "g",
|
||||
"attributes": {
|
||||
"mask": "url(#mask0_3892_95663)"
|
||||
},
|
||||
"children": [
|
||||
{
|
||||
"type": "element",
|
||||
"name": "g",
|
||||
"attributes": {
|
||||
"filter": "url(#filter0_f_3892_95663)"
|
||||
},
|
||||
"children": [
|
||||
{
|
||||
"type": "element",
|
||||
"name": "path",
|
||||
"attributes": {
|
||||
"d": "M3.47232 27.8921C6.70753 29.0411 10.426 26.8868 11.7778 23.0804C13.1296 19.274 11.6028 15.2569 8.36763 14.108C5.13242 12.959 1.41391 15.1133 0.06211 18.9197C-1.28969 22.7261 0.23711 26.7432 3.47232 27.8921Z",
|
||||
"fill": "#FFE432"
|
||||
},
|
||||
"children": []
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"type": "element",
|
||||
"name": "g",
|
||||
"attributes": {
|
||||
"filter": "url(#filter1_f_3892_95663)"
|
||||
},
|
||||
"children": [
|
||||
{
|
||||
"type": "element",
|
||||
"name": "path",
|
||||
"attributes": {
|
||||
"d": "M17.8359 15.341C22.2806 15.341 25.8838 11.6588 25.8838 7.11644C25.8838 2.57412 22.2806 -1.10815 17.8359 -1.10815C13.3912 -1.10815 9.78809 2.57412 9.78809 7.11644C9.78809 11.6588 13.3912 15.341 17.8359 15.341Z",
|
||||
"fill": "#FC413D"
|
||||
},
|
||||
"children": []
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"type": "element",
|
||||
"name": "g",
|
||||
"attributes": {
|
||||
"filter": "url(#filter2_f_3892_95663)"
|
||||
},
|
||||
"children": [
|
||||
{
|
||||
"type": "element",
|
||||
"name": "path",
|
||||
"attributes": {
|
||||
"d": "M14.7081 41.6431C19.3478 41.4163 22.8707 36.3599 22.5768 30.3493C22.283 24.3387 18.2836 19.65 13.644 19.8769C9.00433 20.1037 5.48139 25.1601 5.77525 31.1707C6.06911 37.1813 10.0685 41.87 14.7081 41.6431Z",
|
||||
"fill": "#00B95C"
|
||||
},
|
||||
"children": []
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"type": "element",
|
||||
"name": "g",
|
||||
"attributes": {
|
||||
"filter": "url(#filter3_f_3892_95663)"
|
||||
},
|
||||
"children": [
|
||||
{
|
||||
"type": "element",
|
||||
"name": "path",
|
||||
"attributes": {
|
||||
"d": "M14.7081 41.6431C19.3478 41.4163 22.8707 36.3599 22.5768 30.3493C22.283 24.3387 18.2836 19.65 13.644 19.8769C9.00433 20.1037 5.48139 25.1601 5.77525 31.1707C6.06911 37.1813 10.0685 41.87 14.7081 41.6431Z",
|
||||
"fill": "#00B95C"
|
||||
},
|
||||
"children": []
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"type": "element",
|
||||
"name": "g",
|
||||
"attributes": {
|
||||
"filter": "url(#filter4_f_3892_95663)"
|
||||
},
|
||||
"children": [
|
||||
{
|
||||
"type": "element",
|
||||
"name": "path",
|
||||
"attributes": {
|
||||
"d": "M19.355 38.0071C23.2447 35.6405 24.2857 30.2506 21.6803 25.9684C19.0748 21.6862 13.8095 20.1334 9.91983 22.5C6.03016 24.8666 4.98909 30.2565 7.59454 34.5387C10.2 38.8209 15.4653 40.3738 19.355 38.0071Z",
|
||||
"fill": "#00B95C"
|
||||
},
|
||||
"children": []
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"type": "element",
|
||||
"name": "g",
|
||||
"attributes": {
|
||||
"filter": "url(#filter5_f_3892_95663)"
|
||||
},
|
||||
"children": [
|
||||
{
|
||||
"type": "element",
|
||||
"name": "path",
|
||||
"attributes": {
|
||||
"d": "M35.0759 24.5504C39.4477 24.5504 42.9917 21.1377 42.9917 16.9278C42.9917 12.7179 39.4477 9.30518 35.0759 9.30518C30.7042 9.30518 27.1602 12.7179 27.1602 16.9278C27.1602 21.1377 30.7042 24.5504 35.0759 24.5504Z",
|
||||
"fill": "#3186FF"
|
||||
},
|
||||
"children": []
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"type": "element",
|
||||
"name": "g",
|
||||
"attributes": {
|
||||
"filter": "url(#filter6_f_3892_95663)"
|
||||
},
|
||||
"children": [
|
||||
{
|
||||
"type": "element",
|
||||
"name": "path",
|
||||
"attributes": {
|
||||
"d": "M0.362818 23.6667C4.3882 26.7279 10.2688 25.7676 13.4976 21.5219C16.7264 17.2762 16.0806 11.3528 12.0552 8.29156C8.02982 5.23037 2.14917 6.19062 -1.07959 10.4364C-4.30835 14.6821 -3.66256 20.6055 0.362818 23.6667Z",
|
||||
"fill": "#FBBC04"
|
||||
},
|
||||
"children": []
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"type": "element",
|
||||
"name": "g",
|
||||
"attributes": {
|
||||
"filter": "url(#filter7_f_3892_95663)"
|
||||
},
|
||||
"children": [
|
||||
{
|
||||
"type": "element",
|
||||
"name": "path",
|
||||
"attributes": {
|
||||
"d": "M20.9877 28.1903C25.7924 31.4936 32.1612 30.5732 35.2128 26.1346C38.2644 21.696 36.8432 15.4199 32.0385 12.1166C27.2338 8.81334 20.865 9.73372 17.8134 14.1723C14.7618 18.611 16.183 24.887 20.9877 28.1903Z",
|
||||
"fill": "#3186FF"
|
||||
},
|
||||
"children": []
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"type": "element",
|
||||
"name": "g",
|
||||
"attributes": {
|
||||
"filter": "url(#filter8_f_3892_95663)"
|
||||
},
|
||||
"children": [
|
||||
{
|
||||
"type": "element",
|
||||
"name": "path",
|
||||
"attributes": {
|
||||
"d": "M29.7231 4.99175C30.9455 6.65415 29.3748 9.88535 26.2149 12.2096C23.0549 14.5338 19.5026 15.0707 18.2801 13.4088C17.0576 11.7468 18.6284 8.51514 21.7883 6.19092C24.9482 3.86717 28.5006 3.32982 29.7231 4.99175Z",
|
||||
"fill": "#749BFF"
|
||||
},
|
||||
"children": []
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"type": "element",
|
||||
"name": "g",
|
||||
"attributes": {
|
||||
"filter": "url(#filter9_f_3892_95663)"
|
||||
},
|
||||
"children": [
|
||||
{
|
||||
"type": "element",
|
||||
"name": "path",
|
||||
"attributes": {
|
||||
"d": "M19.6891 12.9486C24.5759 8.41581 26.2531 2.27858 23.4354 -0.759249C20.6176 -3.79708 14.3718 -2.58516 9.485 1.94765C4.59823 6.48046 2.92099 12.6177 5.73879 15.6555C8.55658 18.6933 14.8024 17.4814 19.6891 12.9486Z",
|
||||
"fill": "#FC413D"
|
||||
},
|
||||
"children": []
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"type": "element",
|
||||
"name": "g",
|
||||
"attributes": {
|
||||
"filter": "url(#filter10_f_3892_95663)"
|
||||
},
|
||||
"children": [
|
||||
{
|
||||
"type": "element",
|
||||
"name": "path",
|
||||
"attributes": {
|
||||
"d": "M9.6712 29.23C12.5757 31.3088 15.9102 31.6247 17.1191 29.9356C18.328 28.2465 16.9535 25.1921 14.049 23.1133C11.1446 21.0345 7.81003 20.7186 6.60113 22.4077C5.39223 24.0968 6.76675 27.1512 9.6712 29.23Z",
|
||||
"fill": "#FFEE48"
|
||||
},
|
||||
"children": []
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"type": "element",
|
||||
"name": "defs",
|
||||
"attributes": {},
|
||||
"children": [
|
||||
{
|
||||
"type": "element",
|
||||
"name": "filter",
|
||||
"attributes": {
|
||||
"id": "filter0_f_3892_95663",
|
||||
"x": "-3.44095",
|
||||
"y": "10.7885",
|
||||
"width": "18.7217",
|
||||
"height": "20.4229",
|
||||
"filterUnits": "userSpaceOnUse",
|
||||
"color-interpolation-filters": "sRGB"
|
||||
},
|
||||
"children": [
|
||||
{
|
||||
"type": "element",
|
||||
"name": "feFlood",
|
||||
"attributes": {
|
||||
"flood-opacity": "0",
|
||||
"result": "BackgroundImageFix"
|
||||
},
|
||||
"children": []
|
||||
},
|
||||
{
|
||||
"type": "element",
|
||||
"name": "feBlend",
|
||||
"attributes": {
|
||||
"mode": "normal",
|
||||
"in": "SourceGraphic",
|
||||
"in2": "BackgroundImageFix",
|
||||
"result": "shape"
|
||||
},
|
||||
"children": []
|
||||
},
|
||||
{
|
||||
"type": "element",
|
||||
"name": "feGaussianBlur",
|
||||
"attributes": {
|
||||
"stdDeviation": "1.50514",
|
||||
"result": "effect1_foregroundBlur_3892_95663"
|
||||
},
|
||||
"children": []
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"type": "element",
|
||||
"name": "filter",
|
||||
"attributes": {
|
||||
"id": "filter1_f_3892_95663",
|
||||
"x": "-4.76352",
|
||||
"y": "-15.6598",
|
||||
"width": "45.1989",
|
||||
"height": "45.5524",
|
||||
"filterUnits": "userSpaceOnUse",
|
||||
"color-interpolation-filters": "sRGB"
|
||||
},
|
||||
"children": [
|
||||
{
|
||||
"type": "element",
|
||||
"name": "feFlood",
|
||||
"attributes": {
|
||||
"flood-opacity": "0",
|
||||
"result": "BackgroundImageFix"
|
||||
},
|
||||
"children": []
|
||||
},
|
||||
{
|
||||
"type": "element",
|
||||
"name": "feBlend",
|
||||
"attributes": {
|
||||
"mode": "normal",
|
||||
"in": "SourceGraphic",
|
||||
"in2": "BackgroundImageFix",
|
||||
"result": "shape"
|
||||
},
|
||||
"children": []
|
||||
},
|
||||
{
|
||||
"type": "element",
|
||||
"name": "feGaussianBlur",
|
||||
"attributes": {
|
||||
"stdDeviation": "7.2758",
|
||||
"result": "effect1_foregroundBlur_3892_95663"
|
||||
},
|
||||
"children": []
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"type": "element",
|
||||
"name": "filter",
|
||||
"attributes": {
|
||||
"id": "filter2_f_3892_95663",
|
||||
"x": "-6.61209",
|
||||
"y": "7.49899",
|
||||
"width": "41.5757",
|
||||
"height": "46.522",
|
||||
"filterUnits": "userSpaceOnUse",
|
||||
"color-interpolation-filters": "sRGB"
|
||||
},
|
||||
"children": [
|
||||
{
|
||||
"type": "element",
|
||||
"name": "feFlood",
|
||||
"attributes": {
|
||||
"flood-opacity": "0",
|
||||
"result": "BackgroundImageFix"
|
||||
},
|
||||
"children": []
|
||||
},
|
||||
{
|
||||
"type": "element",
|
||||
"name": "feBlend",
|
||||
"attributes": {
|
||||
"mode": "normal",
|
||||
"in": "SourceGraphic",
|
||||
"in2": "BackgroundImageFix",
|
||||
"result": "shape"
|
||||
},
|
||||
"children": []
|
||||
},
|
||||
{
|
||||
"type": "element",
|
||||
"name": "feGaussianBlur",
|
||||
"attributes": {
|
||||
"stdDeviation": "6.18495",
|
||||
"result": "effect1_foregroundBlur_3892_95663"
|
||||
},
|
||||
"children": []
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"type": "element",
|
||||
"name": "filter",
|
||||
"attributes": {
|
||||
"id": "filter3_f_3892_95663",
|
||||
"x": "-6.61209",
|
||||
"y": "7.49899",
|
||||
"width": "41.5757",
|
||||
"height": "46.522",
|
||||
"filterUnits": "userSpaceOnUse",
|
||||
"color-interpolation-filters": "sRGB"
|
||||
},
|
||||
"children": [
|
||||
{
|
||||
"type": "element",
|
||||
"name": "feFlood",
|
||||
"attributes": {
|
||||
"flood-opacity": "0",
|
||||
"result": "BackgroundImageFix"
|
||||
},
|
||||
"children": []
|
||||
},
|
||||
{
|
||||
"type": "element",
|
||||
"name": "feBlend",
|
||||
"attributes": {
|
||||
"mode": "normal",
|
||||
"in": "SourceGraphic",
|
||||
"in2": "BackgroundImageFix",
|
||||
"result": "shape"
|
||||
},
|
||||
"children": []
|
||||
},
|
||||
{
|
||||
"type": "element",
|
||||
"name": "feGaussianBlur",
|
||||
"attributes": {
|
||||
"stdDeviation": "6.18495",
|
||||
"result": "effect1_foregroundBlur_3892_95663"
|
||||
},
|
||||
"children": []
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"type": "element",
|
||||
"name": "filter",
|
||||
"attributes": {
|
||||
"id": "filter4_f_3892_95663",
|
||||
"x": "-6.21073",
|
||||
"y": "9.02316",
|
||||
"width": "41.6959",
|
||||
"height": "42.4608",
|
||||
"filterUnits": "userSpaceOnUse",
|
||||
"color-interpolation-filters": "sRGB"
|
||||
},
|
||||
"children": [
|
||||
{
|
||||
"type": "element",
|
||||
"name": "feFlood",
|
||||
"attributes": {
|
||||
"flood-opacity": "0",
|
||||
"result": "BackgroundImageFix"
|
||||
},
|
||||
"children": []
|
||||
},
|
||||
{
|
||||
"type": "element",
|
||||
"name": "feBlend",
|
||||
"attributes": {
|
||||
"mode": "normal",
|
||||
"in": "SourceGraphic",
|
||||
"in2": "BackgroundImageFix",
|
||||
"result": "shape"
|
||||
},
|
||||
"children": []
|
||||
},
|
||||
{
|
||||
"type": "element",
|
||||
"name": "feGaussianBlur",
|
||||
"attributes": {
|
||||
"stdDeviation": "6.18495",
|
||||
"result": "effect1_foregroundBlur_3892_95663"
|
||||
},
|
||||
"children": []
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"type": "element",
|
||||
"name": "filter",
|
||||
"attributes": {
|
||||
"id": "filter5_f_3892_95663",
|
||||
"x": "15.405",
|
||||
"y": "-2.44994",
|
||||
"width": "39.3423",
|
||||
"height": "38.7556",
|
||||
"filterUnits": "userSpaceOnUse",
|
||||
"color-interpolation-filters": "sRGB"
|
||||
},
|
||||
"children": [
|
||||
{
|
||||
"type": "element",
|
||||
"name": "feFlood",
|
||||
"attributes": {
|
||||
"flood-opacity": "0",
|
||||
"result": "BackgroundImageFix"
|
||||
},
|
||||
"children": []
|
||||
},
|
||||
{
|
||||
"type": "element",
|
||||
"name": "feBlend",
|
||||
"attributes": {
|
||||
"mode": "normal",
|
||||
"in": "SourceGraphic",
|
||||
"in2": "BackgroundImageFix",
|
||||
"result": "shape"
|
||||
},
|
||||
"children": []
|
||||
},
|
||||
{
|
||||
"type": "element",
|
||||
"name": "feGaussianBlur",
|
||||
"attributes": {
|
||||
"stdDeviation": "5.87756",
|
||||
"result": "effect1_foregroundBlur_3892_95663"
|
||||
},
|
||||
"children": []
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"type": "element",
|
||||
"name": "filter",
|
||||
"attributes": {
|
||||
"id": "filter6_f_3892_95663",
|
||||
"x": "-13.7886",
|
||||
"y": "-4.15284",
|
||||
"width": "39.9951",
|
||||
"height": "40.2639",
|
||||
"filterUnits": "userSpaceOnUse",
|
||||
"color-interpolation-filters": "sRGB"
|
||||
},
|
||||
"children": [
|
||||
{
|
||||
"type": "element",
|
||||
"name": "feFlood",
|
||||
"attributes": {
|
||||
"flood-opacity": "0",
|
||||
"result": "BackgroundImageFix"
|
||||
},
|
||||
"children": []
|
||||
},
|
||||
{
|
||||
"type": "element",
|
||||
"name": "feBlend",
|
||||
"attributes": {
|
||||
"mode": "normal",
|
||||
"in": "SourceGraphic",
|
||||
"in2": "BackgroundImageFix",
|
||||
"result": "shape"
|
||||
},
|
||||
"children": []
|
||||
},
|
||||
{
|
||||
"type": "element",
|
||||
"name": "feGaussianBlur",
|
||||
"attributes": {
|
||||
"stdDeviation": "5.32691",
|
||||
"result": "effect1_foregroundBlur_3892_95663"
|
||||
},
|
||||
"children": []
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"type": "element",
|
||||
"name": "filter",
|
||||
"attributes": {
|
||||
"id": "filter7_f_3892_95663",
|
||||
"x": "6.6925",
|
||||
"y": "0.620963",
|
||||
"width": "39.6414",
|
||||
"height": "39.065",
|
||||
"filterUnits": "userSpaceOnUse",
|
||||
"color-interpolation-filters": "sRGB"
|
||||
},
|
||||
"children": [
|
||||
{
|
||||
"type": "element",
|
||||
"name": "feFlood",
|
||||
"attributes": {
|
||||
"flood-opacity": "0",
|
||||
"result": "BackgroundImageFix"
|
||||
},
|
||||
"children": []
|
||||
},
|
||||
{
|
||||
"type": "element",
|
||||
"name": "feBlend",
|
||||
"attributes": {
|
||||
"mode": "normal",
|
||||
"in": "SourceGraphic",
|
||||
"in2": "BackgroundImageFix",
|
||||
"result": "shape"
|
||||
},
|
||||
"children": []
|
||||
},
|
||||
{
|
||||
"type": "element",
|
||||
"name": "feGaussianBlur",
|
||||
"attributes": {
|
||||
"stdDeviation": "4.75678",
|
||||
"result": "effect1_foregroundBlur_3892_95663"
|
||||
},
|
||||
"children": []
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"type": "element",
|
||||
"name": "filter",
|
||||
"attributes": {
|
||||
"id": "filter8_f_3892_95663",
|
||||
"x": "9.35225",
|
||||
"y": "-4.48661",
|
||||
"width": "29.2984",
|
||||
"height": "27.3739",
|
||||
"filterUnits": "userSpaceOnUse",
|
||||
"color-interpolation-filters": "sRGB"
|
||||
},
|
||||
"children": [
|
||||
{
|
||||
"type": "element",
|
||||
"name": "feFlood",
|
||||
"attributes": {
|
||||
"flood-opacity": "0",
|
||||
"result": "BackgroundImageFix"
|
||||
},
|
||||
"children": []
|
||||
},
|
||||
{
|
||||
"type": "element",
|
||||
"name": "feBlend",
|
||||
"attributes": {
|
||||
"mode": "normal",
|
||||
"in": "SourceGraphic",
|
||||
"in2": "BackgroundImageFix",
|
||||
"result": "shape"
|
||||
},
|
||||
"children": []
|
||||
},
|
||||
{
|
||||
"type": "element",
|
||||
"name": "feGaussianBlur",
|
||||
"attributes": {
|
||||
"stdDeviation": "4.25649",
|
||||
"result": "effect1_foregroundBlur_3892_95663"
|
||||
},
|
||||
"children": []
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"type": "element",
|
||||
"name": "filter",
|
||||
"attributes": {
|
||||
"id": "filter9_f_3892_95663",
|
||||
"x": "-2.81919",
|
||||
"y": "-9.62339",
|
||||
"width": "34.8122",
|
||||
"height": "34.143",
|
||||
"filterUnits": "userSpaceOnUse",
|
||||
"color-interpolation-filters": "sRGB"
|
||||
},
|
||||
"children": [
|
||||
{
|
||||
"type": "element",
|
||||
"name": "feFlood",
|
||||
"attributes": {
|
||||
"flood-opacity": "0",
|
||||
"result": "BackgroundImageFix"
|
||||
},
|
||||
"children": []
|
||||
},
|
||||
{
|
||||
"type": "element",
|
||||
"name": "feBlend",
|
||||
"attributes": {
|
||||
"mode": "normal",
|
||||
"in": "SourceGraphic",
|
||||
"in2": "BackgroundImageFix",
|
||||
"result": "shape"
|
||||
},
|
||||
"children": []
|
||||
},
|
||||
{
|
||||
"type": "element",
|
||||
"name": "feGaussianBlur",
|
||||
"attributes": {
|
||||
"stdDeviation": "3.59514",
|
||||
"result": "effect1_foregroundBlur_3892_95663"
|
||||
},
|
||||
"children": []
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"type": "element",
|
||||
"name": "filter",
|
||||
"attributes": {
|
||||
"id": "filter10_f_3892_95663",
|
||||
"x": "-2.73761",
|
||||
"y": "12.4221",
|
||||
"width": "29.1949",
|
||||
"height": "27.4994",
|
||||
"filterUnits": "userSpaceOnUse",
|
||||
"color-interpolation-filters": "sRGB"
|
||||
},
|
||||
"children": [
|
||||
{
|
||||
"type": "element",
|
||||
"name": "feFlood",
|
||||
"attributes": {
|
||||
"flood-opacity": "0",
|
||||
"result": "BackgroundImageFix"
|
||||
},
|
||||
"children": []
|
||||
},
|
||||
{
|
||||
"type": "element",
|
||||
"name": "feBlend",
|
||||
"attributes": {
|
||||
"mode": "normal",
|
||||
"in": "SourceGraphic",
|
||||
"in2": "BackgroundImageFix",
|
||||
"result": "shape"
|
||||
},
|
||||
"children": []
|
||||
},
|
||||
{
|
||||
"type": "element",
|
||||
"name": "feGaussianBlur",
|
||||
"attributes": {
|
||||
"stdDeviation": "4.44986",
|
||||
"result": "effect1_foregroundBlur_3892_95663"
|
||||
},
|
||||
"children": []
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"type": "element",
|
||||
"name": "linearGradient",
|
||||
"attributes": {
|
||||
"id": "paint0_linear_3892_95663",
|
||||
"x1": "13.9595",
|
||||
"y1": "24.7349",
|
||||
"x2": "28.5025",
|
||||
"y2": "12.4738",
|
||||
"gradientUnits": "userSpaceOnUse"
|
||||
},
|
||||
"children": [
|
||||
{
|
||||
"type": "element",
|
||||
"name": "stop",
|
||||
"attributes": {
|
||||
"stop-color": "#4893FC"
|
||||
},
|
||||
"children": []
|
||||
},
|
||||
{
|
||||
"type": "element",
|
||||
"name": "stop",
|
||||
"attributes": {
|
||||
"offset": "0.27",
|
||||
"stop-color": "#4893FC"
|
||||
},
|
||||
"children": []
|
||||
},
|
||||
{
|
||||
"type": "element",
|
||||
"name": "stop",
|
||||
"attributes": {
|
||||
"offset": "0.777",
|
||||
"stop-color": "#969DFF"
|
||||
},
|
||||
"children": []
|
||||
},
|
||||
{
|
||||
"type": "element",
|
||||
"name": "stop",
|
||||
"attributes": {
|
||||
"offset": "1",
|
||||
"stop-color": "#BD99FE"
|
||||
},
|
||||
"children": []
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
"name": "Gemini"
|
||||
}
|
||||
|
|
@ -0,0 +1,20 @@
|
|||
// GENERATE BY script
|
||||
// DON NOT EDIT IT MANUALLY
|
||||
|
||||
import type { IconData } from '@/app/components/base/icons/IconBase'
|
||||
import * as React from 'react'
|
||||
import IconBase from '@/app/components/base/icons/IconBase'
|
||||
import data from './Gemini.json'
|
||||
|
||||
const Icon = (
|
||||
{
|
||||
ref,
|
||||
...props
|
||||
}: React.SVGProps<SVGSVGElement> & {
|
||||
ref?: React.RefObject<React.RefObject<HTMLOrSVGElement>>
|
||||
},
|
||||
) => <IconBase {...props} ref={ref} data={data as IconData} />
|
||||
|
||||
Icon.displayName = 'Gemini'
|
||||
|
||||
export default Icon
|
||||
|
|
@ -0,0 +1,72 @@
|
|||
{
|
||||
"icon": {
|
||||
"type": "element",
|
||||
"isRootNode": true,
|
||||
"name": "svg",
|
||||
"attributes": {
|
||||
"width": "40",
|
||||
"height": "40",
|
||||
"viewBox": "0 0 40 40",
|
||||
"fill": "none",
|
||||
"xmlns": "http://www.w3.org/2000/svg"
|
||||
},
|
||||
"children": [
|
||||
{
|
||||
"type": "element",
|
||||
"name": "rect",
|
||||
"attributes": {
|
||||
"width": "40",
|
||||
"height": "40",
|
||||
"fill": "white"
|
||||
},
|
||||
"children": []
|
||||
},
|
||||
{
|
||||
"type": "element",
|
||||
"name": "g",
|
||||
"attributes": {
|
||||
"clip-path": "url(#clip0_3892_95659)"
|
||||
},
|
||||
"children": [
|
||||
{
|
||||
"type": "element",
|
||||
"name": "path",
|
||||
"attributes": {
|
||||
"d": "M15.745 24.54L26.715 16.35C27.254 15.95 28.022 16.106 28.279 16.73C29.628 20.018 29.025 23.971 26.341 26.685C23.658 29.399 19.924 29.995 16.511 28.639L12.783 30.384C18.13 34.081 24.623 33.166 28.681 29.06C31.9 25.805 32.897 21.368 31.965 17.367L31.973 17.376C30.622 11.498 32.305 9.149 35.755 4.345L36 4L31.46 8.59V8.576L15.743 24.544M13.48 26.531C9.643 22.824 10.305 17.085 13.58 13.776C16 11.327 19.968 10.328 23.432 11.797L27.152 10.06C26.482 9.57 25.622 9.043 24.637 8.673C20.182 6.819 14.848 7.742 11.227 11.401C7.744 14.924 6.648 20.341 8.53 24.962C9.935 28.416 7.631 30.86 5.31 33.326C4.49 34.2 3.666 35.074 3 36L13.478 26.534",
|
||||
"fill": "black"
|
||||
},
|
||||
"children": []
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"type": "element",
|
||||
"name": "defs",
|
||||
"attributes": {},
|
||||
"children": [
|
||||
{
|
||||
"type": "element",
|
||||
"name": "clipPath",
|
||||
"attributes": {
|
||||
"id": "clip0_3892_95659"
|
||||
},
|
||||
"children": [
|
||||
{
|
||||
"type": "element",
|
||||
"name": "rect",
|
||||
"attributes": {
|
||||
"width": "33",
|
||||
"height": "32",
|
||||
"fill": "white",
|
||||
"transform": "translate(3 4)"
|
||||
},
|
||||
"children": []
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
"name": "Grok"
|
||||
}
|
||||
|
|
@ -0,0 +1,20 @@
|
|||
// GENERATE BY script
|
||||
// DON NOT EDIT IT MANUALLY
|
||||
|
||||
import type { IconData } from '@/app/components/base/icons/IconBase'
|
||||
import * as React from 'react'
|
||||
import IconBase from '@/app/components/base/icons/IconBase'
|
||||
import data from './Grok.json'
|
||||
|
||||
const Icon = (
|
||||
{
|
||||
ref,
|
||||
...props
|
||||
}: React.SVGProps<SVGSVGElement> & {
|
||||
ref?: React.RefObject<React.RefObject<HTMLOrSVGElement>>
|
||||
},
|
||||
) => <IconBase {...props} ref={ref} data={data as IconData} />
|
||||
|
||||
Icon.displayName = 'Grok'
|
||||
|
||||
export default Icon
|
||||
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue