mirror of https://github.com/langgenius/dify.git
Merge branch 'main' into feat/memory-orchestration-fed
This commit is contained in:
commit
dfc17f3b08
|
|
@ -1,9 +1,10 @@
|
|||
name: Check i18n Files and Create PR
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
types: [closed]
|
||||
push:
|
||||
branches: [main]
|
||||
paths:
|
||||
- 'web/i18n/en-US/*.ts'
|
||||
|
||||
permissions:
|
||||
contents: write
|
||||
|
|
@ -11,7 +12,7 @@ permissions:
|
|||
|
||||
jobs:
|
||||
check-and-update:
|
||||
if: github.event.pull_request.merged == true
|
||||
if: github.repository == 'langgenius/dify'
|
||||
runs-on: ubuntu-latest
|
||||
defaults:
|
||||
run:
|
||||
|
|
@ -19,7 +20,7 @@ jobs:
|
|||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 2 # last 2 commits
|
||||
fetch-depth: 2
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Check for file changes in i18n/en-US
|
||||
|
|
@ -31,6 +32,13 @@ jobs:
|
|||
echo "Changed files: $changed_files"
|
||||
if [ -n "$changed_files" ]; then
|
||||
echo "FILES_CHANGED=true" >> $GITHUB_ENV
|
||||
file_args=""
|
||||
for file in $changed_files; do
|
||||
filename=$(basename "$file" .ts)
|
||||
file_args="$file_args --file=$filename"
|
||||
done
|
||||
echo "FILE_ARGS=$file_args" >> $GITHUB_ENV
|
||||
echo "File arguments: $file_args"
|
||||
else
|
||||
echo "FILES_CHANGED=false" >> $GITHUB_ENV
|
||||
fi
|
||||
|
|
@ -55,7 +63,7 @@ jobs:
|
|||
|
||||
- name: Generate i18n translations
|
||||
if: env.FILES_CHANGED == 'true'
|
||||
run: pnpm run auto-gen-i18n
|
||||
run: pnpm run auto-gen-i18n ${{ env.FILE_ARGS }}
|
||||
|
||||
- name: Create Pull Request
|
||||
if: env.FILES_CHANGED == 'true'
|
||||
|
|
|
|||
|
|
@ -330,17 +330,17 @@ class HttpConfig(BaseSettings):
|
|||
def WEB_API_CORS_ALLOW_ORIGINS(self) -> list[str]:
|
||||
return self.inner_WEB_API_CORS_ALLOW_ORIGINS.split(",")
|
||||
|
||||
HTTP_REQUEST_MAX_CONNECT_TIMEOUT: Annotated[
|
||||
PositiveInt, Field(ge=10, description="Maximum connection timeout in seconds for HTTP requests")
|
||||
] = 10
|
||||
HTTP_REQUEST_MAX_CONNECT_TIMEOUT: int = Field(
|
||||
ge=1, description="Maximum connection timeout in seconds for HTTP requests", default=10
|
||||
)
|
||||
|
||||
HTTP_REQUEST_MAX_READ_TIMEOUT: Annotated[
|
||||
PositiveInt, Field(ge=60, description="Maximum read timeout in seconds for HTTP requests")
|
||||
] = 60
|
||||
HTTP_REQUEST_MAX_READ_TIMEOUT: int = Field(
|
||||
ge=1, description="Maximum read timeout in seconds for HTTP requests", default=60
|
||||
)
|
||||
|
||||
HTTP_REQUEST_MAX_WRITE_TIMEOUT: Annotated[
|
||||
PositiveInt, Field(ge=10, description="Maximum write timeout in seconds for HTTP requests")
|
||||
] = 20
|
||||
HTTP_REQUEST_MAX_WRITE_TIMEOUT: int = Field(
|
||||
ge=1, description="Maximum write timeout in seconds for HTTP requests", default=20
|
||||
)
|
||||
|
||||
HTTP_REQUEST_NODE_MAX_BINARY_SIZE: PositiveInt = Field(
|
||||
description="Maximum allowed size in bytes for binary data in HTTP requests",
|
||||
|
|
|
|||
|
|
@ -1,5 +1,6 @@
|
|||
from flask import request
|
||||
from flask_restful import Resource, marshal_with, reqparse
|
||||
from werkzeug.exceptions import Unauthorized
|
||||
|
||||
from controllers.common import fields
|
||||
from controllers.web import api
|
||||
|
|
@ -75,14 +76,14 @@ class AppWebAuthPermission(Resource):
|
|||
try:
|
||||
auth_header = request.headers.get("Authorization")
|
||||
if auth_header is None:
|
||||
raise
|
||||
raise Unauthorized("Authorization header is missing.")
|
||||
if " " not in auth_header:
|
||||
raise
|
||||
raise Unauthorized("Invalid Authorization header format. Expected 'Bearer <api-key>' format.")
|
||||
|
||||
auth_scheme, tk = auth_header.split(None, 1)
|
||||
auth_scheme = auth_scheme.lower()
|
||||
if auth_scheme != "bearer":
|
||||
raise
|
||||
raise Unauthorized("Authorization scheme must be 'Bearer'")
|
||||
|
||||
decoded = PassportService().verify(tk)
|
||||
user_id = decoded.get("user_id", "visitor")
|
||||
|
|
|
|||
|
|
@ -118,26 +118,8 @@ class AdvancedChatAppRunner(WorkflowBasedAppRunner):
|
|||
):
|
||||
return
|
||||
|
||||
# Init conversation variables
|
||||
stmt = select(ConversationVariable).where(
|
||||
ConversationVariable.app_id == self.conversation.app_id,
|
||||
ConversationVariable.conversation_id == self.conversation.id,
|
||||
)
|
||||
with Session(db.engine) as session:
|
||||
db_conversation_variables = session.scalars(stmt).all()
|
||||
if not db_conversation_variables:
|
||||
# Create conversation variables if they don't exist.
|
||||
db_conversation_variables = [
|
||||
ConversationVariable.from_variable(
|
||||
app_id=self.conversation.app_id, conversation_id=self.conversation.id, variable=variable
|
||||
)
|
||||
for variable in self._workflow.conversation_variables
|
||||
]
|
||||
session.add_all(db_conversation_variables)
|
||||
# Convert database entities to variables.
|
||||
conversation_variables = [item.to_variable() for item in db_conversation_variables]
|
||||
|
||||
session.commit()
|
||||
# Initialize conversation variables
|
||||
conversation_variables = self._initialize_conversation_variables()
|
||||
|
||||
# Create a variable pool.
|
||||
system_inputs = SystemVariable(
|
||||
|
|
@ -292,3 +274,100 @@ class AdvancedChatAppRunner(WorkflowBasedAppRunner):
|
|||
message_id=message_id,
|
||||
trace_manager=app_generate_entity.trace_manager,
|
||||
)
|
||||
|
||||
def _initialize_conversation_variables(self) -> list[VariableUnion]:
|
||||
"""
|
||||
Initialize conversation variables for the current conversation.
|
||||
|
||||
This method:
|
||||
1. Loads existing variables from the database
|
||||
2. Creates new variables if none exist
|
||||
3. Syncs missing variables from the workflow definition
|
||||
|
||||
:return: List of conversation variables ready for use
|
||||
"""
|
||||
with Session(db.engine) as session:
|
||||
existing_variables = self._load_existing_conversation_variables(session)
|
||||
|
||||
if not existing_variables:
|
||||
# First time initialization - create all variables
|
||||
existing_variables = self._create_all_conversation_variables(session)
|
||||
else:
|
||||
# Check and add any missing variables from the workflow
|
||||
existing_variables = self._sync_missing_conversation_variables(session, existing_variables)
|
||||
|
||||
# Convert to Variable objects for use in the workflow
|
||||
conversation_variables = [var.to_variable() for var in existing_variables]
|
||||
|
||||
session.commit()
|
||||
return cast(list[VariableUnion], conversation_variables)
|
||||
|
||||
def _load_existing_conversation_variables(self, session: Session) -> list[ConversationVariable]:
|
||||
"""
|
||||
Load existing conversation variables from the database.
|
||||
|
||||
:param session: Database session
|
||||
:return: List of existing conversation variables
|
||||
"""
|
||||
stmt = select(ConversationVariable).where(
|
||||
ConversationVariable.app_id == self.conversation.app_id,
|
||||
ConversationVariable.conversation_id == self.conversation.id,
|
||||
)
|
||||
return list(session.scalars(stmt).all())
|
||||
|
||||
def _create_all_conversation_variables(self, session: Session) -> list[ConversationVariable]:
|
||||
"""
|
||||
Create all conversation variables for a new conversation.
|
||||
|
||||
:param session: Database session
|
||||
:return: List of created conversation variables
|
||||
"""
|
||||
new_variables = [
|
||||
ConversationVariable.from_variable(
|
||||
app_id=self.conversation.app_id, conversation_id=self.conversation.id, variable=variable
|
||||
)
|
||||
for variable in self._workflow.conversation_variables
|
||||
]
|
||||
|
||||
if new_variables:
|
||||
session.add_all(new_variables)
|
||||
|
||||
return new_variables
|
||||
|
||||
def _sync_missing_conversation_variables(
|
||||
self, session: Session, existing_variables: list[ConversationVariable]
|
||||
) -> list[ConversationVariable]:
|
||||
"""
|
||||
Sync missing conversation variables from the workflow definition.
|
||||
|
||||
This handles the case where new variables are added to a workflow
|
||||
after conversations have already been created.
|
||||
|
||||
:param session: Database session
|
||||
:param existing_variables: List of existing conversation variables
|
||||
:return: Updated list including any newly created variables
|
||||
"""
|
||||
# Get IDs of existing and workflow variables
|
||||
existing_ids = {var.id for var in existing_variables}
|
||||
workflow_variables = {var.id: var for var in self._workflow.conversation_variables}
|
||||
|
||||
# Find missing variable IDs
|
||||
missing_ids = set(workflow_variables.keys()) - existing_ids
|
||||
|
||||
if not missing_ids:
|
||||
return existing_variables
|
||||
|
||||
# Create missing variables with their default values
|
||||
new_variables = [
|
||||
ConversationVariable.from_variable(
|
||||
app_id=self.conversation.app_id,
|
||||
conversation_id=self.conversation.id,
|
||||
variable=workflow_variables[var_id],
|
||||
)
|
||||
for var_id in missing_ids
|
||||
]
|
||||
|
||||
session.add_all(new_variables)
|
||||
|
||||
# Return combined list
|
||||
return existing_variables + new_variables
|
||||
|
|
|
|||
|
|
@ -843,7 +843,7 @@ class ProviderConfiguration(BaseModel):
|
|||
continue
|
||||
|
||||
status = ModelStatus.ACTIVE
|
||||
if m.model in model_setting_map:
|
||||
if m.model_type in model_setting_map and m.model in model_setting_map[m.model_type]:
|
||||
model_setting = model_setting_map[m.model_type][m.model]
|
||||
if model_setting.enabled is False:
|
||||
status = ModelStatus.DISABLED
|
||||
|
|
|
|||
|
|
@ -185,6 +185,6 @@ Clickzetta supports advanced full-text search with multiple analyzers:
|
|||
|
||||
## References
|
||||
|
||||
- [Clickzetta Vector Search Documentation](../../../../../../../yunqidoc/cn_markdown_20250526/vector-search.md)
|
||||
- [Clickzetta Inverted Index Documentation](../../../../../../../yunqidoc/cn_markdown_20250526/inverted-index.md)
|
||||
- [Clickzetta SQL Functions](../../../../../../../yunqidoc/cn_markdown_20250526/sql_functions/)
|
||||
- [Clickzetta Vector Search Documentation](https://yunqi.tech/documents/vector-search)
|
||||
- [Clickzetta Inverted Index Documentation](https://yunqi.tech/documents/inverted-index)
|
||||
- [Clickzetta SQL Functions](https://yunqi.tech/documents/sql-reference)
|
||||
|
|
|
|||
|
|
@ -1,7 +1,9 @@
|
|||
import json
|
||||
import logging
|
||||
import queue
|
||||
import re
|
||||
import threading
|
||||
import time
|
||||
import uuid
|
||||
from typing import TYPE_CHECKING, Any, Optional
|
||||
|
||||
|
|
@ -67,6 +69,243 @@ class ClickzettaConfig(BaseModel):
|
|||
return values
|
||||
|
||||
|
||||
class ClickzettaConnectionPool:
|
||||
"""
|
||||
Global connection pool for ClickZetta connections.
|
||||
Manages connection reuse across ClickzettaVector instances.
|
||||
"""
|
||||
|
||||
_instance: Optional["ClickzettaConnectionPool"] = None
|
||||
_lock = threading.Lock()
|
||||
|
||||
def __init__(self):
|
||||
self._pools: dict[str, list[tuple[Connection, float]]] = {} # config_key -> [(connection, last_used_time)]
|
||||
self._pool_locks: dict[str, threading.Lock] = {}
|
||||
self._max_pool_size = 5 # Maximum connections per configuration
|
||||
self._connection_timeout = 300 # 5 minutes timeout
|
||||
self._cleanup_thread: Optional[threading.Thread] = None
|
||||
self._shutdown = False
|
||||
self._start_cleanup_thread()
|
||||
|
||||
@classmethod
|
||||
def get_instance(cls) -> "ClickzettaConnectionPool":
|
||||
"""Get singleton instance of connection pool."""
|
||||
if cls._instance is None:
|
||||
with cls._lock:
|
||||
if cls._instance is None:
|
||||
cls._instance = cls()
|
||||
return cls._instance
|
||||
|
||||
def _get_config_key(self, config: ClickzettaConfig) -> str:
|
||||
"""Generate unique key for connection configuration."""
|
||||
return (
|
||||
f"{config.username}:{config.instance}:{config.service}:"
|
||||
f"{config.workspace}:{config.vcluster}:{config.schema_name}"
|
||||
)
|
||||
|
||||
def _create_connection(self, config: ClickzettaConfig) -> "Connection":
|
||||
"""Create a new ClickZetta connection."""
|
||||
max_retries = 3
|
||||
retry_delay = 1.0
|
||||
|
||||
for attempt in range(max_retries):
|
||||
try:
|
||||
connection = clickzetta.connect(
|
||||
username=config.username,
|
||||
password=config.password,
|
||||
instance=config.instance,
|
||||
service=config.service,
|
||||
workspace=config.workspace,
|
||||
vcluster=config.vcluster,
|
||||
schema=config.schema_name,
|
||||
)
|
||||
|
||||
# Configure connection session settings
|
||||
self._configure_connection(connection)
|
||||
logger.debug("Created new ClickZetta connection (attempt %d/%d)", attempt + 1, max_retries)
|
||||
return connection
|
||||
except Exception:
|
||||
logger.exception("ClickZetta connection attempt %d/%d failed", attempt + 1, max_retries)
|
||||
if attempt < max_retries - 1:
|
||||
time.sleep(retry_delay * (2**attempt))
|
||||
else:
|
||||
raise
|
||||
|
||||
raise RuntimeError(f"Failed to create ClickZetta connection after {max_retries} attempts")
|
||||
|
||||
def _configure_connection(self, connection: "Connection") -> None:
|
||||
"""Configure connection session settings."""
|
||||
try:
|
||||
with connection.cursor() as cursor:
|
||||
# Temporarily suppress ClickZetta client logging to reduce noise
|
||||
clickzetta_logger = logging.getLogger("clickzetta")
|
||||
original_level = clickzetta_logger.level
|
||||
clickzetta_logger.setLevel(logging.WARNING)
|
||||
|
||||
try:
|
||||
# Use quote mode for string literal escaping
|
||||
cursor.execute("SET cz.sql.string.literal.escape.mode = 'quote'")
|
||||
|
||||
# Apply performance optimization hints
|
||||
performance_hints = [
|
||||
# Vector index optimization
|
||||
"SET cz.storage.parquet.vector.index.read.memory.cache = true",
|
||||
"SET cz.storage.parquet.vector.index.read.local.cache = false",
|
||||
# Query optimization
|
||||
"SET cz.sql.table.scan.push.down.filter = true",
|
||||
"SET cz.sql.table.scan.enable.ensure.filter = true",
|
||||
"SET cz.storage.always.prefetch.internal = true",
|
||||
"SET cz.optimizer.generate.columns.always.valid = true",
|
||||
"SET cz.sql.index.prewhere.enabled = true",
|
||||
# Storage optimization
|
||||
"SET cz.storage.parquet.enable.io.prefetch = false",
|
||||
"SET cz.optimizer.enable.mv.rewrite = false",
|
||||
"SET cz.sql.dump.as.lz4 = true",
|
||||
"SET cz.optimizer.limited.optimization.naive.query = true",
|
||||
"SET cz.sql.table.scan.enable.push.down.log = false",
|
||||
"SET cz.storage.use.file.format.local.stats = false",
|
||||
"SET cz.storage.local.file.object.cache.level = all",
|
||||
# Job execution optimization
|
||||
"SET cz.sql.job.fast.mode = true",
|
||||
"SET cz.storage.parquet.non.contiguous.read = true",
|
||||
"SET cz.sql.compaction.after.commit = true",
|
||||
]
|
||||
|
||||
for hint in performance_hints:
|
||||
cursor.execute(hint)
|
||||
finally:
|
||||
# Restore original logging level
|
||||
clickzetta_logger.setLevel(original_level)
|
||||
|
||||
except Exception:
|
||||
logger.exception("Failed to configure connection, continuing with defaults")
|
||||
|
||||
def _is_connection_valid(self, connection: "Connection") -> bool:
|
||||
"""Check if connection is still valid."""
|
||||
try:
|
||||
with connection.cursor() as cursor:
|
||||
cursor.execute("SELECT 1")
|
||||
return True
|
||||
except Exception:
|
||||
return False
|
||||
|
||||
def get_connection(self, config: ClickzettaConfig) -> "Connection":
|
||||
"""Get a connection from the pool or create a new one."""
|
||||
config_key = self._get_config_key(config)
|
||||
|
||||
# Ensure pool lock exists
|
||||
if config_key not in self._pool_locks:
|
||||
with self._lock:
|
||||
if config_key not in self._pool_locks:
|
||||
self._pool_locks[config_key] = threading.Lock()
|
||||
self._pools[config_key] = []
|
||||
|
||||
with self._pool_locks[config_key]:
|
||||
pool = self._pools[config_key]
|
||||
current_time = time.time()
|
||||
|
||||
# Try to reuse existing connection
|
||||
while pool:
|
||||
connection, last_used = pool.pop(0)
|
||||
|
||||
# Check if connection is not expired and still valid
|
||||
if current_time - last_used < self._connection_timeout and self._is_connection_valid(connection):
|
||||
logger.debug("Reusing ClickZetta connection from pool")
|
||||
return connection
|
||||
else:
|
||||
# Connection expired or invalid, close it
|
||||
try:
|
||||
connection.close()
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
# No valid connection found, create new one
|
||||
return self._create_connection(config)
|
||||
|
||||
def return_connection(self, config: ClickzettaConfig, connection: "Connection") -> None:
|
||||
"""Return a connection to the pool."""
|
||||
config_key = self._get_config_key(config)
|
||||
|
||||
if config_key not in self._pool_locks:
|
||||
# Pool was cleaned up, just close the connection
|
||||
try:
|
||||
connection.close()
|
||||
except Exception:
|
||||
pass
|
||||
return
|
||||
|
||||
with self._pool_locks[config_key]:
|
||||
pool = self._pools[config_key]
|
||||
|
||||
# Only return to pool if not at capacity and connection is valid
|
||||
if len(pool) < self._max_pool_size and self._is_connection_valid(connection):
|
||||
pool.append((connection, time.time()))
|
||||
logger.debug("Returned ClickZetta connection to pool")
|
||||
else:
|
||||
# Pool full or connection invalid, close it
|
||||
try:
|
||||
connection.close()
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
def _cleanup_expired_connections(self) -> None:
|
||||
"""Clean up expired connections from all pools."""
|
||||
current_time = time.time()
|
||||
|
||||
with self._lock:
|
||||
for config_key in list(self._pools.keys()):
|
||||
if config_key not in self._pool_locks:
|
||||
continue
|
||||
|
||||
with self._pool_locks[config_key]:
|
||||
pool = self._pools[config_key]
|
||||
valid_connections = []
|
||||
|
||||
for connection, last_used in pool:
|
||||
if current_time - last_used < self._connection_timeout:
|
||||
valid_connections.append((connection, last_used))
|
||||
else:
|
||||
try:
|
||||
connection.close()
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
self._pools[config_key] = valid_connections
|
||||
|
||||
def _start_cleanup_thread(self) -> None:
|
||||
"""Start background thread for connection cleanup."""
|
||||
|
||||
def cleanup_worker():
|
||||
while not self._shutdown:
|
||||
try:
|
||||
time.sleep(60) # Cleanup every minute
|
||||
if not self._shutdown:
|
||||
self._cleanup_expired_connections()
|
||||
except Exception:
|
||||
logger.exception("Error in connection pool cleanup")
|
||||
|
||||
self._cleanup_thread = threading.Thread(target=cleanup_worker, daemon=True)
|
||||
self._cleanup_thread.start()
|
||||
|
||||
def shutdown(self) -> None:
|
||||
"""Shutdown connection pool and close all connections."""
|
||||
self._shutdown = True
|
||||
|
||||
with self._lock:
|
||||
for config_key in list(self._pools.keys()):
|
||||
if config_key not in self._pool_locks:
|
||||
continue
|
||||
|
||||
with self._pool_locks[config_key]:
|
||||
pool = self._pools[config_key]
|
||||
for connection, _ in pool:
|
||||
try:
|
||||
connection.close()
|
||||
except Exception:
|
||||
pass
|
||||
pool.clear()
|
||||
|
||||
|
||||
class ClickzettaVector(BaseVector):
|
||||
"""
|
||||
Clickzetta vector storage implementation.
|
||||
|
|
@ -82,70 +321,74 @@ class ClickzettaVector(BaseVector):
|
|||
super().__init__(collection_name)
|
||||
self._config = config
|
||||
self._table_name = collection_name.replace("-", "_").lower() # Ensure valid table name
|
||||
self._connection: Optional[Connection] = None
|
||||
self._init_connection()
|
||||
self._connection_pool = ClickzettaConnectionPool.get_instance()
|
||||
self._init_write_queue()
|
||||
|
||||
def _init_connection(self):
|
||||
"""Initialize Clickzetta connection."""
|
||||
self._connection = clickzetta.connect(
|
||||
username=self._config.username,
|
||||
password=self._config.password,
|
||||
instance=self._config.instance,
|
||||
service=self._config.service,
|
||||
workspace=self._config.workspace,
|
||||
vcluster=self._config.vcluster,
|
||||
schema=self._config.schema_name,
|
||||
)
|
||||
def _get_connection(self) -> "Connection":
|
||||
"""Get a connection from the pool."""
|
||||
return self._connection_pool.get_connection(self._config)
|
||||
|
||||
# Set session parameters for better string handling and performance optimization
|
||||
if self._connection is not None:
|
||||
with self._connection.cursor() as cursor:
|
||||
# Use quote mode for string literal escaping to handle quotes better
|
||||
cursor.execute("SET cz.sql.string.literal.escape.mode = 'quote'")
|
||||
logger.info("Set string literal escape mode to 'quote' for better quote handling")
|
||||
def _return_connection(self, connection: "Connection") -> None:
|
||||
"""Return a connection to the pool."""
|
||||
self._connection_pool.return_connection(self._config, connection)
|
||||
|
||||
# Performance optimization hints for vector operations
|
||||
self._set_performance_hints(cursor)
|
||||
class ConnectionContext:
|
||||
"""Context manager for borrowing and returning connections."""
|
||||
|
||||
def _set_performance_hints(self, cursor):
|
||||
"""Set ClickZetta performance optimization hints for vector operations."""
|
||||
def __init__(self, vector_instance: "ClickzettaVector"):
|
||||
self.vector = vector_instance
|
||||
self.connection: Optional[Connection] = None
|
||||
|
||||
def __enter__(self) -> "Connection":
|
||||
self.connection = self.vector._get_connection()
|
||||
return self.connection
|
||||
|
||||
def __exit__(self, exc_type, exc_val, exc_tb):
|
||||
if self.connection:
|
||||
self.vector._return_connection(self.connection)
|
||||
|
||||
def get_connection_context(self) -> "ClickzettaVector.ConnectionContext":
|
||||
"""Get a connection context manager."""
|
||||
return self.ConnectionContext(self)
|
||||
|
||||
def _parse_metadata(self, raw_metadata: str, row_id: str) -> dict:
|
||||
"""
|
||||
Parse metadata from JSON string with proper error handling and fallback.
|
||||
|
||||
Args:
|
||||
raw_metadata: Raw JSON string from database
|
||||
row_id: Row ID for fallback document_id
|
||||
|
||||
Returns:
|
||||
Parsed metadata dict with guaranteed required fields
|
||||
"""
|
||||
try:
|
||||
# Performance optimization hints for vector operations and query processing
|
||||
performance_hints = [
|
||||
# Vector index optimization
|
||||
"SET cz.storage.parquet.vector.index.read.memory.cache = true",
|
||||
"SET cz.storage.parquet.vector.index.read.local.cache = false",
|
||||
# Query optimization
|
||||
"SET cz.sql.table.scan.push.down.filter = true",
|
||||
"SET cz.sql.table.scan.enable.ensure.filter = true",
|
||||
"SET cz.storage.always.prefetch.internal = true",
|
||||
"SET cz.optimizer.generate.columns.always.valid = true",
|
||||
"SET cz.sql.index.prewhere.enabled = true",
|
||||
# Storage optimization
|
||||
"SET cz.storage.parquet.enable.io.prefetch = false",
|
||||
"SET cz.optimizer.enable.mv.rewrite = false",
|
||||
"SET cz.sql.dump.as.lz4 = true",
|
||||
"SET cz.optimizer.limited.optimization.naive.query = true",
|
||||
"SET cz.sql.table.scan.enable.push.down.log = false",
|
||||
"SET cz.storage.use.file.format.local.stats = false",
|
||||
"SET cz.storage.local.file.object.cache.level = all",
|
||||
# Job execution optimization
|
||||
"SET cz.sql.job.fast.mode = true",
|
||||
"SET cz.storage.parquet.non.contiguous.read = true",
|
||||
"SET cz.sql.compaction.after.commit = true",
|
||||
]
|
||||
if raw_metadata:
|
||||
metadata = json.loads(raw_metadata)
|
||||
|
||||
for hint in performance_hints:
|
||||
cursor.execute(hint)
|
||||
# Handle double-encoded JSON
|
||||
if isinstance(metadata, str):
|
||||
metadata = json.loads(metadata)
|
||||
|
||||
logger.info(
|
||||
"Applied %d performance optimization hints for ClickZetta vector operations", len(performance_hints)
|
||||
)
|
||||
# Ensure we have a dict
|
||||
if not isinstance(metadata, dict):
|
||||
metadata = {}
|
||||
else:
|
||||
metadata = {}
|
||||
except (json.JSONDecodeError, TypeError):
|
||||
logger.exception("JSON parsing failed for metadata")
|
||||
# Fallback: extract document_id with regex
|
||||
doc_id_match = re.search(r'"document_id":\s*"([^"]+)"', raw_metadata or "")
|
||||
metadata = {"document_id": doc_id_match.group(1)} if doc_id_match else {}
|
||||
|
||||
except Exception:
|
||||
# Catch any errors setting performance hints but continue with defaults
|
||||
logger.exception("Failed to set some performance hints, continuing with default settings")
|
||||
# Ensure required fields are set
|
||||
metadata["doc_id"] = row_id # segment id
|
||||
|
||||
# Ensure document_id exists (critical for Dify's format_retrieval_documents)
|
||||
if "document_id" not in metadata:
|
||||
metadata["document_id"] = row_id # fallback to segment id
|
||||
|
||||
return metadata
|
||||
|
||||
@classmethod
|
||||
def _init_write_queue(cls):
|
||||
|
|
@ -204,24 +447,33 @@ class ClickzettaVector(BaseVector):
|
|||
return "clickzetta"
|
||||
|
||||
def _ensure_connection(self) -> "Connection":
|
||||
"""Ensure connection is available and return it."""
|
||||
if self._connection is None:
|
||||
raise RuntimeError("Database connection not initialized")
|
||||
return self._connection
|
||||
"""Get a connection from the pool."""
|
||||
return self._get_connection()
|
||||
|
||||
def _table_exists(self) -> bool:
|
||||
"""Check if the table exists."""
|
||||
try:
|
||||
connection = self._ensure_connection()
|
||||
with connection.cursor() as cursor:
|
||||
cursor.execute(f"DESC {self._config.schema_name}.{self._table_name}")
|
||||
return True
|
||||
except (RuntimeError, ValueError) as e:
|
||||
if "table or view not found" in str(e).lower():
|
||||
with self.get_connection_context() as connection:
|
||||
with connection.cursor() as cursor:
|
||||
cursor.execute(f"DESC {self._config.schema_name}.{self._table_name}")
|
||||
return True
|
||||
except Exception as e:
|
||||
error_message = str(e).lower()
|
||||
# Handle ClickZetta specific "table or view not found" errors
|
||||
if any(
|
||||
phrase in error_message
|
||||
for phrase in ["table or view not found", "czlh-42000", "semantic analysis exception"]
|
||||
):
|
||||
logger.debug("Table %s.%s does not exist", self._config.schema_name, self._table_name)
|
||||
return False
|
||||
else:
|
||||
# Re-raise if it's a different error
|
||||
raise
|
||||
# For other connection/permission errors, log warning but return False to avoid blocking cleanup
|
||||
logger.exception(
|
||||
"Table existence check failed for %s.%s, assuming it doesn't exist",
|
||||
self._config.schema_name,
|
||||
self._table_name,
|
||||
)
|
||||
return False
|
||||
|
||||
def create(self, texts: list[Document], embeddings: list[list[float]], **kwargs):
|
||||
"""Create the collection and add initial documents."""
|
||||
|
|
@ -253,17 +505,17 @@ class ClickzettaVector(BaseVector):
|
|||
) COMMENT 'Dify RAG knowledge base vector storage table for document embeddings and content'
|
||||
"""
|
||||
|
||||
connection = self._ensure_connection()
|
||||
with connection.cursor() as cursor:
|
||||
cursor.execute(create_table_sql)
|
||||
logger.info("Created table %s.%s", self._config.schema_name, self._table_name)
|
||||
with self.get_connection_context() as connection:
|
||||
with connection.cursor() as cursor:
|
||||
cursor.execute(create_table_sql)
|
||||
logger.info("Created table %s.%s", self._config.schema_name, self._table_name)
|
||||
|
||||
# Create vector index
|
||||
self._create_vector_index(cursor)
|
||||
# Create vector index
|
||||
self._create_vector_index(cursor)
|
||||
|
||||
# Create inverted index for full-text search if enabled
|
||||
if self._config.enable_inverted_index:
|
||||
self._create_inverted_index(cursor)
|
||||
# Create inverted index for full-text search if enabled
|
||||
if self._config.enable_inverted_index:
|
||||
self._create_inverted_index(cursor)
|
||||
|
||||
def _create_vector_index(self, cursor):
|
||||
"""Create HNSW vector index for similarity search."""
|
||||
|
|
@ -432,39 +684,53 @@ class ClickzettaVector(BaseVector):
|
|||
f"VALUES (?, ?, CAST(? AS JSON), CAST(? AS VECTOR({vector_dimension})))"
|
||||
)
|
||||
|
||||
connection = self._ensure_connection()
|
||||
with connection.cursor() as cursor:
|
||||
try:
|
||||
# Set session-level hints for batch insert operations
|
||||
# Note: executemany doesn't support hints parameter, so we set them as session variables
|
||||
cursor.execute("SET cz.sql.job.fast.mode = true")
|
||||
cursor.execute("SET cz.sql.compaction.after.commit = true")
|
||||
cursor.execute("SET cz.storage.always.prefetch.internal = true")
|
||||
with self.get_connection_context() as connection:
|
||||
with connection.cursor() as cursor:
|
||||
try:
|
||||
# Set session-level hints for batch insert operations
|
||||
# Note: executemany doesn't support hints parameter, so we set them as session variables
|
||||
# Temporarily suppress ClickZetta client logging to reduce noise
|
||||
clickzetta_logger = logging.getLogger("clickzetta")
|
||||
original_level = clickzetta_logger.level
|
||||
clickzetta_logger.setLevel(logging.WARNING)
|
||||
|
||||
cursor.executemany(insert_sql, data_rows)
|
||||
logger.info(
|
||||
"Inserted batch %d/%d (%d valid docs using parameterized query with VECTOR(%d) cast)",
|
||||
batch_index // batch_size + 1,
|
||||
total_batches,
|
||||
len(data_rows),
|
||||
vector_dimension,
|
||||
)
|
||||
except (RuntimeError, ValueError, TypeError, ConnectionError) as e:
|
||||
logger.exception("Parameterized SQL execution failed for %d documents", len(data_rows))
|
||||
logger.exception("SQL template: %s", insert_sql)
|
||||
logger.exception("Sample data row: %s", data_rows[0] if data_rows else "None")
|
||||
raise
|
||||
try:
|
||||
cursor.execute("SET cz.sql.job.fast.mode = true")
|
||||
cursor.execute("SET cz.sql.compaction.after.commit = true")
|
||||
cursor.execute("SET cz.storage.always.prefetch.internal = true")
|
||||
finally:
|
||||
# Restore original logging level
|
||||
clickzetta_logger.setLevel(original_level)
|
||||
|
||||
cursor.executemany(insert_sql, data_rows)
|
||||
logger.info(
|
||||
"Inserted batch %d/%d (%d valid docs using parameterized query with VECTOR(%d) cast)",
|
||||
batch_index // batch_size + 1,
|
||||
total_batches,
|
||||
len(data_rows),
|
||||
vector_dimension,
|
||||
)
|
||||
except (RuntimeError, ValueError, TypeError, ConnectionError) as e:
|
||||
logger.exception("Parameterized SQL execution failed for %d documents", len(data_rows))
|
||||
logger.exception("SQL template: %s", insert_sql)
|
||||
logger.exception("Sample data row: %s", data_rows[0] if data_rows else "None")
|
||||
raise
|
||||
|
||||
def text_exists(self, id: str) -> bool:
|
||||
"""Check if a document exists by ID."""
|
||||
# Check if table exists first
|
||||
if not self._table_exists():
|
||||
return False
|
||||
|
||||
safe_id = self._safe_doc_id(id)
|
||||
connection = self._ensure_connection()
|
||||
with connection.cursor() as cursor:
|
||||
cursor.execute(
|
||||
f"SELECT COUNT(*) FROM {self._config.schema_name}.{self._table_name} WHERE id = ?", [safe_id]
|
||||
)
|
||||
result = cursor.fetchone()
|
||||
return result[0] > 0 if result else False
|
||||
with self.get_connection_context() as connection:
|
||||
with connection.cursor() as cursor:
|
||||
cursor.execute(
|
||||
f"SELECT COUNT(*) FROM {self._config.schema_name}.{self._table_name} WHERE id = ?",
|
||||
binding_params=[safe_id],
|
||||
)
|
||||
result = cursor.fetchone()
|
||||
return result[0] > 0 if result else False
|
||||
|
||||
def delete_by_ids(self, ids: list[str]) -> None:
|
||||
"""Delete documents by IDs."""
|
||||
|
|
@ -482,13 +748,14 @@ class ClickzettaVector(BaseVector):
|
|||
def _delete_by_ids_impl(self, ids: list[str]) -> None:
|
||||
"""Implementation of delete by IDs (executed in write worker thread)."""
|
||||
safe_ids = [self._safe_doc_id(id) for id in ids]
|
||||
# Create properly escaped string literals for SQL
|
||||
id_list = ",".join(f"'{id}'" for id in safe_ids)
|
||||
sql = f"DELETE FROM {self._config.schema_name}.{self._table_name} WHERE id IN ({id_list})"
|
||||
|
||||
connection = self._ensure_connection()
|
||||
with connection.cursor() as cursor:
|
||||
cursor.execute(sql)
|
||||
# Use parameterized query to prevent SQL injection
|
||||
placeholders = ",".join("?" for _ in safe_ids)
|
||||
sql = f"DELETE FROM {self._config.schema_name}.{self._table_name} WHERE id IN ({placeholders})"
|
||||
|
||||
with self.get_connection_context() as connection:
|
||||
with connection.cursor() as cursor:
|
||||
cursor.execute(sql, binding_params=safe_ids)
|
||||
|
||||
def delete_by_metadata_field(self, key: str, value: str) -> None:
|
||||
"""Delete documents by metadata field."""
|
||||
|
|
@ -502,19 +769,28 @@ class ClickzettaVector(BaseVector):
|
|||
|
||||
def _delete_by_metadata_field_impl(self, key: str, value: str) -> None:
|
||||
"""Implementation of delete by metadata field (executed in write worker thread)."""
|
||||
connection = self._ensure_connection()
|
||||
with connection.cursor() as cursor:
|
||||
# Using JSON path to filter with parameterized query
|
||||
# Note: JSON path requires literal key name, cannot be parameterized
|
||||
# Use json_extract_string function for ClickZetta compatibility
|
||||
sql = (
|
||||
f"DELETE FROM {self._config.schema_name}.{self._table_name} "
|
||||
f"WHERE json_extract_string({Field.METADATA_KEY.value}, '$.{key}') = ?"
|
||||
)
|
||||
cursor.execute(sql, [value])
|
||||
with self.get_connection_context() as connection:
|
||||
with connection.cursor() as cursor:
|
||||
# Using JSON path to filter with parameterized query
|
||||
# Note: JSON path requires literal key name, cannot be parameterized
|
||||
# Use json_extract_string function for ClickZetta compatibility
|
||||
sql = (
|
||||
f"DELETE FROM {self._config.schema_name}.{self._table_name} "
|
||||
f"WHERE json_extract_string({Field.METADATA_KEY.value}, '$.{key}') = ?"
|
||||
)
|
||||
cursor.execute(sql, binding_params=[value])
|
||||
|
||||
def search_by_vector(self, query_vector: list[float], **kwargs: Any) -> list[Document]:
|
||||
"""Search for documents by vector similarity."""
|
||||
# Check if table exists first
|
||||
if not self._table_exists():
|
||||
logger.warning(
|
||||
"Table %s.%s does not exist, returning empty results",
|
||||
self._config.schema_name,
|
||||
self._table_name,
|
||||
)
|
||||
return []
|
||||
|
||||
top_k = kwargs.get("top_k", 10)
|
||||
score_threshold = kwargs.get("score_threshold", 0.0)
|
||||
document_ids_filter = kwargs.get("document_ids_filter")
|
||||
|
|
@ -565,56 +841,31 @@ class ClickzettaVector(BaseVector):
|
|||
"""
|
||||
|
||||
documents = []
|
||||
connection = self._ensure_connection()
|
||||
with connection.cursor() as cursor:
|
||||
# Use hints parameter for vector search optimization
|
||||
search_hints = {
|
||||
"hints": {
|
||||
"sdk.job.timeout": 60, # Increase timeout for vector search
|
||||
"cz.sql.job.fast.mode": True,
|
||||
"cz.storage.parquet.vector.index.read.memory.cache": True,
|
||||
with self.get_connection_context() as connection:
|
||||
with connection.cursor() as cursor:
|
||||
# Use hints parameter for vector search optimization
|
||||
search_hints = {
|
||||
"hints": {
|
||||
"sdk.job.timeout": 60, # Increase timeout for vector search
|
||||
"cz.sql.job.fast.mode": True,
|
||||
"cz.storage.parquet.vector.index.read.memory.cache": True,
|
||||
}
|
||||
}
|
||||
}
|
||||
cursor.execute(search_sql, parameters=search_hints)
|
||||
results = cursor.fetchall()
|
||||
cursor.execute(search_sql, search_hints)
|
||||
results = cursor.fetchall()
|
||||
|
||||
for row in results:
|
||||
# Parse metadata from JSON string (may be double-encoded)
|
||||
try:
|
||||
if row[2]:
|
||||
metadata = json.loads(row[2])
|
||||
for row in results:
|
||||
# Parse metadata using centralized method
|
||||
metadata = self._parse_metadata(row[2], row[0])
|
||||
|
||||
# If result is a string, it's double-encoded JSON - parse again
|
||||
if isinstance(metadata, str):
|
||||
metadata = json.loads(metadata)
|
||||
|
||||
if not isinstance(metadata, dict):
|
||||
metadata = {}
|
||||
# Add score based on distance
|
||||
if self._config.vector_distance_function == "cosine_distance":
|
||||
metadata["score"] = 1 - (row[3] / 2)
|
||||
else:
|
||||
metadata = {}
|
||||
except (json.JSONDecodeError, TypeError) as e:
|
||||
logger.exception("JSON parsing failed")
|
||||
# Fallback: extract document_id with regex
|
||||
import re
|
||||
metadata["score"] = 1 / (1 + row[3])
|
||||
|
||||
doc_id_match = re.search(r'"document_id":\s*"([^"]+)"', str(row[2] or ""))
|
||||
metadata = {"document_id": doc_id_match.group(1)} if doc_id_match else {}
|
||||
|
||||
# Ensure required fields are set
|
||||
metadata["doc_id"] = row[0] # segment id
|
||||
|
||||
# Ensure document_id exists (critical for Dify's format_retrieval_documents)
|
||||
if "document_id" not in metadata:
|
||||
metadata["document_id"] = row[0] # fallback to segment id
|
||||
|
||||
# Add score based on distance
|
||||
if self._config.vector_distance_function == "cosine_distance":
|
||||
metadata["score"] = 1 - (row[3] / 2)
|
||||
else:
|
||||
metadata["score"] = 1 / (1 + row[3])
|
||||
|
||||
doc = Document(page_content=row[1], metadata=metadata)
|
||||
documents.append(doc)
|
||||
doc = Document(page_content=row[1], metadata=metadata)
|
||||
documents.append(doc)
|
||||
|
||||
return documents
|
||||
|
||||
|
|
@ -624,6 +875,15 @@ class ClickzettaVector(BaseVector):
|
|||
logger.warning("Full-text search is not enabled. Enable inverted index in config.")
|
||||
return []
|
||||
|
||||
# Check if table exists first
|
||||
if not self._table_exists():
|
||||
logger.warning(
|
||||
"Table %s.%s does not exist, returning empty results",
|
||||
self._config.schema_name,
|
||||
self._table_name,
|
||||
)
|
||||
return []
|
||||
|
||||
top_k = kwargs.get("top_k", 10)
|
||||
document_ids_filter = kwargs.get("document_ids_filter")
|
||||
|
||||
|
|
@ -659,62 +919,70 @@ class ClickzettaVector(BaseVector):
|
|||
"""
|
||||
|
||||
documents = []
|
||||
connection = self._ensure_connection()
|
||||
with connection.cursor() as cursor:
|
||||
try:
|
||||
# Use hints parameter for full-text search optimization
|
||||
fulltext_hints = {
|
||||
"hints": {
|
||||
"sdk.job.timeout": 30, # Timeout for full-text search
|
||||
"cz.sql.job.fast.mode": True,
|
||||
"cz.sql.index.prewhere.enabled": True,
|
||||
with self.get_connection_context() as connection:
|
||||
with connection.cursor() as cursor:
|
||||
try:
|
||||
# Use hints parameter for full-text search optimization
|
||||
fulltext_hints = {
|
||||
"hints": {
|
||||
"sdk.job.timeout": 30, # Timeout for full-text search
|
||||
"cz.sql.job.fast.mode": True,
|
||||
"cz.sql.index.prewhere.enabled": True,
|
||||
}
|
||||
}
|
||||
}
|
||||
cursor.execute(search_sql, parameters=fulltext_hints)
|
||||
results = cursor.fetchall()
|
||||
cursor.execute(search_sql, fulltext_hints)
|
||||
results = cursor.fetchall()
|
||||
|
||||
for row in results:
|
||||
# Parse metadata from JSON string (may be double-encoded)
|
||||
try:
|
||||
if row[2]:
|
||||
metadata = json.loads(row[2])
|
||||
for row in results:
|
||||
# Parse metadata from JSON string (may be double-encoded)
|
||||
try:
|
||||
if row[2]:
|
||||
metadata = json.loads(row[2])
|
||||
|
||||
# If result is a string, it's double-encoded JSON - parse again
|
||||
if isinstance(metadata, str):
|
||||
metadata = json.loads(metadata)
|
||||
# If result is a string, it's double-encoded JSON - parse again
|
||||
if isinstance(metadata, str):
|
||||
metadata = json.loads(metadata)
|
||||
|
||||
if not isinstance(metadata, dict):
|
||||
if not isinstance(metadata, dict):
|
||||
metadata = {}
|
||||
else:
|
||||
metadata = {}
|
||||
else:
|
||||
metadata = {}
|
||||
except (json.JSONDecodeError, TypeError) as e:
|
||||
logger.exception("JSON parsing failed")
|
||||
# Fallback: extract document_id with regex
|
||||
import re
|
||||
except (json.JSONDecodeError, TypeError) as e:
|
||||
logger.exception("JSON parsing failed")
|
||||
# Fallback: extract document_id with regex
|
||||
|
||||
doc_id_match = re.search(r'"document_id":\s*"([^"]+)"', str(row[2] or ""))
|
||||
metadata = {"document_id": doc_id_match.group(1)} if doc_id_match else {}
|
||||
doc_id_match = re.search(r'"document_id":\s*"([^"]+)"', str(row[2] or ""))
|
||||
metadata = {"document_id": doc_id_match.group(1)} if doc_id_match else {}
|
||||
|
||||
# Ensure required fields are set
|
||||
metadata["doc_id"] = row[0] # segment id
|
||||
# Ensure required fields are set
|
||||
metadata["doc_id"] = row[0] # segment id
|
||||
|
||||
# Ensure document_id exists (critical for Dify's format_retrieval_documents)
|
||||
if "document_id" not in metadata:
|
||||
metadata["document_id"] = row[0] # fallback to segment id
|
||||
# Ensure document_id exists (critical for Dify's format_retrieval_documents)
|
||||
if "document_id" not in metadata:
|
||||
metadata["document_id"] = row[0] # fallback to segment id
|
||||
|
||||
# Add a relevance score for full-text search
|
||||
metadata["score"] = 1.0 # Clickzetta doesn't provide relevance scores
|
||||
doc = Document(page_content=row[1], metadata=metadata)
|
||||
documents.append(doc)
|
||||
except (RuntimeError, ValueError, TypeError, ConnectionError) as e:
|
||||
logger.exception("Full-text search failed")
|
||||
# Fallback to LIKE search if full-text search fails
|
||||
return self._search_by_like(query, **kwargs)
|
||||
# Add a relevance score for full-text search
|
||||
metadata["score"] = 1.0 # Clickzetta doesn't provide relevance scores
|
||||
doc = Document(page_content=row[1], metadata=metadata)
|
||||
documents.append(doc)
|
||||
except (RuntimeError, ValueError, TypeError, ConnectionError) as e:
|
||||
logger.exception("Full-text search failed")
|
||||
# Fallback to LIKE search if full-text search fails
|
||||
return self._search_by_like(query, **kwargs)
|
||||
|
||||
return documents
|
||||
|
||||
def _search_by_like(self, query: str, **kwargs: Any) -> list[Document]:
|
||||
"""Fallback search using LIKE operator."""
|
||||
# Check if table exists first
|
||||
if not self._table_exists():
|
||||
logger.warning(
|
||||
"Table %s.%s does not exist, returning empty results",
|
||||
self._config.schema_name,
|
||||
self._table_name,
|
||||
)
|
||||
return []
|
||||
|
||||
top_k = kwargs.get("top_k", 10)
|
||||
document_ids_filter = kwargs.get("document_ids_filter")
|
||||
|
||||
|
|
@ -746,58 +1014,33 @@ class ClickzettaVector(BaseVector):
|
|||
"""
|
||||
|
||||
documents = []
|
||||
connection = self._ensure_connection()
|
||||
with connection.cursor() as cursor:
|
||||
# Use hints parameter for LIKE search optimization
|
||||
like_hints = {
|
||||
"hints": {
|
||||
"sdk.job.timeout": 20, # Timeout for LIKE search
|
||||
"cz.sql.job.fast.mode": True,
|
||||
with self.get_connection_context() as connection:
|
||||
with connection.cursor() as cursor:
|
||||
# Use hints parameter for LIKE search optimization
|
||||
like_hints = {
|
||||
"hints": {
|
||||
"sdk.job.timeout": 20, # Timeout for LIKE search
|
||||
"cz.sql.job.fast.mode": True,
|
||||
}
|
||||
}
|
||||
}
|
||||
cursor.execute(search_sql, parameters=like_hints)
|
||||
results = cursor.fetchall()
|
||||
cursor.execute(search_sql, like_hints)
|
||||
results = cursor.fetchall()
|
||||
|
||||
for row in results:
|
||||
# Parse metadata from JSON string (may be double-encoded)
|
||||
try:
|
||||
if row[2]:
|
||||
metadata = json.loads(row[2])
|
||||
for row in results:
|
||||
# Parse metadata using centralized method
|
||||
metadata = self._parse_metadata(row[2], row[0])
|
||||
|
||||
# If result is a string, it's double-encoded JSON - parse again
|
||||
if isinstance(metadata, str):
|
||||
metadata = json.loads(metadata)
|
||||
|
||||
if not isinstance(metadata, dict):
|
||||
metadata = {}
|
||||
else:
|
||||
metadata = {}
|
||||
except (json.JSONDecodeError, TypeError) as e:
|
||||
logger.exception("JSON parsing failed")
|
||||
# Fallback: extract document_id with regex
|
||||
import re
|
||||
|
||||
doc_id_match = re.search(r'"document_id":\s*"([^"]+)"', str(row[2] or ""))
|
||||
metadata = {"document_id": doc_id_match.group(1)} if doc_id_match else {}
|
||||
|
||||
# Ensure required fields are set
|
||||
metadata["doc_id"] = row[0] # segment id
|
||||
|
||||
# Ensure document_id exists (critical for Dify's format_retrieval_documents)
|
||||
if "document_id" not in metadata:
|
||||
metadata["document_id"] = row[0] # fallback to segment id
|
||||
|
||||
metadata["score"] = 0.5 # Lower score for LIKE search
|
||||
doc = Document(page_content=row[1], metadata=metadata)
|
||||
documents.append(doc)
|
||||
metadata["score"] = 0.5 # Lower score for LIKE search
|
||||
doc = Document(page_content=row[1], metadata=metadata)
|
||||
documents.append(doc)
|
||||
|
||||
return documents
|
||||
|
||||
def delete(self) -> None:
|
||||
"""Delete the entire collection."""
|
||||
connection = self._ensure_connection()
|
||||
with connection.cursor() as cursor:
|
||||
cursor.execute(f"DROP TABLE IF EXISTS {self._config.schema_name}.{self._table_name}")
|
||||
with self.get_connection_context() as connection:
|
||||
with connection.cursor() as cursor:
|
||||
cursor.execute(f"DROP TABLE IF EXISTS {self._config.schema_name}.{self._table_name}")
|
||||
|
||||
def _format_vector_simple(self, vector: list[float]) -> str:
|
||||
"""Simple vector formatting for SQL queries."""
|
||||
|
|
|
|||
|
|
@ -168,7 +168,57 @@ def _extract_text_by_mime_type(*, file_content: bytes, mime_type: str) -> str:
|
|||
def _extract_text_by_file_extension(*, file_content: bytes, file_extension: str) -> str:
|
||||
"""Extract text from a file based on its file extension."""
|
||||
match file_extension:
|
||||
case ".txt" | ".markdown" | ".md" | ".html" | ".htm" | ".xml":
|
||||
case (
|
||||
".txt"
|
||||
| ".markdown"
|
||||
| ".md"
|
||||
| ".html"
|
||||
| ".htm"
|
||||
| ".xml"
|
||||
| ".c"
|
||||
| ".h"
|
||||
| ".cpp"
|
||||
| ".hpp"
|
||||
| ".cc"
|
||||
| ".cxx"
|
||||
| ".c++"
|
||||
| ".py"
|
||||
| ".js"
|
||||
| ".ts"
|
||||
| ".jsx"
|
||||
| ".tsx"
|
||||
| ".java"
|
||||
| ".php"
|
||||
| ".rb"
|
||||
| ".go"
|
||||
| ".rs"
|
||||
| ".swift"
|
||||
| ".kt"
|
||||
| ".scala"
|
||||
| ".sh"
|
||||
| ".bash"
|
||||
| ".bat"
|
||||
| ".ps1"
|
||||
| ".sql"
|
||||
| ".r"
|
||||
| ".m"
|
||||
| ".pl"
|
||||
| ".lua"
|
||||
| ".vim"
|
||||
| ".asm"
|
||||
| ".s"
|
||||
| ".css"
|
||||
| ".scss"
|
||||
| ".less"
|
||||
| ".sass"
|
||||
| ".ini"
|
||||
| ".cfg"
|
||||
| ".conf"
|
||||
| ".toml"
|
||||
| ".env"
|
||||
| ".log"
|
||||
| ".vtt"
|
||||
):
|
||||
return _extract_text_from_plain_text(file_content)
|
||||
case ".json":
|
||||
return _extract_text_from_json(file_content)
|
||||
|
|
@ -194,8 +244,6 @@ def _extract_text_by_file_extension(*, file_content: bytes, file_extension: str)
|
|||
return _extract_text_from_eml(file_content)
|
||||
case ".msg":
|
||||
return _extract_text_from_msg(file_content)
|
||||
case ".vtt":
|
||||
return _extract_text_from_vtt(file_content)
|
||||
case ".properties":
|
||||
return _extract_text_from_properties(file_content)
|
||||
case _:
|
||||
|
|
|
|||
|
|
@ -50,12 +50,16 @@ class ConversationService:
|
|||
Conversation.from_account_id == (user.id if isinstance(user, Account) else None),
|
||||
or_(Conversation.invoke_from.is_(None), Conversation.invoke_from == invoke_from.value),
|
||||
)
|
||||
# Check if include_ids is not None and not empty to avoid WHERE false condition
|
||||
if include_ids is not None and len(include_ids) > 0:
|
||||
# Check if include_ids is not None to apply filter
|
||||
if include_ids is not None:
|
||||
if len(include_ids) == 0:
|
||||
# If include_ids is empty, return empty result
|
||||
return InfiniteScrollPagination(data=[], limit=limit, has_more=False)
|
||||
stmt = stmt.where(Conversation.id.in_(include_ids))
|
||||
# Check if exclude_ids is not None and not empty to avoid WHERE false condition
|
||||
if exclude_ids is not None and len(exclude_ids) > 0:
|
||||
stmt = stmt.where(~Conversation.id.in_(exclude_ids))
|
||||
# Check if exclude_ids is not None to apply filter
|
||||
if exclude_ids is not None:
|
||||
if len(exclude_ids) > 0:
|
||||
stmt = stmt.where(~Conversation.id.in_(exclude_ids))
|
||||
|
||||
# define sort fields and directions
|
||||
sort_field, sort_direction = cls._get_sort_params(sort_by)
|
||||
|
|
|
|||
|
|
@ -59,7 +59,14 @@ def clean_dataset_task(
|
|||
# Fix: Always clean vector database resources regardless of document existence
|
||||
# This ensures all 33 vector databases properly drop tables/collections/indices
|
||||
if doc_form is None:
|
||||
raise ValueError("Index type must be specified.")
|
||||
# Use default paragraph index type for empty datasets to enable vector database cleanup
|
||||
from core.rag.index_processor.constant.index_type import IndexType
|
||||
|
||||
doc_form = IndexType.PARAGRAPH_INDEX
|
||||
logging.info(
|
||||
click.style(f"No documents found, using default index type for cleanup: {doc_form}", fg="yellow")
|
||||
)
|
||||
|
||||
index_processor = IndexProcessorFactory(doc_form).init_index_processor()
|
||||
index_processor.clean(dataset, None, with_keywords=True, delete_child_chunks=True)
|
||||
|
||||
|
|
|
|||
|
|
@ -0,0 +1,487 @@
|
|||
from unittest.mock import patch
|
||||
|
||||
import pytest
|
||||
from faker import Faker
|
||||
|
||||
from models.api_based_extension import APIBasedExtension
|
||||
from services.account_service import AccountService, TenantService
|
||||
from services.api_based_extension_service import APIBasedExtensionService
|
||||
|
||||
|
||||
class TestAPIBasedExtensionService:
|
||||
"""Integration tests for APIBasedExtensionService using testcontainers."""
|
||||
|
||||
@pytest.fixture
|
||||
def mock_external_service_dependencies(self):
|
||||
"""Mock setup for external service dependencies."""
|
||||
with (
|
||||
patch("services.account_service.FeatureService") as mock_account_feature_service,
|
||||
patch("services.api_based_extension_service.APIBasedExtensionRequestor") as mock_requestor,
|
||||
):
|
||||
# Setup default mock returns
|
||||
mock_account_feature_service.get_features.return_value.billing.enabled = False
|
||||
|
||||
# Mock successful ping response
|
||||
mock_requestor_instance = mock_requestor.return_value
|
||||
mock_requestor_instance.request.return_value = {"result": "pong"}
|
||||
|
||||
yield {
|
||||
"account_feature_service": mock_account_feature_service,
|
||||
"requestor": mock_requestor,
|
||||
"requestor_instance": mock_requestor_instance,
|
||||
}
|
||||
|
||||
def _create_test_account_and_tenant(self, db_session_with_containers, mock_external_service_dependencies):
|
||||
"""
|
||||
Helper method to create a test account and tenant for testing.
|
||||
|
||||
Args:
|
||||
db_session_with_containers: Database session from testcontainers infrastructure
|
||||
mock_external_service_dependencies: Mock dependencies
|
||||
|
||||
Returns:
|
||||
tuple: (account, tenant) - Created account and tenant instances
|
||||
"""
|
||||
fake = Faker()
|
||||
|
||||
# Setup mocks for account creation
|
||||
mock_external_service_dependencies[
|
||||
"account_feature_service"
|
||||
].get_system_features.return_value.is_allow_register = True
|
||||
|
||||
# Create account and tenant
|
||||
account = AccountService.create_account(
|
||||
email=fake.email(),
|
||||
name=fake.name(),
|
||||
interface_language="en-US",
|
||||
password=fake.password(length=12),
|
||||
)
|
||||
TenantService.create_owner_tenant_if_not_exist(account, name=fake.company())
|
||||
tenant = account.current_tenant
|
||||
|
||||
return account, tenant
|
||||
|
||||
def test_save_extension_success(self, db_session_with_containers, mock_external_service_dependencies):
|
||||
"""
|
||||
Test successful saving of API-based extension.
|
||||
"""
|
||||
fake = Faker()
|
||||
account, tenant = self._create_test_account_and_tenant(
|
||||
db_session_with_containers, mock_external_service_dependencies
|
||||
)
|
||||
|
||||
# Setup extension data
|
||||
extension_data = APIBasedExtension()
|
||||
extension_data.tenant_id = tenant.id
|
||||
extension_data.name = fake.company()
|
||||
extension_data.api_endpoint = f"https://{fake.domain_name()}/api"
|
||||
extension_data.api_key = fake.password(length=20)
|
||||
|
||||
# Save extension
|
||||
saved_extension = APIBasedExtensionService.save(extension_data)
|
||||
|
||||
# Verify extension was saved correctly
|
||||
assert saved_extension.id is not None
|
||||
assert saved_extension.tenant_id == tenant.id
|
||||
assert saved_extension.name == extension_data.name
|
||||
assert saved_extension.api_endpoint == extension_data.api_endpoint
|
||||
assert saved_extension.api_key == extension_data.api_key # Should be decrypted when retrieved
|
||||
assert saved_extension.created_at is not None
|
||||
|
||||
# Verify extension was saved to database
|
||||
from extensions.ext_database import db
|
||||
|
||||
db.session.refresh(saved_extension)
|
||||
assert saved_extension.id is not None
|
||||
|
||||
# Verify ping connection was called
|
||||
mock_external_service_dependencies["requestor_instance"].request.assert_called_once()
|
||||
|
||||
def test_save_extension_validation_errors(self, db_session_with_containers, mock_external_service_dependencies):
|
||||
"""
|
||||
Test validation errors when saving extension with invalid data.
|
||||
"""
|
||||
fake = Faker()
|
||||
account, tenant = self._create_test_account_and_tenant(
|
||||
db_session_with_containers, mock_external_service_dependencies
|
||||
)
|
||||
|
||||
# Test empty name
|
||||
extension_data = APIBasedExtension()
|
||||
extension_data.tenant_id = tenant.id
|
||||
extension_data.name = ""
|
||||
extension_data.api_endpoint = f"https://{fake.domain_name()}/api"
|
||||
extension_data.api_key = fake.password(length=20)
|
||||
|
||||
with pytest.raises(ValueError, match="name must not be empty"):
|
||||
APIBasedExtensionService.save(extension_data)
|
||||
|
||||
# Test empty api_endpoint
|
||||
extension_data.name = fake.company()
|
||||
extension_data.api_endpoint = ""
|
||||
|
||||
with pytest.raises(ValueError, match="api_endpoint must not be empty"):
|
||||
APIBasedExtensionService.save(extension_data)
|
||||
|
||||
# Test empty api_key
|
||||
extension_data.api_endpoint = f"https://{fake.domain_name()}/api"
|
||||
extension_data.api_key = ""
|
||||
|
||||
with pytest.raises(ValueError, match="api_key must not be empty"):
|
||||
APIBasedExtensionService.save(extension_data)
|
||||
|
||||
def test_get_all_by_tenant_id_success(self, db_session_with_containers, mock_external_service_dependencies):
|
||||
"""
|
||||
Test successful retrieval of all extensions by tenant ID.
|
||||
"""
|
||||
fake = Faker()
|
||||
account, tenant = self._create_test_account_and_tenant(
|
||||
db_session_with_containers, mock_external_service_dependencies
|
||||
)
|
||||
|
||||
# Create multiple extensions
|
||||
extensions = []
|
||||
for i in range(3):
|
||||
extension_data = APIBasedExtension()
|
||||
extension_data.tenant_id = tenant.id
|
||||
extension_data.name = f"Extension {i}: {fake.company()}"
|
||||
extension_data.api_endpoint = f"https://{fake.domain_name()}/api"
|
||||
extension_data.api_key = fake.password(length=20)
|
||||
|
||||
saved_extension = APIBasedExtensionService.save(extension_data)
|
||||
extensions.append(saved_extension)
|
||||
|
||||
# Get all extensions for tenant
|
||||
extension_list = APIBasedExtensionService.get_all_by_tenant_id(tenant.id)
|
||||
|
||||
# Verify results
|
||||
assert len(extension_list) == 3
|
||||
|
||||
# Verify all extensions belong to the correct tenant and are ordered by created_at desc
|
||||
for i, extension in enumerate(extension_list):
|
||||
assert extension.tenant_id == tenant.id
|
||||
assert extension.api_key is not None # Should be decrypted
|
||||
if i > 0:
|
||||
# Verify descending order (newer first)
|
||||
assert extension.created_at <= extension_list[i - 1].created_at
|
||||
|
||||
def test_get_with_tenant_id_success(self, db_session_with_containers, mock_external_service_dependencies):
|
||||
"""
|
||||
Test successful retrieval of extension by tenant ID and extension ID.
|
||||
"""
|
||||
fake = Faker()
|
||||
account, tenant = self._create_test_account_and_tenant(
|
||||
db_session_with_containers, mock_external_service_dependencies
|
||||
)
|
||||
|
||||
# Create an extension
|
||||
extension_data = APIBasedExtension()
|
||||
extension_data.tenant_id = tenant.id
|
||||
extension_data.name = fake.company()
|
||||
extension_data.api_endpoint = f"https://{fake.domain_name()}/api"
|
||||
extension_data.api_key = fake.password(length=20)
|
||||
|
||||
created_extension = APIBasedExtensionService.save(extension_data)
|
||||
|
||||
# Get extension by ID
|
||||
retrieved_extension = APIBasedExtensionService.get_with_tenant_id(tenant.id, created_extension.id)
|
||||
|
||||
# Verify extension was retrieved correctly
|
||||
assert retrieved_extension is not None
|
||||
assert retrieved_extension.id == created_extension.id
|
||||
assert retrieved_extension.tenant_id == tenant.id
|
||||
assert retrieved_extension.name == extension_data.name
|
||||
assert retrieved_extension.api_endpoint == extension_data.api_endpoint
|
||||
assert retrieved_extension.api_key == extension_data.api_key # Should be decrypted
|
||||
assert retrieved_extension.created_at is not None
|
||||
|
||||
def test_get_with_tenant_id_not_found(self, db_session_with_containers, mock_external_service_dependencies):
|
||||
"""
|
||||
Test retrieval of extension when extension is not found.
|
||||
"""
|
||||
fake = Faker()
|
||||
account, tenant = self._create_test_account_and_tenant(
|
||||
db_session_with_containers, mock_external_service_dependencies
|
||||
)
|
||||
non_existent_extension_id = fake.uuid4()
|
||||
|
||||
# Try to get non-existent extension
|
||||
with pytest.raises(ValueError, match="API based extension is not found"):
|
||||
APIBasedExtensionService.get_with_tenant_id(tenant.id, non_existent_extension_id)
|
||||
|
||||
def test_delete_extension_success(self, db_session_with_containers, mock_external_service_dependencies):
|
||||
"""
|
||||
Test successful deletion of extension.
|
||||
"""
|
||||
fake = Faker()
|
||||
account, tenant = self._create_test_account_and_tenant(
|
||||
db_session_with_containers, mock_external_service_dependencies
|
||||
)
|
||||
|
||||
# Create an extension first
|
||||
extension_data = APIBasedExtension()
|
||||
extension_data.tenant_id = tenant.id
|
||||
extension_data.name = fake.company()
|
||||
extension_data.api_endpoint = f"https://{fake.domain_name()}/api"
|
||||
extension_data.api_key = fake.password(length=20)
|
||||
|
||||
created_extension = APIBasedExtensionService.save(extension_data)
|
||||
extension_id = created_extension.id
|
||||
|
||||
# Delete the extension
|
||||
APIBasedExtensionService.delete(created_extension)
|
||||
|
||||
# Verify extension was deleted
|
||||
from extensions.ext_database import db
|
||||
|
||||
deleted_extension = db.session.query(APIBasedExtension).filter(APIBasedExtension.id == extension_id).first()
|
||||
assert deleted_extension is None
|
||||
|
||||
def test_save_extension_duplicate_name(self, db_session_with_containers, mock_external_service_dependencies):
|
||||
"""
|
||||
Test validation error when saving extension with duplicate name.
|
||||
"""
|
||||
fake = Faker()
|
||||
account, tenant = self._create_test_account_and_tenant(
|
||||
db_session_with_containers, mock_external_service_dependencies
|
||||
)
|
||||
|
||||
# Create first extension
|
||||
extension_data1 = APIBasedExtension()
|
||||
extension_data1.tenant_id = tenant.id
|
||||
extension_data1.name = "Test Extension"
|
||||
extension_data1.api_endpoint = f"https://{fake.domain_name()}/api"
|
||||
extension_data1.api_key = fake.password(length=20)
|
||||
|
||||
APIBasedExtensionService.save(extension_data1)
|
||||
|
||||
# Try to create second extension with same name
|
||||
extension_data2 = APIBasedExtension()
|
||||
extension_data2.tenant_id = tenant.id
|
||||
extension_data2.name = "Test Extension" # Same name
|
||||
extension_data2.api_endpoint = f"https://{fake.domain_name()}/api"
|
||||
extension_data2.api_key = fake.password(length=20)
|
||||
|
||||
with pytest.raises(ValueError, match="name must be unique, it is already existed"):
|
||||
APIBasedExtensionService.save(extension_data2)
|
||||
|
||||
def test_save_extension_update_existing(self, db_session_with_containers, mock_external_service_dependencies):
|
||||
"""
|
||||
Test successful update of existing extension.
|
||||
"""
|
||||
fake = Faker()
|
||||
account, tenant = self._create_test_account_and_tenant(
|
||||
db_session_with_containers, mock_external_service_dependencies
|
||||
)
|
||||
|
||||
# Create initial extension
|
||||
extension_data = APIBasedExtension()
|
||||
extension_data.tenant_id = tenant.id
|
||||
extension_data.name = fake.company()
|
||||
extension_data.api_endpoint = f"https://{fake.domain_name()}/api"
|
||||
extension_data.api_key = fake.password(length=20)
|
||||
|
||||
created_extension = APIBasedExtensionService.save(extension_data)
|
||||
|
||||
# Save original values for later comparison
|
||||
original_name = created_extension.name
|
||||
original_endpoint = created_extension.api_endpoint
|
||||
|
||||
# Update the extension
|
||||
new_name = fake.company()
|
||||
new_endpoint = f"https://{fake.domain_name()}/api"
|
||||
new_api_key = fake.password(length=20)
|
||||
|
||||
created_extension.name = new_name
|
||||
created_extension.api_endpoint = new_endpoint
|
||||
created_extension.api_key = new_api_key
|
||||
|
||||
updated_extension = APIBasedExtensionService.save(created_extension)
|
||||
|
||||
# Verify extension was updated correctly
|
||||
assert updated_extension.id == created_extension.id
|
||||
assert updated_extension.tenant_id == tenant.id
|
||||
assert updated_extension.name == new_name
|
||||
assert updated_extension.api_endpoint == new_endpoint
|
||||
|
||||
# Verify original values were changed
|
||||
assert updated_extension.name != original_name
|
||||
assert updated_extension.api_endpoint != original_endpoint
|
||||
|
||||
# Verify ping connection was called for both create and update
|
||||
assert mock_external_service_dependencies["requestor_instance"].request.call_count == 2
|
||||
|
||||
# Verify the update by retrieving the extension again
|
||||
retrieved_extension = APIBasedExtensionService.get_with_tenant_id(tenant.id, created_extension.id)
|
||||
assert retrieved_extension.name == new_name
|
||||
assert retrieved_extension.api_endpoint == new_endpoint
|
||||
assert retrieved_extension.api_key == new_api_key # Should be decrypted when retrieved
|
||||
|
||||
def test_save_extension_connection_error(self, db_session_with_containers, mock_external_service_dependencies):
|
||||
"""
|
||||
Test connection error when saving extension with invalid endpoint.
|
||||
"""
|
||||
fake = Faker()
|
||||
account, tenant = self._create_test_account_and_tenant(
|
||||
db_session_with_containers, mock_external_service_dependencies
|
||||
)
|
||||
|
||||
# Mock connection error
|
||||
mock_external_service_dependencies["requestor_instance"].request.side_effect = ValueError(
|
||||
"connection error: request timeout"
|
||||
)
|
||||
|
||||
# Setup extension data
|
||||
extension_data = APIBasedExtension()
|
||||
extension_data.tenant_id = tenant.id
|
||||
extension_data.name = fake.company()
|
||||
extension_data.api_endpoint = "https://invalid-endpoint.com/api"
|
||||
extension_data.api_key = fake.password(length=20)
|
||||
|
||||
# Try to save extension with connection error
|
||||
with pytest.raises(ValueError, match="connection error: request timeout"):
|
||||
APIBasedExtensionService.save(extension_data)
|
||||
|
||||
def test_save_extension_invalid_api_key_length(
|
||||
self, db_session_with_containers, mock_external_service_dependencies
|
||||
):
|
||||
"""
|
||||
Test validation error when saving extension with API key that is too short.
|
||||
"""
|
||||
fake = Faker()
|
||||
account, tenant = self._create_test_account_and_tenant(
|
||||
db_session_with_containers, mock_external_service_dependencies
|
||||
)
|
||||
|
||||
# Setup extension data with short API key
|
||||
extension_data = APIBasedExtension()
|
||||
extension_data.tenant_id = tenant.id
|
||||
extension_data.name = fake.company()
|
||||
extension_data.api_endpoint = f"https://{fake.domain_name()}/api"
|
||||
extension_data.api_key = "1234" # Less than 5 characters
|
||||
|
||||
# Try to save extension with short API key
|
||||
with pytest.raises(ValueError, match="api_key must be at least 5 characters"):
|
||||
APIBasedExtensionService.save(extension_data)
|
||||
|
||||
def test_save_extension_empty_fields(self, db_session_with_containers, mock_external_service_dependencies):
|
||||
"""
|
||||
Test validation errors when saving extension with empty required fields.
|
||||
"""
|
||||
fake = Faker()
|
||||
account, tenant = self._create_test_account_and_tenant(
|
||||
db_session_with_containers, mock_external_service_dependencies
|
||||
)
|
||||
|
||||
# Test with None values
|
||||
extension_data = APIBasedExtension()
|
||||
extension_data.tenant_id = tenant.id
|
||||
extension_data.name = None
|
||||
extension_data.api_endpoint = f"https://{fake.domain_name()}/api"
|
||||
extension_data.api_key = fake.password(length=20)
|
||||
|
||||
with pytest.raises(ValueError, match="name must not be empty"):
|
||||
APIBasedExtensionService.save(extension_data)
|
||||
|
||||
# Test with None api_endpoint
|
||||
extension_data.name = fake.company()
|
||||
extension_data.api_endpoint = None
|
||||
|
||||
with pytest.raises(ValueError, match="api_endpoint must not be empty"):
|
||||
APIBasedExtensionService.save(extension_data)
|
||||
|
||||
# Test with None api_key
|
||||
extension_data.api_endpoint = f"https://{fake.domain_name()}/api"
|
||||
extension_data.api_key = None
|
||||
|
||||
with pytest.raises(ValueError, match="api_key must not be empty"):
|
||||
APIBasedExtensionService.save(extension_data)
|
||||
|
||||
def test_get_all_by_tenant_id_empty_list(self, db_session_with_containers, mock_external_service_dependencies):
|
||||
"""
|
||||
Test retrieval of extensions when no extensions exist for tenant.
|
||||
"""
|
||||
fake = Faker()
|
||||
account, tenant = self._create_test_account_and_tenant(
|
||||
db_session_with_containers, mock_external_service_dependencies
|
||||
)
|
||||
|
||||
# Get all extensions for tenant (none exist)
|
||||
extension_list = APIBasedExtensionService.get_all_by_tenant_id(tenant.id)
|
||||
|
||||
# Verify empty list is returned
|
||||
assert len(extension_list) == 0
|
||||
assert extension_list == []
|
||||
|
||||
def test_save_extension_invalid_ping_response(self, db_session_with_containers, mock_external_service_dependencies):
|
||||
"""
|
||||
Test validation error when ping response is invalid.
|
||||
"""
|
||||
fake = Faker()
|
||||
account, tenant = self._create_test_account_and_tenant(
|
||||
db_session_with_containers, mock_external_service_dependencies
|
||||
)
|
||||
|
||||
# Mock invalid ping response
|
||||
mock_external_service_dependencies["requestor_instance"].request.return_value = {"result": "invalid"}
|
||||
|
||||
# Setup extension data
|
||||
extension_data = APIBasedExtension()
|
||||
extension_data.tenant_id = tenant.id
|
||||
extension_data.name = fake.company()
|
||||
extension_data.api_endpoint = f"https://{fake.domain_name()}/api"
|
||||
extension_data.api_key = fake.password(length=20)
|
||||
|
||||
# Try to save extension with invalid ping response
|
||||
with pytest.raises(ValueError, match="{'result': 'invalid'}"):
|
||||
APIBasedExtensionService.save(extension_data)
|
||||
|
||||
def test_save_extension_missing_ping_result(self, db_session_with_containers, mock_external_service_dependencies):
|
||||
"""
|
||||
Test validation error when ping response is missing result field.
|
||||
"""
|
||||
fake = Faker()
|
||||
account, tenant = self._create_test_account_and_tenant(
|
||||
db_session_with_containers, mock_external_service_dependencies
|
||||
)
|
||||
|
||||
# Mock ping response without result field
|
||||
mock_external_service_dependencies["requestor_instance"].request.return_value = {"status": "ok"}
|
||||
|
||||
# Setup extension data
|
||||
extension_data = APIBasedExtension()
|
||||
extension_data.tenant_id = tenant.id
|
||||
extension_data.name = fake.company()
|
||||
extension_data.api_endpoint = f"https://{fake.domain_name()}/api"
|
||||
extension_data.api_key = fake.password(length=20)
|
||||
|
||||
# Try to save extension with missing ping result
|
||||
with pytest.raises(ValueError, match="{'status': 'ok'}"):
|
||||
APIBasedExtensionService.save(extension_data)
|
||||
|
||||
def test_get_with_tenant_id_wrong_tenant(self, db_session_with_containers, mock_external_service_dependencies):
|
||||
"""
|
||||
Test retrieval of extension when tenant ID doesn't match.
|
||||
"""
|
||||
fake = Faker()
|
||||
account1, tenant1 = self._create_test_account_and_tenant(
|
||||
db_session_with_containers, mock_external_service_dependencies
|
||||
)
|
||||
|
||||
# Create second account and tenant
|
||||
account2, tenant2 = self._create_test_account_and_tenant(
|
||||
db_session_with_containers, mock_external_service_dependencies
|
||||
)
|
||||
|
||||
# Create extension in first tenant
|
||||
extension_data = APIBasedExtension()
|
||||
extension_data.tenant_id = tenant1.id
|
||||
extension_data.name = fake.company()
|
||||
extension_data.api_endpoint = f"https://{fake.domain_name()}/api"
|
||||
extension_data.api_key = fake.password(length=20)
|
||||
|
||||
created_extension = APIBasedExtensionService.save(extension_data)
|
||||
|
||||
# Try to get extension with wrong tenant ID
|
||||
with pytest.raises(ValueError, match="API based extension is not found"):
|
||||
APIBasedExtensionService.get_with_tenant_id(tenant2.id, created_extension.id)
|
||||
|
|
@ -0,0 +1,473 @@
|
|||
import json
|
||||
from unittest.mock import MagicMock, patch
|
||||
|
||||
import pytest
|
||||
import yaml
|
||||
from faker import Faker
|
||||
|
||||
from models.model import App, AppModelConfig
|
||||
from services.account_service import AccountService, TenantService
|
||||
from services.app_dsl_service import AppDslService, ImportMode, ImportStatus
|
||||
from services.app_service import AppService
|
||||
|
||||
|
||||
class TestAppDslService:
|
||||
"""Integration tests for AppDslService using testcontainers."""
|
||||
|
||||
@pytest.fixture
|
||||
def mock_external_service_dependencies(self):
|
||||
"""Mock setup for external service dependencies."""
|
||||
with (
|
||||
patch("services.app_dsl_service.WorkflowService") as mock_workflow_service,
|
||||
patch("services.app_dsl_service.DependenciesAnalysisService") as mock_dependencies_service,
|
||||
patch("services.app_dsl_service.WorkflowDraftVariableService") as mock_draft_variable_service,
|
||||
patch("services.app_dsl_service.ssrf_proxy") as mock_ssrf_proxy,
|
||||
patch("services.app_dsl_service.redis_client") as mock_redis_client,
|
||||
patch("services.app_dsl_service.app_was_created") as mock_app_was_created,
|
||||
patch("services.app_dsl_service.app_model_config_was_updated") as mock_app_model_config_was_updated,
|
||||
patch("services.app_service.ModelManager") as mock_model_manager,
|
||||
patch("services.app_service.FeatureService") as mock_feature_service,
|
||||
patch("services.app_service.EnterpriseService") as mock_enterprise_service,
|
||||
):
|
||||
# Setup default mock returns
|
||||
mock_workflow_service.return_value.get_draft_workflow.return_value = None
|
||||
mock_workflow_service.return_value.sync_draft_workflow.return_value = MagicMock()
|
||||
mock_dependencies_service.generate_latest_dependencies.return_value = []
|
||||
mock_dependencies_service.get_leaked_dependencies.return_value = []
|
||||
mock_dependencies_service.generate_dependencies.return_value = []
|
||||
mock_draft_variable_service.return_value.delete_workflow_variables.return_value = None
|
||||
mock_ssrf_proxy.get.return_value.content = b"test content"
|
||||
mock_ssrf_proxy.get.return_value.raise_for_status.return_value = None
|
||||
mock_redis_client.setex.return_value = None
|
||||
mock_redis_client.get.return_value = None
|
||||
mock_redis_client.delete.return_value = None
|
||||
mock_app_was_created.send.return_value = None
|
||||
mock_app_model_config_was_updated.send.return_value = None
|
||||
|
||||
# Mock ModelManager for app service
|
||||
mock_model_instance = mock_model_manager.return_value
|
||||
mock_model_instance.get_default_model_instance.return_value = None
|
||||
mock_model_instance.get_default_provider_model_name.return_value = ("openai", "gpt-3.5-turbo")
|
||||
|
||||
# Mock FeatureService and EnterpriseService
|
||||
mock_feature_service.get_system_features.return_value.webapp_auth.enabled = False
|
||||
mock_enterprise_service.WebAppAuth.update_app_access_mode.return_value = None
|
||||
mock_enterprise_service.WebAppAuth.cleanup_webapp.return_value = None
|
||||
|
||||
yield {
|
||||
"workflow_service": mock_workflow_service,
|
||||
"dependencies_service": mock_dependencies_service,
|
||||
"draft_variable_service": mock_draft_variable_service,
|
||||
"ssrf_proxy": mock_ssrf_proxy,
|
||||
"redis_client": mock_redis_client,
|
||||
"app_was_created": mock_app_was_created,
|
||||
"app_model_config_was_updated": mock_app_model_config_was_updated,
|
||||
"model_manager": mock_model_manager,
|
||||
"feature_service": mock_feature_service,
|
||||
"enterprise_service": mock_enterprise_service,
|
||||
}
|
||||
|
||||
def _create_test_app_and_account(self, db_session_with_containers, mock_external_service_dependencies):
|
||||
"""
|
||||
Helper method to create a test app and account for testing.
|
||||
|
||||
Args:
|
||||
db_session_with_containers: Database session from testcontainers infrastructure
|
||||
mock_external_service_dependencies: Mock dependencies
|
||||
|
||||
Returns:
|
||||
tuple: (app, account) - Created app and account instances
|
||||
"""
|
||||
fake = Faker()
|
||||
|
||||
# Setup mocks for account creation
|
||||
with patch("services.account_service.FeatureService") as mock_account_feature_service:
|
||||
mock_account_feature_service.get_system_features.return_value.is_allow_register = True
|
||||
|
||||
# Create account and tenant first
|
||||
account = AccountService.create_account(
|
||||
email=fake.email(),
|
||||
name=fake.name(),
|
||||
interface_language="en-US",
|
||||
password=fake.password(length=12),
|
||||
)
|
||||
TenantService.create_owner_tenant_if_not_exist(account, name=fake.company())
|
||||
tenant = account.current_tenant
|
||||
|
||||
# Setup app creation arguments
|
||||
app_args = {
|
||||
"name": fake.company(),
|
||||
"description": fake.text(max_nb_chars=100),
|
||||
"mode": "chat",
|
||||
"icon_type": "emoji",
|
||||
"icon": "🤖",
|
||||
"icon_background": "#FF6B6B",
|
||||
"api_rph": 100,
|
||||
"api_rpm": 10,
|
||||
}
|
||||
|
||||
# Create app
|
||||
app_service = AppService()
|
||||
app = app_service.create_app(tenant.id, app_args, account)
|
||||
|
||||
return app, account
|
||||
|
||||
def _create_simple_yaml_content(self, app_name="Test App", app_mode="chat"):
|
||||
"""
|
||||
Helper method to create simple YAML content for testing.
|
||||
"""
|
||||
yaml_data = {
|
||||
"version": "0.3.0",
|
||||
"kind": "app",
|
||||
"app": {
|
||||
"name": app_name,
|
||||
"mode": app_mode,
|
||||
"icon": "🤖",
|
||||
"icon_background": "#FFEAD5",
|
||||
"description": "Test app description",
|
||||
"use_icon_as_answer_icon": False,
|
||||
},
|
||||
"model_config": {
|
||||
"model": {
|
||||
"provider": "openai",
|
||||
"name": "gpt-3.5-turbo",
|
||||
"mode": "chat",
|
||||
"completion_params": {
|
||||
"max_tokens": 1000,
|
||||
"temperature": 0.7,
|
||||
"top_p": 1.0,
|
||||
},
|
||||
},
|
||||
"pre_prompt": "You are a helpful assistant.",
|
||||
"prompt_type": "simple",
|
||||
},
|
||||
}
|
||||
return yaml.dump(yaml_data, allow_unicode=True)
|
||||
|
||||
def test_import_app_yaml_content_success(self, db_session_with_containers, mock_external_service_dependencies):
|
||||
"""
|
||||
Test successful app import from YAML content.
|
||||
"""
|
||||
fake = Faker()
|
||||
app, account = self._create_test_app_and_account(db_session_with_containers, mock_external_service_dependencies)
|
||||
|
||||
# Create YAML content
|
||||
yaml_content = self._create_simple_yaml_content(fake.company(), "chat")
|
||||
|
||||
# Import app
|
||||
dsl_service = AppDslService(db_session_with_containers)
|
||||
result = dsl_service.import_app(
|
||||
account=account,
|
||||
import_mode=ImportMode.YAML_CONTENT,
|
||||
yaml_content=yaml_content,
|
||||
name="Imported App",
|
||||
description="Imported app description",
|
||||
)
|
||||
|
||||
# Verify import result
|
||||
assert result.status == ImportStatus.COMPLETED
|
||||
assert result.app_id is not None
|
||||
assert result.app_mode == "chat"
|
||||
assert result.imported_dsl_version == "0.3.0"
|
||||
assert result.error == ""
|
||||
|
||||
# Verify app was created in database
|
||||
imported_app = db_session_with_containers.query(App).filter(App.id == result.app_id).first()
|
||||
assert imported_app is not None
|
||||
assert imported_app.name == "Imported App"
|
||||
assert imported_app.description == "Imported app description"
|
||||
assert imported_app.mode == "chat"
|
||||
assert imported_app.tenant_id == account.current_tenant_id
|
||||
assert imported_app.created_by == account.id
|
||||
|
||||
# Verify model config was created
|
||||
model_config = (
|
||||
db_session_with_containers.query(AppModelConfig).filter(AppModelConfig.app_id == result.app_id).first()
|
||||
)
|
||||
assert model_config is not None
|
||||
# The provider and model_id are stored in the model field as JSON
|
||||
model_dict = model_config.model_dict
|
||||
assert model_dict["provider"] == "openai"
|
||||
assert model_dict["name"] == "gpt-3.5-turbo"
|
||||
|
||||
def test_import_app_yaml_url_success(self, db_session_with_containers, mock_external_service_dependencies):
|
||||
"""
|
||||
Test successful app import from YAML URL.
|
||||
"""
|
||||
fake = Faker()
|
||||
app, account = self._create_test_app_and_account(db_session_with_containers, mock_external_service_dependencies)
|
||||
|
||||
# Create YAML content for mock response
|
||||
yaml_content = self._create_simple_yaml_content(fake.company(), "chat")
|
||||
|
||||
# Setup mock response
|
||||
mock_response = MagicMock()
|
||||
mock_response.content = yaml_content.encode("utf-8")
|
||||
mock_response.raise_for_status.return_value = None
|
||||
mock_external_service_dependencies["ssrf_proxy"].get.return_value = mock_response
|
||||
|
||||
# Import app from URL
|
||||
dsl_service = AppDslService(db_session_with_containers)
|
||||
result = dsl_service.import_app(
|
||||
account=account,
|
||||
import_mode=ImportMode.YAML_URL,
|
||||
yaml_url="https://example.com/app.yaml",
|
||||
name="URL Imported App",
|
||||
description="App imported from URL",
|
||||
)
|
||||
|
||||
# Verify import result
|
||||
assert result.status == ImportStatus.COMPLETED
|
||||
assert result.app_id is not None
|
||||
assert result.app_mode == "chat"
|
||||
assert result.imported_dsl_version == "0.3.0"
|
||||
assert result.error == ""
|
||||
|
||||
# Verify app was created in database
|
||||
imported_app = db_session_with_containers.query(App).filter(App.id == result.app_id).first()
|
||||
assert imported_app is not None
|
||||
assert imported_app.name == "URL Imported App"
|
||||
assert imported_app.description == "App imported from URL"
|
||||
assert imported_app.mode == "chat"
|
||||
assert imported_app.tenant_id == account.current_tenant_id
|
||||
|
||||
# Verify ssrf_proxy was called
|
||||
mock_external_service_dependencies["ssrf_proxy"].get.assert_called_once_with(
|
||||
"https://example.com/app.yaml", follow_redirects=True, timeout=(10, 10)
|
||||
)
|
||||
|
||||
def test_import_app_invalid_yaml_format(self, db_session_with_containers, mock_external_service_dependencies):
|
||||
"""
|
||||
Test app import with invalid YAML format.
|
||||
"""
|
||||
fake = Faker()
|
||||
app, account = self._create_test_app_and_account(db_session_with_containers, mock_external_service_dependencies)
|
||||
|
||||
# Create invalid YAML content
|
||||
invalid_yaml = "invalid: yaml: content: ["
|
||||
|
||||
# Import app with invalid YAML
|
||||
dsl_service = AppDslService(db_session_with_containers)
|
||||
result = dsl_service.import_app(
|
||||
account=account,
|
||||
import_mode=ImportMode.YAML_CONTENT,
|
||||
yaml_content=invalid_yaml,
|
||||
name="Invalid App",
|
||||
)
|
||||
|
||||
# Verify import failed
|
||||
assert result.status == ImportStatus.FAILED
|
||||
assert result.app_id is None
|
||||
assert "Invalid YAML format" in result.error
|
||||
assert result.imported_dsl_version == ""
|
||||
|
||||
# Verify no app was created in database
|
||||
apps_count = db_session_with_containers.query(App).filter(App.tenant_id == account.current_tenant_id).count()
|
||||
assert apps_count == 1 # Only the original test app
|
||||
|
||||
def test_import_app_missing_yaml_content(self, db_session_with_containers, mock_external_service_dependencies):
|
||||
"""
|
||||
Test app import with missing YAML content.
|
||||
"""
|
||||
fake = Faker()
|
||||
app, account = self._create_test_app_and_account(db_session_with_containers, mock_external_service_dependencies)
|
||||
|
||||
# Import app without YAML content
|
||||
dsl_service = AppDslService(db_session_with_containers)
|
||||
result = dsl_service.import_app(
|
||||
account=account,
|
||||
import_mode=ImportMode.YAML_CONTENT,
|
||||
name="Missing Content App",
|
||||
)
|
||||
|
||||
# Verify import failed
|
||||
assert result.status == ImportStatus.FAILED
|
||||
assert result.app_id is None
|
||||
assert "yaml_content is required" in result.error
|
||||
assert result.imported_dsl_version == ""
|
||||
|
||||
# Verify no app was created in database
|
||||
apps_count = db_session_with_containers.query(App).filter(App.tenant_id == account.current_tenant_id).count()
|
||||
assert apps_count == 1 # Only the original test app
|
||||
|
||||
def test_import_app_missing_yaml_url(self, db_session_with_containers, mock_external_service_dependencies):
|
||||
"""
|
||||
Test app import with missing YAML URL.
|
||||
"""
|
||||
fake = Faker()
|
||||
app, account = self._create_test_app_and_account(db_session_with_containers, mock_external_service_dependencies)
|
||||
|
||||
# Import app without YAML URL
|
||||
dsl_service = AppDslService(db_session_with_containers)
|
||||
result = dsl_service.import_app(
|
||||
account=account,
|
||||
import_mode=ImportMode.YAML_URL,
|
||||
name="Missing URL App",
|
||||
)
|
||||
|
||||
# Verify import failed
|
||||
assert result.status == ImportStatus.FAILED
|
||||
assert result.app_id is None
|
||||
assert "yaml_url is required" in result.error
|
||||
assert result.imported_dsl_version == ""
|
||||
|
||||
# Verify no app was created in database
|
||||
apps_count = db_session_with_containers.query(App).filter(App.tenant_id == account.current_tenant_id).count()
|
||||
assert apps_count == 1 # Only the original test app
|
||||
|
||||
def test_import_app_invalid_import_mode(self, db_session_with_containers, mock_external_service_dependencies):
|
||||
"""
|
||||
Test app import with invalid import mode.
|
||||
"""
|
||||
fake = Faker()
|
||||
app, account = self._create_test_app_and_account(db_session_with_containers, mock_external_service_dependencies)
|
||||
|
||||
# Create YAML content
|
||||
yaml_content = self._create_simple_yaml_content(fake.company(), "chat")
|
||||
|
||||
# Import app with invalid mode should raise ValueError
|
||||
dsl_service = AppDslService(db_session_with_containers)
|
||||
with pytest.raises(ValueError, match="Invalid import_mode: invalid-mode"):
|
||||
dsl_service.import_app(
|
||||
account=account,
|
||||
import_mode="invalid-mode",
|
||||
yaml_content=yaml_content,
|
||||
name="Invalid Mode App",
|
||||
)
|
||||
|
||||
# Verify no app was created in database
|
||||
apps_count = db_session_with_containers.query(App).filter(App.tenant_id == account.current_tenant_id).count()
|
||||
assert apps_count == 1 # Only the original test app
|
||||
|
||||
def test_export_dsl_chat_app_success(self, db_session_with_containers, mock_external_service_dependencies):
|
||||
"""
|
||||
Test successful DSL export for chat app.
|
||||
"""
|
||||
fake = Faker()
|
||||
app, account = self._create_test_app_and_account(db_session_with_containers, mock_external_service_dependencies)
|
||||
|
||||
# Create model config for the app
|
||||
model_config = AppModelConfig()
|
||||
model_config.id = fake.uuid4()
|
||||
model_config.app_id = app.id
|
||||
model_config.provider = "openai"
|
||||
model_config.model_id = "gpt-3.5-turbo"
|
||||
model_config.model = json.dumps(
|
||||
{
|
||||
"provider": "openai",
|
||||
"name": "gpt-3.5-turbo",
|
||||
"mode": "chat",
|
||||
"completion_params": {
|
||||
"max_tokens": 1000,
|
||||
"temperature": 0.7,
|
||||
},
|
||||
}
|
||||
)
|
||||
model_config.pre_prompt = "You are a helpful assistant."
|
||||
model_config.prompt_type = "simple"
|
||||
model_config.created_by = account.id
|
||||
model_config.updated_by = account.id
|
||||
|
||||
# Set the app_model_config_id to link the config
|
||||
app.app_model_config_id = model_config.id
|
||||
|
||||
db_session_with_containers.add(model_config)
|
||||
db_session_with_containers.commit()
|
||||
|
||||
# Export DSL
|
||||
exported_dsl = AppDslService.export_dsl(app, include_secret=False)
|
||||
|
||||
# Parse exported YAML
|
||||
exported_data = yaml.safe_load(exported_dsl)
|
||||
|
||||
# Verify exported data structure
|
||||
assert exported_data["kind"] == "app"
|
||||
assert exported_data["app"]["name"] == app.name
|
||||
assert exported_data["app"]["mode"] == app.mode
|
||||
assert exported_data["app"]["icon"] == app.icon
|
||||
assert exported_data["app"]["icon_background"] == app.icon_background
|
||||
assert exported_data["app"]["description"] == app.description
|
||||
|
||||
# Verify model config was exported
|
||||
assert "model_config" in exported_data
|
||||
# The exported model_config structure may be different from the database structure
|
||||
# Check that the model config exists and has the expected content
|
||||
assert exported_data["model_config"] is not None
|
||||
|
||||
# Verify dependencies were exported
|
||||
assert "dependencies" in exported_data
|
||||
assert isinstance(exported_data["dependencies"], list)
|
||||
|
||||
def test_export_dsl_workflow_app_success(self, db_session_with_containers, mock_external_service_dependencies):
|
||||
"""
|
||||
Test successful DSL export for workflow app.
|
||||
"""
|
||||
fake = Faker()
|
||||
app, account = self._create_test_app_and_account(db_session_with_containers, mock_external_service_dependencies)
|
||||
|
||||
# Update app to workflow mode
|
||||
app.mode = "workflow"
|
||||
db_session_with_containers.commit()
|
||||
|
||||
# Mock workflow service to return a workflow
|
||||
mock_workflow = MagicMock()
|
||||
mock_workflow.to_dict.return_value = {
|
||||
"graph": {"nodes": [{"id": "start", "type": "start", "data": {"type": "start"}}], "edges": []},
|
||||
"features": {},
|
||||
"environment_variables": [],
|
||||
"conversation_variables": [],
|
||||
}
|
||||
mock_external_service_dependencies[
|
||||
"workflow_service"
|
||||
].return_value.get_draft_workflow.return_value = mock_workflow
|
||||
|
||||
# Export DSL
|
||||
exported_dsl = AppDslService.export_dsl(app, include_secret=False)
|
||||
|
||||
# Parse exported YAML
|
||||
exported_data = yaml.safe_load(exported_dsl)
|
||||
|
||||
# Verify exported data structure
|
||||
assert exported_data["kind"] == "app"
|
||||
assert exported_data["app"]["name"] == app.name
|
||||
assert exported_data["app"]["mode"] == "workflow"
|
||||
|
||||
# Verify workflow was exported
|
||||
assert "workflow" in exported_data
|
||||
assert "graph" in exported_data["workflow"]
|
||||
assert "nodes" in exported_data["workflow"]["graph"]
|
||||
|
||||
# Verify dependencies were exported
|
||||
assert "dependencies" in exported_data
|
||||
assert isinstance(exported_data["dependencies"], list)
|
||||
|
||||
# Verify workflow service was called
|
||||
mock_external_service_dependencies["workflow_service"].return_value.get_draft_workflow.assert_called_once_with(
|
||||
app
|
||||
)
|
||||
|
||||
def test_check_dependencies_success(self, db_session_with_containers, mock_external_service_dependencies):
|
||||
"""
|
||||
Test successful dependency checking.
|
||||
"""
|
||||
fake = Faker()
|
||||
app, account = self._create_test_app_and_account(db_session_with_containers, mock_external_service_dependencies)
|
||||
|
||||
# Mock Redis to return dependencies
|
||||
mock_dependencies_json = '{"app_id": "' + app.id + '", "dependencies": []}'
|
||||
mock_external_service_dependencies["redis_client"].get.return_value = mock_dependencies_json
|
||||
|
||||
# Check dependencies
|
||||
dsl_service = AppDslService(db_session_with_containers)
|
||||
result = dsl_service.check_dependencies(app_model=app)
|
||||
|
||||
# Verify result
|
||||
assert result.leaked_dependencies == []
|
||||
|
||||
# Verify Redis was queried
|
||||
mock_external_service_dependencies["redis_client"].get.assert_called_once_with(
|
||||
f"app_check_dependencies:{app.id}"
|
||||
)
|
||||
|
||||
# Verify dependencies service was called
|
||||
mock_external_service_dependencies["dependencies_service"].get_leaked_dependencies.assert_called_once()
|
||||
|
|
@ -0,0 +1,419 @@
|
|||
"""Test conversation variable handling in AdvancedChatAppRunner."""
|
||||
|
||||
from unittest.mock import MagicMock, patch
|
||||
from uuid import uuid4
|
||||
|
||||
from sqlalchemy.orm import Session
|
||||
|
||||
from core.app.apps.advanced_chat.app_runner import AdvancedChatAppRunner
|
||||
from core.app.entities.app_invoke_entities import AdvancedChatAppGenerateEntity, InvokeFrom
|
||||
from core.variables import SegmentType
|
||||
from factories import variable_factory
|
||||
from models import ConversationVariable, Workflow
|
||||
|
||||
|
||||
class TestAdvancedChatAppRunnerConversationVariables:
|
||||
"""Test that AdvancedChatAppRunner correctly handles conversation variables."""
|
||||
|
||||
def test_missing_conversation_variables_are_added(self):
|
||||
"""Test that new conversation variables added to workflow are created for existing conversations."""
|
||||
# Setup
|
||||
app_id = str(uuid4())
|
||||
conversation_id = str(uuid4())
|
||||
workflow_id = str(uuid4())
|
||||
|
||||
# Create workflow with two conversation variables
|
||||
workflow_vars = [
|
||||
variable_factory.build_conversation_variable_from_mapping(
|
||||
{
|
||||
"id": "var1",
|
||||
"name": "existing_var",
|
||||
"value_type": SegmentType.STRING,
|
||||
"value": "default1",
|
||||
}
|
||||
),
|
||||
variable_factory.build_conversation_variable_from_mapping(
|
||||
{
|
||||
"id": "var2",
|
||||
"name": "new_var",
|
||||
"value_type": SegmentType.STRING,
|
||||
"value": "default2",
|
||||
}
|
||||
),
|
||||
]
|
||||
|
||||
# Mock workflow with conversation variables
|
||||
mock_workflow = MagicMock(spec=Workflow)
|
||||
mock_workflow.conversation_variables = workflow_vars
|
||||
mock_workflow.tenant_id = str(uuid4())
|
||||
mock_workflow.app_id = app_id
|
||||
mock_workflow.id = workflow_id
|
||||
mock_workflow.type = "chat"
|
||||
mock_workflow.graph_dict = {}
|
||||
mock_workflow.environment_variables = []
|
||||
|
||||
# Create existing conversation variable (only var1 exists in DB)
|
||||
existing_db_var = MagicMock(spec=ConversationVariable)
|
||||
existing_db_var.id = "var1"
|
||||
existing_db_var.app_id = app_id
|
||||
existing_db_var.conversation_id = conversation_id
|
||||
existing_db_var.to_variable = MagicMock(return_value=workflow_vars[0])
|
||||
|
||||
# Mock conversation and message
|
||||
mock_conversation = MagicMock()
|
||||
mock_conversation.app_id = app_id
|
||||
mock_conversation.id = conversation_id
|
||||
|
||||
mock_message = MagicMock()
|
||||
mock_message.id = str(uuid4())
|
||||
|
||||
# Mock app config
|
||||
mock_app_config = MagicMock()
|
||||
mock_app_config.app_id = app_id
|
||||
mock_app_config.workflow_id = workflow_id
|
||||
mock_app_config.tenant_id = str(uuid4())
|
||||
|
||||
# Mock app generate entity
|
||||
mock_app_generate_entity = MagicMock(spec=AdvancedChatAppGenerateEntity)
|
||||
mock_app_generate_entity.app_config = mock_app_config
|
||||
mock_app_generate_entity.inputs = {}
|
||||
mock_app_generate_entity.query = "test query"
|
||||
mock_app_generate_entity.files = []
|
||||
mock_app_generate_entity.user_id = str(uuid4())
|
||||
mock_app_generate_entity.invoke_from = InvokeFrom.SERVICE_API
|
||||
mock_app_generate_entity.workflow_run_id = str(uuid4())
|
||||
mock_app_generate_entity.call_depth = 0
|
||||
mock_app_generate_entity.single_iteration_run = None
|
||||
mock_app_generate_entity.single_loop_run = None
|
||||
mock_app_generate_entity.trace_manager = None
|
||||
|
||||
# Create runner
|
||||
runner = AdvancedChatAppRunner(
|
||||
application_generate_entity=mock_app_generate_entity,
|
||||
queue_manager=MagicMock(),
|
||||
conversation=mock_conversation,
|
||||
message=mock_message,
|
||||
dialogue_count=1,
|
||||
variable_loader=MagicMock(),
|
||||
workflow=mock_workflow,
|
||||
system_user_id=str(uuid4()),
|
||||
app=MagicMock(),
|
||||
)
|
||||
|
||||
# Mock database session
|
||||
mock_session = MagicMock(spec=Session)
|
||||
|
||||
# First query returns only existing variable
|
||||
mock_scalars_result = MagicMock()
|
||||
mock_scalars_result.all.return_value = [existing_db_var]
|
||||
mock_session.scalars.return_value = mock_scalars_result
|
||||
|
||||
# Track what gets added to session
|
||||
added_items = []
|
||||
|
||||
def track_add_all(items):
|
||||
added_items.extend(items)
|
||||
|
||||
mock_session.add_all.side_effect = track_add_all
|
||||
|
||||
# Patch the necessary components
|
||||
with (
|
||||
patch("core.app.apps.advanced_chat.app_runner.Session") as mock_session_class,
|
||||
patch("core.app.apps.advanced_chat.app_runner.select") as mock_select,
|
||||
patch("core.app.apps.advanced_chat.app_runner.db") as mock_db,
|
||||
patch.object(runner, "_init_graph") as mock_init_graph,
|
||||
patch.object(runner, "handle_input_moderation", return_value=False),
|
||||
patch.object(runner, "handle_annotation_reply", return_value=False),
|
||||
patch("core.app.apps.advanced_chat.app_runner.WorkflowEntry") as mock_workflow_entry_class,
|
||||
patch("core.app.apps.advanced_chat.app_runner.VariablePool") as mock_variable_pool_class,
|
||||
):
|
||||
# Setup mocks
|
||||
mock_session_class.return_value.__enter__.return_value = mock_session
|
||||
mock_db.session.query.return_value.where.return_value.first.return_value = MagicMock() # App exists
|
||||
mock_db.engine = MagicMock()
|
||||
|
||||
# Mock graph initialization
|
||||
mock_init_graph.return_value = MagicMock()
|
||||
|
||||
# Mock workflow entry
|
||||
mock_workflow_entry = MagicMock()
|
||||
mock_workflow_entry.run.return_value = iter([]) # Empty generator
|
||||
mock_workflow_entry_class.return_value = mock_workflow_entry
|
||||
|
||||
# Run the method
|
||||
runner.run()
|
||||
|
||||
# Verify that the missing variable was added
|
||||
assert len(added_items) == 1, "Should have added exactly one missing variable"
|
||||
|
||||
# Check that the added item is the missing variable (var2)
|
||||
added_var = added_items[0]
|
||||
assert hasattr(added_var, "id"), "Added item should be a ConversationVariable"
|
||||
# Note: Since we're mocking ConversationVariable.from_variable,
|
||||
# we can't directly check the id, but we can verify add_all was called
|
||||
assert mock_session.add_all.called, "Session add_all should have been called"
|
||||
assert mock_session.commit.called, "Session commit should have been called"
|
||||
|
||||
def test_no_variables_creates_all(self):
|
||||
"""Test that all conversation variables are created when none exist in DB."""
|
||||
# Setup
|
||||
app_id = str(uuid4())
|
||||
conversation_id = str(uuid4())
|
||||
workflow_id = str(uuid4())
|
||||
|
||||
# Create workflow with conversation variables
|
||||
workflow_vars = [
|
||||
variable_factory.build_conversation_variable_from_mapping(
|
||||
{
|
||||
"id": "var1",
|
||||
"name": "var1",
|
||||
"value_type": SegmentType.STRING,
|
||||
"value": "default1",
|
||||
}
|
||||
),
|
||||
variable_factory.build_conversation_variable_from_mapping(
|
||||
{
|
||||
"id": "var2",
|
||||
"name": "var2",
|
||||
"value_type": SegmentType.STRING,
|
||||
"value": "default2",
|
||||
}
|
||||
),
|
||||
]
|
||||
|
||||
# Mock workflow
|
||||
mock_workflow = MagicMock(spec=Workflow)
|
||||
mock_workflow.conversation_variables = workflow_vars
|
||||
mock_workflow.tenant_id = str(uuid4())
|
||||
mock_workflow.app_id = app_id
|
||||
mock_workflow.id = workflow_id
|
||||
mock_workflow.type = "chat"
|
||||
mock_workflow.graph_dict = {}
|
||||
mock_workflow.environment_variables = []
|
||||
|
||||
# Mock conversation and message
|
||||
mock_conversation = MagicMock()
|
||||
mock_conversation.app_id = app_id
|
||||
mock_conversation.id = conversation_id
|
||||
|
||||
mock_message = MagicMock()
|
||||
mock_message.id = str(uuid4())
|
||||
|
||||
# Mock app config
|
||||
mock_app_config = MagicMock()
|
||||
mock_app_config.app_id = app_id
|
||||
mock_app_config.workflow_id = workflow_id
|
||||
mock_app_config.tenant_id = str(uuid4())
|
||||
|
||||
# Mock app generate entity
|
||||
mock_app_generate_entity = MagicMock(spec=AdvancedChatAppGenerateEntity)
|
||||
mock_app_generate_entity.app_config = mock_app_config
|
||||
mock_app_generate_entity.inputs = {}
|
||||
mock_app_generate_entity.query = "test query"
|
||||
mock_app_generate_entity.files = []
|
||||
mock_app_generate_entity.user_id = str(uuid4())
|
||||
mock_app_generate_entity.invoke_from = InvokeFrom.SERVICE_API
|
||||
mock_app_generate_entity.workflow_run_id = str(uuid4())
|
||||
mock_app_generate_entity.call_depth = 0
|
||||
mock_app_generate_entity.single_iteration_run = None
|
||||
mock_app_generate_entity.single_loop_run = None
|
||||
mock_app_generate_entity.trace_manager = None
|
||||
|
||||
# Create runner
|
||||
runner = AdvancedChatAppRunner(
|
||||
application_generate_entity=mock_app_generate_entity,
|
||||
queue_manager=MagicMock(),
|
||||
conversation=mock_conversation,
|
||||
message=mock_message,
|
||||
dialogue_count=1,
|
||||
variable_loader=MagicMock(),
|
||||
workflow=mock_workflow,
|
||||
system_user_id=str(uuid4()),
|
||||
app=MagicMock(),
|
||||
)
|
||||
|
||||
# Mock database session
|
||||
mock_session = MagicMock(spec=Session)
|
||||
|
||||
# Query returns empty list (no existing variables)
|
||||
mock_scalars_result = MagicMock()
|
||||
mock_scalars_result.all.return_value = []
|
||||
mock_session.scalars.return_value = mock_scalars_result
|
||||
|
||||
# Track what gets added to session
|
||||
added_items = []
|
||||
|
||||
def track_add_all(items):
|
||||
added_items.extend(items)
|
||||
|
||||
mock_session.add_all.side_effect = track_add_all
|
||||
|
||||
# Patch the necessary components
|
||||
with (
|
||||
patch("core.app.apps.advanced_chat.app_runner.Session") as mock_session_class,
|
||||
patch("core.app.apps.advanced_chat.app_runner.select") as mock_select,
|
||||
patch("core.app.apps.advanced_chat.app_runner.db") as mock_db,
|
||||
patch.object(runner, "_init_graph") as mock_init_graph,
|
||||
patch.object(runner, "handle_input_moderation", return_value=False),
|
||||
patch.object(runner, "handle_annotation_reply", return_value=False),
|
||||
patch("core.app.apps.advanced_chat.app_runner.WorkflowEntry") as mock_workflow_entry_class,
|
||||
patch("core.app.apps.advanced_chat.app_runner.VariablePool") as mock_variable_pool_class,
|
||||
patch("core.app.apps.advanced_chat.app_runner.ConversationVariable") as mock_conv_var_class,
|
||||
):
|
||||
# Setup mocks
|
||||
mock_session_class.return_value.__enter__.return_value = mock_session
|
||||
mock_db.session.query.return_value.where.return_value.first.return_value = MagicMock() # App exists
|
||||
mock_db.engine = MagicMock()
|
||||
|
||||
# Mock ConversationVariable.from_variable to return mock objects
|
||||
mock_conv_vars = []
|
||||
for var in workflow_vars:
|
||||
mock_cv = MagicMock()
|
||||
mock_cv.id = var.id
|
||||
mock_cv.to_variable.return_value = var
|
||||
mock_conv_vars.append(mock_cv)
|
||||
|
||||
mock_conv_var_class.from_variable.side_effect = mock_conv_vars
|
||||
|
||||
# Mock graph initialization
|
||||
mock_init_graph.return_value = MagicMock()
|
||||
|
||||
# Mock workflow entry
|
||||
mock_workflow_entry = MagicMock()
|
||||
mock_workflow_entry.run.return_value = iter([]) # Empty generator
|
||||
mock_workflow_entry_class.return_value = mock_workflow_entry
|
||||
|
||||
# Run the method
|
||||
runner.run()
|
||||
|
||||
# Verify that all variables were created
|
||||
assert len(added_items) == 2, "Should have added both variables"
|
||||
assert mock_session.add_all.called, "Session add_all should have been called"
|
||||
assert mock_session.commit.called, "Session commit should have been called"
|
||||
|
||||
def test_all_variables_exist_no_changes(self):
|
||||
"""Test that no changes are made when all variables already exist in DB."""
|
||||
# Setup
|
||||
app_id = str(uuid4())
|
||||
conversation_id = str(uuid4())
|
||||
workflow_id = str(uuid4())
|
||||
|
||||
# Create workflow with conversation variables
|
||||
workflow_vars = [
|
||||
variable_factory.build_conversation_variable_from_mapping(
|
||||
{
|
||||
"id": "var1",
|
||||
"name": "var1",
|
||||
"value_type": SegmentType.STRING,
|
||||
"value": "default1",
|
||||
}
|
||||
),
|
||||
variable_factory.build_conversation_variable_from_mapping(
|
||||
{
|
||||
"id": "var2",
|
||||
"name": "var2",
|
||||
"value_type": SegmentType.STRING,
|
||||
"value": "default2",
|
||||
}
|
||||
),
|
||||
]
|
||||
|
||||
# Mock workflow
|
||||
mock_workflow = MagicMock(spec=Workflow)
|
||||
mock_workflow.conversation_variables = workflow_vars
|
||||
mock_workflow.tenant_id = str(uuid4())
|
||||
mock_workflow.app_id = app_id
|
||||
mock_workflow.id = workflow_id
|
||||
mock_workflow.type = "chat"
|
||||
mock_workflow.graph_dict = {}
|
||||
mock_workflow.environment_variables = []
|
||||
|
||||
# Create existing conversation variables (both exist in DB)
|
||||
existing_db_vars = []
|
||||
for var in workflow_vars:
|
||||
db_var = MagicMock(spec=ConversationVariable)
|
||||
db_var.id = var.id
|
||||
db_var.app_id = app_id
|
||||
db_var.conversation_id = conversation_id
|
||||
db_var.to_variable = MagicMock(return_value=var)
|
||||
existing_db_vars.append(db_var)
|
||||
|
||||
# Mock conversation and message
|
||||
mock_conversation = MagicMock()
|
||||
mock_conversation.app_id = app_id
|
||||
mock_conversation.id = conversation_id
|
||||
|
||||
mock_message = MagicMock()
|
||||
mock_message.id = str(uuid4())
|
||||
|
||||
# Mock app config
|
||||
mock_app_config = MagicMock()
|
||||
mock_app_config.app_id = app_id
|
||||
mock_app_config.workflow_id = workflow_id
|
||||
mock_app_config.tenant_id = str(uuid4())
|
||||
|
||||
# Mock app generate entity
|
||||
mock_app_generate_entity = MagicMock(spec=AdvancedChatAppGenerateEntity)
|
||||
mock_app_generate_entity.app_config = mock_app_config
|
||||
mock_app_generate_entity.inputs = {}
|
||||
mock_app_generate_entity.query = "test query"
|
||||
mock_app_generate_entity.files = []
|
||||
mock_app_generate_entity.user_id = str(uuid4())
|
||||
mock_app_generate_entity.invoke_from = InvokeFrom.SERVICE_API
|
||||
mock_app_generate_entity.workflow_run_id = str(uuid4())
|
||||
mock_app_generate_entity.call_depth = 0
|
||||
mock_app_generate_entity.single_iteration_run = None
|
||||
mock_app_generate_entity.single_loop_run = None
|
||||
mock_app_generate_entity.trace_manager = None
|
||||
|
||||
# Create runner
|
||||
runner = AdvancedChatAppRunner(
|
||||
application_generate_entity=mock_app_generate_entity,
|
||||
queue_manager=MagicMock(),
|
||||
conversation=mock_conversation,
|
||||
message=mock_message,
|
||||
dialogue_count=1,
|
||||
variable_loader=MagicMock(),
|
||||
workflow=mock_workflow,
|
||||
system_user_id=str(uuid4()),
|
||||
app=MagicMock(),
|
||||
)
|
||||
|
||||
# Mock database session
|
||||
mock_session = MagicMock(spec=Session)
|
||||
|
||||
# Query returns all existing variables
|
||||
mock_scalars_result = MagicMock()
|
||||
mock_scalars_result.all.return_value = existing_db_vars
|
||||
mock_session.scalars.return_value = mock_scalars_result
|
||||
|
||||
# Patch the necessary components
|
||||
with (
|
||||
patch("core.app.apps.advanced_chat.app_runner.Session") as mock_session_class,
|
||||
patch("core.app.apps.advanced_chat.app_runner.select") as mock_select,
|
||||
patch("core.app.apps.advanced_chat.app_runner.db") as mock_db,
|
||||
patch.object(runner, "_init_graph") as mock_init_graph,
|
||||
patch.object(runner, "handle_input_moderation", return_value=False),
|
||||
patch.object(runner, "handle_annotation_reply", return_value=False),
|
||||
patch("core.app.apps.advanced_chat.app_runner.WorkflowEntry") as mock_workflow_entry_class,
|
||||
patch("core.app.apps.advanced_chat.app_runner.VariablePool") as mock_variable_pool_class,
|
||||
):
|
||||
# Setup mocks
|
||||
mock_session_class.return_value.__enter__.return_value = mock_session
|
||||
mock_db.session.query.return_value.where.return_value.first.return_value = MagicMock() # App exists
|
||||
mock_db.engine = MagicMock()
|
||||
|
||||
# Mock graph initialization
|
||||
mock_init_graph.return_value = MagicMock()
|
||||
|
||||
# Mock workflow entry
|
||||
mock_workflow_entry = MagicMock()
|
||||
mock_workflow_entry.run.return_value = iter([]) # Empty generator
|
||||
mock_workflow_entry_class.return_value = mock_workflow_entry
|
||||
|
||||
# Run the method
|
||||
runner.run()
|
||||
|
||||
# Verify that no variables were added
|
||||
assert not mock_session.add_all.called, "Session add_all should not have been called"
|
||||
assert mock_session.commit.called, "Session commit should still be called"
|
||||
|
|
@ -0,0 +1,127 @@
|
|||
import uuid
|
||||
from unittest.mock import MagicMock, patch
|
||||
|
||||
from core.app.entities.app_invoke_entities import InvokeFrom
|
||||
from services.conversation_service import ConversationService
|
||||
|
||||
|
||||
class TestConversationService:
|
||||
def test_pagination_with_empty_include_ids(self):
|
||||
"""Test that empty include_ids returns empty result"""
|
||||
mock_session = MagicMock()
|
||||
mock_app_model = MagicMock(id=str(uuid.uuid4()))
|
||||
mock_user = MagicMock(id=str(uuid.uuid4()))
|
||||
|
||||
result = ConversationService.pagination_by_last_id(
|
||||
session=mock_session,
|
||||
app_model=mock_app_model,
|
||||
user=mock_user,
|
||||
last_id=None,
|
||||
limit=20,
|
||||
invoke_from=InvokeFrom.WEB_APP,
|
||||
include_ids=[], # Empty include_ids should return empty result
|
||||
exclude_ids=None,
|
||||
)
|
||||
|
||||
assert result.data == []
|
||||
assert result.has_more is False
|
||||
assert result.limit == 20
|
||||
|
||||
def test_pagination_with_non_empty_include_ids(self):
|
||||
"""Test that non-empty include_ids filters properly"""
|
||||
mock_session = MagicMock()
|
||||
mock_app_model = MagicMock(id=str(uuid.uuid4()))
|
||||
mock_user = MagicMock(id=str(uuid.uuid4()))
|
||||
|
||||
# Mock the query results
|
||||
mock_conversations = [MagicMock(id=str(uuid.uuid4())) for _ in range(3)]
|
||||
mock_session.scalars.return_value.all.return_value = mock_conversations
|
||||
mock_session.scalar.return_value = 0
|
||||
|
||||
with patch("services.conversation_service.select") as mock_select:
|
||||
mock_stmt = MagicMock()
|
||||
mock_select.return_value = mock_stmt
|
||||
mock_stmt.where.return_value = mock_stmt
|
||||
mock_stmt.order_by.return_value = mock_stmt
|
||||
mock_stmt.limit.return_value = mock_stmt
|
||||
mock_stmt.subquery.return_value = MagicMock()
|
||||
|
||||
result = ConversationService.pagination_by_last_id(
|
||||
session=mock_session,
|
||||
app_model=mock_app_model,
|
||||
user=mock_user,
|
||||
last_id=None,
|
||||
limit=20,
|
||||
invoke_from=InvokeFrom.WEB_APP,
|
||||
include_ids=["conv1", "conv2"], # Non-empty include_ids
|
||||
exclude_ids=None,
|
||||
)
|
||||
|
||||
# Verify the where clause was called with id.in_
|
||||
assert mock_stmt.where.called
|
||||
|
||||
def test_pagination_with_empty_exclude_ids(self):
|
||||
"""Test that empty exclude_ids doesn't filter"""
|
||||
mock_session = MagicMock()
|
||||
mock_app_model = MagicMock(id=str(uuid.uuid4()))
|
||||
mock_user = MagicMock(id=str(uuid.uuid4()))
|
||||
|
||||
# Mock the query results
|
||||
mock_conversations = [MagicMock(id=str(uuid.uuid4())) for _ in range(5)]
|
||||
mock_session.scalars.return_value.all.return_value = mock_conversations
|
||||
mock_session.scalar.return_value = 0
|
||||
|
||||
with patch("services.conversation_service.select") as mock_select:
|
||||
mock_stmt = MagicMock()
|
||||
mock_select.return_value = mock_stmt
|
||||
mock_stmt.where.return_value = mock_stmt
|
||||
mock_stmt.order_by.return_value = mock_stmt
|
||||
mock_stmt.limit.return_value = mock_stmt
|
||||
mock_stmt.subquery.return_value = MagicMock()
|
||||
|
||||
result = ConversationService.pagination_by_last_id(
|
||||
session=mock_session,
|
||||
app_model=mock_app_model,
|
||||
user=mock_user,
|
||||
last_id=None,
|
||||
limit=20,
|
||||
invoke_from=InvokeFrom.WEB_APP,
|
||||
include_ids=None,
|
||||
exclude_ids=[], # Empty exclude_ids should not filter
|
||||
)
|
||||
|
||||
# Result should contain the mocked conversations
|
||||
assert len(result.data) == 5
|
||||
|
||||
def test_pagination_with_non_empty_exclude_ids(self):
|
||||
"""Test that non-empty exclude_ids filters properly"""
|
||||
mock_session = MagicMock()
|
||||
mock_app_model = MagicMock(id=str(uuid.uuid4()))
|
||||
mock_user = MagicMock(id=str(uuid.uuid4()))
|
||||
|
||||
# Mock the query results
|
||||
mock_conversations = [MagicMock(id=str(uuid.uuid4())) for _ in range(3)]
|
||||
mock_session.scalars.return_value.all.return_value = mock_conversations
|
||||
mock_session.scalar.return_value = 0
|
||||
|
||||
with patch("services.conversation_service.select") as mock_select:
|
||||
mock_stmt = MagicMock()
|
||||
mock_select.return_value = mock_stmt
|
||||
mock_stmt.where.return_value = mock_stmt
|
||||
mock_stmt.order_by.return_value = mock_stmt
|
||||
mock_stmt.limit.return_value = mock_stmt
|
||||
mock_stmt.subquery.return_value = MagicMock()
|
||||
|
||||
result = ConversationService.pagination_by_last_id(
|
||||
session=mock_session,
|
||||
app_model=mock_app_model,
|
||||
user=mock_user,
|
||||
last_id=None,
|
||||
limit=20,
|
||||
invoke_from=InvokeFrom.WEB_APP,
|
||||
include_ids=None,
|
||||
exclude_ids=["conv1", "conv2"], # Non-empty exclude_ids
|
||||
)
|
||||
|
||||
# Verify the where clause was called for exclusion
|
||||
assert mock_stmt.where.called
|
||||
|
|
@ -4,7 +4,6 @@ import React, { useEffect, useMemo } from 'react'
|
|||
import { usePathname } from 'next/navigation'
|
||||
import useSWR from 'swr'
|
||||
import { useTranslation } from 'react-i18next'
|
||||
import { useBoolean } from 'ahooks'
|
||||
import {
|
||||
RiEqualizer2Fill,
|
||||
RiEqualizer2Line,
|
||||
|
|
@ -44,17 +43,12 @@ type IExtraInfoProps = {
|
|||
}
|
||||
|
||||
const ExtraInfo = ({ isMobile, relatedApps, expand }: IExtraInfoProps) => {
|
||||
const [isShowTips, { toggle: toggleTips, set: setShowTips }] = useBoolean(!isMobile)
|
||||
const { t } = useTranslation()
|
||||
const docLink = useDocLink()
|
||||
|
||||
const hasRelatedApps = relatedApps?.data && relatedApps?.data?.length > 0
|
||||
const relatedAppsTotal = relatedApps?.data?.length || 0
|
||||
|
||||
useEffect(() => {
|
||||
setShowTips(!isMobile)
|
||||
}, [isMobile, setShowTips])
|
||||
|
||||
return <div>
|
||||
{/* Related apps for desktop */}
|
||||
<div className={classNames(
|
||||
|
|
|
|||
|
|
@ -86,8 +86,8 @@ const Container = () => {
|
|||
}, [currentWorkspace, router])
|
||||
|
||||
return (
|
||||
<div ref={containerRef} className='scroll-container relative flex grow flex-col overflow-y-auto bg-background-body'>
|
||||
<div className='sticky top-0 z-10 flex h-[80px] shrink-0 flex-wrap items-center justify-between gap-y-2 bg-background-body px-12 pb-2 pt-4 leading-[56px]'>
|
||||
<div ref={containerRef} className={`scroll-container relative flex grow flex-col overflow-y-auto rounded-t-xl outline-none ${activeTab === 'dataset' ? 'bg-background-body' : 'bg-components-panel-bg'}`}>
|
||||
<div className={`sticky top-0 z-10 flex shrink-0 flex-wrap items-center justify-between gap-y-2 rounded-t-xl px-6 py-2 ${activeTab === 'api' ? 'border-b border-solid border-b-divider-regular' : ''} ${activeTab === 'dataset' ? 'bg-background-body' : 'bg-components-panel-bg'}`}>
|
||||
<TabSliderNew
|
||||
value={activeTab}
|
||||
onChange={newActiveTab => setActiveTab(newActiveTab)}
|
||||
|
|
|
|||
|
|
@ -193,7 +193,7 @@ const Doc = ({ apiBaseUrl }: DocProps) => {
|
|||
</button>
|
||||
)}
|
||||
</div>
|
||||
<article className={cn('prose-xl prose mx-1 rounded-t-xl bg-background-default px-4 pt-16 sm:mx-12', theme === Theme.dark && 'prose-invert')}>
|
||||
<article className={cn('prose-xl prose', theme === Theme.dark && 'prose-invert')}>
|
||||
{Template}
|
||||
</article>
|
||||
</div>
|
||||
|
|
|
|||
|
|
@ -55,8 +55,6 @@ const SettingsModal: FC<SettingsModalProps> = ({
|
|||
const { data: embeddingsModelList } = useModelList(ModelTypeEnum.textEmbedding)
|
||||
const {
|
||||
modelList: rerankModelList,
|
||||
defaultModel: rerankDefaultModel,
|
||||
currentModel: isRerankDefaultModelValid,
|
||||
} = useModelListAndDefaultModelAndCurrentProviderAndModel(ModelTypeEnum.rerank)
|
||||
const { t } = useTranslation()
|
||||
const docLink = useDocLink()
|
||||
|
|
|
|||
|
|
@ -78,7 +78,6 @@ const DatePicker = ({
|
|||
setCurrentDate(prev => getDateWithTimezone({ date: prev, timezone }))
|
||||
setSelectedDate(prev => prev ? getDateWithTimezone({ date: prev, timezone }) : undefined)
|
||||
}
|
||||
// eslint-disable-next-line react-hooks/exhaustive-deps
|
||||
}, [timezone])
|
||||
|
||||
const handleClickTrigger = (e: React.MouseEvent) => {
|
||||
|
|
@ -192,7 +191,7 @@ const DatePicker = ({
|
|||
setView(ViewType.date)
|
||||
}
|
||||
|
||||
const timeFormat = needTimePicker ? 'MMMM D, YYYY hh:mm A' : 'MMMM D, YYYY'
|
||||
const timeFormat = needTimePicker ? t('time.dateFormats.displayWithTime') : t('time.dateFormats.display')
|
||||
const displayValue = value?.format(timeFormat) || ''
|
||||
const displayTime = selectedDate?.format('hh:mm A') || '--:-- --'
|
||||
const placeholderDate = isOpen && selectedDate ? selectedDate.format(timeFormat) : (placeholder || t('time.defaultPlaceholder'))
|
||||
|
|
|
|||
|
|
@ -90,3 +90,49 @@ export const convertTimezoneToOffsetStr = (timezone?: string) => {
|
|||
return DEFAULT_OFFSET_STR
|
||||
return `UTC${tzItem.name.charAt(0)}${tzItem.name.charAt(2)}`
|
||||
}
|
||||
|
||||
// Parse date with multiple format support
|
||||
export const parseDateWithFormat = (dateString: string, format?: string): Dayjs | null => {
|
||||
if (!dateString) return null
|
||||
|
||||
// If format is specified, use it directly
|
||||
if (format) {
|
||||
const parsed = dayjs(dateString, format, true)
|
||||
return parsed.isValid() ? parsed : null
|
||||
}
|
||||
|
||||
// Try common date formats
|
||||
const formats = [
|
||||
'YYYY-MM-DD', // Standard format
|
||||
'YYYY/MM/DD', // Slash format
|
||||
'DD-MM-YYYY', // European format
|
||||
'DD/MM/YYYY', // European slash format
|
||||
'MM-DD-YYYY', // US format
|
||||
'MM/DD/YYYY', // US slash format
|
||||
'YYYY-MM-DDTHH:mm:ss.SSSZ', // ISO format
|
||||
'YYYY-MM-DDTHH:mm:ssZ', // ISO format (no milliseconds)
|
||||
'YYYY-MM-DD HH:mm:ss', // Standard datetime format
|
||||
]
|
||||
|
||||
for (const fmt of formats) {
|
||||
const parsed = dayjs(dateString, fmt, true)
|
||||
if (parsed.isValid())
|
||||
return parsed
|
||||
}
|
||||
|
||||
return null
|
||||
}
|
||||
|
||||
// Format date output with localization support
|
||||
export const formatDateForOutput = (date: Dayjs, includeTime: boolean = false, locale: string = 'en-US'): string => {
|
||||
if (!date || !date.isValid()) return ''
|
||||
|
||||
if (includeTime) {
|
||||
// Output format with time
|
||||
return date.format('YYYY-MM-DDTHH:mm:ss.SSSZ')
|
||||
}
|
||||
else {
|
||||
// Date-only output format without timezone
|
||||
return date.format('YYYY-MM-DD')
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -36,7 +36,7 @@ describe('file-uploader utils', () => {
|
|||
})
|
||||
|
||||
describe('fileUpload', () => {
|
||||
it('should handle successful file upload', async () => {
|
||||
it('should handle successful file upload', () => {
|
||||
const mockFile = new File(['test'], 'test.txt')
|
||||
const mockCallbacks = {
|
||||
onProgressCallback: jest.fn(),
|
||||
|
|
@ -46,13 +46,12 @@ describe('file-uploader utils', () => {
|
|||
|
||||
jest.mocked(upload).mockResolvedValue({ id: '123' })
|
||||
|
||||
await fileUpload({
|
||||
fileUpload({
|
||||
file: mockFile,
|
||||
...mockCallbacks,
|
||||
})
|
||||
|
||||
expect(upload).toHaveBeenCalled()
|
||||
expect(mockCallbacks.onSuccessCallback).toHaveBeenCalledWith({ id: '123' })
|
||||
})
|
||||
})
|
||||
|
||||
|
|
@ -284,7 +283,23 @@ describe('file-uploader utils', () => {
|
|||
})
|
||||
|
||||
describe('getProcessedFilesFromResponse', () => {
|
||||
it('should process files correctly', () => {
|
||||
beforeEach(() => {
|
||||
jest.mocked(mime.getAllExtensions).mockImplementation((mimeType: string) => {
|
||||
const mimeMap: Record<string, Set<string>> = {
|
||||
'image/jpeg': new Set(['jpg', 'jpeg']),
|
||||
'image/png': new Set(['png']),
|
||||
'image/gif': new Set(['gif']),
|
||||
'video/mp4': new Set(['mp4']),
|
||||
'audio/mp3': new Set(['mp3']),
|
||||
'application/pdf': new Set(['pdf']),
|
||||
'text/plain': new Set(['txt']),
|
||||
'application/json': new Set(['json']),
|
||||
}
|
||||
return mimeMap[mimeType] || new Set()
|
||||
})
|
||||
})
|
||||
|
||||
it('should process files correctly without type correction', () => {
|
||||
const files = [{
|
||||
related_id: '2a38e2ca-1295-415d-a51d-65d4ff9912d9',
|
||||
extension: '.jpeg',
|
||||
|
|
@ -294,6 +309,8 @@ describe('file-uploader utils', () => {
|
|||
transfer_method: TransferMethod.local_file,
|
||||
type: 'image',
|
||||
url: 'https://upload.dify.dev/files/xxx/file-preview',
|
||||
upload_file_id: '2a38e2ca-1295-415d-a51d-65d4ff9912d9',
|
||||
remote_url: '',
|
||||
}]
|
||||
|
||||
const result = getProcessedFilesFromResponse(files)
|
||||
|
|
@ -309,6 +326,215 @@ describe('file-uploader utils', () => {
|
|||
url: 'https://upload.dify.dev/files/xxx/file-preview',
|
||||
})
|
||||
})
|
||||
|
||||
it('should correct image file misclassified as document', () => {
|
||||
const files = [{
|
||||
related_id: '123',
|
||||
extension: '.jpg',
|
||||
filename: 'image.jpg',
|
||||
size: 1024,
|
||||
mime_type: 'image/jpeg',
|
||||
transfer_method: TransferMethod.local_file,
|
||||
type: 'document',
|
||||
url: 'https://example.com/image.jpg',
|
||||
upload_file_id: '123',
|
||||
remote_url: '',
|
||||
}]
|
||||
|
||||
const result = getProcessedFilesFromResponse(files)
|
||||
expect(result[0].supportFileType).toBe('image')
|
||||
})
|
||||
|
||||
it('should correct video file misclassified as document', () => {
|
||||
const files = [{
|
||||
related_id: '123',
|
||||
extension: '.mp4',
|
||||
filename: 'video.mp4',
|
||||
size: 1024,
|
||||
mime_type: 'video/mp4',
|
||||
transfer_method: TransferMethod.local_file,
|
||||
type: 'document',
|
||||
url: 'https://example.com/video.mp4',
|
||||
upload_file_id: '123',
|
||||
remote_url: '',
|
||||
}]
|
||||
|
||||
const result = getProcessedFilesFromResponse(files)
|
||||
expect(result[0].supportFileType).toBe('video')
|
||||
})
|
||||
|
||||
it('should correct audio file misclassified as document', () => {
|
||||
const files = [{
|
||||
related_id: '123',
|
||||
extension: '.mp3',
|
||||
filename: 'audio.mp3',
|
||||
size: 1024,
|
||||
mime_type: 'audio/mp3',
|
||||
transfer_method: TransferMethod.local_file,
|
||||
type: 'document',
|
||||
url: 'https://example.com/audio.mp3',
|
||||
upload_file_id: '123',
|
||||
remote_url: '',
|
||||
}]
|
||||
|
||||
const result = getProcessedFilesFromResponse(files)
|
||||
expect(result[0].supportFileType).toBe('audio')
|
||||
})
|
||||
|
||||
it('should correct document file misclassified as image', () => {
|
||||
const files = [{
|
||||
related_id: '123',
|
||||
extension: '.pdf',
|
||||
filename: 'document.pdf',
|
||||
size: 1024,
|
||||
mime_type: 'application/pdf',
|
||||
transfer_method: TransferMethod.local_file,
|
||||
type: 'image',
|
||||
url: 'https://example.com/document.pdf',
|
||||
upload_file_id: '123',
|
||||
remote_url: '',
|
||||
}]
|
||||
|
||||
const result = getProcessedFilesFromResponse(files)
|
||||
expect(result[0].supportFileType).toBe('document')
|
||||
})
|
||||
|
||||
it('should NOT correct when filename and MIME type conflict', () => {
|
||||
const files = [{
|
||||
related_id: '123',
|
||||
extension: '.pdf',
|
||||
filename: 'document.pdf',
|
||||
size: 1024,
|
||||
mime_type: 'image/jpeg',
|
||||
transfer_method: TransferMethod.local_file,
|
||||
type: 'document',
|
||||
url: 'https://example.com/document.pdf',
|
||||
upload_file_id: '123',
|
||||
remote_url: '',
|
||||
}]
|
||||
|
||||
const result = getProcessedFilesFromResponse(files)
|
||||
expect(result[0].supportFileType).toBe('document')
|
||||
})
|
||||
|
||||
it('should NOT correct when filename and MIME type both point to wrong type', () => {
|
||||
const files = [{
|
||||
related_id: '123',
|
||||
extension: '.jpg',
|
||||
filename: 'image.jpg',
|
||||
size: 1024,
|
||||
mime_type: 'image/jpeg',
|
||||
transfer_method: TransferMethod.local_file,
|
||||
type: 'image',
|
||||
url: 'https://example.com/image.jpg',
|
||||
upload_file_id: '123',
|
||||
remote_url: '',
|
||||
}]
|
||||
|
||||
const result = getProcessedFilesFromResponse(files)
|
||||
expect(result[0].supportFileType).toBe('image')
|
||||
})
|
||||
|
||||
it('should handle files with missing filename', () => {
|
||||
const files = [{
|
||||
related_id: '123',
|
||||
extension: '',
|
||||
filename: '',
|
||||
size: 1024,
|
||||
mime_type: 'image/jpeg',
|
||||
transfer_method: TransferMethod.local_file,
|
||||
type: 'document',
|
||||
url: 'https://example.com/file',
|
||||
upload_file_id: '123',
|
||||
remote_url: '',
|
||||
}]
|
||||
|
||||
const result = getProcessedFilesFromResponse(files)
|
||||
expect(result[0].supportFileType).toBe('document')
|
||||
})
|
||||
|
||||
it('should handle files with missing MIME type', () => {
|
||||
const files = [{
|
||||
related_id: '123',
|
||||
extension: '.jpg',
|
||||
filename: 'image.jpg',
|
||||
size: 1024,
|
||||
mime_type: '',
|
||||
transfer_method: TransferMethod.local_file,
|
||||
type: 'document',
|
||||
url: 'https://example.com/image.jpg',
|
||||
upload_file_id: '123',
|
||||
remote_url: '',
|
||||
}]
|
||||
|
||||
const result = getProcessedFilesFromResponse(files)
|
||||
expect(result[0].supportFileType).toBe('document')
|
||||
})
|
||||
|
||||
it('should handle files with unknown extensions', () => {
|
||||
const files = [{
|
||||
related_id: '123',
|
||||
extension: '.unknown',
|
||||
filename: 'file.unknown',
|
||||
size: 1024,
|
||||
mime_type: 'application/unknown',
|
||||
transfer_method: TransferMethod.local_file,
|
||||
type: 'document',
|
||||
url: 'https://example.com/file.unknown',
|
||||
upload_file_id: '123',
|
||||
remote_url: '',
|
||||
}]
|
||||
|
||||
const result = getProcessedFilesFromResponse(files)
|
||||
expect(result[0].supportFileType).toBe('document')
|
||||
})
|
||||
|
||||
it('should handle multiple different file types correctly', () => {
|
||||
const files = [
|
||||
{
|
||||
related_id: '1',
|
||||
extension: '.jpg',
|
||||
filename: 'correct-image.jpg',
|
||||
mime_type: 'image/jpeg',
|
||||
type: 'image',
|
||||
size: 1024,
|
||||
transfer_method: TransferMethod.local_file,
|
||||
url: 'https://example.com/correct-image.jpg',
|
||||
upload_file_id: '1',
|
||||
remote_url: '',
|
||||
},
|
||||
{
|
||||
related_id: '2',
|
||||
extension: '.png',
|
||||
filename: 'misclassified-image.png',
|
||||
mime_type: 'image/png',
|
||||
type: 'document',
|
||||
size: 2048,
|
||||
transfer_method: TransferMethod.local_file,
|
||||
url: 'https://example.com/misclassified-image.png',
|
||||
upload_file_id: '2',
|
||||
remote_url: '',
|
||||
},
|
||||
{
|
||||
related_id: '3',
|
||||
extension: '.pdf',
|
||||
filename: 'conflicted.pdf',
|
||||
mime_type: 'image/jpeg',
|
||||
type: 'document',
|
||||
size: 3072,
|
||||
transfer_method: TransferMethod.local_file,
|
||||
url: 'https://example.com/conflicted.pdf',
|
||||
upload_file_id: '3',
|
||||
remote_url: '',
|
||||
},
|
||||
]
|
||||
|
||||
const result = getProcessedFilesFromResponse(files)
|
||||
|
||||
expect(result[0].supportFileType).toBe('image') // correct, no change
|
||||
expect(result[1].supportFileType).toBe('image') // corrected from document to image
|
||||
expect(result[2].supportFileType).toBe('document') // conflict, no change
|
||||
})
|
||||
})
|
||||
|
||||
describe('getFileNameFromUrl', () => {
|
||||
|
|
|
|||
|
|
@ -70,10 +70,13 @@ export const getFileExtension = (fileName: string, fileMimetype: string, isRemot
|
|||
}
|
||||
}
|
||||
if (!extension) {
|
||||
if (extensions.size > 0)
|
||||
extension = extensions.values().next().value.toLowerCase()
|
||||
else
|
||||
if (extensions.size > 0) {
|
||||
const firstExtension = extensions.values().next().value
|
||||
extension = firstExtension ? firstExtension.toLowerCase() : ''
|
||||
}
|
||||
else {
|
||||
extension = extensionInFileName
|
||||
}
|
||||
}
|
||||
|
||||
if (isRemote)
|
||||
|
|
@ -145,6 +148,19 @@ export const getProcessedFiles = (files: FileEntity[]) => {
|
|||
|
||||
export const getProcessedFilesFromResponse = (files: FileResponse[]) => {
|
||||
return files.map((fileItem) => {
|
||||
let supportFileType = fileItem.type
|
||||
|
||||
if (fileItem.filename && fileItem.mime_type) {
|
||||
const detectedTypeFromFileName = getSupportFileType(fileItem.filename, '')
|
||||
const detectedTypeFromMime = getSupportFileType('', fileItem.mime_type)
|
||||
|
||||
if (detectedTypeFromFileName
|
||||
&& detectedTypeFromMime
|
||||
&& detectedTypeFromFileName === detectedTypeFromMime
|
||||
&& detectedTypeFromFileName !== fileItem.type)
|
||||
supportFileType = detectedTypeFromFileName
|
||||
}
|
||||
|
||||
return {
|
||||
id: fileItem.related_id,
|
||||
name: fileItem.filename,
|
||||
|
|
@ -152,7 +168,7 @@ export const getProcessedFilesFromResponse = (files: FileResponse[]) => {
|
|||
type: fileItem.mime_type,
|
||||
progress: 100,
|
||||
transferMethod: fileItem.transfer_method,
|
||||
supportFileType: fileItem.type,
|
||||
supportFileType,
|
||||
uploadedId: fileItem.upload_file_id || fileItem.related_id,
|
||||
url: fileItem.url || fileItem.remote_url,
|
||||
}
|
||||
|
|
|
|||
|
|
@ -81,7 +81,6 @@ const CodeBlock: any = memo(({ inline, className, children = '', ...props }: any
|
|||
const echartsRef = useRef<any>(null)
|
||||
const contentRef = useRef<string>('')
|
||||
const processedRef = useRef<boolean>(false) // Track if content was successfully processed
|
||||
const instanceIdRef = useRef<string>(`chart-${Date.now()}-${Math.random().toString(36).substring(2, 9)}`) // Unique ID for logging
|
||||
const isInitialRenderRef = useRef<boolean>(true) // Track if this is initial render
|
||||
const chartInstanceRef = useRef<any>(null) // Direct reference to ECharts instance
|
||||
const resizeTimerRef = useRef<NodeJS.Timeout | null>(null) // For debounce handling
|
||||
|
|
|
|||
|
|
@ -7,6 +7,7 @@ import TimePicker from '@/app/components/base/date-and-time-picker/time-picker'
|
|||
import Checkbox from '@/app/components/base/checkbox'
|
||||
import Select from '@/app/components/base/select'
|
||||
import { useChatContext } from '@/app/components/base/chat/chat/context'
|
||||
import { formatDateForOutput } from '@/app/components/base/date-and-time-picker/utils/dayjs'
|
||||
|
||||
enum DATA_FORMAT {
|
||||
TEXT = 'text',
|
||||
|
|
@ -51,8 +52,20 @@ const MarkdownForm = ({ node }: any) => {
|
|||
const getFormValues = (children: any) => {
|
||||
const values: { [key: string]: any } = {}
|
||||
children.forEach((child: any) => {
|
||||
if ([SUPPORTED_TAGS.INPUT, SUPPORTED_TAGS.TEXTAREA].includes(child.tagName))
|
||||
values[child.properties.name] = formValues[child.properties.name]
|
||||
if ([SUPPORTED_TAGS.INPUT, SUPPORTED_TAGS.TEXTAREA].includes(child.tagName)) {
|
||||
let value = formValues[child.properties.name]
|
||||
|
||||
if (child.tagName === SUPPORTED_TAGS.INPUT
|
||||
&& (child.properties.type === SUPPORTED_TYPES.DATE || child.properties.type === SUPPORTED_TYPES.DATETIME)) {
|
||||
if (value && typeof value.format === 'function') {
|
||||
// Format date output consistently
|
||||
const includeTime = child.properties.type === SUPPORTED_TYPES.DATETIME
|
||||
value = formatDateForOutput(value, includeTime)
|
||||
}
|
||||
}
|
||||
|
||||
values[child.properties.name] = value
|
||||
}
|
||||
})
|
||||
return values
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,4 +1,4 @@
|
|||
import React, { useCallback, useEffect, useMemo, useRef, useState } from 'react'
|
||||
import React, { useCallback, useEffect, useRef, useState } from 'react'
|
||||
import mermaid, { type MermaidConfig } from 'mermaid'
|
||||
import { useTranslation } from 'react-i18next'
|
||||
import { ExclamationTriangleIcon } from '@heroicons/react/24/outline'
|
||||
|
|
@ -122,14 +122,6 @@ const Flowchart = React.forwardRef((props: {
|
|||
const renderTimeoutRef = useRef<NodeJS.Timeout>()
|
||||
const [errMsg, setErrMsg] = useState('')
|
||||
const [imagePreviewUrl, setImagePreviewUrl] = useState('')
|
||||
const [isCodeComplete, setIsCodeComplete] = useState(false)
|
||||
const codeCompletionCheckRef = useRef<NodeJS.Timeout>()
|
||||
const prevCodeRef = useRef<string>()
|
||||
|
||||
// Create cache key from code, style and theme
|
||||
const cacheKey = useMemo(() => {
|
||||
return `${props.PrimitiveCode}-${look}-${currentTheme}`
|
||||
}, [props.PrimitiveCode, look, currentTheme])
|
||||
|
||||
/**
|
||||
* Renders Mermaid chart
|
||||
|
|
@ -537,11 +529,9 @@ const Flowchart = React.forwardRef((props: {
|
|||
{isLoading && !svgString && (
|
||||
<div className='px-[26px] py-4'>
|
||||
<LoadingAnim type='text'/>
|
||||
{!isCodeComplete && (
|
||||
<div className="mt-2 text-sm text-gray-500">
|
||||
{t('common.wait_for_completion', 'Waiting for diagram code to complete...')}
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
)}
|
||||
|
||||
|
|
|
|||
|
|
@ -0,0 +1,61 @@
|
|||
'use client'
|
||||
import { Menu, MenuButton, MenuItem, MenuItems, Transition } from '@headlessui/react'
|
||||
import { Fragment } from 'react'
|
||||
import { GlobeAltIcon } from '@heroicons/react/24/outline'
|
||||
|
||||
type ISelectProps = {
|
||||
items: Array<{ value: string; name: string }>
|
||||
value?: string
|
||||
className?: string
|
||||
onChange?: (value: string) => void
|
||||
}
|
||||
|
||||
export default function LocaleSigninSelect({
|
||||
items,
|
||||
value,
|
||||
onChange,
|
||||
}: ISelectProps) {
|
||||
const item = items.filter(item => item.value === value)[0]
|
||||
|
||||
return (
|
||||
<div className="w-56 text-right">
|
||||
<Menu as="div" className="relative inline-block text-left">
|
||||
<div>
|
||||
<MenuButton className="h-[44px]justify-center inline-flex w-full items-center rounded-lg border border-components-button-secondary-border px-[10px] py-[6px] text-[13px] font-medium text-text-primary hover:bg-state-base-hover">
|
||||
<GlobeAltIcon className="mr-1 h-5 w-5" aria-hidden="true" />
|
||||
{item?.name}
|
||||
</MenuButton>
|
||||
</div>
|
||||
<Transition
|
||||
as={Fragment}
|
||||
enter="transition ease-out duration-100"
|
||||
enterFrom="transform opacity-0 scale-95"
|
||||
enterTo="transform opacity-100 scale-100"
|
||||
leave="transition ease-in duration-75"
|
||||
leaveFrom="transform opacity-100 scale-100"
|
||||
leaveTo="transform opacity-0 scale-95"
|
||||
>
|
||||
<MenuItems className="absolute right-0 z-10 mt-2 w-[200px] origin-top-right divide-y divide-divider-regular rounded-xl border-[0.5px] border-components-panel-border bg-components-panel-bg-blur shadow-lg focus:outline-none">
|
||||
<div className="px-1 py-1 ">
|
||||
{items.map((item) => {
|
||||
return <MenuItem key={item.value}>
|
||||
<button
|
||||
className={'group flex w-full items-center rounded-lg px-3 py-2 text-sm text-text-secondary data-[active]:bg-state-base-hover'}
|
||||
onClick={(evt) => {
|
||||
evt.preventDefault()
|
||||
onChange && onChange(item.value)
|
||||
}}
|
||||
>
|
||||
{item.name}
|
||||
</button>
|
||||
</MenuItem>
|
||||
})}
|
||||
|
||||
</div>
|
||||
|
||||
</MenuItems>
|
||||
</Transition>
|
||||
</Menu>
|
||||
</div>
|
||||
)
|
||||
}
|
||||
|
|
@ -25,8 +25,8 @@ const TabSliderNew: FC<TabSliderProps> = ({
|
|||
key={option.value}
|
||||
onClick={() => onChange(option.value)}
|
||||
className={cn(
|
||||
'mr-1 flex h-[32px] cursor-pointer items-center rounded-lg border-[0.5px] border-transparent px-3 py-[7px] text-[13px] font-medium leading-[18px] text-text-tertiary hover:bg-components-main-nav-nav-button-bg-active',
|
||||
value === option.value && 'border-components-main-nav-nav-button-border bg-components-main-nav-nav-button-bg-active text-components-main-nav-nav-button-text-active shadow-xs',
|
||||
'mr-1 flex h-[32px] cursor-pointer items-center rounded-lg border-[0.5px] border-transparent px-3 py-[7px] text-[13px] font-medium leading-[18px] text-text-tertiary hover:bg-state-base-hover',
|
||||
value === option.value && 'border-components-main-nav-nav-button-border bg-state-base-hover text-components-main-nav-nav-button-text-active shadow-xs',
|
||||
)}
|
||||
>
|
||||
{option.icon}
|
||||
|
|
|
|||
|
|
@ -1,4 +1,4 @@
|
|||
import React, { useState } from 'react'
|
||||
import React from 'react'
|
||||
import type { FC } from 'react'
|
||||
import { useTranslation } from 'react-i18next'
|
||||
import { RiBookOpenLine } from '@remixicon/react'
|
||||
|
|
@ -28,10 +28,8 @@ const Form: FC<FormProps> = React.memo(({
|
|||
}) => {
|
||||
const { t, i18n } = useTranslation()
|
||||
const docLink = useDocLink()
|
||||
const [changeKey, setChangeKey] = useState('')
|
||||
|
||||
const handleFormChange = (key: string, val: string) => {
|
||||
setChangeKey(key)
|
||||
if (key === 'name') {
|
||||
onChange({ ...value, [key]: val })
|
||||
}
|
||||
|
|
|
|||
|
|
@ -28,8 +28,8 @@ const RenameDatasetModal = ({ show, dataset, onSuccess, onClose }: RenameDataset
|
|||
const [loading, setLoading] = useState(false)
|
||||
const [name, setName] = useState<string>(dataset.name)
|
||||
const [description, setDescription] = useState<string>(dataset.description)
|
||||
const [externalKnowledgeId, setExternalKnowledgeId] = useState<string>(dataset.external_knowledge_info.external_knowledge_id)
|
||||
const [externalKnowledgeApiId, setExternalKnowledgeApiId] = useState<string>(dataset.external_knowledge_info.external_knowledge_api_id)
|
||||
const externalKnowledgeId = dataset.external_knowledge_info.external_knowledge_id
|
||||
const externalKnowledgeApiId = dataset.external_knowledge_info.external_knowledge_api_id
|
||||
|
||||
const onConfirm: MouseEventHandler = async () => {
|
||||
if (!name.trim()) {
|
||||
|
|
|
|||
|
|
@ -23,7 +23,9 @@ const SecretKeyGenerateModal = ({
|
|||
const { t } = useTranslation()
|
||||
return (
|
||||
<Modal isShow={isShow} onClose={onClose} title={`${t('appApi.apiKeyModal.apiSecretKey')}`} className={`px-8 ${className}`}>
|
||||
<XMarkIcon className={`absolute h-6 w-6 cursor-pointer text-text-tertiary ${s.close}`} onClick={onClose} />
|
||||
<div className="-mr-2 -mt-6 mb-4 flex justify-end">
|
||||
<XMarkIcon className="h-6 w-6 cursor-pointer text-text-tertiary" onClick={onClose} />
|
||||
</div>
|
||||
<p className='mt-1 text-[13px] font-normal leading-5 text-text-tertiary'>{t('appApi.apiKeyModal.generateTips')}</p>
|
||||
<div className='my-4'>
|
||||
<InputCopy className='w-full' value={newKey?.token} />
|
||||
|
|
|
|||
|
|
@ -84,7 +84,9 @@ const SecretKeyModal = ({
|
|||
|
||||
return (
|
||||
<Modal isShow={isShow} onClose={onClose} title={`${t('appApi.apiKeyModal.apiSecretKey')}`} className={`${s.customModal} flex flex-col px-8`}>
|
||||
<XMarkIcon className={`absolute h-6 w-6 cursor-pointer text-text-tertiary ${s.close}`} onClick={onClose} />
|
||||
<div className="-mr-2 -mt-6 mb-4 flex justify-end">
|
||||
<XMarkIcon className="h-6 w-6 cursor-pointer text-text-tertiary" onClick={onClose} />
|
||||
</div>
|
||||
<p className='mt-1 shrink-0 text-[13px] font-normal leading-5 text-text-tertiary'>{t('appApi.apiKeyModal.apiSecretKeyTips')}</p>
|
||||
{!apiKeysList && <div className='mt-4'><Loading /></div>}
|
||||
{
|
||||
|
|
|
|||
|
|
@ -392,6 +392,86 @@ import { Row, Col, Properties, Property, Heading, SubProperty, Paragraph } from
|
|||
</Row>
|
||||
---
|
||||
|
||||
<Heading
|
||||
url='/files/:file_id/preview'
|
||||
method='GET'
|
||||
title='ファイルプレビュー'
|
||||
name='#file-preview'
|
||||
/>
|
||||
<Row>
|
||||
<Col>
|
||||
アップロードされたファイルをプレビューまたはダウンロードします。このエンドポイントを使用すると、以前にファイルアップロード API でアップロードされたファイルにアクセスできます。
|
||||
|
||||
<i>ファイルは、リクエストしているアプリケーションのメッセージ範囲内にある場合のみアクセス可能です。</i>
|
||||
|
||||
### パスパラメータ
|
||||
- `file_id` (string) 必須
|
||||
プレビューするファイルの一意識別子。ファイルアップロード API レスポンスから取得します。
|
||||
|
||||
### クエリパラメータ
|
||||
- `as_attachment` (boolean) オプション
|
||||
ファイルを添付ファイルとして強制ダウンロードするかどうか。デフォルトは `false`(ブラウザでプレビュー)。
|
||||
|
||||
### レスポンス
|
||||
ブラウザ表示またはダウンロード用の適切なヘッダー付きでファイル内容を返します。
|
||||
- `Content-Type` ファイル MIME タイプに基づいて設定
|
||||
- `Content-Length` ファイルサイズ(バイト、利用可能な場合)
|
||||
- `Content-Disposition` `as_attachment=true` の場合は "attachment" に設定
|
||||
- `Cache-Control` パフォーマンス向上のためのキャッシュヘッダー
|
||||
- `Accept-Ranges` 音声/動画ファイルの場合は "bytes" に設定
|
||||
|
||||
### エラー
|
||||
- 400, `invalid_param`, パラメータ入力異常
|
||||
- 403, `file_access_denied`, ファイルアクセス拒否またはファイルが現在のアプリケーションに属していません
|
||||
- 404, `file_not_found`, ファイルが見つからないか削除されています
|
||||
- 500, サーバー内部エラー
|
||||
|
||||
</Col>
|
||||
<Col sticky>
|
||||
### リクエスト例
|
||||
<CodeGroup title="Request" tag="GET" label="/files/:file_id/preview" targetCode={`curl -X GET '${props.appDetail.api_base_url}/files/72fa9618-8f89-4a37-9b33-7e1178a24a67/preview' \\\n--header 'Authorization: Bearer {api_key}'`}>
|
||||
|
||||
```bash {{ title: 'cURL - ブラウザプレビュー' }}
|
||||
curl -X GET '${props.appDetail.api_base_url}/files/72fa9618-8f89-4a37-9b33-7e1178a24a67/preview' \
|
||||
--header 'Authorization: Bearer {api_key}'
|
||||
```
|
||||
|
||||
</CodeGroup>
|
||||
|
||||
### 添付ファイルとしてダウンロード
|
||||
<CodeGroup title="Download Request" tag="GET" label="/files/:file_id/preview?as_attachment=true" targetCode={`curl -X GET '${props.appDetail.api_base_url}/files/72fa9618-8f89-4a37-9b33-7e1178a24a67/preview?as_attachment=true' \\\n--header 'Authorization: Bearer {api_key}' \\\n--output downloaded_file.png`}>
|
||||
|
||||
```bash {{ title: 'cURL' }}
|
||||
curl -X GET '${props.appDetail.api_base_url}/files/72fa9618-8f89-4a37-9b33-7e1178a24a67/preview?as_attachment=true' \
|
||||
--header 'Authorization: Bearer {api_key}' \
|
||||
--output downloaded_file.png
|
||||
```
|
||||
|
||||
</CodeGroup>
|
||||
|
||||
### レスポンスヘッダー例
|
||||
<CodeGroup title="Response Headers">
|
||||
```http {{ title: 'ヘッダー - 画像プレビュー' }}
|
||||
Content-Type: image/png
|
||||
Content-Length: 1024
|
||||
Cache-Control: public, max-age=3600
|
||||
```
|
||||
</CodeGroup>
|
||||
|
||||
### ダウンロードレスポンスヘッダー
|
||||
<CodeGroup title="Download Response Headers">
|
||||
```http {{ title: 'ヘッダー - ファイルダウンロード' }}
|
||||
Content-Type: image/png
|
||||
Content-Length: 1024
|
||||
Content-Disposition: attachment; filename*=UTF-8''example.png
|
||||
Cache-Control: public, max-age=3600
|
||||
```
|
||||
</CodeGroup>
|
||||
</Col>
|
||||
</Row>
|
||||
|
||||
---
|
||||
|
||||
<Heading
|
||||
url='/chat-messages/:task_id/stop'
|
||||
method='POST'
|
||||
|
|
@ -1422,86 +1502,6 @@ import { Row, Col, Properties, Property, Heading, SubProperty, Paragraph } from
|
|||
</Row>
|
||||
---
|
||||
|
||||
<Heading
|
||||
url='/files/:file_id/preview'
|
||||
method='GET'
|
||||
title='ファイルプレビュー'
|
||||
name='#file-preview'
|
||||
/>
|
||||
<Row>
|
||||
<Col>
|
||||
アップロードされたファイルをプレビューまたはダウンロードします。このエンドポイントを使用すると、以前にファイルアップロード API でアップロードされたファイルにアクセスできます。
|
||||
|
||||
<i>ファイルは、リクエストしているアプリケーションのメッセージ範囲内にある場合のみアクセス可能です。</i>
|
||||
|
||||
### パスパラメータ
|
||||
- `file_id` (string) 必須
|
||||
プレビューするファイルの一意識別子。ファイルアップロード API レスポンスから取得します。
|
||||
|
||||
### クエリパラメータ
|
||||
- `as_attachment` (boolean) オプション
|
||||
ファイルを添付ファイルとして強制ダウンロードするかどうか。デフォルトは `false`(ブラウザでプレビュー)。
|
||||
|
||||
### レスポンス
|
||||
ブラウザ表示またはダウンロード用の適切なヘッダー付きでファイル内容を返します。
|
||||
- `Content-Type` ファイル MIME タイプに基づいて設定
|
||||
- `Content-Length` ファイルサイズ(バイト、利用可能な場合)
|
||||
- `Content-Disposition` `as_attachment=true` の場合は "attachment" に設定
|
||||
- `Cache-Control` パフォーマンス向上のためのキャッシュヘッダー
|
||||
- `Accept-Ranges` 音声/動画ファイルの場合は "bytes" に設定
|
||||
|
||||
### エラー
|
||||
- 400, `invalid_param`, パラメータ入力異常
|
||||
- 403, `file_access_denied`, ファイルアクセス拒否またはファイルが現在のアプリケーションに属していません
|
||||
- 404, `file_not_found`, ファイルが見つからないか削除されています
|
||||
- 500, サーバー内部エラー
|
||||
|
||||
</Col>
|
||||
<Col sticky>
|
||||
### リクエスト例
|
||||
<CodeGroup title="Request" tag="GET" label="/files/:file_id/preview" targetCode={`curl -X GET '${props.appDetail.api_base_url}/files/72fa9618-8f89-4a37-9b33-7e1178a24a67/preview' \\\n--header 'Authorization: Bearer {api_key}'`}>
|
||||
|
||||
```bash {{ title: 'cURL - ブラウザプレビュー' }}
|
||||
curl -X GET '${props.appDetail.api_base_url}/files/72fa9618-8f89-4a37-9b33-7e1178a24a67/preview' \
|
||||
--header 'Authorization: Bearer {api_key}'
|
||||
```
|
||||
|
||||
</CodeGroup>
|
||||
|
||||
### 添付ファイルとしてダウンロード
|
||||
<CodeGroup title="Download Request" tag="GET" label="/files/:file_id/preview?as_attachment=true" targetCode={`curl -X GET '${props.appDetail.api_base_url}/files/72fa9618-8f89-4a37-9b33-7e1178a24a67/preview?as_attachment=true' \\\n--header 'Authorization: Bearer {api_key}' \\\n--output downloaded_file.png`}>
|
||||
|
||||
```bash {{ title: 'cURL' }}
|
||||
curl -X GET '${props.appDetail.api_base_url}/files/72fa9618-8f89-4a37-9b33-7e1178a24a67/preview?as_attachment=true' \
|
||||
--header 'Authorization: Bearer {api_key}' \
|
||||
--output downloaded_file.png
|
||||
```
|
||||
|
||||
</CodeGroup>
|
||||
|
||||
### レスポンスヘッダー例
|
||||
<CodeGroup title="Response Headers">
|
||||
```http {{ title: 'ヘッダー - 画像プレビュー' }}
|
||||
Content-Type: image/png
|
||||
Content-Length: 1024
|
||||
Cache-Control: public, max-age=3600
|
||||
```
|
||||
</CodeGroup>
|
||||
|
||||
### ダウンロードレスポンスヘッダー
|
||||
<CodeGroup title="Download Response Headers">
|
||||
```http {{ title: 'ヘッダー - ファイルダウンロード' }}
|
||||
Content-Type: image/png
|
||||
Content-Length: 1024
|
||||
Content-Disposition: attachment; filename*=UTF-8''example.png
|
||||
Cache-Control: public, max-age=3600
|
||||
```
|
||||
</CodeGroup>
|
||||
</Col>
|
||||
</Row>
|
||||
|
||||
---
|
||||
|
||||
<Heading
|
||||
url='/meta'
|
||||
method='GET'
|
||||
|
|
|
|||
|
|
@ -51,7 +51,6 @@ const Apps = ({
|
|||
handleSearch()
|
||||
}
|
||||
|
||||
const [currentType, setCurrentType] = useState<string>('')
|
||||
const [currCategory, setCurrCategory] = useTabSearchParams({
|
||||
defaultTab: allCategoriesEn,
|
||||
disableSearchParams: false,
|
||||
|
|
@ -74,28 +73,7 @@ const Apps = ({
|
|||
},
|
||||
)
|
||||
|
||||
const filteredList = useMemo(() => {
|
||||
if (currCategory === allCategoriesEn) {
|
||||
if (!currentType)
|
||||
return allList
|
||||
else if (currentType === 'chatbot')
|
||||
return allList.filter(item => (item.app.mode === 'chat' || item.app.mode === 'advanced-chat'))
|
||||
else if (currentType === 'agent')
|
||||
return allList.filter(item => (item.app.mode === 'agent-chat'))
|
||||
else
|
||||
return allList.filter(item => (item.app.mode === 'workflow'))
|
||||
}
|
||||
else {
|
||||
if (!currentType)
|
||||
return allList.filter(item => item.category === currCategory)
|
||||
else if (currentType === 'chatbot')
|
||||
return allList.filter(item => (item.app.mode === 'chat' || item.app.mode === 'advanced-chat') && item.category === currCategory)
|
||||
else if (currentType === 'agent')
|
||||
return allList.filter(item => (item.app.mode === 'agent-chat') && item.category === currCategory)
|
||||
else
|
||||
return allList.filter(item => (item.app.mode === 'workflow') && item.category === currCategory)
|
||||
}
|
||||
}, [currentType, currCategory, allCategoriesEn, allList])
|
||||
const filteredList = allList.filter(item => currCategory === allCategoriesEn || item.category === currCategory)
|
||||
|
||||
const searchFilteredList = useMemo(() => {
|
||||
if (!searchKeywords || !filteredList || filteredList.length === 0)
|
||||
|
|
|
|||
|
|
@ -49,7 +49,6 @@ const SideBar: FC<IExploreSideBarProps> = ({
|
|||
const segments = useSelectedLayoutSegments()
|
||||
const lastSegment = segments.slice(-1)[0]
|
||||
const isDiscoverySelected = lastSegment === 'apps'
|
||||
const isChatSelected = lastSegment === 'chat'
|
||||
const { installedApps, setInstalledApps, setIsFetchingInstalledApps } = useContext(ExploreContext)
|
||||
const { isFetching: isFetchingInstalledApps, data: ret, refetch: fetchInstalledAppList } = useGetInstalledApps()
|
||||
const { mutateAsync: uninstallApp } = useUninstallApp()
|
||||
|
|
|
|||
|
|
@ -1,11 +1,10 @@
|
|||
'use client'
|
||||
import React, { useCallback, useEffect, useRef, useState } from 'react'
|
||||
import React, { useCallback, useEffect, useState } from 'react'
|
||||
import { t } from 'i18next'
|
||||
import copy from 'copy-to-clipboard'
|
||||
import s from './index.module.css'
|
||||
import type { SuccessInvitationResult } from '.'
|
||||
import Tooltip from '@/app/components/base/tooltip'
|
||||
import { randomString } from '@/utils'
|
||||
|
||||
type IInvitationLinkProps = {
|
||||
value: SuccessInvitationResult
|
||||
|
|
@ -15,7 +14,6 @@ const InvitationLink = ({
|
|||
value,
|
||||
}: IInvitationLinkProps) => {
|
||||
const [isCopied, setIsCopied] = useState(false)
|
||||
const selector = useRef(`invite-link-${randomString(4)}`)
|
||||
|
||||
const copyHandle = useCallback(() => {
|
||||
// No prefix is needed here because the backend has already processed it
|
||||
|
|
|
|||
|
|
@ -1,7 +1,6 @@
|
|||
import type { FC } from 'react'
|
||||
import { useEffect, useRef, useState } from 'react'
|
||||
import type { ModelParameterRule } from '../declarations'
|
||||
import { useLanguage } from '../hooks'
|
||||
import { isNullOrUndefined } from '../utils'
|
||||
import cn from '@/utils/classnames'
|
||||
import Switch from '@/app/components/base/switch'
|
||||
|
|
@ -10,7 +9,6 @@ import Slider from '@/app/components/base/slider'
|
|||
import Radio from '@/app/components/base/radio'
|
||||
import { SimpleSelect } from '@/app/components/base/select'
|
||||
import TagInput from '@/app/components/base/tag-input'
|
||||
import { useTranslation } from 'react-i18next'
|
||||
|
||||
export type ParameterValue = number | string | string[] | boolean | undefined
|
||||
|
||||
|
|
@ -28,8 +26,6 @@ const ParameterItem: FC<ParameterItemProps> = ({
|
|||
onSwitch,
|
||||
isInWorkflow,
|
||||
}) => {
|
||||
const { t } = useTranslation()
|
||||
const language = useLanguage()
|
||||
const [localValue, setLocalValue] = useState(value)
|
||||
const numberInputRef = useRef<HTMLInputElement>(null)
|
||||
|
||||
|
|
|
|||
|
|
@ -13,7 +13,7 @@ const HeaderWrapper = ({
|
|||
children,
|
||||
}: HeaderWrapperProps) => {
|
||||
const pathname = usePathname()
|
||||
const isBordered = ['/apps', '/datasets', '/datasets/create', '/tools'].includes(pathname)
|
||||
const isBordered = ['/apps', '/datasets/create', '/tools'].includes(pathname)
|
||||
// Check if the current path is a workflow canvas & fullscreen
|
||||
const inWorkflowCanvas = pathname.endsWith('/workflow')
|
||||
const workflowCanvasMaximize = localStorage.getItem('workflow-canvas-maximize') === 'true'
|
||||
|
|
|
|||
|
|
@ -30,7 +30,7 @@ const TagsFilter = ({
|
|||
const { t } = useMixedTranslation(locale)
|
||||
const [open, setOpen] = useState(false)
|
||||
const [searchText, setSearchText] = useState('')
|
||||
const { tags: options, tagsMap } = useTags(t)
|
||||
const { tags: options } = useTags(t)
|
||||
const filteredOptions = options.filter(option => option.label.toLowerCase().includes(searchText.toLowerCase()))
|
||||
const handleCheck = (id: string) => {
|
||||
if (tags.includes(id))
|
||||
|
|
@ -38,7 +38,6 @@ const TagsFilter = ({
|
|||
else
|
||||
onTagsChange([...tags, id])
|
||||
}
|
||||
const selectedTagsLength = tags.length
|
||||
|
||||
return (
|
||||
<PortalToFollowElem
|
||||
|
|
|
|||
|
|
@ -45,7 +45,7 @@ export const getPluginDetailLinkInMarketplace = (plugin: Plugin) => {
|
|||
}
|
||||
|
||||
export const getMarketplacePluginsByCollectionId = async (collectionId: string, query?: CollectionsAndPluginsSearchParams) => {
|
||||
let plugins = [] as Plugin[]
|
||||
let plugins: Plugin[]
|
||||
|
||||
try {
|
||||
const url = `${MARKETPLACE_API_PREFIX}/collections/${collectionId}/plugins`
|
||||
|
|
|
|||
|
|
@ -50,9 +50,9 @@ const PluginSettingModal: FC<Props> = ({
|
|||
isShow
|
||||
onClose={onHide}
|
||||
closable
|
||||
className='w-[480px] !p-0'
|
||||
className='w-[620px] max-w-[620px] !p-0'
|
||||
>
|
||||
<div className='shadows-shadow-xl flex w-[480px] flex-col items-start rounded-2xl border border-components-panel-border bg-components-panel-bg'>
|
||||
<div className='shadows-shadow-xl flex w-full flex-col items-start rounded-2xl border border-components-panel-border bg-components-panel-bg'>
|
||||
<div className='flex items-start gap-2 self-stretch pb-3 pl-6 pr-14 pt-6'>
|
||||
<span className='title-2xl-semi-bold self-stretch text-text-primary'>{t(`${i18nPrefix}.title`)}</span>
|
||||
</div>
|
||||
|
|
|
|||
|
|
@ -151,10 +151,9 @@ const TextGeneration: FC<IMainProps> = ({
|
|||
const pendingTaskList = allTaskList.filter(task => task.status === TaskStatus.pending)
|
||||
const noPendingTask = pendingTaskList.length === 0
|
||||
const showTaskList = allTaskList.filter(task => task.status !== TaskStatus.pending)
|
||||
const [currGroupNum, doSetCurrGroupNum] = useState(0)
|
||||
const currGroupNumRef = useRef(0)
|
||||
|
||||
const setCurrGroupNum = (num: number) => {
|
||||
doSetCurrGroupNum(num)
|
||||
currGroupNumRef.current = num
|
||||
}
|
||||
const getCurrGroupNum = () => {
|
||||
|
|
@ -164,10 +163,8 @@ const TextGeneration: FC<IMainProps> = ({
|
|||
const allFailedTaskList = allTaskList.filter(task => task.status === TaskStatus.failed)
|
||||
const allTasksFinished = allTaskList.every(task => task.status === TaskStatus.completed)
|
||||
const allTasksRun = allTaskList.every(task => [TaskStatus.completed, TaskStatus.failed].includes(task.status))
|
||||
const [batchCompletionRes, doSetBatchCompletionRes] = useState<Record<string, string>>({})
|
||||
const batchCompletionResRef = useRef<Record<string, string>>({})
|
||||
const setBatchCompletionRes = (res: Record<string, string>) => {
|
||||
doSetBatchCompletionRes(res)
|
||||
batchCompletionResRef.current = res
|
||||
}
|
||||
const getBatchCompletionRes = () => batchCompletionResRef.current
|
||||
|
|
|
|||
|
|
@ -23,7 +23,7 @@ const useStickyScroll = ({
|
|||
return
|
||||
const { height: wrapHeight, top: wrapTop } = wrapDom.getBoundingClientRect()
|
||||
const { top: nextToStickyTop } = stickyDOM.getBoundingClientRect()
|
||||
let scrollPositionNew = ScrollPosition.belowTheWrap
|
||||
let scrollPositionNew: ScrollPosition
|
||||
|
||||
if (nextToStickyTop - wrapTop >= wrapHeight)
|
||||
scrollPositionNew = ScrollPosition.belowTheWrap
|
||||
|
|
|
|||
|
|
@ -444,7 +444,7 @@ export const useFetchToolsData = () => {
|
|||
workflowTools: workflowTools || [],
|
||||
})
|
||||
}
|
||||
if(type === 'mcp') {
|
||||
if (type === 'mcp') {
|
||||
const mcpTools = await fetchAllMCPTools()
|
||||
|
||||
workflowStore.setState({
|
||||
|
|
@ -500,18 +500,17 @@ export const useToolIcon = (data: Node['data']) => {
|
|||
const mcpTools = useStore(s => s.mcpTools)
|
||||
|
||||
const toolIcon = useMemo(() => {
|
||||
if(!data)
|
||||
if (!data)
|
||||
return ''
|
||||
if (data.type === BlockEnum.Tool) {
|
||||
let targetTools = buildInTools
|
||||
let targetTools = workflowTools
|
||||
if (data.provider_type === CollectionType.builtIn)
|
||||
targetTools = buildInTools
|
||||
else if (data.provider_type === CollectionType.custom)
|
||||
targetTools = customTools
|
||||
else if (data.provider_type === CollectionType.mcp)
|
||||
targetTools = mcpTools
|
||||
else
|
||||
targetTools = workflowTools
|
||||
|
||||
return targetTools.find(toolWithProvider => canFindTool(toolWithProvider.id, data.provider_id))?.icon
|
||||
}
|
||||
}, [data, buildInTools, customTools, mcpTools, workflowTools])
|
||||
|
|
|
|||
|
|
@ -181,7 +181,7 @@ const FormItem: FC<Props> = ({
|
|||
value={singleFileValue}
|
||||
onChange={handleSingleFileChange}
|
||||
fileConfig={{
|
||||
allowed_file_types: inStepRun
|
||||
allowed_file_types: inStepRun && (!payload.allowed_file_types || payload.allowed_file_types.length === 0)
|
||||
? [
|
||||
SupportUploadFileTypes.image,
|
||||
SupportUploadFileTypes.document,
|
||||
|
|
@ -189,7 +189,7 @@ const FormItem: FC<Props> = ({
|
|||
SupportUploadFileTypes.video,
|
||||
]
|
||||
: payload.allowed_file_types,
|
||||
allowed_file_extensions: inStepRun
|
||||
allowed_file_extensions: inStepRun && (!payload.allowed_file_extensions || payload.allowed_file_extensions.length === 0)
|
||||
? [
|
||||
...FILE_EXTS[SupportUploadFileTypes.image],
|
||||
...FILE_EXTS[SupportUploadFileTypes.document],
|
||||
|
|
@ -208,7 +208,7 @@ const FormItem: FC<Props> = ({
|
|||
value={value}
|
||||
onChange={files => onChange(files)}
|
||||
fileConfig={{
|
||||
allowed_file_types: (inStepRun || isIteratorItemFile)
|
||||
allowed_file_types: (inStepRun || isIteratorItemFile) && (!payload.allowed_file_types || payload.allowed_file_types.length === 0)
|
||||
? [
|
||||
SupportUploadFileTypes.image,
|
||||
SupportUploadFileTypes.document,
|
||||
|
|
@ -216,7 +216,7 @@ const FormItem: FC<Props> = ({
|
|||
SupportUploadFileTypes.video,
|
||||
]
|
||||
: payload.allowed_file_types,
|
||||
allowed_file_extensions: (inStepRun || isIteratorItemFile)
|
||||
allowed_file_extensions: (inStepRun || isIteratorItemFile) && (!payload.allowed_file_extensions || payload.allowed_file_extensions.length === 0)
|
||||
? [
|
||||
...FILE_EXTS[SupportUploadFileTypes.image],
|
||||
...FILE_EXTS[SupportUploadFileTypes.document],
|
||||
|
|
|
|||
|
|
@ -11,7 +11,6 @@ import {
|
|||
import {
|
||||
useNodeDataUpdate,
|
||||
useNodesInteractions,
|
||||
useNodesSyncDraft,
|
||||
} from '../../../hooks'
|
||||
import { type Node, NodeRunningStatus } from '../../../types'
|
||||
import { canRunBySingle } from '../../../utils'
|
||||
|
|
@ -30,7 +29,6 @@ const NodeControl: FC<NodeControlProps> = ({
|
|||
const [open, setOpen] = useState(false)
|
||||
const { handleNodeDataUpdate } = useNodeDataUpdate()
|
||||
const { handleNodeSelect } = useNodesInteractions()
|
||||
const { handleSyncWorkflowDraft } = useNodesSyncDraft()
|
||||
const isSingleRunning = data._singleRunningStatus === NodeRunningStatus.Running
|
||||
const handleOpenChange = useCallback((newOpen: boolean) => {
|
||||
setOpen(newOpen)
|
||||
|
|
|
|||
|
|
@ -198,7 +198,6 @@ const BasePanel: FC<BasePanelProps> = ({
|
|||
isShowSingleRun,
|
||||
hideSingleRun,
|
||||
runningStatus,
|
||||
handleStop,
|
||||
runInputData,
|
||||
runInputDataRef,
|
||||
runResult,
|
||||
|
|
|
|||
|
|
@ -36,6 +36,7 @@ const useConfig = (id: string, payload: ListFilterNodeType) => {
|
|||
const { inputs, setInputs } = useNodeCrud<ListFilterNodeType>(id, payload)
|
||||
|
||||
const { getCurrentVariableType } = useWorkflowVariables()
|
||||
|
||||
const getType = useCallback((variable?: ValueSelector) => {
|
||||
const varType = getCurrentVariableType({
|
||||
parentNode: isInIteration ? iterationNode : loopNode,
|
||||
|
|
@ -44,7 +45,7 @@ const useConfig = (id: string, payload: ListFilterNodeType) => {
|
|||
isChatMode,
|
||||
isConstant: false,
|
||||
})
|
||||
let itemVarType = VarType.string
|
||||
let itemVarType = varType
|
||||
switch (varType) {
|
||||
case VarType.arrayNumber:
|
||||
itemVarType = VarType.number
|
||||
|
|
@ -58,8 +59,6 @@ const useConfig = (id: string, payload: ListFilterNodeType) => {
|
|||
case VarType.arrayObject:
|
||||
itemVarType = VarType.object
|
||||
break
|
||||
default:
|
||||
itemVarType = varType
|
||||
}
|
||||
return { varType, itemVarType }
|
||||
}, [availableNodes, getCurrentVariableType, inputs.variable, isChatMode, isInIteration, iterationNode, loopNode])
|
||||
|
|
|
|||
|
|
@ -16,15 +16,20 @@ import {
|
|||
PortalToFollowElemContent,
|
||||
PortalToFollowElemTrigger,
|
||||
} from '@/app/components/base/portal-to-follow-elem'
|
||||
import { getNodesBounds, useReactFlow } from 'reactflow'
|
||||
import ImagePreview from '@/app/components/base/image-uploader/image-preview'
|
||||
|
||||
const ExportImage: FC = () => {
|
||||
const { t } = useTranslation()
|
||||
const { getNodesReadOnly } = useNodesReadOnly()
|
||||
const reactFlow = useReactFlow()
|
||||
|
||||
const appDetail = useAppStore(s => s.appDetail)
|
||||
const [open, setOpen] = useState(false)
|
||||
const [previewUrl, setPreviewUrl] = useState('')
|
||||
const [previewTitle, setPreviewTitle] = useState('')
|
||||
|
||||
const handleExportImage = useCallback(async (type: 'png' | 'jpeg' | 'svg') => {
|
||||
const handleExportImage = useCallback(async (type: 'png' | 'jpeg' | 'svg', currentWorkflow = false) => {
|
||||
if (!appDetail)
|
||||
return
|
||||
|
||||
|
|
@ -44,31 +49,123 @@ const ExportImage: FC = () => {
|
|||
}
|
||||
|
||||
let dataUrl
|
||||
switch (type) {
|
||||
case 'png':
|
||||
dataUrl = await toPng(flowElement, { filter })
|
||||
break
|
||||
case 'jpeg':
|
||||
dataUrl = await toJpeg(flowElement, { filter })
|
||||
break
|
||||
case 'svg':
|
||||
dataUrl = await toSvg(flowElement, { filter })
|
||||
break
|
||||
default:
|
||||
dataUrl = await toPng(flowElement, { filter })
|
||||
let filename = `${appDetail.name}`
|
||||
|
||||
if (currentWorkflow) {
|
||||
// Get all nodes and their bounds
|
||||
const nodes = reactFlow.getNodes()
|
||||
const nodesBounds = getNodesBounds(nodes)
|
||||
|
||||
// Save current viewport
|
||||
const currentViewport = reactFlow.getViewport()
|
||||
|
||||
// Calculate the required zoom to fit all nodes
|
||||
const viewportWidth = window.innerWidth
|
||||
const viewportHeight = window.innerHeight
|
||||
const zoom = Math.min(
|
||||
viewportWidth / (nodesBounds.width + 100),
|
||||
viewportHeight / (nodesBounds.height + 100),
|
||||
1,
|
||||
)
|
||||
|
||||
// Calculate center position
|
||||
const centerX = nodesBounds.x + nodesBounds.width / 2
|
||||
const centerY = nodesBounds.y + nodesBounds.height / 2
|
||||
|
||||
// Set viewport to show all nodes
|
||||
reactFlow.setViewport({
|
||||
x: viewportWidth / 2 - centerX * zoom,
|
||||
y: viewportHeight / 2 - centerY * zoom,
|
||||
zoom,
|
||||
})
|
||||
|
||||
// Wait for the transition to complete
|
||||
await new Promise(resolve => setTimeout(resolve, 300))
|
||||
|
||||
// Calculate actual content size with padding
|
||||
const padding = 50 // More padding for better visualization
|
||||
const contentWidth = nodesBounds.width + padding * 2
|
||||
const contentHeight = nodesBounds.height + padding * 2
|
||||
|
||||
// Export with higher quality for whole workflow
|
||||
const exportOptions = {
|
||||
filter,
|
||||
backgroundColor: '#1a1a1a', // Dark background to match previous style
|
||||
pixelRatio: 2, // Higher resolution for better zoom
|
||||
width: contentWidth,
|
||||
height: contentHeight,
|
||||
style: {
|
||||
width: `${contentWidth}px`,
|
||||
height: `${contentHeight}px`,
|
||||
transform: `translate(${padding - nodesBounds.x}px, ${padding - nodesBounds.y}px) scale(${zoom})`,
|
||||
},
|
||||
}
|
||||
|
||||
switch (type) {
|
||||
case 'png':
|
||||
dataUrl = await toPng(flowElement, exportOptions)
|
||||
break
|
||||
case 'jpeg':
|
||||
dataUrl = await toJpeg(flowElement, exportOptions)
|
||||
break
|
||||
case 'svg':
|
||||
dataUrl = await toSvg(flowElement, { filter })
|
||||
break
|
||||
default:
|
||||
dataUrl = await toPng(flowElement, exportOptions)
|
||||
}
|
||||
|
||||
filename += '-whole-workflow'
|
||||
|
||||
// Restore original viewport after a delay
|
||||
setTimeout(() => {
|
||||
reactFlow.setViewport(currentViewport)
|
||||
}, 500)
|
||||
}
|
||||
else {
|
||||
// Current viewport export (existing functionality)
|
||||
switch (type) {
|
||||
case 'png':
|
||||
dataUrl = await toPng(flowElement, { filter })
|
||||
break
|
||||
case 'jpeg':
|
||||
dataUrl = await toJpeg(flowElement, { filter })
|
||||
break
|
||||
case 'svg':
|
||||
dataUrl = await toSvg(flowElement, { filter })
|
||||
break
|
||||
default:
|
||||
dataUrl = await toPng(flowElement, { filter })
|
||||
}
|
||||
}
|
||||
|
||||
const link = document.createElement('a')
|
||||
link.href = dataUrl
|
||||
link.download = `${appDetail.name}.${type}`
|
||||
document.body.appendChild(link)
|
||||
link.click()
|
||||
document.body.removeChild(link)
|
||||
if (currentWorkflow) {
|
||||
// For whole workflow, show preview first
|
||||
setPreviewUrl(dataUrl)
|
||||
setPreviewTitle(`${filename}.${type}`)
|
||||
|
||||
// Also auto-download
|
||||
const link = document.createElement('a')
|
||||
link.href = dataUrl
|
||||
link.download = `${filename}.${type}`
|
||||
document.body.appendChild(link)
|
||||
link.click()
|
||||
document.body.removeChild(link)
|
||||
}
|
||||
else {
|
||||
// For current view, just download
|
||||
const link = document.createElement('a')
|
||||
link.href = dataUrl
|
||||
link.download = `${filename}.${type}`
|
||||
document.body.appendChild(link)
|
||||
link.click()
|
||||
document.body.removeChild(link)
|
||||
}
|
||||
}
|
||||
catch (error) {
|
||||
console.error('Export image failed:', error)
|
||||
}
|
||||
}, [getNodesReadOnly, appDetail])
|
||||
}, [getNodesReadOnly, appDetail, reactFlow])
|
||||
|
||||
const handleTrigger = useCallback(() => {
|
||||
if (getNodesReadOnly())
|
||||
|
|
@ -78,53 +175,90 @@ const ExportImage: FC = () => {
|
|||
}, [getNodesReadOnly])
|
||||
|
||||
return (
|
||||
<PortalToFollowElem
|
||||
open={open}
|
||||
onOpenChange={setOpen}
|
||||
placement="top-start"
|
||||
offset={{
|
||||
mainAxis: 4,
|
||||
crossAxis: -8,
|
||||
}}
|
||||
>
|
||||
<PortalToFollowElemTrigger>
|
||||
<TipPopup title={t('workflow.common.exportImage')}>
|
||||
<div
|
||||
className={cn(
|
||||
'flex h-8 w-8 cursor-pointer items-center justify-center rounded-lg hover:bg-state-base-hover hover:text-text-secondary',
|
||||
`${getNodesReadOnly() && 'cursor-not-allowed text-text-disabled hover:bg-transparent hover:text-text-disabled'}`,
|
||||
)}
|
||||
onClick={handleTrigger}
|
||||
>
|
||||
<RiExportLine className='h-4 w-4' />
|
||||
</div>
|
||||
</TipPopup>
|
||||
</PortalToFollowElemTrigger>
|
||||
<PortalToFollowElemContent className='z-10'>
|
||||
<div className='min-w-[120px] rounded-xl border-[0.5px] border-components-panel-border bg-components-panel-bg-blur text-text-secondary shadow-lg'>
|
||||
<div className='p-1'>
|
||||
<>
|
||||
<PortalToFollowElem
|
||||
open={open}
|
||||
onOpenChange={setOpen}
|
||||
placement="top-start"
|
||||
offset={{
|
||||
mainAxis: 4,
|
||||
crossAxis: -8,
|
||||
}}
|
||||
>
|
||||
<PortalToFollowElemTrigger>
|
||||
<TipPopup title={t('workflow.common.exportImage')}>
|
||||
<div
|
||||
className='system-md-regular flex h-8 cursor-pointer items-center rounded-lg px-2 hover:bg-state-base-hover'
|
||||
onClick={() => handleExportImage('png')}
|
||||
className={cn(
|
||||
'flex h-8 w-8 cursor-pointer items-center justify-center rounded-lg hover:bg-state-base-hover hover:text-text-secondary',
|
||||
`${getNodesReadOnly() && 'cursor-not-allowed text-text-disabled hover:bg-transparent hover:text-text-disabled'}`,
|
||||
)}
|
||||
onClick={handleTrigger}
|
||||
>
|
||||
{t('workflow.common.exportPNG')}
|
||||
<RiExportLine className='h-4 w-4' />
|
||||
</div>
|
||||
<div
|
||||
className='system-md-regular flex h-8 cursor-pointer items-center rounded-lg px-2 hover:bg-state-base-hover'
|
||||
onClick={() => handleExportImage('jpeg')}
|
||||
>
|
||||
{t('workflow.common.exportJPEG')}
|
||||
</div>
|
||||
<div
|
||||
className='system-md-regular flex h-8 cursor-pointer items-center rounded-lg px-2 hover:bg-state-base-hover'
|
||||
onClick={() => handleExportImage('svg')}
|
||||
>
|
||||
{t('workflow.common.exportSVG')}
|
||||
</TipPopup>
|
||||
</PortalToFollowElemTrigger>
|
||||
<PortalToFollowElemContent className='z-10'>
|
||||
<div className='min-w-[180px] rounded-xl border-[0.5px] border-components-panel-border bg-components-panel-bg-blur text-text-secondary shadow-lg'>
|
||||
<div className='p-1'>
|
||||
<div className='px-2 py-1 text-xs font-medium text-text-tertiary'>
|
||||
{t('workflow.common.currentView')}
|
||||
</div>
|
||||
<div
|
||||
className='system-md-regular flex h-8 cursor-pointer items-center rounded-lg px-2 hover:bg-state-base-hover'
|
||||
onClick={() => handleExportImage('png')}
|
||||
>
|
||||
{t('workflow.common.exportPNG')}
|
||||
</div>
|
||||
<div
|
||||
className='system-md-regular flex h-8 cursor-pointer items-center rounded-lg px-2 hover:bg-state-base-hover'
|
||||
onClick={() => handleExportImage('jpeg')}
|
||||
>
|
||||
{t('workflow.common.exportJPEG')}
|
||||
</div>
|
||||
<div
|
||||
className='system-md-regular flex h-8 cursor-pointer items-center rounded-lg px-2 hover:bg-state-base-hover'
|
||||
onClick={() => handleExportImage('svg')}
|
||||
>
|
||||
{t('workflow.common.exportSVG')}
|
||||
</div>
|
||||
|
||||
<div className='border-border-divider mx-2 my-1 border-t' />
|
||||
|
||||
<div className='px-2 py-1 text-xs font-medium text-text-tertiary'>
|
||||
{t('workflow.common.currentWorkflow')}
|
||||
</div>
|
||||
<div
|
||||
className='system-md-regular flex h-8 cursor-pointer items-center rounded-lg px-2 hover:bg-state-base-hover'
|
||||
onClick={() => handleExportImage('png', true)}
|
||||
>
|
||||
{t('workflow.common.exportPNG')}
|
||||
</div>
|
||||
<div
|
||||
className='system-md-regular flex h-8 cursor-pointer items-center rounded-lg px-2 hover:bg-state-base-hover'
|
||||
onClick={() => handleExportImage('jpeg', true)}
|
||||
>
|
||||
{t('workflow.common.exportJPEG')}
|
||||
</div>
|
||||
<div
|
||||
className='system-md-regular flex h-8 cursor-pointer items-center rounded-lg px-2 hover:bg-state-base-hover'
|
||||
onClick={() => handleExportImage('svg', true)}
|
||||
>
|
||||
{t('workflow.common.exportSVG')}
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</PortalToFollowElemContent>
|
||||
</PortalToFollowElem>
|
||||
</PortalToFollowElemContent>
|
||||
</PortalToFollowElem>
|
||||
|
||||
{previewUrl && (
|
||||
<ImagePreview
|
||||
url={previewUrl}
|
||||
title={previewTitle}
|
||||
onCancel={() => setPreviewUrl('')}
|
||||
/>
|
||||
)}
|
||||
</>
|
||||
)
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -260,7 +260,30 @@ const SelectionContextmenu = () => {
|
|||
|
||||
// Get all selected nodes
|
||||
const selectedNodeIds = selectedNodes.map(node => node.id)
|
||||
const nodesToAlign = nodes.filter(node => selectedNodeIds.includes(node.id))
|
||||
|
||||
// Find container nodes and their children
|
||||
// Container nodes (like Iteration and Loop) have child nodes that should not be aligned independently
|
||||
// when the container is selected. This prevents child nodes from being moved outside their containers.
|
||||
const childNodeIds = new Set<string>()
|
||||
|
||||
nodes.forEach((node) => {
|
||||
// Check if this is a container node (Iteration or Loop)
|
||||
if (node.data._children && node.data._children.length > 0) {
|
||||
// If container node is selected, add its children to the exclusion set
|
||||
if (selectedNodeIds.includes(node.id)) {
|
||||
// Add all its children to the childNodeIds set
|
||||
node.data._children.forEach((child: { nodeId: string; nodeType: string }) => {
|
||||
childNodeIds.add(child.nodeId)
|
||||
})
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
// Filter out child nodes from the alignment operation
|
||||
// Only align nodes that are selected AND are not children of container nodes
|
||||
// This ensures container nodes can be aligned while their children stay in the same relative position
|
||||
const nodesToAlign = nodes.filter(node =>
|
||||
selectedNodeIds.includes(node.id) && !childNodeIds.has(node.id))
|
||||
|
||||
if (nodesToAlign.length <= 1) {
|
||||
handleSelectionContextmenuCancel()
|
||||
|
|
|
|||
|
|
@ -1,7 +1,7 @@
|
|||
'use client'
|
||||
import React from 'react'
|
||||
import { useContext } from 'use-context-selector'
|
||||
import Select from '@/app/components/base/select/locale'
|
||||
import LocaleSigninSelect from '@/app/components/base/select/locale-signin'
|
||||
import Divider from '@/app/components/base/divider'
|
||||
import { languages } from '@/i18n-config/language'
|
||||
import type { Locale } from '@/i18n-config'
|
||||
|
|
@ -33,7 +33,7 @@ const Header = () => {
|
|||
/>
|
||||
: <DifyLogo size='large' />}
|
||||
<div className='flex items-center gap-1'>
|
||||
<Select
|
||||
<LocaleSigninSelect
|
||||
value={locale}
|
||||
items={languages.filter(item => item.supported)}
|
||||
onChange={(value) => {
|
||||
|
|
|
|||
|
|
@ -0,0 +1,17 @@
|
|||
<svg width="24" height="24" viewBox="0 0 24 24" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<g clip-path="url(#clip0_131_1011)">
|
||||
<path fill-rule="evenodd" clip-rule="evenodd" d="M12.0003 0.5C9.15149 0.501478 6.39613 1.51046 4.22687 3.34652C2.05761 5.18259 0.615903 7.72601 0.159545 10.522C-0.296814 13.318 0.261927 16.1842 1.73587 18.6082C3.20981 21.0321 5.50284 22.8558 8.20493 23.753C8.80105 23.8636 9.0256 23.4941 9.0256 23.18C9.0256 22.8658 9.01367 21.955 9.0097 20.9592C5.6714 21.6804 4.96599 19.5505 4.96599 19.5505C4.42152 18.1674 3.63464 17.8039 3.63464 17.8039C2.54571 17.065 3.71611 17.0788 3.71611 17.0788C4.92227 17.1637 5.55616 18.3097 5.55616 18.3097C6.62521 20.1333 8.36389 19.6058 9.04745 19.2976C9.15475 18.5251 9.46673 17.9995 9.8105 17.7012C7.14383 17.4008 4.34204 16.3774 4.34204 11.8054C4.32551 10.6197 4.76802 9.47305 5.57801 8.60268C5.45481 8.30236 5.04348 7.08923 5.69524 5.44143C5.69524 5.44143 6.7027 5.12135 8.9958 6.66444C10.9627 6.12962 13.0379 6.12962 15.0047 6.66444C17.2958 5.12135 18.3013 5.44143 18.3013 5.44143C18.9551 7.08528 18.5437 8.29841 18.4205 8.60268C19.2331 9.47319 19.6765 10.6218 19.6585 11.8094C19.6585 16.3912 16.8507 17.4008 14.1801 17.6952C14.6093 18.0667 14.9928 18.7918 14.9928 19.9061C14.9928 21.5026 14.9789 22.7868 14.9789 23.18C14.9789 23.4981 15.1955 23.8695 15.8035 23.753C18.5059 22.8557 20.7992 21.0317 22.2731 18.6073C23.747 16.183 24.3055 13.3163 23.8486 10.5201C23.3917 7.7238 21.9493 5.18035 19.7793 3.34461C17.6093 1.50886 14.8533 0.500541 12.0042 0.5H12.0003Z" fill="#ffffff"/>
|
||||
<path d="M4.54444 17.6321C4.5186 17.6914 4.42322 17.7092 4.34573 17.6677C4.26823 17.6262 4.21061 17.5491 4.23843 17.4879C4.26625 17.4266 4.35964 17.4108 4.43714 17.4523C4.51463 17.4938 4.57424 17.5729 4.54444 17.6321Z" fill="#ffffff"/>
|
||||
<path d="M5.03123 18.1714C4.99008 18.192 4.943 18.1978 4.89805 18.1877C4.8531 18.1776 4.81308 18.1523 4.78483 18.1161C4.70734 18.0331 4.69143 17.9185 4.75104 17.8671C4.81066 17.8157 4.91797 17.8395 4.99546 17.9224C5.07296 18.0054 5.09084 18.12 5.03123 18.1714Z" fill="#ffffff"/>
|
||||
<path d="M5.50425 18.857C5.43072 18.9084 5.30553 18.857 5.23598 18.7543C5.21675 18.7359 5.20146 18.7138 5.19101 18.6893C5.18056 18.6649 5.17517 18.6386 5.17517 18.612C5.17517 18.5855 5.18056 18.5592 5.19101 18.5347C5.20146 18.5103 5.21675 18.4882 5.23598 18.4698C5.3095 18.4204 5.4347 18.4698 5.50425 18.5705C5.57379 18.6713 5.57578 18.8057 5.50425 18.857V18.857Z" fill="#ffffff"/>
|
||||
<path d="M6.14612 19.5207C6.08054 19.5939 5.94741 19.5741 5.83812 19.4753C5.72883 19.3765 5.70299 19.2422 5.76857 19.171C5.83414 19.0999 5.96727 19.1197 6.08054 19.2165C6.1938 19.3133 6.21566 19.4496 6.14612 19.5207V19.5207Z" fill="#ffffff"/>
|
||||
<path d="M7.04617 19.9081C7.01637 20.001 6.88124 20.0425 6.74612 20.003C6.611 19.9635 6.52158 19.8528 6.54741 19.758C6.57325 19.6631 6.71036 19.6197 6.84747 19.6631C6.98457 19.7066 7.07201 19.8113 7.04617 19.9081Z" fill="#ffffff"/>
|
||||
<path d="M8.02783 19.9752C8.02783 20.072 7.91656 20.155 7.77349 20.1569C7.63042 20.1589 7.51318 20.0799 7.51318 19.9831C7.51318 19.8863 7.62445 19.8033 7.76752 19.8013C7.91059 19.7993 8.02783 19.8764 8.02783 19.9752Z" fill="#ffffff"/>
|
||||
<path d="M8.9419 19.8232C8.95978 19.92 8.86042 20.0207 8.71735 20.0445C8.57428 20.0682 8.4491 20.0109 8.43121 19.916C8.41333 19.8212 8.51666 19.7185 8.65576 19.6928C8.79485 19.6671 8.92401 19.7264 8.9419 19.8232Z" fill="#ffffff"/>
|
||||
</g>
|
||||
<defs>
|
||||
<clipPath id="clip0_131_1011">
|
||||
<rect width="24" height="24" fill="white"/>
|
||||
</clipPath>
|
||||
</defs>
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 3.4 KiB |
|
|
@ -144,11 +144,10 @@ const NormalForm = () => {
|
|||
</div>
|
||||
|
||||
{showORLine && <div className="relative mt-6">
|
||||
<div className="absolute inset-0 flex items-center" aria-hidden="true">
|
||||
<div className='h-px w-full bg-gradient-to-r from-background-gradient-mask-transparent via-divider-regular to-background-gradient-mask-transparent'></div>
|
||||
</div>
|
||||
<div className="relative flex justify-center">
|
||||
<span className="system-xs-medium-uppercase px-2 text-text-tertiary">{t('login.or')}</span>
|
||||
<div className="flex items-center">
|
||||
<div className="h-px flex-1 bg-gradient-to-r from-background-gradient-mask-transparent to-divider-regular"></div>
|
||||
<span className="system-xs-medium-uppercase px-3 text-text-tertiary">{t('login.or')}</span>
|
||||
<div className="h-px flex-1 bg-gradient-to-l from-background-gradient-mask-transparent to-divider-regular"></div>
|
||||
</div>
|
||||
</div>}
|
||||
{
|
||||
|
|
@ -2,6 +2,10 @@
|
|||
background: center/contain url('./assets/github.svg') no-repeat;
|
||||
}
|
||||
|
||||
html[data-theme="dark"] .githubIcon {
|
||||
background: center/contain url('./assets/github-dark.svg') no-repeat;
|
||||
}
|
||||
|
||||
.googleIcon {
|
||||
background: center/contain url('./assets/google.svg') no-repeat;
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,7 +1,7 @@
|
|||
'use client'
|
||||
import { useSearchParams } from 'next/navigation'
|
||||
import OneMoreStep from './oneMoreStep'
|
||||
import NormalForm from './normalForm'
|
||||
import OneMoreStep from './one-more-step'
|
||||
import NormalForm from './normal-form'
|
||||
|
||||
const SignIn = () => {
|
||||
const searchParams = useSearchParams()
|
||||
|
|
|
|||
|
|
@ -163,7 +163,7 @@ export default combine(
|
|||
'sonarjs/single-char-in-character-classes': 'off',
|
||||
'sonarjs/anchor-precedence': 'warn',
|
||||
'sonarjs/updated-loop-counter': 'off',
|
||||
'sonarjs/no-dead-store': 'warn',
|
||||
'sonarjs/no-dead-store': 'error',
|
||||
'sonarjs/no-duplicated-branches': 'warn',
|
||||
'sonarjs/max-lines': 'warn', // max 1000 lines
|
||||
'sonarjs/no-variable-usage-before-declaration': 'error',
|
||||
|
|
|
|||
|
|
@ -212,7 +212,9 @@ export default translation
|
|||
|
||||
// Add command line argument support
|
||||
const isDryRun = process.argv.includes('--dry-run')
|
||||
const targetFile = process.argv.find(arg => arg.startsWith('--file='))?.split('=')[1]
|
||||
const targetFiles = process.argv
|
||||
.filter(arg => arg.startsWith('--file='))
|
||||
.map(arg => arg.split('=')[1])
|
||||
const targetLang = process.argv.find(arg => arg.startsWith('--lang='))?.split('=')[1]
|
||||
|
||||
// Rate limiting helper
|
||||
|
|
@ -230,8 +232,8 @@ async function main() {
|
|||
.map(file => file.replace(/\.ts$/, ''))
|
||||
// Removed app-debug exclusion, now only skip specific problematic keys
|
||||
|
||||
// Filter by target file if specified
|
||||
const filesToProcess = targetFile ? files.filter(f => f === targetFile) : files
|
||||
// Filter by target files if specified
|
||||
const filesToProcess = targetFiles.length > 0 ? files.filter(f => targetFiles.includes(f)) : files
|
||||
const languagesToProcess = targetLang ? [targetLang] : Object.keys(languageKeyMap)
|
||||
|
||||
console.log(`📁 Files to process: ${filesToProcess.join(', ')}`)
|
||||
|
|
|
|||
|
|
@ -32,6 +32,13 @@ const translation = {
|
|||
pickTime: 'Wähle Zeit',
|
||||
},
|
||||
defaultPlaceholder: 'Wähle eine Zeit...',
|
||||
dateFormats: {
|
||||
displayWithTime: 'MMMM D, YYYY hh:mm A',
|
||||
output: 'YYYY-MM-DD',
|
||||
display: 'MMMM D, YYYY',
|
||||
input: 'YYYY-MM-DD',
|
||||
outputWithTime: 'YYYY-MM-DDTHH:mm:ss.SSSZ',
|
||||
},
|
||||
}
|
||||
|
||||
export default translation
|
||||
|
|
|
|||
|
|
@ -114,6 +114,8 @@ const translation = {
|
|||
needEndNode: 'Der Endknoten muss hinzugefügt werden.',
|
||||
needAnswerNode: 'Der Antwortknoten muss hinzugefügt werden.',
|
||||
tagBound: 'Anzahl der Apps, die dieses Tag verwenden',
|
||||
currentWorkflow: 'Aktueller Arbeitsablauf',
|
||||
currentView: 'Aktuelle Ansicht',
|
||||
},
|
||||
env: {
|
||||
envPanelTitle: 'Umgebungsvariablen',
|
||||
|
|
|
|||
|
|
@ -32,6 +32,14 @@ const translation = {
|
|||
pickTime: 'Pick Time',
|
||||
},
|
||||
defaultPlaceholder: 'Pick a time...',
|
||||
// Date format configurations
|
||||
dateFormats: {
|
||||
display: 'MMMM D, YYYY',
|
||||
displayWithTime: 'MMMM D, YYYY hh:mm A',
|
||||
input: 'YYYY-MM-DD',
|
||||
output: 'YYYY-MM-DD',
|
||||
outputWithTime: 'YYYY-MM-DDTHH:mm:ss.SSSZ',
|
||||
},
|
||||
}
|
||||
|
||||
export default translation
|
||||
|
|
|
|||
|
|
@ -74,6 +74,8 @@ const translation = {
|
|||
exportPNG: 'Export as PNG',
|
||||
exportJPEG: 'Export as JPEG',
|
||||
exportSVG: 'Export as SVG',
|
||||
currentView: 'Current View',
|
||||
currentWorkflow: 'Current Workflow',
|
||||
model: 'Model',
|
||||
workflowAsTool: 'Workflow as Tool',
|
||||
configureRequired: 'Configure Required',
|
||||
|
|
|
|||
|
|
@ -32,6 +32,13 @@ const translation = {
|
|||
pickTime: 'Elegir hora',
|
||||
},
|
||||
defaultPlaceholder: 'Elige una hora...',
|
||||
dateFormats: {
|
||||
input: 'AAAA-MM-DD',
|
||||
output: 'AAAA-MM-DD',
|
||||
display: 'MMMM D, AAAA',
|
||||
outputWithTime: 'AAAA-MM-DDTHH:mm:ss.SSSZ',
|
||||
displayWithTime: 'MMMM D, YYYY hh:mm A',
|
||||
},
|
||||
}
|
||||
|
||||
export default translation
|
||||
|
|
|
|||
|
|
@ -114,6 +114,8 @@ const translation = {
|
|||
needEndNode: 'Se debe agregar el nodo Final',
|
||||
addBlock: 'Agregar nodo',
|
||||
tagBound: 'Número de aplicaciones que utilizan esta etiqueta',
|
||||
currentView: 'Vista actual',
|
||||
currentWorkflow: 'Flujo de trabajo actual',
|
||||
},
|
||||
env: {
|
||||
envPanelTitle: 'Variables de Entorno',
|
||||
|
|
|
|||
|
|
@ -32,6 +32,13 @@ const translation = {
|
|||
pickTime: 'زمان انتخاب کنید',
|
||||
},
|
||||
defaultPlaceholder: 'زمانی را انتخاب کنید...',
|
||||
dateFormats: {
|
||||
output: 'YYYY-MM-DD',
|
||||
input: 'YYYY-MM-DD',
|
||||
display: 'MMMM D, YYYY',
|
||||
displayWithTime: 'MMMM D, YYYY hh:mm A',
|
||||
outputWithTime: 'YYYY-MM-DDTHH:mm:ss.SSSZ',
|
||||
},
|
||||
}
|
||||
|
||||
export default translation
|
||||
|
|
|
|||
|
|
@ -114,6 +114,8 @@ const translation = {
|
|||
needAnswerNode: 'باید گره پاسخ اضافه شود',
|
||||
addBlock: 'نود اضافه کنید',
|
||||
tagBound: 'تعداد برنامههایی که از این برچسب استفاده میکنند',
|
||||
currentView: 'نمای فعلی',
|
||||
currentWorkflow: 'گردش کار فعلی',
|
||||
},
|
||||
env: {
|
||||
envPanelTitle: 'متغیرهای محیطی',
|
||||
|
|
|
|||
|
|
@ -32,6 +32,13 @@ const translation = {
|
|||
pickTime: 'Choisir le temps',
|
||||
},
|
||||
defaultPlaceholder: 'Choisissez un moment...',
|
||||
dateFormats: {
|
||||
display: 'MMMM D, AAAA',
|
||||
output: 'AAAA-MM-JJ',
|
||||
input: 'AAAA-MM-JJ',
|
||||
displayWithTime: 'MMMM D, YYYY hh:mm A',
|
||||
outputWithTime: 'AAAA-MM-JJTHH:mm:ss.SSSZ',
|
||||
},
|
||||
}
|
||||
|
||||
export default translation
|
||||
|
|
|
|||
|
|
@ -114,6 +114,8 @@ const translation = {
|
|||
needAnswerNode: 'Le nœud de réponse doit être ajouté.',
|
||||
addBlock: 'Ajouter un nœud',
|
||||
tagBound: 'Nombre d\'applications utilisant cette étiquette',
|
||||
currentView: 'Vue actuelle',
|
||||
currentWorkflow: 'Flux de travail actuel',
|
||||
},
|
||||
env: {
|
||||
envPanelTitle: 'Variables d\'Environnement',
|
||||
|
|
|
|||
|
|
@ -32,6 +32,13 @@ const translation = {
|
|||
pickTime: 'समय चुनें',
|
||||
},
|
||||
defaultPlaceholder: 'एक समय चुनें...',
|
||||
dateFormats: {
|
||||
outputWithTime: 'YYYY-MM-DDTHH:mm:ss.SSSZ',
|
||||
input: 'YYYY-MM-DD',
|
||||
output: 'YYYY-MM-DD',
|
||||
display: 'MMMM D, YYYY',
|
||||
displayWithTime: 'MMMM D, YYYY hh:mm A',
|
||||
},
|
||||
}
|
||||
|
||||
export default translation
|
||||
|
|
|
|||
|
|
@ -117,6 +117,8 @@ const translation = {
|
|||
addBlock: 'नोड जोड़ें',
|
||||
needEndNode: 'अंत नोड जोड़ा जाना चाहिए',
|
||||
tagBound: 'इस टैग का उपयोग करने वाले ऐप्स की संख्या',
|
||||
currentView: 'वर्तमान दृश्य',
|
||||
currentWorkflow: 'वर्तमान कार्यप्रवाह',
|
||||
},
|
||||
env: {
|
||||
envPanelTitle: 'पर्यावरण चर',
|
||||
|
|
|
|||
|
|
@ -32,6 +32,13 @@ const translation = {
|
|||
pickTime: 'Scegli Tempo',
|
||||
},
|
||||
defaultPlaceholder: 'Scegli un orario...',
|
||||
dateFormats: {
|
||||
display: 'MMMM D, YYYY',
|
||||
input: 'AAAA-MM-GG',
|
||||
output: 'AAAA-MM-GG',
|
||||
outputWithTime: 'AAAA-MM-GGTHH:mm:ss.SSSZ',
|
||||
displayWithTime: 'MMMM D, YYYY hh:mm A',
|
||||
},
|
||||
}
|
||||
|
||||
export default translation
|
||||
|
|
|
|||
|
|
@ -118,6 +118,8 @@ const translation = {
|
|||
addBlock: 'Aggiungi nodo',
|
||||
needAnswerNode: 'Deve essere aggiunto il nodo di risposta',
|
||||
tagBound: 'Numero di app che utilizzano questo tag',
|
||||
currentWorkflow: 'Flusso di lavoro corrente',
|
||||
currentView: 'Vista corrente',
|
||||
},
|
||||
env: {
|
||||
envPanelTitle: 'Variabili d\'Ambiente',
|
||||
|
|
|
|||
|
|
@ -32,6 +32,14 @@ const translation = {
|
|||
pickTime: 'ピックタイム',
|
||||
},
|
||||
defaultPlaceholder: '時間を選んでください...',
|
||||
// Date format configurations
|
||||
dateFormats: {
|
||||
display: 'YYYY年MM月DD日',
|
||||
displayWithTime: 'YYYY年MM月DD日 HH:mm',
|
||||
input: 'YYYY-MM-DD',
|
||||
output: 'YYYY-MM-DD',
|
||||
outputWithTime: 'YYYY-MM-DDTHH:mm:ss.SSSZ',
|
||||
},
|
||||
}
|
||||
|
||||
export default translation
|
||||
|
|
|
|||
|
|
@ -74,6 +74,8 @@ const translation = {
|
|||
exportPNG: 'PNG で出力',
|
||||
exportJPEG: 'JPEG で出力',
|
||||
exportSVG: 'SVG で出力',
|
||||
currentView: '現在のビュー',
|
||||
currentWorkflow: '現在のワークフロー',
|
||||
model: 'モデル',
|
||||
workflowAsTool: 'ワークフローをツールとして公開する',
|
||||
configureRequired: '設定が必要',
|
||||
|
|
|
|||
|
|
@ -32,6 +32,13 @@ const translation = {
|
|||
pickTime: '시간 선택',
|
||||
},
|
||||
defaultPlaceholder: '시간을 선택하세요...',
|
||||
dateFormats: {
|
||||
input: 'YYYY-MM-DD',
|
||||
display: 'MMMM D, YYYY',
|
||||
outputWithTime: 'YYYY-MM-DDTHH:mm:ss.SSSZ',
|
||||
displayWithTime: 'MMMM D, YYYY hh:mm A',
|
||||
output: 'YYYY-MM-DD',
|
||||
},
|
||||
}
|
||||
|
||||
export default translation
|
||||
|
|
|
|||
|
|
@ -118,6 +118,8 @@ const translation = {
|
|||
needAnswerNode: '답변 노드를 추가해야 합니다.',
|
||||
needEndNode: '종단 노드를 추가해야 합니다.',
|
||||
tagBound: '이 태그를 사용하는 앱 수',
|
||||
currentView: '현재 보기',
|
||||
currentWorkflow: '현재 워크플로',
|
||||
},
|
||||
env: {
|
||||
envPanelTitle: '환경 변수',
|
||||
|
|
|
|||
|
|
@ -32,6 +32,13 @@ const translation = {
|
|||
pickTime: 'Wybierz czas',
|
||||
},
|
||||
defaultPlaceholder: 'Wybierz czas...',
|
||||
dateFormats: {
|
||||
output: 'RRRR-MM-DD',
|
||||
displayWithTime: 'MMMM D, YYYY hh:mm A',
|
||||
display: 'MMMM D, YYYY',
|
||||
input: 'RRRR-MM-DD',
|
||||
outputWithTime: 'RRRR-MM-DDTHH:mm:ss.SSSZ',
|
||||
},
|
||||
}
|
||||
|
||||
export default translation
|
||||
|
|
|
|||
|
|
@ -114,6 +114,8 @@ const translation = {
|
|||
needEndNode: 'Należy dodać węzeł końcowy',
|
||||
needAnswerNode: 'Węzeł odpowiedzi musi zostać dodany',
|
||||
tagBound: 'Liczba aplikacji korzystających z tego tagu',
|
||||
currentWorkflow: 'Bieżący przepływ pracy',
|
||||
currentView: 'Bieżący widok',
|
||||
},
|
||||
env: {
|
||||
envPanelTitle: 'Zmienne Środowiskowe',
|
||||
|
|
|
|||
|
|
@ -32,6 +32,13 @@ const translation = {
|
|||
pickTime: 'Escolha o Horário',
|
||||
},
|
||||
defaultPlaceholder: 'Escolha um horário...',
|
||||
dateFormats: {
|
||||
input: 'AAAA-MM-DD',
|
||||
output: 'AAAA-MM-DD',
|
||||
outputWithTime: 'AAAA-MM-DDTHH:mm:ss.SSSZ',
|
||||
displayWithTime: 'MMMM D, YYYY hh:mm A',
|
||||
display: 'MMMM D, YYYY',
|
||||
},
|
||||
}
|
||||
|
||||
export default translation
|
||||
|
|
|
|||
|
|
@ -114,6 +114,8 @@ const translation = {
|
|||
needEndNode: 'O nó de Fim deve ser adicionado',
|
||||
needAnswerNode: 'O nó de resposta deve ser adicionado',
|
||||
tagBound: 'Número de aplicativos usando esta tag',
|
||||
currentView: 'Visualização atual',
|
||||
currentWorkflow: 'Fluxo de trabalho atual',
|
||||
},
|
||||
env: {
|
||||
envPanelTitle: 'Variáveis de Ambiente',
|
||||
|
|
|
|||
|
|
@ -32,6 +32,13 @@ const translation = {
|
|||
pickTime: 'Alegeți timpul',
|
||||
},
|
||||
defaultPlaceholder: 'Alege o oră...',
|
||||
dateFormats: {
|
||||
display: 'MMMM D, YYYY',
|
||||
input: 'AAAA-LL-ZZ',
|
||||
output: 'AAAA-LL-ZZ',
|
||||
displayWithTime: 'MMMM D, YYYY hh:mm A',
|
||||
outputWithTime: 'AAAA-LL-ZZSS:mm:ss.SSSZ',
|
||||
},
|
||||
}
|
||||
|
||||
export default translation
|
||||
|
|
|
|||
|
|
@ -114,6 +114,8 @@ const translation = {
|
|||
needAnswerNode: 'Nodul de răspuns trebuie adăugat',
|
||||
needEndNode: 'Nodul de sfârșit trebuie adăugat',
|
||||
tagBound: 'Numărul de aplicații care folosesc acest tag',
|
||||
currentView: 'Vizualizare curentă',
|
||||
currentWorkflow: 'Flux de lucru curent',
|
||||
},
|
||||
env: {
|
||||
envPanelTitle: 'Variabile de Mediu',
|
||||
|
|
|
|||
|
|
@ -32,6 +32,13 @@ const translation = {
|
|||
pickTime: 'Выберите время',
|
||||
},
|
||||
defaultPlaceholder: 'Выберите время...',
|
||||
dateFormats: {
|
||||
display: 'MMMM D, YYYY',
|
||||
outputWithTime: 'YYYY-MM-DDTHH:mm:ss.SSSZ',
|
||||
displayWithTime: 'MMMM D, YYYY hh:mm A',
|
||||
output: 'ГГГГ-ММ-ДД',
|
||||
input: 'ГГГГ-ММ-ДД',
|
||||
},
|
||||
}
|
||||
|
||||
export default translation
|
||||
|
|
|
|||
|
|
@ -114,6 +114,8 @@ const translation = {
|
|||
needAnswerNode: 'В узел ответа необходимо добавить',
|
||||
needEndNode: 'Узел конца должен быть добавлен',
|
||||
tagBound: 'Количество приложений, использующих этот тег',
|
||||
currentView: 'Текущий вид',
|
||||
currentWorkflow: 'Текущий рабочий процесс',
|
||||
},
|
||||
env: {
|
||||
envPanelTitle: 'Переменные среды',
|
||||
|
|
|
|||
|
|
@ -32,6 +32,13 @@ const translation = {
|
|||
pickTime: 'Izberi čas',
|
||||
},
|
||||
defaultPlaceholder: 'Izberi čas...',
|
||||
dateFormats: {
|
||||
display: 'MMMM D, YYYY',
|
||||
input: 'YYYY-MM-DD',
|
||||
output: 'YYYY-MM-DD',
|
||||
outputWithTime: 'YYYY-MM-DDTHH:mm:ss.SSSZ',
|
||||
displayWithTime: 'MMMM D, YYYY hh:mm A',
|
||||
},
|
||||
}
|
||||
|
||||
export default translation
|
||||
|
|
|
|||
|
|
@ -114,6 +114,8 @@ const translation = {
|
|||
configure: 'Konfiguriraj',
|
||||
inRunMode: 'V načinu izvajanja',
|
||||
tagBound: 'Število aplikacij, ki uporabljajo to oznako',
|
||||
currentView: 'Trenutni pogled',
|
||||
currentWorkflow: 'Trenutni potek dela',
|
||||
},
|
||||
env: {
|
||||
modal: {
|
||||
|
|
|
|||
|
|
@ -32,6 +32,13 @@ const translation = {
|
|||
pickTime: 'เลือกเวลา',
|
||||
},
|
||||
defaultPlaceholder: 'เลือกเวลา...',
|
||||
dateFormats: {
|
||||
input: 'YYYY-MM-DD',
|
||||
displayWithTime: 'MMMM D, YYYY hh:mm A',
|
||||
outputWithTime: 'YYYY-MM-DDTHH:mm:ss.SSSZ',
|
||||
display: 'MMMM D, YYYY',
|
||||
output: 'YYYY-MM-DD',
|
||||
},
|
||||
}
|
||||
|
||||
export default translation
|
||||
|
|
|
|||
|
|
@ -114,6 +114,8 @@ const translation = {
|
|||
addBlock: 'เพิ่มโนด',
|
||||
needEndNode: 'ต้องเพิ่มโหนดจบ',
|
||||
tagBound: 'จำนวนแอปพลิเคชันที่ใช้แท็กนี้',
|
||||
currentWorkflow: 'เวิร์กโฟลว์ปัจจุบัน',
|
||||
currentView: 'ปัจจุบัน View',
|
||||
},
|
||||
env: {
|
||||
envPanelTitle: 'ตัวแปรสภาพแวดล้อม',
|
||||
|
|
|
|||
|
|
@ -32,6 +32,13 @@ const translation = {
|
|||
pickTime: 'Zamanı Seç',
|
||||
},
|
||||
defaultPlaceholder: 'Bir zaman seç...',
|
||||
dateFormats: {
|
||||
displayWithTime: 'MMMM D, YYYY hh:mm A',
|
||||
output: 'YYYY-AA-GG',
|
||||
display: 'MMMM D, YYYY',
|
||||
outputWithTime: 'YYYY-AA-GGSS:DD:DDS.SSSZ',
|
||||
input: 'YYYY-AA-GG',
|
||||
},
|
||||
}
|
||||
|
||||
export default translation
|
||||
|
|
|
|||
|
|
@ -114,6 +114,8 @@ const translation = {
|
|||
needAnswerNode: 'Cevap düğümü eklenmelidir.',
|
||||
needEndNode: 'Son düğüm eklenmelidir',
|
||||
tagBound: 'Bu etiketi kullanan uygulama sayısı',
|
||||
currentView: 'Geçerli Görünüm',
|
||||
currentWorkflow: 'Mevcut İş Akışı',
|
||||
},
|
||||
env: {
|
||||
envPanelTitle: 'Çevre Değişkenleri',
|
||||
|
|
|
|||
|
|
@ -32,6 +32,12 @@ const translation = {
|
|||
pickTime: 'Виберіть час',
|
||||
},
|
||||
defaultPlaceholder: 'Виберіть час...',
|
||||
dateFormats: {
|
||||
displayWithTime: 'MMMM D, YYYY hh:mm A',
|
||||
output: 'РРРР-ММ-ДД',
|
||||
display: 'MMMM D, YYYY',
|
||||
outputWithTime: 'YYYY-MM-DDTHH:mm:ss.SSSZ',
|
||||
},
|
||||
}
|
||||
|
||||
export default translation
|
||||
|
|
|
|||
|
|
@ -114,6 +114,8 @@ const translation = {
|
|||
needEndNode: 'Необхідно додати кінцевий вузол',
|
||||
needAnswerNode: 'Вузол Відповіді повинен бути доданий',
|
||||
tagBound: 'Кількість додатків, що використовують цей тег',
|
||||
currentView: 'Поточний вигляд',
|
||||
currentWorkflow: 'Поточний робочий процес',
|
||||
},
|
||||
env: {
|
||||
envPanelTitle: 'Змінні середовища',
|
||||
|
|
|
|||
|
|
@ -32,6 +32,13 @@ const translation = {
|
|||
pickTime: 'Chọn Thời Gian',
|
||||
},
|
||||
defaultPlaceholder: 'Chọn một thời gian...',
|
||||
dateFormats: {
|
||||
input: 'YYYY-MM-DD',
|
||||
displayWithTime: 'MMMM D, YYYY hh:mm A',
|
||||
display: 'MMMM D, YYYY',
|
||||
outputWithTime: 'YYYY-MM-DDTHH:mm:ss.SSSZ',
|
||||
output: 'YYYY-MM-DD',
|
||||
},
|
||||
}
|
||||
|
||||
export default translation
|
||||
|
|
|
|||
|
|
@ -114,6 +114,8 @@ const translation = {
|
|||
addBlock: 'Thêm Node',
|
||||
needEndNode: 'Nút Kết thúc phải được thêm vào',
|
||||
tagBound: 'Số lượng ứng dụng sử dụng thẻ này',
|
||||
currentWorkflow: 'Quy trình làm việc hiện tại',
|
||||
currentView: 'Hiện tại View',
|
||||
},
|
||||
env: {
|
||||
envPanelTitle: 'Biến Môi Trường',
|
||||
|
|
|
|||
|
|
@ -32,6 +32,14 @@ const translation = {
|
|||
pickTime: '选择时间',
|
||||
},
|
||||
defaultPlaceholder: '请选择时间...',
|
||||
// Date format configurations
|
||||
dateFormats: {
|
||||
display: 'YYYY年MM月DD日',
|
||||
displayWithTime: 'YYYY年MM月DD日 HH:mm',
|
||||
input: 'YYYY-MM-DD',
|
||||
output: 'YYYY-MM-DD',
|
||||
outputWithTime: 'YYYY-MM-DDTHH:mm:ss.SSSZ',
|
||||
},
|
||||
}
|
||||
|
||||
export default translation
|
||||
|
|
|
|||
|
|
@ -73,6 +73,8 @@ const translation = {
|
|||
exportPNG: '导出为 PNG',
|
||||
exportJPEG: '导出为 JPEG',
|
||||
exportSVG: '导出为 SVG',
|
||||
currentView: '当前视图',
|
||||
currentWorkflow: '整个工作流',
|
||||
model: '模型',
|
||||
workflowAsTool: '发布为工具',
|
||||
configureRequired: '需要进行配置',
|
||||
|
|
|
|||
|
|
@ -32,6 +32,13 @@ const translation = {
|
|||
pickTime: '選擇時間',
|
||||
},
|
||||
defaultPlaceholder: '選擇一個時間...',
|
||||
dateFormats: {
|
||||
display: 'MMMM D, YYYY',
|
||||
output: 'YYYY-MM-DD',
|
||||
displayWithTime: 'MMMM D, YYYY hh:mm A',
|
||||
input: 'YYYY-MM-DD',
|
||||
outputWithTime: 'YYYY年MM月DD日 HH:mm:ss.SSSZ',
|
||||
},
|
||||
}
|
||||
|
||||
export default translation
|
||||
|
|
|
|||
|
|
@ -114,6 +114,8 @@ const translation = {
|
|||
exportImage: '匯出圖像',
|
||||
exportJPEG: '匯出為 JPEG',
|
||||
tagBound: '使用此標籤的應用程式數量',
|
||||
currentView: '當前檢視',
|
||||
currentWorkflow: '當前工作流程',
|
||||
},
|
||||
env: {
|
||||
envPanelTitle: '環境變數',
|
||||
|
|
|
|||
Loading…
Reference in New Issue