mirror of https://github.com/langgenius/dify.git
Merge branch 'feat/add-knowledge-pipeline-template-command' into deploy/rag-dev
This commit is contained in:
commit
74d938a8d2
|
|
@ -8,6 +8,7 @@ on:
|
|||
- "deploy/enterprise"
|
||||
- "build/**"
|
||||
- "release/e-*"
|
||||
- "hotfix/**"
|
||||
tags:
|
||||
- "*"
|
||||
|
||||
|
|
|
|||
152
api/commands.py
152
api/commands.py
|
|
@ -10,6 +10,7 @@ from flask import current_app
|
|||
from pydantic import TypeAdapter
|
||||
from sqlalchemy import select
|
||||
from sqlalchemy.exc import SQLAlchemyError
|
||||
from sqlalchemy.orm import sessionmaker
|
||||
|
||||
from configs import dify_config
|
||||
from constants.languages import languages
|
||||
|
|
@ -61,31 +62,30 @@ def reset_password(email, new_password, password_confirm):
|
|||
if str(new_password).strip() != str(password_confirm).strip():
|
||||
click.echo(click.style("Passwords do not match.", fg="red"))
|
||||
return
|
||||
with sessionmaker(db.engine, expire_on_commit=False).begin() as session:
|
||||
account = session.query(Account).where(Account.email == email).one_or_none()
|
||||
|
||||
account = db.session.query(Account).where(Account.email == email).one_or_none()
|
||||
if not account:
|
||||
click.echo(click.style(f"Account not found for email: {email}", fg="red"))
|
||||
return
|
||||
|
||||
if not account:
|
||||
click.echo(click.style(f"Account not found for email: {email}", fg="red"))
|
||||
return
|
||||
try:
|
||||
valid_password(new_password)
|
||||
except:
|
||||
click.echo(click.style(f"Invalid password. Must match {password_pattern}", fg="red"))
|
||||
return
|
||||
|
||||
try:
|
||||
valid_password(new_password)
|
||||
except:
|
||||
click.echo(click.style(f"Invalid password. Must match {password_pattern}", fg="red"))
|
||||
return
|
||||
# generate password salt
|
||||
salt = secrets.token_bytes(16)
|
||||
base64_salt = base64.b64encode(salt).decode()
|
||||
|
||||
# generate password salt
|
||||
salt = secrets.token_bytes(16)
|
||||
base64_salt = base64.b64encode(salt).decode()
|
||||
|
||||
# encrypt password with salt
|
||||
password_hashed = hash_password(new_password, salt)
|
||||
base64_password_hashed = base64.b64encode(password_hashed).decode()
|
||||
account.password = base64_password_hashed
|
||||
account.password_salt = base64_salt
|
||||
db.session.commit()
|
||||
AccountService.reset_login_error_rate_limit(email)
|
||||
click.echo(click.style("Password reset successfully.", fg="green"))
|
||||
# encrypt password with salt
|
||||
password_hashed = hash_password(new_password, salt)
|
||||
base64_password_hashed = base64.b64encode(password_hashed).decode()
|
||||
account.password = base64_password_hashed
|
||||
account.password_salt = base64_salt
|
||||
AccountService.reset_login_error_rate_limit(email)
|
||||
click.echo(click.style("Password reset successfully.", fg="green"))
|
||||
|
||||
|
||||
@click.command("reset-email", help="Reset the account email.")
|
||||
|
|
@ -100,22 +100,21 @@ def reset_email(email, new_email, email_confirm):
|
|||
if str(new_email).strip() != str(email_confirm).strip():
|
||||
click.echo(click.style("New emails do not match.", fg="red"))
|
||||
return
|
||||
with sessionmaker(db.engine, expire_on_commit=False).begin() as session:
|
||||
account = session.query(Account).where(Account.email == email).one_or_none()
|
||||
|
||||
account = db.session.query(Account).where(Account.email == email).one_or_none()
|
||||
if not account:
|
||||
click.echo(click.style(f"Account not found for email: {email}", fg="red"))
|
||||
return
|
||||
|
||||
if not account:
|
||||
click.echo(click.style(f"Account not found for email: {email}", fg="red"))
|
||||
return
|
||||
try:
|
||||
email_validate(new_email)
|
||||
except:
|
||||
click.echo(click.style(f"Invalid email: {new_email}", fg="red"))
|
||||
return
|
||||
|
||||
try:
|
||||
email_validate(new_email)
|
||||
except:
|
||||
click.echo(click.style(f"Invalid email: {new_email}", fg="red"))
|
||||
return
|
||||
|
||||
account.email = new_email
|
||||
db.session.commit()
|
||||
click.echo(click.style("Email updated successfully.", fg="green"))
|
||||
account.email = new_email
|
||||
click.echo(click.style("Email updated successfully.", fg="green"))
|
||||
|
||||
|
||||
@click.command(
|
||||
|
|
@ -139,25 +138,24 @@ def reset_encrypt_key_pair():
|
|||
if dify_config.EDITION != "SELF_HOSTED":
|
||||
click.echo(click.style("This command is only for SELF_HOSTED installations.", fg="red"))
|
||||
return
|
||||
with sessionmaker(db.engine, expire_on_commit=False).begin() as session:
|
||||
tenants = session.query(Tenant).all()
|
||||
for tenant in tenants:
|
||||
if not tenant:
|
||||
click.echo(click.style("No workspaces found. Run /install first.", fg="red"))
|
||||
return
|
||||
|
||||
tenants = db.session.query(Tenant).all()
|
||||
for tenant in tenants:
|
||||
if not tenant:
|
||||
click.echo(click.style("No workspaces found. Run /install first.", fg="red"))
|
||||
return
|
||||
tenant.encrypt_public_key = generate_key_pair(tenant.id)
|
||||
|
||||
tenant.encrypt_public_key = generate_key_pair(tenant.id)
|
||||
session.query(Provider).where(Provider.provider_type == "custom", Provider.tenant_id == tenant.id).delete()
|
||||
session.query(ProviderModel).where(ProviderModel.tenant_id == tenant.id).delete()
|
||||
|
||||
db.session.query(Provider).where(Provider.provider_type == "custom", Provider.tenant_id == tenant.id).delete()
|
||||
db.session.query(ProviderModel).where(ProviderModel.tenant_id == tenant.id).delete()
|
||||
db.session.commit()
|
||||
|
||||
click.echo(
|
||||
click.style(
|
||||
f"Congratulations! The asymmetric key pair of workspace {tenant.id} has been reset.",
|
||||
fg="green",
|
||||
click.echo(
|
||||
click.style(
|
||||
f"Congratulations! The asymmetric key pair of workspace {tenant.id} has been reset.",
|
||||
fg="green",
|
||||
)
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
@click.command("vdb-migrate", help="Migrate vector db.")
|
||||
|
|
@ -182,14 +180,15 @@ def migrate_annotation_vector_database():
|
|||
try:
|
||||
# get apps info
|
||||
per_page = 50
|
||||
apps = (
|
||||
db.session.query(App)
|
||||
.where(App.status == "normal")
|
||||
.order_by(App.created_at.desc())
|
||||
.limit(per_page)
|
||||
.offset((page - 1) * per_page)
|
||||
.all()
|
||||
)
|
||||
with sessionmaker(db.engine, expire_on_commit=False).begin() as session:
|
||||
apps = (
|
||||
session.query(App)
|
||||
.where(App.status == "normal")
|
||||
.order_by(App.created_at.desc())
|
||||
.limit(per_page)
|
||||
.offset((page - 1) * per_page)
|
||||
.all()
|
||||
)
|
||||
if not apps:
|
||||
break
|
||||
except SQLAlchemyError:
|
||||
|
|
@ -203,26 +202,27 @@ def migrate_annotation_vector_database():
|
|||
)
|
||||
try:
|
||||
click.echo(f"Creating app annotation index: {app.id}")
|
||||
app_annotation_setting = (
|
||||
db.session.query(AppAnnotationSetting).where(AppAnnotationSetting.app_id == app.id).first()
|
||||
)
|
||||
with sessionmaker(db.engine, expire_on_commit=False).begin() as session:
|
||||
app_annotation_setting = (
|
||||
session.query(AppAnnotationSetting).where(AppAnnotationSetting.app_id == app.id).first()
|
||||
)
|
||||
|
||||
if not app_annotation_setting:
|
||||
skipped_count = skipped_count + 1
|
||||
click.echo(f"App annotation setting disabled: {app.id}")
|
||||
continue
|
||||
# get dataset_collection_binding info
|
||||
dataset_collection_binding = (
|
||||
db.session.query(DatasetCollectionBinding)
|
||||
.where(DatasetCollectionBinding.id == app_annotation_setting.collection_binding_id)
|
||||
.first()
|
||||
)
|
||||
if not dataset_collection_binding:
|
||||
click.echo(f"App annotation collection binding not found: {app.id}")
|
||||
continue
|
||||
annotations = db.session.scalars(
|
||||
select(MessageAnnotation).where(MessageAnnotation.app_id == app.id)
|
||||
).all()
|
||||
if not app_annotation_setting:
|
||||
skipped_count = skipped_count + 1
|
||||
click.echo(f"App annotation setting disabled: {app.id}")
|
||||
continue
|
||||
# get dataset_collection_binding info
|
||||
dataset_collection_binding = (
|
||||
session.query(DatasetCollectionBinding)
|
||||
.where(DatasetCollectionBinding.id == app_annotation_setting.collection_binding_id)
|
||||
.first()
|
||||
)
|
||||
if not dataset_collection_binding:
|
||||
click.echo(f"App annotation collection binding not found: {app.id}")
|
||||
continue
|
||||
annotations = session.scalars(
|
||||
select(MessageAnnotation).where(MessageAnnotation.app_id == app.id)
|
||||
).all()
|
||||
dataset = Dataset(
|
||||
id=app.id,
|
||||
tenant_id=app.tenant_id,
|
||||
|
|
|
|||
|
|
@ -18,3 +18,18 @@ class EnterpriseFeatureConfig(BaseSettings):
|
|||
description="Allow customization of the enterprise logo.",
|
||||
default=False,
|
||||
)
|
||||
|
||||
UPLOAD_KNOWLEDGE_PIPELINE_TEMPLATE_TOKEN: str = Field(
|
||||
description="Token for uploading knowledge pipeline template.",
|
||||
default="",
|
||||
)
|
||||
|
||||
KNOWLEDGE_PIPELINE_TEMPLATE_COPYRIGHT: str = Field(
|
||||
description="Knowledge pipeline template copyright.",
|
||||
default="Copyright 2023 Dify",
|
||||
)
|
||||
|
||||
KNOWLEDGE_PIPELINE_TEMPLATE_PRIVACY_POLICY: str = Field(
|
||||
description="Knowledge pipeline template privacy policy.",
|
||||
default="https://dify.ai",
|
||||
)
|
||||
|
|
|
|||
|
|
@ -40,8 +40,12 @@ class OceanBaseVectorConfig(BaseSettings):
|
|||
|
||||
OCEANBASE_FULLTEXT_PARSER: str | None = Field(
|
||||
description=(
|
||||
"Fulltext parser to use for text indexing. Options: 'japanese_ftparser' (Japanese), "
|
||||
"'thai_ftparser' (Thai), 'ik' (Chinese). Default is 'ik'"
|
||||
"Fulltext parser to use for text indexing. "
|
||||
"Built-in options: 'ngram' (N-gram tokenizer for English/numbers), "
|
||||
"'beng' (Basic English tokenizer), 'space' (Space-based tokenizer), "
|
||||
"'ngram2' (Improved N-gram tokenizer), 'ik' (Chinese tokenizer). "
|
||||
"External plugins (require installation): 'japanese_ftparser' (Japanese tokenizer), "
|
||||
"'thai_ftparser' (Thai tokenizer). Default is 'ik'"
|
||||
),
|
||||
default="ik",
|
||||
)
|
||||
|
|
|
|||
|
|
@ -1,6 +1,7 @@
|
|||
from datetime import datetime
|
||||
|
||||
import pytz # pip install pytz
|
||||
import sqlalchemy as sa
|
||||
from flask_login import current_user
|
||||
from flask_restx import Resource, marshal_with, reqparse
|
||||
from flask_restx.inputs import int_range
|
||||
|
|
@ -70,7 +71,7 @@ class CompletionConversationApi(Resource):
|
|||
parser.add_argument("limit", type=int_range(1, 100), default=20, location="args")
|
||||
args = parser.parse_args()
|
||||
|
||||
query = db.select(Conversation).where(
|
||||
query = sa.select(Conversation).where(
|
||||
Conversation.app_id == app_model.id, Conversation.mode == "completion", Conversation.is_deleted.is_(False)
|
||||
)
|
||||
|
||||
|
|
@ -236,7 +237,7 @@ class ChatConversationApi(Resource):
|
|||
.subquery()
|
||||
)
|
||||
|
||||
query = db.select(Conversation).where(Conversation.app_id == app_model.id, Conversation.is_deleted.is_(False))
|
||||
query = sa.select(Conversation).where(Conversation.app_id == app_model.id, Conversation.is_deleted.is_(False))
|
||||
|
||||
if args["keyword"]:
|
||||
keyword_filter = f"%{args['keyword']}%"
|
||||
|
|
|
|||
|
|
@ -4,6 +4,7 @@ from argparse import ArgumentTypeError
|
|||
from collections.abc import Sequence
|
||||
from typing import Literal, cast
|
||||
|
||||
import sqlalchemy as sa
|
||||
from flask import request
|
||||
from flask_login import current_user
|
||||
from flask_restx import Resource, fields, marshal, marshal_with, reqparse
|
||||
|
|
@ -211,13 +212,13 @@ class DatasetDocumentListApi(Resource):
|
|||
|
||||
if sort == "hit_count":
|
||||
sub_query = (
|
||||
db.select(DocumentSegment.document_id, db.func.sum(DocumentSegment.hit_count).label("total_hit_count"))
|
||||
sa.select(DocumentSegment.document_id, sa.func.sum(DocumentSegment.hit_count).label("total_hit_count"))
|
||||
.group_by(DocumentSegment.document_id)
|
||||
.subquery()
|
||||
)
|
||||
|
||||
query = query.outerjoin(sub_query, sub_query.c.document_id == Document.id).order_by(
|
||||
sort_logic(db.func.coalesce(sub_query.c.total_hit_count, 0)),
|
||||
sort_logic(sa.func.coalesce(sub_query.c.total_hit_count, 0)),
|
||||
sort_logic(Document.position),
|
||||
)
|
||||
elif sort == "created_at":
|
||||
|
|
|
|||
|
|
@ -14,7 +14,10 @@ from controllers.console.wraps import (
|
|||
from extensions.ext_database import db
|
||||
from libs.login import login_required
|
||||
from models.dataset import PipelineCustomizedTemplate
|
||||
from services.entities.knowledge_entities.rag_pipeline_entities import PipelineTemplateInfoEntity
|
||||
from services.entities.knowledge_entities.rag_pipeline_entities import (
|
||||
PipelineBuiltInTemplateEntity,
|
||||
PipelineTemplateInfoEntity,
|
||||
)
|
||||
from services.rag_pipeline.rag_pipeline import RagPipelineService
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
|
@ -26,12 +29,6 @@ def _validate_name(name):
|
|||
return name
|
||||
|
||||
|
||||
def _validate_description_length(description):
|
||||
if len(description) > 400:
|
||||
raise ValueError("Description cannot exceed 400 characters.")
|
||||
return description
|
||||
|
||||
|
||||
class PipelineTemplateListApi(Resource):
|
||||
@setup_required
|
||||
@login_required
|
||||
|
|
@ -146,6 +143,186 @@ class PublishCustomizedPipelineTemplateApi(Resource):
|
|||
return {"result": "success"}
|
||||
|
||||
|
||||
class PipelineTemplateInstallApi(Resource):
|
||||
"""API endpoint for installing built-in pipeline templates"""
|
||||
|
||||
def post(self):
|
||||
"""
|
||||
Install a built-in pipeline template
|
||||
|
||||
Args:
|
||||
template_id: The template ID from URL parameter
|
||||
|
||||
Returns:
|
||||
Success response or error with appropriate HTTP status
|
||||
"""
|
||||
try:
|
||||
# Extract and validate Bearer token
|
||||
auth_token = self._extract_bearer_token()
|
||||
|
||||
# Parse and validate request parameters
|
||||
template_args = self._parse_template_args()
|
||||
|
||||
# Process uploaded template file
|
||||
file_content = self._process_template_file()
|
||||
|
||||
# Create template entity
|
||||
pipeline_built_in_template_entity = PipelineBuiltInTemplateEntity(**template_args)
|
||||
|
||||
# Install the template
|
||||
rag_pipeline_service = RagPipelineService()
|
||||
rag_pipeline_service.install_built_in_pipeline_template(
|
||||
pipeline_built_in_template_entity, file_content, auth_token
|
||||
)
|
||||
|
||||
return {"result": "success", "message": "Template installed successfully"}, 200
|
||||
|
||||
except ValueError as e:
|
||||
logger.exception("Validation error in template installation")
|
||||
return {"error": str(e)}, 400
|
||||
except Exception as e:
|
||||
logger.exception("Unexpected error in template installation")
|
||||
return {"error": "An unexpected error occurred during template installation"}, 500
|
||||
|
||||
def _extract_bearer_token(self) -> str:
|
||||
"""
|
||||
Extract and validate Bearer token from Authorization header
|
||||
|
||||
Returns:
|
||||
The extracted token string
|
||||
|
||||
Raises:
|
||||
ValueError: If token is missing or invalid
|
||||
"""
|
||||
auth_header = request.headers.get("Authorization", "").strip()
|
||||
|
||||
if not auth_header:
|
||||
raise ValueError("Authorization header is required")
|
||||
|
||||
if not auth_header.startswith("Bearer "):
|
||||
raise ValueError("Authorization header must start with 'Bearer '")
|
||||
|
||||
token_parts = auth_header.split(" ", 1)
|
||||
if len(token_parts) != 2:
|
||||
raise ValueError("Invalid Authorization header format")
|
||||
|
||||
auth_token = token_parts[1].strip()
|
||||
if not auth_token:
|
||||
raise ValueError("Bearer token cannot be empty")
|
||||
|
||||
return auth_token
|
||||
|
||||
def _parse_template_args(self) -> dict:
|
||||
"""
|
||||
Parse and validate template arguments from form data
|
||||
|
||||
Args:
|
||||
template_id: The template ID from URL
|
||||
|
||||
Returns:
|
||||
Dictionary of validated template arguments
|
||||
"""
|
||||
# Use reqparse for consistent parameter parsing
|
||||
parser = reqparse.RequestParser()
|
||||
|
||||
parser.add_argument(
|
||||
"template_id",
|
||||
type=str,
|
||||
location="form",
|
||||
required=False,
|
||||
help="Template ID for updating existing template"
|
||||
)
|
||||
parser.add_argument(
|
||||
"language",
|
||||
type=str,
|
||||
location="form",
|
||||
required=True,
|
||||
default="en-US",
|
||||
choices=["en-US", "zh-CN", "ja-JP"],
|
||||
help="Template language code"
|
||||
)
|
||||
parser.add_argument(
|
||||
"name",
|
||||
type=str,
|
||||
location="form",
|
||||
required=True,
|
||||
default="New Pipeline Template",
|
||||
help="Template name (1-200 characters)"
|
||||
)
|
||||
parser.add_argument(
|
||||
"description",
|
||||
type=str,
|
||||
location="form",
|
||||
required=False,
|
||||
default="",
|
||||
help="Template description (max 1000 characters)"
|
||||
)
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
# Additional validation
|
||||
if args.get("name"):
|
||||
args["name"] = self._validate_name(args["name"])
|
||||
|
||||
if args.get("description") and len(args["description"]) > 1000:
|
||||
raise ValueError("Description must not exceed 1000 characters")
|
||||
|
||||
# Filter out None values
|
||||
return {k: v for k, v in args.items() if v is not None}
|
||||
|
||||
def _validate_name(self, name: str) -> str:
|
||||
"""
|
||||
Validate template name
|
||||
|
||||
Args:
|
||||
name: Template name to validate
|
||||
|
||||
Returns:
|
||||
Validated and trimmed name
|
||||
|
||||
Raises:
|
||||
ValueError: If name is invalid
|
||||
"""
|
||||
name = name.strip()
|
||||
if not name or len(name) < 1 or len(name) > 200:
|
||||
raise ValueError("Template name must be between 1 and 200 characters")
|
||||
return name
|
||||
|
||||
def _process_template_file(self) -> str:
|
||||
"""
|
||||
Process and validate uploaded template file
|
||||
|
||||
Returns:
|
||||
File content as string
|
||||
|
||||
Raises:
|
||||
ValueError: If file is missing or invalid
|
||||
"""
|
||||
if "file" not in request.files:
|
||||
raise ValueError("Template file is required")
|
||||
|
||||
file = request.files["file"]
|
||||
|
||||
# Validate file
|
||||
if not file or not file.filename:
|
||||
raise ValueError("No file selected")
|
||||
|
||||
filename = file.filename.strip()
|
||||
if not filename:
|
||||
raise ValueError("File name cannot be empty")
|
||||
|
||||
# Check file extension
|
||||
if not filename.lower().endswith(".pipeline"):
|
||||
raise ValueError("Template file must be a pipeline file (.pipeline)")
|
||||
|
||||
try:
|
||||
file_content = file.read().decode("utf-8")
|
||||
except UnicodeDecodeError:
|
||||
raise ValueError("Template file must be valid UTF-8 text")
|
||||
|
||||
return file_content
|
||||
|
||||
|
||||
api.add_resource(
|
||||
PipelineTemplateListApi,
|
||||
"/rag/pipeline/templates",
|
||||
|
|
|
|||
|
|
@ -123,7 +123,7 @@ class OceanBaseVector(BaseVector):
|
|||
# Get parser from config or use default ik parser
|
||||
parser_name = dify_config.OCEANBASE_FULLTEXT_PARSER or "ik"
|
||||
|
||||
allowed_parsers = ["ik", "japanese_ftparser", "thai_ftparser"]
|
||||
allowed_parsers = ["ngram", "beng", "space", "ngram2", "ik", "japanese_ftparser", "thai_ftparser"]
|
||||
if parser_name not in allowed_parsers:
|
||||
raise ValueError(
|
||||
f"Invalid OceanBase full-text parser: {parser_name}. "
|
||||
|
|
|
|||
|
|
@ -1,9 +1,11 @@
|
|||
import contextvars
|
||||
import logging
|
||||
from collections.abc import Generator, Mapping, Sequence
|
||||
from concurrent.futures import Future, ThreadPoolExecutor, as_completed
|
||||
from datetime import UTC, datetime
|
||||
from typing import TYPE_CHECKING, Any, NewType, cast
|
||||
|
||||
from flask import Flask, current_app
|
||||
from typing_extensions import TypeIs
|
||||
|
||||
from core.variables import IntegerVariable, NoneSegment
|
||||
|
|
@ -35,6 +37,7 @@ from core.workflow.nodes.base.entities import BaseNodeData, RetryConfig
|
|||
from core.workflow.nodes.base.node import Node
|
||||
from core.workflow.nodes.iteration.entities import ErrorHandleMode, IterationNodeData
|
||||
from libs.datetime_utils import naive_utc_now
|
||||
from libs.flask_utils import preserve_flask_contexts
|
||||
|
||||
from .exc import (
|
||||
InvalidIteratorValueError,
|
||||
|
|
@ -239,6 +242,8 @@ class IterationNode(Node):
|
|||
self._execute_single_iteration_parallel,
|
||||
index=index,
|
||||
item=item,
|
||||
flask_app=current_app._get_current_object(), # type: ignore
|
||||
context_vars=contextvars.copy_context(),
|
||||
)
|
||||
future_to_index[future] = index
|
||||
|
||||
|
|
@ -281,26 +286,29 @@ class IterationNode(Node):
|
|||
self,
|
||||
index: int,
|
||||
item: object,
|
||||
flask_app: Flask,
|
||||
context_vars: contextvars.Context,
|
||||
) -> tuple[datetime, list[GraphNodeEventBase], object | None, int]:
|
||||
"""Execute a single iteration in parallel mode and return results."""
|
||||
iter_start_at = datetime.now(UTC).replace(tzinfo=None)
|
||||
events: list[GraphNodeEventBase] = []
|
||||
outputs_temp: list[object] = []
|
||||
with preserve_flask_contexts(flask_app=flask_app, context_vars=context_vars):
|
||||
iter_start_at = datetime.now(UTC).replace(tzinfo=None)
|
||||
events: list[GraphNodeEventBase] = []
|
||||
outputs_temp: list[object] = []
|
||||
|
||||
graph_engine = self._create_graph_engine(index, item)
|
||||
graph_engine = self._create_graph_engine(index, item)
|
||||
|
||||
# Collect events instead of yielding them directly
|
||||
for event in self._run_single_iter(
|
||||
variable_pool=graph_engine.graph_runtime_state.variable_pool,
|
||||
outputs=outputs_temp,
|
||||
graph_engine=graph_engine,
|
||||
):
|
||||
events.append(event)
|
||||
# Collect events instead of yielding them directly
|
||||
for event in self._run_single_iter(
|
||||
variable_pool=graph_engine.graph_runtime_state.variable_pool,
|
||||
outputs=outputs_temp,
|
||||
graph_engine=graph_engine,
|
||||
):
|
||||
events.append(event)
|
||||
|
||||
# Get the output value from the temporary outputs list
|
||||
output_value = outputs_temp[0] if outputs_temp else None
|
||||
# Get the output value from the temporary outputs list
|
||||
output_value = outputs_temp[0] if outputs_temp else None
|
||||
|
||||
return iter_start_at, events, output_value, graph_engine.graph_runtime_state.total_tokens
|
||||
return iter_start_at, events, output_value, graph_engine.graph_runtime_state.total_tokens
|
||||
|
||||
def _handle_iteration_success(
|
||||
self,
|
||||
|
|
|
|||
|
|
@ -0,0 +1,37 @@
|
|||
"""remove-builtin-template-user
|
||||
|
||||
Revision ID: bf0bcbf45396
|
||||
Revises: 68519ad5cd18
|
||||
Create Date: 2025-09-25 16:50:32.245503
|
||||
|
||||
"""
|
||||
from alembic import op
|
||||
import models as models
|
||||
import sqlalchemy as sa
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = 'bf0bcbf45396'
|
||||
down_revision = '68519ad5cd18'
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
|
||||
with op.batch_alter_table('pipeline_built_in_templates', schema=None) as batch_op:
|
||||
batch_op.drop_column('updated_by')
|
||||
batch_op.drop_column('created_by')
|
||||
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
def downgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
|
||||
with op.batch_alter_table('pipeline_built_in_templates', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('created_by', sa.UUID(), autoincrement=False, nullable=False))
|
||||
batch_op.add_column(sa.Column('updated_by', sa.UUID(), autoincrement=False, nullable=True))
|
||||
|
||||
# ### end Alembic commands ###
|
||||
|
|
@ -910,7 +910,7 @@ class AppDatasetJoin(Base):
|
|||
id = mapped_column(StringUUID, primary_key=True, nullable=False, server_default=sa.text("uuid_generate_v4()"))
|
||||
app_id = mapped_column(StringUUID, nullable=False)
|
||||
dataset_id = mapped_column(StringUUID, nullable=False)
|
||||
created_at: Mapped[datetime] = mapped_column(DateTime, nullable=False, server_default=db.func.current_timestamp())
|
||||
created_at: Mapped[datetime] = mapped_column(DateTime, nullable=False, server_default=sa.func.current_timestamp())
|
||||
|
||||
@property
|
||||
def app(self):
|
||||
|
|
@ -931,7 +931,7 @@ class DatasetQuery(Base):
|
|||
source_app_id = mapped_column(StringUUID, nullable=True)
|
||||
created_by_role = mapped_column(String, nullable=False)
|
||||
created_by = mapped_column(StringUUID, nullable=False)
|
||||
created_at: Mapped[datetime] = mapped_column(DateTime, nullable=False, server_default=db.func.current_timestamp())
|
||||
created_at: Mapped[datetime] = mapped_column(DateTime, nullable=False, server_default=sa.func.current_timestamp())
|
||||
|
||||
|
||||
class DatasetKeywordTable(Base):
|
||||
|
|
@ -1239,15 +1239,6 @@ class PipelineBuiltInTemplate(Base): # type: ignore[name-defined]
|
|||
language = db.Column(db.String(255), nullable=False)
|
||||
created_at = db.Column(db.DateTime, nullable=False, server_default=func.current_timestamp())
|
||||
updated_at = db.Column(db.DateTime, nullable=False, server_default=func.current_timestamp())
|
||||
created_by = db.Column(StringUUID, nullable=False)
|
||||
updated_by = db.Column(StringUUID, nullable=True)
|
||||
|
||||
@property
|
||||
def created_user_name(self):
|
||||
account = db.session.query(Account).where(Account.id == self.created_by).first()
|
||||
if account:
|
||||
return account.name
|
||||
return ""
|
||||
|
||||
|
||||
class PipelineCustomizedTemplate(Base): # type: ignore[name-defined]
|
||||
|
|
|
|||
|
|
@ -1731,7 +1731,7 @@ class MessageChain(Base):
|
|||
type: Mapped[str] = mapped_column(String(255), nullable=False)
|
||||
input = mapped_column(sa.Text, nullable=True)
|
||||
output = mapped_column(sa.Text, nullable=True)
|
||||
created_at = mapped_column(sa.DateTime, nullable=False, server_default=db.func.current_timestamp())
|
||||
created_at = mapped_column(sa.DateTime, nullable=False, server_default=sa.func.current_timestamp())
|
||||
|
||||
|
||||
class MessageAgentThought(Base):
|
||||
|
|
@ -1769,7 +1769,7 @@ class MessageAgentThought(Base):
|
|||
latency: Mapped[float | None] = mapped_column(sa.Float, nullable=True)
|
||||
created_by_role = mapped_column(String, nullable=False)
|
||||
created_by = mapped_column(StringUUID, nullable=False)
|
||||
created_at = mapped_column(sa.DateTime, nullable=False, server_default=db.func.current_timestamp())
|
||||
created_at = mapped_column(sa.DateTime, nullable=False, server_default=sa.func.current_timestamp())
|
||||
|
||||
@property
|
||||
def files(self) -> list[Any]:
|
||||
|
|
@ -1872,7 +1872,7 @@ class DatasetRetrieverResource(Base):
|
|||
index_node_hash = mapped_column(sa.Text, nullable=True)
|
||||
retriever_from = mapped_column(sa.Text, nullable=False)
|
||||
created_by = mapped_column(StringUUID, nullable=False)
|
||||
created_at = mapped_column(sa.DateTime, nullable=False, server_default=db.func.current_timestamp())
|
||||
created_at = mapped_column(sa.DateTime, nullable=False, server_default=sa.func.current_timestamp())
|
||||
|
||||
|
||||
class Tag(Base):
|
||||
|
|
|
|||
|
|
@ -2,6 +2,7 @@ import json
|
|||
import logging
|
||||
from typing import TypedDict, cast
|
||||
|
||||
import sqlalchemy as sa
|
||||
from flask_sqlalchemy.pagination import Pagination
|
||||
|
||||
from configs import dify_config
|
||||
|
|
@ -65,7 +66,7 @@ class AppService:
|
|||
return None
|
||||
|
||||
app_models = db.paginate(
|
||||
db.select(App).where(*filters).order_by(App.created_at.desc()),
|
||||
sa.select(App).where(*filters).order_by(App.created_at.desc()),
|
||||
page=args["page"],
|
||||
per_page=args["limit"],
|
||||
error_out=False,
|
||||
|
|
|
|||
|
|
@ -115,12 +115,12 @@ class DatasetService:
|
|||
# Check if permitted_dataset_ids is not empty to avoid WHERE false condition
|
||||
if permitted_dataset_ids and len(permitted_dataset_ids) > 0:
|
||||
query = query.where(
|
||||
db.or_(
|
||||
sa.or_(
|
||||
Dataset.permission == DatasetPermissionEnum.ALL_TEAM,
|
||||
db.and_(
|
||||
sa.and_(
|
||||
Dataset.permission == DatasetPermissionEnum.ONLY_ME, Dataset.created_by == user.id
|
||||
),
|
||||
db.and_(
|
||||
sa.and_(
|
||||
Dataset.permission == DatasetPermissionEnum.PARTIAL_TEAM,
|
||||
Dataset.id.in_(permitted_dataset_ids),
|
||||
),
|
||||
|
|
@ -128,9 +128,9 @@ class DatasetService:
|
|||
)
|
||||
else:
|
||||
query = query.where(
|
||||
db.or_(
|
||||
sa.or_(
|
||||
Dataset.permission == DatasetPermissionEnum.ALL_TEAM,
|
||||
db.and_(
|
||||
sa.and_(
|
||||
Dataset.permission == DatasetPermissionEnum.ONLY_ME, Dataset.created_by == user.id
|
||||
),
|
||||
)
|
||||
|
|
@ -1879,7 +1879,7 @@ class DocumentService:
|
|||
# for notion_info in notion_info_list:
|
||||
# workspace_id = notion_info.workspace_id
|
||||
# data_source_binding = DataSourceOauthBinding.query.filter(
|
||||
# db.and_(
|
||||
# sa.and_(
|
||||
# DataSourceOauthBinding.tenant_id == current_user.current_tenant_id,
|
||||
# DataSourceOauthBinding.provider == "notion",
|
||||
# DataSourceOauthBinding.disabled == False,
|
||||
|
|
|
|||
|
|
@ -128,3 +128,10 @@ class KnowledgeConfiguration(BaseModel):
|
|||
if v is None:
|
||||
return ""
|
||||
return v
|
||||
|
||||
|
||||
class PipelineBuiltInTemplateEntity(BaseModel):
|
||||
template_id: str | None = None
|
||||
name: str
|
||||
description: str
|
||||
language: str
|
||||
|
|
|
|||
|
|
@ -471,7 +471,7 @@ class PluginMigration:
|
|||
total_failed_tenant = 0
|
||||
while True:
|
||||
# paginate
|
||||
tenants = db.paginate(db.select(Tenant).order_by(Tenant.created_at.desc()), page=page, per_page=100)
|
||||
tenants = db.paginate(sa.select(Tenant).order_by(Tenant.created_at.desc()), page=page, per_page=100)
|
||||
if tenants.items is None or len(tenants.items) == 0:
|
||||
break
|
||||
|
||||
|
|
|
|||
|
|
@ -74,5 +74,4 @@ class DatabasePipelineTemplateRetrieval(PipelineTemplateRetrievalBase):
|
|||
"chunk_structure": pipeline_template.chunk_structure,
|
||||
"export_data": pipeline_template.yaml_content,
|
||||
"graph": graph_data,
|
||||
"created_by": pipeline_template.created_user_name,
|
||||
}
|
||||
|
|
|
|||
|
|
@ -8,6 +8,7 @@ from datetime import UTC, datetime
|
|||
from typing import Any, Union, cast
|
||||
from uuid import uuid4
|
||||
|
||||
import yaml
|
||||
from flask_login import current_user
|
||||
from sqlalchemy import func, or_, select
|
||||
from sqlalchemy.orm import Session, sessionmaker
|
||||
|
|
@ -60,6 +61,7 @@ from models.dataset import ( # type: ignore
|
|||
Document,
|
||||
DocumentPipelineExecutionLog,
|
||||
Pipeline,
|
||||
PipelineBuiltInTemplate,
|
||||
PipelineCustomizedTemplate,
|
||||
PipelineRecommendedPlugin,
|
||||
)
|
||||
|
|
@ -76,6 +78,7 @@ from repositories.factory import DifyAPIRepositoryFactory
|
|||
from services.datasource_provider_service import DatasourceProviderService
|
||||
from services.entities.knowledge_entities.rag_pipeline_entities import (
|
||||
KnowledgeConfiguration,
|
||||
PipelineBuiltInTemplateEntity,
|
||||
PipelineTemplateInfoEntity,
|
||||
)
|
||||
from services.errors.app import WorkflowHashNotEqualError
|
||||
|
|
@ -1454,3 +1457,140 @@ class RagPipelineService:
|
|||
if not pipeline:
|
||||
raise ValueError("Pipeline not found")
|
||||
return pipeline
|
||||
|
||||
def install_built_in_pipeline_template(
|
||||
self, args: PipelineBuiltInTemplateEntity, file_content: str, auth_token: str
|
||||
) -> None:
|
||||
"""
|
||||
Install built-in pipeline template
|
||||
|
||||
Args:
|
||||
args: Pipeline built-in template entity with template metadata
|
||||
file_content: YAML content of the pipeline template
|
||||
auth_token: Authentication token for authorization
|
||||
|
||||
Raises:
|
||||
ValueError: If validation fails or template processing errors occur
|
||||
"""
|
||||
# Validate authentication
|
||||
self._validate_auth_token(auth_token)
|
||||
|
||||
# Parse and validate template content
|
||||
pipeline_template_dsl = self._parse_template_content(file_content)
|
||||
|
||||
# Extract template metadata
|
||||
icon = self._extract_icon_metadata(pipeline_template_dsl)
|
||||
chunk_structure = self._extract_chunk_structure(pipeline_template_dsl)
|
||||
|
||||
# Prepare template data
|
||||
template_data = {
|
||||
"name": args.name,
|
||||
"description": args.description,
|
||||
"chunk_structure": chunk_structure,
|
||||
"icon": icon,
|
||||
"language": args.language,
|
||||
"yaml_content": file_content,
|
||||
}
|
||||
|
||||
# Use transaction for database operations
|
||||
try:
|
||||
if args.template_id:
|
||||
self._update_existing_template(args.template_id, template_data)
|
||||
else:
|
||||
self._create_new_template(template_data)
|
||||
db.session.commit()
|
||||
except Exception as e:
|
||||
db.session.rollback()
|
||||
raise ValueError(f"Failed to install pipeline template: {str(e)}")
|
||||
|
||||
def _validate_auth_token(self, auth_token: str) -> None:
|
||||
"""Validate the authentication token"""
|
||||
config_auth_token = dify_config.UPLOAD_KNOWLEDGE_PIPELINE_TEMPLATE_TOKEN
|
||||
if not config_auth_token:
|
||||
raise ValueError("Auth token configuration is required")
|
||||
if config_auth_token != auth_token:
|
||||
raise ValueError("Auth token is incorrect")
|
||||
|
||||
def _parse_template_content(self, file_content: str) -> dict:
|
||||
"""Parse and validate YAML template content"""
|
||||
try:
|
||||
pipeline_template_dsl = yaml.safe_load(file_content)
|
||||
except yaml.YAMLError as e:
|
||||
raise ValueError(f"Invalid YAML content: {str(e)}")
|
||||
|
||||
if not pipeline_template_dsl:
|
||||
raise ValueError("Pipeline template DSL is required")
|
||||
|
||||
return pipeline_template_dsl
|
||||
|
||||
def _extract_icon_metadata(self, pipeline_template_dsl: dict) -> dict:
|
||||
"""Extract icon metadata from template DSL"""
|
||||
rag_pipeline_info = pipeline_template_dsl.get("rag_pipeline", {})
|
||||
|
||||
return {
|
||||
"icon": rag_pipeline_info.get("icon", "📙"),
|
||||
"icon_type": rag_pipeline_info.get("icon_type", "emoji"),
|
||||
"icon_background": rag_pipeline_info.get("icon_background", "#FFEAD5"),
|
||||
"icon_url": rag_pipeline_info.get("icon_url"),
|
||||
}
|
||||
|
||||
def _extract_chunk_structure(self, pipeline_template_dsl: dict) -> str:
|
||||
"""Extract chunk structure from template DSL"""
|
||||
nodes = pipeline_template_dsl.get("workflow", {}).get("graph", {}).get("nodes", [])
|
||||
|
||||
# Use generator expression for efficiency
|
||||
chunk_structure = next(
|
||||
(
|
||||
node.get("data", {}).get("chunk_structure")
|
||||
for node in nodes
|
||||
if node.get("data", {}).get("type") == NodeType.KNOWLEDGE_INDEX.value
|
||||
),
|
||||
None
|
||||
)
|
||||
|
||||
if not chunk_structure:
|
||||
raise ValueError("Chunk structure is required in template")
|
||||
|
||||
return chunk_structure
|
||||
|
||||
def _update_existing_template(self, template_id: str, template_data: dict) -> None:
|
||||
"""Update an existing pipeline template"""
|
||||
pipeline_built_in_template = (
|
||||
db.session.query(PipelineBuiltInTemplate)
|
||||
.filter(PipelineBuiltInTemplate.id == template_id)
|
||||
.first()
|
||||
)
|
||||
|
||||
if not pipeline_built_in_template:
|
||||
raise ValueError(f"Pipeline built-in template not found: {template_id}")
|
||||
|
||||
# Update template fields
|
||||
for key, value in template_data.items():
|
||||
setattr(pipeline_built_in_template, key, value)
|
||||
|
||||
db.session.add(pipeline_built_in_template)
|
||||
|
||||
def _create_new_template(self, template_data: dict) -> None:
|
||||
"""Create a new pipeline template"""
|
||||
# Get the next available position
|
||||
position = self._get_next_position(template_data["language"])
|
||||
|
||||
# Add additional fields for new template
|
||||
template_data.update({
|
||||
"position": position,
|
||||
"install_count": 0,
|
||||
"copyright": dify_config.KNOWLEDGE_PIPELINE_TEMPLATE_COPYRIGHT,
|
||||
"privacy_policy": dify_config.KNOWLEDGE_PIPELINE_TEMPLATE_PRIVACY_POLICY,
|
||||
})
|
||||
|
||||
new_template = PipelineBuiltInTemplate(**template_data)
|
||||
db.session.add(new_template)
|
||||
|
||||
def _get_next_position(self, language: str) -> int:
|
||||
"""Get the next available position for a template in the specified language"""
|
||||
max_position = (
|
||||
db.session.query(func.max(PipelineBuiltInTemplate.position))
|
||||
.filter(PipelineBuiltInTemplate.language == language)
|
||||
.scalar()
|
||||
)
|
||||
return (max_position or 0) + 1
|
||||
|
|
|
|||
|
|
@ -1,5 +1,6 @@
|
|||
import uuid
|
||||
|
||||
import sqlalchemy as sa
|
||||
from flask_login import current_user
|
||||
from sqlalchemy import func, select
|
||||
from werkzeug.exceptions import NotFound
|
||||
|
|
@ -18,7 +19,7 @@ class TagService:
|
|||
.where(Tag.type == tag_type, Tag.tenant_id == current_tenant_id)
|
||||
)
|
||||
if keyword:
|
||||
query = query.where(db.and_(Tag.name.ilike(f"%{keyword}%")))
|
||||
query = query.where(sa.and_(Tag.name.ilike(f"%{keyword}%")))
|
||||
query = query.group_by(Tag.id, Tag.type, Tag.name, Tag.created_at)
|
||||
results: list = query.order_by(Tag.created_at.desc()).all()
|
||||
return results
|
||||
|
|
|
|||
|
|
@ -262,6 +262,14 @@ class VariableTruncator:
|
|||
target_length = self._array_element_limit
|
||||
|
||||
for i, item in enumerate(value):
|
||||
# Dirty fix:
|
||||
# The output of `Start` node may contain list of `File` elements,
|
||||
# causing `AssertionError` while invoking `_truncate_json_primitives`.
|
||||
#
|
||||
# This check ensures that `list[File]` are handled separately
|
||||
if isinstance(item, File):
|
||||
truncated_value.append(item)
|
||||
continue
|
||||
if i >= target_length:
|
||||
return _PartResult(truncated_value, used_size, True)
|
||||
if i > 0:
|
||||
|
|
|
|||
|
|
@ -2,6 +2,7 @@ import logging
|
|||
import time
|
||||
|
||||
import click
|
||||
import sqlalchemy as sa
|
||||
from celery import shared_task
|
||||
from sqlalchemy import select
|
||||
|
||||
|
|
@ -51,7 +52,7 @@ def document_indexing_sync_task(dataset_id: str, document_id: str):
|
|||
data_source_binding = (
|
||||
db.session.query(DataSourceOauthBinding)
|
||||
.where(
|
||||
db.and_(
|
||||
sa.and_(
|
||||
DataSourceOauthBinding.tenant_id == document.tenant_id,
|
||||
DataSourceOauthBinding.provider == "notion",
|
||||
DataSourceOauthBinding.disabled == False,
|
||||
|
|
|
|||
|
|
@ -588,3 +588,11 @@ class TestIntegrationScenarios:
|
|||
if isinstance(result.result, ObjectSegment):
|
||||
result_size = truncator.calculate_json_size(result.result.value)
|
||||
assert result_size <= original_size
|
||||
|
||||
def test_file_and_array_file_variable_mapping(self, file):
|
||||
truncator = VariableTruncator(string_length_limit=30, array_element_limit=3, max_size_bytes=300)
|
||||
|
||||
mapping = {"array_file": [file]}
|
||||
truncated_mapping, truncated = truncator.truncate_variable_mapping(mapping)
|
||||
assert truncated is False
|
||||
assert truncated_mapping == mapping
|
||||
|
|
|
|||
|
|
@ -655,6 +655,8 @@ LINDORM_USING_UGC=True
|
|||
LINDORM_QUERY_TIMEOUT=1
|
||||
|
||||
# OceanBase Vector configuration, only available when VECTOR_STORE is `oceanbase`
|
||||
# Built-in fulltext parsers are `ngram`, `beng`, `space`, `ngram2`, `ik`
|
||||
# External fulltext parsers (require plugin installation) are `japanese_ftparser`, `thai_ftparser`
|
||||
OCEANBASE_VECTOR_HOST=oceanbase
|
||||
OCEANBASE_VECTOR_PORT=2881
|
||||
OCEANBASE_VECTOR_USER=root@test
|
||||
|
|
|
|||
|
|
@ -35,7 +35,7 @@ import { useStore as useAppStore } from '@/app/components/app/store'
|
|||
import { useAppContext } from '@/context/app-context'
|
||||
import useTimestamp from '@/hooks/use-timestamp'
|
||||
import Tooltip from '@/app/components/base/tooltip'
|
||||
import { CopyIcon } from '@/app/components/base/copy-icon'
|
||||
import CopyIcon from '@/app/components/base/copy-icon'
|
||||
import { buildChatItemTree, getThreadMessages } from '@/app/components/base/chat/utils'
|
||||
import { getProcessedFilesFromResponse } from '@/app/components/base/file-uploader/utils'
|
||||
import cn from '@/utils/classnames'
|
||||
|
|
|
|||
|
|
@ -15,7 +15,7 @@ type Props = {
|
|||
|
||||
const prefixEmbedded = 'appOverview.overview.appInfo.embedded'
|
||||
|
||||
export const CopyIcon = ({ content }: Props) => {
|
||||
const CopyIcon = ({ content }: Props) => {
|
||||
const { t } = useTranslation()
|
||||
const [isCopied, setIsCopied] = useState<boolean>(false)
|
||||
|
||||
|
|
|
|||
|
|
@ -63,7 +63,7 @@ const useThinkTimer = (children: any) => {
|
|||
return { elapsedTime, isComplete }
|
||||
}
|
||||
|
||||
export const ThinkBlock = ({ children, ...props }: any) => {
|
||||
const ThinkBlock = ({ children, ...props }: React.ComponentProps<'details'>) => {
|
||||
const { elapsedTime, isComplete } = useThinkTimer(children)
|
||||
const displayContent = removeEndThink(children)
|
||||
const { t } = useTranslation()
|
||||
|
|
|
|||
|
|
@ -53,7 +53,7 @@ const SearchInput: FC<SearchInputProps> = ({
|
|||
}}
|
||||
onCompositionEnd={(e) => {
|
||||
isComposing.current = false
|
||||
onChange(e.data)
|
||||
onChange(e.currentTarget.value)
|
||||
}}
|
||||
onFocus={() => setFocus(true)}
|
||||
onBlur={() => setFocus(false)}
|
||||
|
|
|
|||
|
|
@ -3,7 +3,7 @@ import { SVG } from '@svgdotjs/svg.js'
|
|||
import DOMPurify from 'dompurify'
|
||||
import ImagePreview from '@/app/components/base/image-uploader/image-preview'
|
||||
|
||||
export const SVGRenderer = ({ content }: { content: string }) => {
|
||||
const SVGRenderer = ({ content }: { content: string }) => {
|
||||
const svgRef = useRef<HTMLDivElement>(null)
|
||||
const [imagePreview, setImagePreview] = useState('')
|
||||
const [windowSize, setWindowSize] = useState({
|
||||
|
|
|
|||
|
|
@ -1,9 +1,18 @@
|
|||
import { usePipelineTemplateList } from '@/service/use-pipeline'
|
||||
import TemplateCard from './template-card'
|
||||
import CreateCard from './create-card'
|
||||
import { useI18N } from '@/context/i18n'
|
||||
import { useMemo } from 'react'
|
||||
import { LanguagesSupported } from '@/i18n-config/language'
|
||||
|
||||
const BuiltInPipelineList = () => {
|
||||
const { data: pipelineList, isLoading } = usePipelineTemplateList({ type: 'built-in' })
|
||||
const { locale } = useI18N()
|
||||
const language = useMemo(() => {
|
||||
if (['zh-Hans', 'ja-JP'].includes(locale))
|
||||
return locale
|
||||
return LanguagesSupported[0]
|
||||
}, [locale])
|
||||
const { data: pipelineList, isLoading } = usePipelineTemplateList({ type: 'built-in', language })
|
||||
const list = pipelineList?.pipeline_templates || []
|
||||
|
||||
return (
|
||||
|
|
|
|||
|
|
@ -146,7 +146,6 @@ const PluginItem: FC<Props> = ({
|
|||
{/* Organization & Name */}
|
||||
<div className='flex grow items-center overflow-hidden'>
|
||||
<OrgInfo
|
||||
className='mt-0.5'
|
||||
orgName={orgName}
|
||||
packageName={name}
|
||||
packageNameClassName='w-auto max-w-[150px]'
|
||||
|
|
@ -154,8 +153,8 @@ const PluginItem: FC<Props> = ({
|
|||
{category === PluginType.extension && (
|
||||
<>
|
||||
<div className='system-xs-regular mx-2 text-text-quaternary'>·</div>
|
||||
<div className='system-xs-regular flex space-x-1 overflow-hidden text-text-tertiary'>
|
||||
<RiLoginCircleLine className='h-4 w-4 shrink-0' />
|
||||
<div className='system-xs-regular flex items-center gap-x-1 overflow-hidden text-text-tertiary'>
|
||||
<RiLoginCircleLine className='size-3 shrink-0' />
|
||||
<span
|
||||
className='truncate'
|
||||
title={t('plugin.endpointsEnabled', { num: endpoints_active })}
|
||||
|
|
@ -184,7 +183,7 @@ const PluginItem: FC<Props> = ({
|
|||
&& <>
|
||||
<a href={getMarketplaceUrl(`/plugins/${author}/${name}`, { theme })} target='_blank' className='flex items-center gap-0.5'>
|
||||
<div className='system-2xs-medium-uppercase text-text-tertiary'>{t('plugin.from')} <span className='text-text-secondary'>marketplace</span></div>
|
||||
<RiArrowRightUpLine className='h-3 w-3 text-text-tertiary' />
|
||||
<RiArrowRightUpLine className='h-3 w-3 text-text-secondary' />
|
||||
</a>
|
||||
</>
|
||||
}
|
||||
|
|
|
|||
|
|
@ -244,9 +244,8 @@ const ProviderDetail = ({
|
|||
<div className="flex h-5 items-center">
|
||||
<Title title={collection.label[language]} />
|
||||
</div>
|
||||
<div className='mb-1 flex h-4 items-center justify-between'>
|
||||
<div className='mb-1 mt-0.5 flex h-4 items-center justify-between'>
|
||||
<OrgInfo
|
||||
className="mt-0.5"
|
||||
packageNameClassName='w-auto'
|
||||
orgName={collection.author}
|
||||
packageName={collection.name}
|
||||
|
|
|
|||
|
|
@ -30,7 +30,7 @@ enum GeneratorView {
|
|||
result = 'result',
|
||||
}
|
||||
|
||||
export const JsonSchemaGenerator: FC<JsonSchemaGeneratorProps> = ({
|
||||
const JsonSchemaGenerator: FC<JsonSchemaGeneratorProps> = ({
|
||||
onApply,
|
||||
crossAxisOffset,
|
||||
}) => {
|
||||
|
|
|
|||
|
|
@ -1,12 +1,18 @@
|
|||
@import "preflight.css";
|
||||
@tailwind base;
|
||||
@tailwind components;
|
||||
|
||||
|
||||
@import '../../themes/light.css';
|
||||
@import '../../themes/dark.css';
|
||||
@import "../../themes/manual-light.css";
|
||||
@import "../../themes/manual-dark.css";
|
||||
|
||||
@import "../components/base/button/index.css";
|
||||
@import "../components/base/action-button/index.css";
|
||||
@import "../components/base/modal/index.css";
|
||||
|
||||
@tailwind base;
|
||||
@tailwind components;
|
||||
|
||||
html {
|
||||
color-scheme: light;
|
||||
}
|
||||
|
|
@ -680,10 +686,6 @@ button:focus-within {
|
|||
display: none;
|
||||
}
|
||||
|
||||
@import "../components/base/button/index.css";
|
||||
@import "../components/base/action-button/index.css";
|
||||
@import "../components/base/modal/index.css";
|
||||
|
||||
@tailwind utilities;
|
||||
|
||||
@layer utilities {
|
||||
|
|
|
|||
|
|
@ -17,6 +17,7 @@ export enum DatasourceType {
|
|||
|
||||
export type PipelineTemplateListParams = {
|
||||
type: 'built-in' | 'customized'
|
||||
language?: string
|
||||
}
|
||||
|
||||
export type PipelineTemplate = {
|
||||
|
|
|
|||
|
|
@ -91,12 +91,10 @@ const remoteImageURLs = [hasSetWebPrefix ? new URL(`${process.env.NEXT_PUBLIC_WE
|
|||
/** @type {import('next').NextConfig} */
|
||||
const nextConfig = {
|
||||
basePath: process.env.NEXT_PUBLIC_BASE_PATH || '',
|
||||
webpack: (config, { dev, isServer }) => {
|
||||
if (dev) {
|
||||
config.plugins.push(codeInspectorPlugin({ bundler: 'webpack' }))
|
||||
}
|
||||
|
||||
return config
|
||||
turbopack: {
|
||||
rules: codeInspectorPlugin({
|
||||
bundler: 'turbopack'
|
||||
})
|
||||
},
|
||||
productionBrowserSourceMaps: false, // enable browser source map generation during the production build
|
||||
// Configure pageExtensions to include md and mdx
|
||||
|
|
@ -112,6 +110,10 @@ const nextConfig = {
|
|||
})),
|
||||
},
|
||||
experimental: {
|
||||
optimizePackageImports: [
|
||||
'@remixicon/react',
|
||||
'@heroicons/react'
|
||||
],
|
||||
},
|
||||
// fix all before production. Now it slow the develop speed.
|
||||
eslint: {
|
||||
|
|
|
|||
|
|
@ -19,7 +19,7 @@
|
|||
"and_qq >= 14.9"
|
||||
],
|
||||
"scripts": {
|
||||
"dev": "cross-env NODE_OPTIONS='--inspect' next dev",
|
||||
"dev": "cross-env NODE_OPTIONS='--inspect' next dev --turbopack",
|
||||
"build": "next build",
|
||||
"build:docker": "next build && node scripts/optimize-standalone.js",
|
||||
"start": "cp -r .next/static .next/standalone/.next/static && cp -r public .next/standalone/public && cross-env PORT=$npm_config_port HOSTNAME=$npm_config_host node .next/standalone/server.js",
|
||||
|
|
@ -203,7 +203,7 @@
|
|||
"autoprefixer": "^10.4.20",
|
||||
"babel-loader": "^10.0.0",
|
||||
"bing-translate-api": "^4.0.2",
|
||||
"code-inspector-plugin": "^0.18.1",
|
||||
"code-inspector-plugin": "1.2.9",
|
||||
"cross-env": "^7.0.3",
|
||||
"eslint": "^9.35.0",
|
||||
"eslint-config-next": "15.5.0",
|
||||
|
|
|
|||
|
|
@ -519,8 +519,8 @@ importers:
|
|||
specifier: ^4.0.2
|
||||
version: 4.1.0
|
||||
code-inspector-plugin:
|
||||
specifier: ^0.18.1
|
||||
version: 0.18.3
|
||||
specifier: 1.2.9
|
||||
version: 1.2.9
|
||||
cross-env:
|
||||
specifier: ^7.0.3
|
||||
version: 7.0.3
|
||||
|
|
@ -1372,6 +1372,24 @@ packages:
|
|||
'@clack/prompts@0.11.0':
|
||||
resolution: {integrity: sha512-pMN5FcrEw9hUkZA4f+zLlzivQSeQf5dRGJjSUbvVYDLvpKCdQx5OaknvKzgbtXOizhP+SJJJjqEbOe55uKKfAw==}
|
||||
|
||||
'@code-inspector/core@1.2.9':
|
||||
resolution: {integrity: sha512-A1w+G73HlTB6S8X6sA6tT+ziWHTAcTyH+7FZ1Sgd3ZLXF/E/jT+hgRbKposjXMwxcbodRc6hBG6UyiV+VxwE6Q==}
|
||||
|
||||
'@code-inspector/esbuild@1.2.9':
|
||||
resolution: {integrity: sha512-DuyfxGupV43CN8YElIqynAniBtE86i037+3OVJYrm3jlJscXzbV98/kOzvu+VJQQvElcDgpgD6C/aGmPvFEiUg==}
|
||||
|
||||
'@code-inspector/mako@1.2.9':
|
||||
resolution: {integrity: sha512-8N+MHdr64AnthLB4v+YGe8/9bgog3BnkxIW/fqX5iVS0X06mF7X1pxfZOD2bABVtv1tW25lRtNs5AgvYJs0vpg==}
|
||||
|
||||
'@code-inspector/turbopack@1.2.9':
|
||||
resolution: {integrity: sha512-UVOUbqU6rpi5eOkrFamKrdeSWb0/OFFJQBaxbgs1RK5V5f4/iVwC5KjO2wkjv8cOGU4EppLfBVSBI1ysOo8S5A==}
|
||||
|
||||
'@code-inspector/vite@1.2.9':
|
||||
resolution: {integrity: sha512-saIokJ3o3SdrHEgTEg1fbbowbKfh7J4mYtu0i1mVfah1b1UfdCF/iFHTEJ6SADMiY47TeNZTg0TQWTlU1AWPww==}
|
||||
|
||||
'@code-inspector/webpack@1.2.9':
|
||||
resolution: {integrity: sha512-9YEykVrOIc0zMV7pyTyZhCprjScjn6gPPmxb4/OQXKCrP2fAm+NB188rg0s95e4sM7U3qRUpPA4NUH5F7Ogo+g==}
|
||||
|
||||
'@cspotcode/source-map-support@0.8.1':
|
||||
resolution: {integrity: sha512-IchNf6dN4tHoMFIn/7OE8LWZ19Y6q/67Bmf6vnGREv8RSbBVb9LPJxEcnwrcwX6ixSvaiGoomAUvu4YSxXrVgw==}
|
||||
engines: {node: '>=12'}
|
||||
|
|
@ -4425,11 +4443,8 @@ packages:
|
|||
resolution: {integrity: sha512-QVb0dM5HvG+uaxitm8wONl7jltx8dqhfU33DcqtOZcLSVIKSDDLDi7+0LbAKiyI8hD9u42m2YxXSkMGWThaecQ==}
|
||||
engines: {iojs: '>= 1.0.0', node: '>= 0.12.0'}
|
||||
|
||||
code-inspector-core@0.18.3:
|
||||
resolution: {integrity: sha512-60pT2cPoguMTUYdN1MMpjoPUnuF0ud/u7M2y+Vqit/bniLEit9dySEWAVxLU/Ukc5ILrDeLKEttc6fCMl9RUrA==}
|
||||
|
||||
code-inspector-plugin@0.18.3:
|
||||
resolution: {integrity: sha512-d9oJXZUsnvfTaQDwFmDNA2F+AR/TXIxWg1rr8KGcEskltR2prbZsfuu1z70EAn4khpx0smfi/PvIIwNJQ7FAMw==}
|
||||
code-inspector-plugin@1.2.9:
|
||||
resolution: {integrity: sha512-PGp/AQ03vaajimG9rn5+eQHGifrym5CSNLCViPtwzot7FM3MqEkGNqcvimH0FVuv3wDOcP5KvETAUSLf1BE3HA==}
|
||||
|
||||
collapse-white-space@2.1.0:
|
||||
resolution: {integrity: sha512-loKTxY1zCOuG4j9f6EPnuyyYkf58RnhhWTvRoZEokgB+WbdXehfjFviyOVYkqzEWz1Q5kRiZdBYS5SwxbQYwzw==}
|
||||
|
|
@ -5055,9 +5070,6 @@ packages:
|
|||
esast-util-from-js@2.0.1:
|
||||
resolution: {integrity: sha512-8Ja+rNJ0Lt56Pcf3TAmpBZjmx8ZcK5Ts4cAzIOjsjevg9oSXJnl6SUQ2EevU8tv3h6ZLWmoKL5H4fgWvdvfETw==}
|
||||
|
||||
esbuild-code-inspector-plugin@0.18.3:
|
||||
resolution: {integrity: sha512-FaPt5eFMtW1oXMWqAcqfAJByNagP1V/R9dwDDLQO29JmryMF35+frskTqy+G53whmTaVi19+TCrFqhNbMZH5ZQ==}
|
||||
|
||||
esbuild-register@3.6.0:
|
||||
resolution: {integrity: sha512-H2/S7Pm8a9CL1uhp9OvjwrBh5Pvx0H8qVOxNu8Wed9Y7qv56MPtq+GGM8RJpq6glYJn9Wspr8uw7l55uyinNeg==}
|
||||
peerDependencies:
|
||||
|
|
@ -6413,8 +6425,8 @@ packages:
|
|||
resolution: {integrity: sha512-MbjN408fEndfiQXbFQ1vnd+1NoLDsnQW41410oQBXiyXDMYH5z505juWa4KUE1LqxRC7DgOgZDbKLxHIwm27hA==}
|
||||
engines: {node: '>=0.10'}
|
||||
|
||||
launch-ide@1.0.1:
|
||||
resolution: {integrity: sha512-U7qBxSNk774PxWq4XbmRe0ThiIstPoa4sMH/OGSYxrFVvg8x3biXcF1fsH6wasDpEmEXMdINUrQhBdwsSgKyMg==}
|
||||
launch-ide@1.2.0:
|
||||
resolution: {integrity: sha512-7nXSPQOt3b2JT52Ge8jp4miFcY+nrUEZxNLWBzrEfjmByDTb9b5ytqMSwGhsNwY6Cntwop+6n7rWIFN0+S8PTw==}
|
||||
|
||||
layout-base@1.0.2:
|
||||
resolution: {integrity: sha512-8h2oVEZNktL4BH2JCOI90iD1yXwL6iNW7KcCKT2QZgQJR2vbqDsldCTPRU9NifTCqHZci57XvQQ15YTu+sTYPg==}
|
||||
|
|
@ -8693,9 +8705,6 @@ packages:
|
|||
vfile@6.0.3:
|
||||
resolution: {integrity: sha512-KzIbH/9tXat2u30jf+smMwFCsno4wHVdNmzFyL+T/L3UGqqk6JKfVqOFOZEpZSHADH1k40ab6NUIXZq422ov3Q==}
|
||||
|
||||
vite-code-inspector-plugin@0.18.3:
|
||||
resolution: {integrity: sha512-178H73vbDUHE+JpvfAfioUHlUr7qXCYIEa2YNXtzenFQGOjtae59P1jjcxGfa6pPHEnOoaitb13K+0qxwhi/WA==}
|
||||
|
||||
vm-browserify@1.1.2:
|
||||
resolution: {integrity: sha512-2ham8XPWTONajOR0ohOKOHXkm3+gaBmGut3SRuu75xLd/RRaY6vqgh8NBYYk7+RW3u5AtzPQZG8F10LHkl0lAQ==}
|
||||
|
||||
|
|
@ -8754,9 +8763,6 @@ packages:
|
|||
engines: {node: '>= 10.13.0'}
|
||||
hasBin: true
|
||||
|
||||
webpack-code-inspector-plugin@0.18.3:
|
||||
resolution: {integrity: sha512-3782rsJhBnRiw0IpR6EqnyGDQoiSq0CcGeLJ52rZXlszYCe8igXtcujq7OhI0byaivWQ1LW7sXKyMEoVpBhq0w==}
|
||||
|
||||
webpack-dev-middleware@6.1.3:
|
||||
resolution: {integrity: sha512-A4ChP0Qj8oGociTs6UdlRUGANIGrCDL3y+pmQMc+dSsraXHCatFpmMey4mYELA+juqwUqwQsUgJJISXl1KWmiw==}
|
||||
engines: {node: '>= 14.15.0'}
|
||||
|
|
@ -9993,6 +9999,48 @@ snapshots:
|
|||
picocolors: 1.1.1
|
||||
sisteransi: 1.0.5
|
||||
|
||||
'@code-inspector/core@1.2.9':
|
||||
dependencies:
|
||||
'@vue/compiler-dom': 3.5.17
|
||||
chalk: 4.1.2
|
||||
dotenv: 16.6.1
|
||||
launch-ide: 1.2.0
|
||||
portfinder: 1.0.37
|
||||
transitivePeerDependencies:
|
||||
- supports-color
|
||||
|
||||
'@code-inspector/esbuild@1.2.9':
|
||||
dependencies:
|
||||
'@code-inspector/core': 1.2.9
|
||||
transitivePeerDependencies:
|
||||
- supports-color
|
||||
|
||||
'@code-inspector/mako@1.2.9':
|
||||
dependencies:
|
||||
'@code-inspector/core': 1.2.9
|
||||
transitivePeerDependencies:
|
||||
- supports-color
|
||||
|
||||
'@code-inspector/turbopack@1.2.9':
|
||||
dependencies:
|
||||
'@code-inspector/core': 1.2.9
|
||||
'@code-inspector/webpack': 1.2.9
|
||||
transitivePeerDependencies:
|
||||
- supports-color
|
||||
|
||||
'@code-inspector/vite@1.2.9':
|
||||
dependencies:
|
||||
'@code-inspector/core': 1.2.9
|
||||
chalk: 4.1.1
|
||||
transitivePeerDependencies:
|
||||
- supports-color
|
||||
|
||||
'@code-inspector/webpack@1.2.9':
|
||||
dependencies:
|
||||
'@code-inspector/core': 1.2.9
|
||||
transitivePeerDependencies:
|
||||
- supports-color
|
||||
|
||||
'@cspotcode/source-map-support@0.8.1':
|
||||
dependencies:
|
||||
'@jridgewell/trace-mapping': 0.3.9
|
||||
|
|
@ -12799,7 +12847,7 @@ snapshots:
|
|||
|
||||
'@vue/compiler-core@3.5.17':
|
||||
dependencies:
|
||||
'@babel/parser': 7.28.0
|
||||
'@babel/parser': 7.28.4
|
||||
'@vue/shared': 3.5.17
|
||||
entities: 4.5.0
|
||||
estree-walker: 2.0.2
|
||||
|
|
@ -13503,24 +13551,15 @@ snapshots:
|
|||
|
||||
co@4.6.0: {}
|
||||
|
||||
code-inspector-core@0.18.3:
|
||||
code-inspector-plugin@1.2.9:
|
||||
dependencies:
|
||||
'@vue/compiler-dom': 3.5.17
|
||||
'@code-inspector/core': 1.2.9
|
||||
'@code-inspector/esbuild': 1.2.9
|
||||
'@code-inspector/mako': 1.2.9
|
||||
'@code-inspector/turbopack': 1.2.9
|
||||
'@code-inspector/vite': 1.2.9
|
||||
'@code-inspector/webpack': 1.2.9
|
||||
chalk: 4.1.1
|
||||
dotenv: 16.6.1
|
||||
launch-ide: 1.0.1
|
||||
portfinder: 1.0.37
|
||||
transitivePeerDependencies:
|
||||
- supports-color
|
||||
|
||||
code-inspector-plugin@0.18.3:
|
||||
dependencies:
|
||||
chalk: 4.1.1
|
||||
code-inspector-core: 0.18.3
|
||||
dotenv: 16.6.1
|
||||
esbuild-code-inspector-plugin: 0.18.3
|
||||
vite-code-inspector-plugin: 0.18.3
|
||||
webpack-code-inspector-plugin: 0.18.3
|
||||
transitivePeerDependencies:
|
||||
- supports-color
|
||||
|
||||
|
|
@ -14160,12 +14199,6 @@ snapshots:
|
|||
esast-util-from-estree: 2.0.0
|
||||
vfile-message: 4.0.2
|
||||
|
||||
esbuild-code-inspector-plugin@0.18.3:
|
||||
dependencies:
|
||||
code-inspector-core: 0.18.3
|
||||
transitivePeerDependencies:
|
||||
- supports-color
|
||||
|
||||
esbuild-register@3.6.0(esbuild@0.25.0):
|
||||
dependencies:
|
||||
debug: 4.4.1
|
||||
|
|
@ -16020,7 +16053,7 @@ snapshots:
|
|||
dependencies:
|
||||
language-subtag-registry: 0.3.23
|
||||
|
||||
launch-ide@1.0.1:
|
||||
launch-ide@1.2.0:
|
||||
dependencies:
|
||||
chalk: 4.1.2
|
||||
dotenv: 16.6.1
|
||||
|
|
@ -18779,12 +18812,6 @@ snapshots:
|
|||
'@types/unist': 3.0.3
|
||||
vfile-message: 4.0.2
|
||||
|
||||
vite-code-inspector-plugin@0.18.3:
|
||||
dependencies:
|
||||
code-inspector-core: 0.18.3
|
||||
transitivePeerDependencies:
|
||||
- supports-color
|
||||
|
||||
vm-browserify@1.1.2: {}
|
||||
|
||||
void-elements@3.1.0: {}
|
||||
|
|
@ -18855,12 +18882,6 @@ snapshots:
|
|||
- bufferutil
|
||||
- utf-8-validate
|
||||
|
||||
webpack-code-inspector-plugin@0.18.3:
|
||||
dependencies:
|
||||
code-inspector-core: 0.18.3
|
||||
transitivePeerDependencies:
|
||||
- supports-color
|
||||
|
||||
webpack-dev-middleware@6.1.3(webpack@5.100.2(esbuild@0.25.0)(uglify-js@3.19.3)):
|
||||
dependencies:
|
||||
colorette: 2.0.20
|
||||
|
|
|
|||
|
|
@ -40,8 +40,9 @@ const NAME_SPACE = 'pipeline'
|
|||
|
||||
export const PipelineTemplateListQueryKeyPrefix = [NAME_SPACE, 'template-list']
|
||||
export const usePipelineTemplateList = (params: PipelineTemplateListParams) => {
|
||||
const { type, language } = params
|
||||
return useQuery<PipelineTemplateListResponse>({
|
||||
queryKey: [...PipelineTemplateListQueryKeyPrefix, params.type],
|
||||
queryKey: [...PipelineTemplateListQueryKeyPrefix, type, language],
|
||||
queryFn: () => {
|
||||
return get<PipelineTemplateListResponse>('/rag/pipeline/templates', { params })
|
||||
},
|
||||
|
|
@ -55,7 +56,7 @@ export const useInvalidCustomizedTemplateList = () => {
|
|||
export const usePipelineTemplateById = (params: PipelineTemplateByIdRequest, enabled: boolean) => {
|
||||
const { template_id, type } = params
|
||||
return useQuery<PipelineTemplateByIdResponse>({
|
||||
queryKey: [NAME_SPACE, 'template', template_id],
|
||||
queryKey: [NAME_SPACE, 'template', type, template_id],
|
||||
queryFn: () => {
|
||||
return get<PipelineTemplateByIdResponse>(`/rag/pipeline/templates/${template_id}`, {
|
||||
params: {
|
||||
|
|
@ -64,6 +65,7 @@ export const usePipelineTemplateById = (params: PipelineTemplateByIdRequest, ena
|
|||
})
|
||||
},
|
||||
enabled,
|
||||
staleTime: 0,
|
||||
})
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -26,6 +26,9 @@
|
|||
"paths": {
|
||||
"@/*": [
|
||||
"./*"
|
||||
],
|
||||
"~@/*": [
|
||||
"./*"
|
||||
]
|
||||
}
|
||||
},
|
||||
|
|
|
|||
Loading…
Reference in New Issue