mirror of https://github.com/langgenius/dify.git
Merge branch 'main' into feat/trigger
This commit is contained in:
commit
6853a699e1
|
|
@ -8,6 +8,7 @@ on:
|
||||||
- "deploy/enterprise"
|
- "deploy/enterprise"
|
||||||
- "build/**"
|
- "build/**"
|
||||||
- "release/e-*"
|
- "release/e-*"
|
||||||
|
- "hotfix/**"
|
||||||
tags:
|
tags:
|
||||||
- "*"
|
- "*"
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -408,6 +408,9 @@ SSRF_DEFAULT_TIME_OUT=5
|
||||||
SSRF_DEFAULT_CONNECT_TIME_OUT=5
|
SSRF_DEFAULT_CONNECT_TIME_OUT=5
|
||||||
SSRF_DEFAULT_READ_TIME_OUT=5
|
SSRF_DEFAULT_READ_TIME_OUT=5
|
||||||
SSRF_DEFAULT_WRITE_TIME_OUT=5
|
SSRF_DEFAULT_WRITE_TIME_OUT=5
|
||||||
|
SSRF_POOL_MAX_CONNECTIONS=100
|
||||||
|
SSRF_POOL_MAX_KEEPALIVE_CONNECTIONS=20
|
||||||
|
SSRF_POOL_KEEPALIVE_EXPIRY=5.0
|
||||||
|
|
||||||
BATCH_UPLOAD_LIMIT=10
|
BATCH_UPLOAD_LIMIT=10
|
||||||
KEYWORD_DATA_SOURCE_TYPE=database
|
KEYWORD_DATA_SOURCE_TYPE=database
|
||||||
|
|
@ -418,6 +421,10 @@ WORKFLOW_FILE_UPLOAD_LIMIT=10
|
||||||
# CODE EXECUTION CONFIGURATION
|
# CODE EXECUTION CONFIGURATION
|
||||||
CODE_EXECUTION_ENDPOINT=http://127.0.0.1:8194
|
CODE_EXECUTION_ENDPOINT=http://127.0.0.1:8194
|
||||||
CODE_EXECUTION_API_KEY=dify-sandbox
|
CODE_EXECUTION_API_KEY=dify-sandbox
|
||||||
|
CODE_EXECUTION_SSL_VERIFY=True
|
||||||
|
CODE_EXECUTION_POOL_MAX_CONNECTIONS=100
|
||||||
|
CODE_EXECUTION_POOL_MAX_KEEPALIVE_CONNECTIONS=20
|
||||||
|
CODE_EXECUTION_POOL_KEEPALIVE_EXPIRY=5.0
|
||||||
CODE_MAX_NUMBER=9223372036854775807
|
CODE_MAX_NUMBER=9223372036854775807
|
||||||
CODE_MIN_NUMBER=-9223372036854775808
|
CODE_MIN_NUMBER=-9223372036854775808
|
||||||
CODE_MAX_STRING_LENGTH=80000
|
CODE_MAX_STRING_LENGTH=80000
|
||||||
|
|
|
||||||
152
api/commands.py
152
api/commands.py
|
|
@ -10,6 +10,7 @@ from flask import current_app
|
||||||
from pydantic import TypeAdapter
|
from pydantic import TypeAdapter
|
||||||
from sqlalchemy import select
|
from sqlalchemy import select
|
||||||
from sqlalchemy.exc import SQLAlchemyError
|
from sqlalchemy.exc import SQLAlchemyError
|
||||||
|
from sqlalchemy.orm import sessionmaker
|
||||||
|
|
||||||
from configs import dify_config
|
from configs import dify_config
|
||||||
from constants.languages import languages
|
from constants.languages import languages
|
||||||
|
|
@ -61,31 +62,30 @@ def reset_password(email, new_password, password_confirm):
|
||||||
if str(new_password).strip() != str(password_confirm).strip():
|
if str(new_password).strip() != str(password_confirm).strip():
|
||||||
click.echo(click.style("Passwords do not match.", fg="red"))
|
click.echo(click.style("Passwords do not match.", fg="red"))
|
||||||
return
|
return
|
||||||
|
with sessionmaker(db.engine, expire_on_commit=False).begin() as session:
|
||||||
|
account = session.query(Account).where(Account.email == email).one_or_none()
|
||||||
|
|
||||||
account = db.session.query(Account).where(Account.email == email).one_or_none()
|
if not account:
|
||||||
|
click.echo(click.style(f"Account not found for email: {email}", fg="red"))
|
||||||
|
return
|
||||||
|
|
||||||
if not account:
|
try:
|
||||||
click.echo(click.style(f"Account not found for email: {email}", fg="red"))
|
valid_password(new_password)
|
||||||
return
|
except:
|
||||||
|
click.echo(click.style(f"Invalid password. Must match {password_pattern}", fg="red"))
|
||||||
|
return
|
||||||
|
|
||||||
try:
|
# generate password salt
|
||||||
valid_password(new_password)
|
salt = secrets.token_bytes(16)
|
||||||
except:
|
base64_salt = base64.b64encode(salt).decode()
|
||||||
click.echo(click.style(f"Invalid password. Must match {password_pattern}", fg="red"))
|
|
||||||
return
|
|
||||||
|
|
||||||
# generate password salt
|
# encrypt password with salt
|
||||||
salt = secrets.token_bytes(16)
|
password_hashed = hash_password(new_password, salt)
|
||||||
base64_salt = base64.b64encode(salt).decode()
|
base64_password_hashed = base64.b64encode(password_hashed).decode()
|
||||||
|
account.password = base64_password_hashed
|
||||||
# encrypt password with salt
|
account.password_salt = base64_salt
|
||||||
password_hashed = hash_password(new_password, salt)
|
AccountService.reset_login_error_rate_limit(email)
|
||||||
base64_password_hashed = base64.b64encode(password_hashed).decode()
|
click.echo(click.style("Password reset successfully.", fg="green"))
|
||||||
account.password = base64_password_hashed
|
|
||||||
account.password_salt = base64_salt
|
|
||||||
db.session.commit()
|
|
||||||
AccountService.reset_login_error_rate_limit(email)
|
|
||||||
click.echo(click.style("Password reset successfully.", fg="green"))
|
|
||||||
|
|
||||||
|
|
||||||
@click.command("reset-email", help="Reset the account email.")
|
@click.command("reset-email", help="Reset the account email.")
|
||||||
|
|
@ -100,22 +100,21 @@ def reset_email(email, new_email, email_confirm):
|
||||||
if str(new_email).strip() != str(email_confirm).strip():
|
if str(new_email).strip() != str(email_confirm).strip():
|
||||||
click.echo(click.style("New emails do not match.", fg="red"))
|
click.echo(click.style("New emails do not match.", fg="red"))
|
||||||
return
|
return
|
||||||
|
with sessionmaker(db.engine, expire_on_commit=False).begin() as session:
|
||||||
|
account = session.query(Account).where(Account.email == email).one_or_none()
|
||||||
|
|
||||||
account = db.session.query(Account).where(Account.email == email).one_or_none()
|
if not account:
|
||||||
|
click.echo(click.style(f"Account not found for email: {email}", fg="red"))
|
||||||
|
return
|
||||||
|
|
||||||
if not account:
|
try:
|
||||||
click.echo(click.style(f"Account not found for email: {email}", fg="red"))
|
email_validate(new_email)
|
||||||
return
|
except:
|
||||||
|
click.echo(click.style(f"Invalid email: {new_email}", fg="red"))
|
||||||
|
return
|
||||||
|
|
||||||
try:
|
account.email = new_email
|
||||||
email_validate(new_email)
|
click.echo(click.style("Email updated successfully.", fg="green"))
|
||||||
except:
|
|
||||||
click.echo(click.style(f"Invalid email: {new_email}", fg="red"))
|
|
||||||
return
|
|
||||||
|
|
||||||
account.email = new_email
|
|
||||||
db.session.commit()
|
|
||||||
click.echo(click.style("Email updated successfully.", fg="green"))
|
|
||||||
|
|
||||||
|
|
||||||
@click.command(
|
@click.command(
|
||||||
|
|
@ -139,25 +138,24 @@ def reset_encrypt_key_pair():
|
||||||
if dify_config.EDITION != "SELF_HOSTED":
|
if dify_config.EDITION != "SELF_HOSTED":
|
||||||
click.echo(click.style("This command is only for SELF_HOSTED installations.", fg="red"))
|
click.echo(click.style("This command is only for SELF_HOSTED installations.", fg="red"))
|
||||||
return
|
return
|
||||||
|
with sessionmaker(db.engine, expire_on_commit=False).begin() as session:
|
||||||
|
tenants = session.query(Tenant).all()
|
||||||
|
for tenant in tenants:
|
||||||
|
if not tenant:
|
||||||
|
click.echo(click.style("No workspaces found. Run /install first.", fg="red"))
|
||||||
|
return
|
||||||
|
|
||||||
tenants = db.session.query(Tenant).all()
|
tenant.encrypt_public_key = generate_key_pair(tenant.id)
|
||||||
for tenant in tenants:
|
|
||||||
if not tenant:
|
|
||||||
click.echo(click.style("No workspaces found. Run /install first.", fg="red"))
|
|
||||||
return
|
|
||||||
|
|
||||||
tenant.encrypt_public_key = generate_key_pair(tenant.id)
|
session.query(Provider).where(Provider.provider_type == "custom", Provider.tenant_id == tenant.id).delete()
|
||||||
|
session.query(ProviderModel).where(ProviderModel.tenant_id == tenant.id).delete()
|
||||||
|
|
||||||
db.session.query(Provider).where(Provider.provider_type == "custom", Provider.tenant_id == tenant.id).delete()
|
click.echo(
|
||||||
db.session.query(ProviderModel).where(ProviderModel.tenant_id == tenant.id).delete()
|
click.style(
|
||||||
db.session.commit()
|
f"Congratulations! The asymmetric key pair of workspace {tenant.id} has been reset.",
|
||||||
|
fg="green",
|
||||||
click.echo(
|
)
|
||||||
click.style(
|
|
||||||
f"Congratulations! The asymmetric key pair of workspace {tenant.id} has been reset.",
|
|
||||||
fg="green",
|
|
||||||
)
|
)
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
@click.command("vdb-migrate", help="Migrate vector db.")
|
@click.command("vdb-migrate", help="Migrate vector db.")
|
||||||
|
|
@ -182,14 +180,15 @@ def migrate_annotation_vector_database():
|
||||||
try:
|
try:
|
||||||
# get apps info
|
# get apps info
|
||||||
per_page = 50
|
per_page = 50
|
||||||
apps = (
|
with sessionmaker(db.engine, expire_on_commit=False).begin() as session:
|
||||||
db.session.query(App)
|
apps = (
|
||||||
.where(App.status == "normal")
|
session.query(App)
|
||||||
.order_by(App.created_at.desc())
|
.where(App.status == "normal")
|
||||||
.limit(per_page)
|
.order_by(App.created_at.desc())
|
||||||
.offset((page - 1) * per_page)
|
.limit(per_page)
|
||||||
.all()
|
.offset((page - 1) * per_page)
|
||||||
)
|
.all()
|
||||||
|
)
|
||||||
if not apps:
|
if not apps:
|
||||||
break
|
break
|
||||||
except SQLAlchemyError:
|
except SQLAlchemyError:
|
||||||
|
|
@ -203,26 +202,27 @@ def migrate_annotation_vector_database():
|
||||||
)
|
)
|
||||||
try:
|
try:
|
||||||
click.echo(f"Creating app annotation index: {app.id}")
|
click.echo(f"Creating app annotation index: {app.id}")
|
||||||
app_annotation_setting = (
|
with sessionmaker(db.engine, expire_on_commit=False).begin() as session:
|
||||||
db.session.query(AppAnnotationSetting).where(AppAnnotationSetting.app_id == app.id).first()
|
app_annotation_setting = (
|
||||||
)
|
session.query(AppAnnotationSetting).where(AppAnnotationSetting.app_id == app.id).first()
|
||||||
|
)
|
||||||
|
|
||||||
if not app_annotation_setting:
|
if not app_annotation_setting:
|
||||||
skipped_count = skipped_count + 1
|
skipped_count = skipped_count + 1
|
||||||
click.echo(f"App annotation setting disabled: {app.id}")
|
click.echo(f"App annotation setting disabled: {app.id}")
|
||||||
continue
|
continue
|
||||||
# get dataset_collection_binding info
|
# get dataset_collection_binding info
|
||||||
dataset_collection_binding = (
|
dataset_collection_binding = (
|
||||||
db.session.query(DatasetCollectionBinding)
|
session.query(DatasetCollectionBinding)
|
||||||
.where(DatasetCollectionBinding.id == app_annotation_setting.collection_binding_id)
|
.where(DatasetCollectionBinding.id == app_annotation_setting.collection_binding_id)
|
||||||
.first()
|
.first()
|
||||||
)
|
)
|
||||||
if not dataset_collection_binding:
|
if not dataset_collection_binding:
|
||||||
click.echo(f"App annotation collection binding not found: {app.id}")
|
click.echo(f"App annotation collection binding not found: {app.id}")
|
||||||
continue
|
continue
|
||||||
annotations = db.session.scalars(
|
annotations = session.scalars(
|
||||||
select(MessageAnnotation).where(MessageAnnotation.app_id == app.id)
|
select(MessageAnnotation).where(MessageAnnotation.app_id == app.id)
|
||||||
).all()
|
).all()
|
||||||
dataset = Dataset(
|
dataset = Dataset(
|
||||||
id=app.id,
|
id=app.id,
|
||||||
tenant_id=app.tenant_id,
|
tenant_id=app.tenant_id,
|
||||||
|
|
|
||||||
|
|
@ -113,6 +113,21 @@ class CodeExecutionSandboxConfig(BaseSettings):
|
||||||
default=10.0,
|
default=10.0,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
CODE_EXECUTION_POOL_MAX_CONNECTIONS: PositiveInt = Field(
|
||||||
|
description="Maximum number of concurrent connections for the code execution HTTP client",
|
||||||
|
default=100,
|
||||||
|
)
|
||||||
|
|
||||||
|
CODE_EXECUTION_POOL_MAX_KEEPALIVE_CONNECTIONS: PositiveInt = Field(
|
||||||
|
description="Maximum number of persistent keep-alive connections for the code execution HTTP client",
|
||||||
|
default=20,
|
||||||
|
)
|
||||||
|
|
||||||
|
CODE_EXECUTION_POOL_KEEPALIVE_EXPIRY: PositiveFloat | None = Field(
|
||||||
|
description="Keep-alive expiry in seconds for idle connections (set to None to disable)",
|
||||||
|
default=5.0,
|
||||||
|
)
|
||||||
|
|
||||||
CODE_MAX_NUMBER: PositiveInt = Field(
|
CODE_MAX_NUMBER: PositiveInt = Field(
|
||||||
description="Maximum allowed numeric value in code execution",
|
description="Maximum allowed numeric value in code execution",
|
||||||
default=9223372036854775807,
|
default=9223372036854775807,
|
||||||
|
|
@ -153,6 +168,11 @@ class CodeExecutionSandboxConfig(BaseSettings):
|
||||||
default=1000,
|
default=1000,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
CODE_EXECUTION_SSL_VERIFY: bool = Field(
|
||||||
|
description="Enable or disable SSL verification for code execution requests",
|
||||||
|
default=True,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
class TriggerConfig(BaseSettings):
|
class TriggerConfig(BaseSettings):
|
||||||
"""
|
"""
|
||||||
|
|
@ -415,6 +435,21 @@ class HttpConfig(BaseSettings):
|
||||||
default=5,
|
default=5,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
SSRF_POOL_MAX_CONNECTIONS: PositiveInt = Field(
|
||||||
|
description="Maximum number of concurrent connections for the SSRF HTTP client",
|
||||||
|
default=100,
|
||||||
|
)
|
||||||
|
|
||||||
|
SSRF_POOL_MAX_KEEPALIVE_CONNECTIONS: PositiveInt = Field(
|
||||||
|
description="Maximum number of persistent keep-alive connections for the SSRF HTTP client",
|
||||||
|
default=20,
|
||||||
|
)
|
||||||
|
|
||||||
|
SSRF_POOL_KEEPALIVE_EXPIRY: PositiveFloat | None = Field(
|
||||||
|
description="Keep-alive expiry in seconds for idle SSRF connections (set to None to disable)",
|
||||||
|
default=5.0,
|
||||||
|
)
|
||||||
|
|
||||||
RESPECT_XFORWARD_HEADERS_ENABLED: bool = Field(
|
RESPECT_XFORWARD_HEADERS_ENABLED: bool = Field(
|
||||||
description="Enable handling of X-Forwarded-For, X-Forwarded-Proto, and X-Forwarded-Port headers"
|
description="Enable handling of X-Forwarded-For, X-Forwarded-Proto, and X-Forwarded-Port headers"
|
||||||
" when the app is behind a single trusted reverse proxy.",
|
" when the app is behind a single trusted reverse proxy.",
|
||||||
|
|
|
||||||
|
|
@ -40,8 +40,12 @@ class OceanBaseVectorConfig(BaseSettings):
|
||||||
|
|
||||||
OCEANBASE_FULLTEXT_PARSER: str | None = Field(
|
OCEANBASE_FULLTEXT_PARSER: str | None = Field(
|
||||||
description=(
|
description=(
|
||||||
"Fulltext parser to use for text indexing. Options: 'japanese_ftparser' (Japanese), "
|
"Fulltext parser to use for text indexing. "
|
||||||
"'thai_ftparser' (Thai), 'ik' (Chinese). Default is 'ik'"
|
"Built-in options: 'ngram' (N-gram tokenizer for English/numbers), "
|
||||||
|
"'beng' (Basic English tokenizer), 'space' (Space-based tokenizer), "
|
||||||
|
"'ngram2' (Improved N-gram tokenizer), 'ik' (Chinese tokenizer). "
|
||||||
|
"External plugins (require installation): 'japanese_ftparser' (Japanese tokenizer), "
|
||||||
|
"'thai_ftparser' (Thai tokenizer). Default is 'ik'"
|
||||||
),
|
),
|
||||||
default="ik",
|
default="ik",
|
||||||
)
|
)
|
||||||
|
|
|
||||||
|
|
@ -1,6 +1,7 @@
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
|
|
||||||
import pytz # pip install pytz
|
import pytz # pip install pytz
|
||||||
|
import sqlalchemy as sa
|
||||||
from flask_login import current_user
|
from flask_login import current_user
|
||||||
from flask_restx import Resource, marshal_with, reqparse
|
from flask_restx import Resource, marshal_with, reqparse
|
||||||
from flask_restx.inputs import int_range
|
from flask_restx.inputs import int_range
|
||||||
|
|
@ -70,7 +71,7 @@ class CompletionConversationApi(Resource):
|
||||||
parser.add_argument("limit", type=int_range(1, 100), default=20, location="args")
|
parser.add_argument("limit", type=int_range(1, 100), default=20, location="args")
|
||||||
args = parser.parse_args()
|
args = parser.parse_args()
|
||||||
|
|
||||||
query = db.select(Conversation).where(
|
query = sa.select(Conversation).where(
|
||||||
Conversation.app_id == app_model.id, Conversation.mode == "completion", Conversation.is_deleted.is_(False)
|
Conversation.app_id == app_model.id, Conversation.mode == "completion", Conversation.is_deleted.is_(False)
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
@ -236,7 +237,7 @@ class ChatConversationApi(Resource):
|
||||||
.subquery()
|
.subquery()
|
||||||
)
|
)
|
||||||
|
|
||||||
query = db.select(Conversation).where(Conversation.app_id == app_model.id, Conversation.is_deleted.is_(False))
|
query = sa.select(Conversation).where(Conversation.app_id == app_model.id, Conversation.is_deleted.is_(False))
|
||||||
|
|
||||||
if args["keyword"]:
|
if args["keyword"]:
|
||||||
keyword_filter = f"%{args['keyword']}%"
|
keyword_filter = f"%{args['keyword']}%"
|
||||||
|
|
|
||||||
|
|
@ -4,6 +4,7 @@ from argparse import ArgumentTypeError
|
||||||
from collections.abc import Sequence
|
from collections.abc import Sequence
|
||||||
from typing import Literal, cast
|
from typing import Literal, cast
|
||||||
|
|
||||||
|
import sqlalchemy as sa
|
||||||
from flask import request
|
from flask import request
|
||||||
from flask_login import current_user
|
from flask_login import current_user
|
||||||
from flask_restx import Resource, fields, marshal, marshal_with, reqparse
|
from flask_restx import Resource, fields, marshal, marshal_with, reqparse
|
||||||
|
|
@ -211,13 +212,13 @@ class DatasetDocumentListApi(Resource):
|
||||||
|
|
||||||
if sort == "hit_count":
|
if sort == "hit_count":
|
||||||
sub_query = (
|
sub_query = (
|
||||||
db.select(DocumentSegment.document_id, db.func.sum(DocumentSegment.hit_count).label("total_hit_count"))
|
sa.select(DocumentSegment.document_id, sa.func.sum(DocumentSegment.hit_count).label("total_hit_count"))
|
||||||
.group_by(DocumentSegment.document_id)
|
.group_by(DocumentSegment.document_id)
|
||||||
.subquery()
|
.subquery()
|
||||||
)
|
)
|
||||||
|
|
||||||
query = query.outerjoin(sub_query, sub_query.c.document_id == Document.id).order_by(
|
query = query.outerjoin(sub_query, sub_query.c.document_id == Document.id).order_by(
|
||||||
sort_logic(db.func.coalesce(sub_query.c.total_hit_count, 0)),
|
sort_logic(sa.func.coalesce(sub_query.c.total_hit_count, 0)),
|
||||||
sort_logic(Document.position),
|
sort_logic(Document.position),
|
||||||
)
|
)
|
||||||
elif sort == "created_at":
|
elif sort == "created_at":
|
||||||
|
|
|
||||||
|
|
@ -30,7 +30,6 @@ from extensions.ext_database import db
|
||||||
from fields.document_fields import document_fields, document_status_fields
|
from fields.document_fields import document_fields, document_status_fields
|
||||||
from libs.login import current_user
|
from libs.login import current_user
|
||||||
from models.dataset import Dataset, Document, DocumentSegment
|
from models.dataset import Dataset, Document, DocumentSegment
|
||||||
from models.model import EndUser
|
|
||||||
from services.dataset_service import DatasetService, DocumentService
|
from services.dataset_service import DatasetService, DocumentService
|
||||||
from services.entities.knowledge_entities.knowledge_entities import KnowledgeConfig
|
from services.entities.knowledge_entities.knowledge_entities import KnowledgeConfig
|
||||||
from services.file_service import FileService
|
from services.file_service import FileService
|
||||||
|
|
@ -311,8 +310,6 @@ class DocumentAddByFileApi(DatasetApiResource):
|
||||||
if not file.filename:
|
if not file.filename:
|
||||||
raise FilenameNotExistsError
|
raise FilenameNotExistsError
|
||||||
|
|
||||||
if not isinstance(current_user, EndUser):
|
|
||||||
raise ValueError("Invalid user account")
|
|
||||||
if not current_user:
|
if not current_user:
|
||||||
raise ValueError("current_user is required")
|
raise ValueError("current_user is required")
|
||||||
upload_file = FileService(db.engine).upload_file(
|
upload_file = FileService(db.engine).upload_file(
|
||||||
|
|
@ -406,9 +403,6 @@ class DocumentUpdateByFileApi(DatasetApiResource):
|
||||||
if not current_user:
|
if not current_user:
|
||||||
raise ValueError("current_user is required")
|
raise ValueError("current_user is required")
|
||||||
|
|
||||||
if not isinstance(current_user, EndUser):
|
|
||||||
raise ValueError("Invalid user account")
|
|
||||||
|
|
||||||
try:
|
try:
|
||||||
upload_file = FileService(db.engine).upload_file(
|
upload_file = FileService(db.engine).upload_file(
|
||||||
filename=file.filename,
|
filename=file.filename,
|
||||||
|
|
|
||||||
|
|
@ -4,7 +4,7 @@ from enum import StrEnum
|
||||||
from threading import Lock
|
from threading import Lock
|
||||||
from typing import Any
|
from typing import Any
|
||||||
|
|
||||||
from httpx import Timeout, post
|
import httpx
|
||||||
from pydantic import BaseModel
|
from pydantic import BaseModel
|
||||||
from yarl import URL
|
from yarl import URL
|
||||||
|
|
||||||
|
|
@ -13,9 +13,17 @@ from core.helper.code_executor.javascript.javascript_transformer import NodeJsTe
|
||||||
from core.helper.code_executor.jinja2.jinja2_transformer import Jinja2TemplateTransformer
|
from core.helper.code_executor.jinja2.jinja2_transformer import Jinja2TemplateTransformer
|
||||||
from core.helper.code_executor.python3.python3_transformer import Python3TemplateTransformer
|
from core.helper.code_executor.python3.python3_transformer import Python3TemplateTransformer
|
||||||
from core.helper.code_executor.template_transformer import TemplateTransformer
|
from core.helper.code_executor.template_transformer import TemplateTransformer
|
||||||
|
from core.helper.http_client_pooling import get_pooled_http_client
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
code_execution_endpoint_url = URL(str(dify_config.CODE_EXECUTION_ENDPOINT))
|
code_execution_endpoint_url = URL(str(dify_config.CODE_EXECUTION_ENDPOINT))
|
||||||
|
CODE_EXECUTION_SSL_VERIFY = dify_config.CODE_EXECUTION_SSL_VERIFY
|
||||||
|
_CODE_EXECUTOR_CLIENT_LIMITS = httpx.Limits(
|
||||||
|
max_connections=dify_config.CODE_EXECUTION_POOL_MAX_CONNECTIONS,
|
||||||
|
max_keepalive_connections=dify_config.CODE_EXECUTION_POOL_MAX_KEEPALIVE_CONNECTIONS,
|
||||||
|
keepalive_expiry=dify_config.CODE_EXECUTION_POOL_KEEPALIVE_EXPIRY,
|
||||||
|
)
|
||||||
|
_CODE_EXECUTOR_CLIENT_KEY = "code_executor:http_client"
|
||||||
|
|
||||||
|
|
||||||
class CodeExecutionError(Exception):
|
class CodeExecutionError(Exception):
|
||||||
|
|
@ -38,6 +46,13 @@ class CodeLanguage(StrEnum):
|
||||||
JAVASCRIPT = "javascript"
|
JAVASCRIPT = "javascript"
|
||||||
|
|
||||||
|
|
||||||
|
def _build_code_executor_client() -> httpx.Client:
|
||||||
|
return httpx.Client(
|
||||||
|
verify=CODE_EXECUTION_SSL_VERIFY,
|
||||||
|
limits=_CODE_EXECUTOR_CLIENT_LIMITS,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
class CodeExecutor:
|
class CodeExecutor:
|
||||||
dependencies_cache: dict[str, str] = {}
|
dependencies_cache: dict[str, str] = {}
|
||||||
dependencies_cache_lock = Lock()
|
dependencies_cache_lock = Lock()
|
||||||
|
|
@ -76,17 +91,21 @@ class CodeExecutor:
|
||||||
"enable_network": True,
|
"enable_network": True,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
timeout = httpx.Timeout(
|
||||||
|
connect=dify_config.CODE_EXECUTION_CONNECT_TIMEOUT,
|
||||||
|
read=dify_config.CODE_EXECUTION_READ_TIMEOUT,
|
||||||
|
write=dify_config.CODE_EXECUTION_WRITE_TIMEOUT,
|
||||||
|
pool=None,
|
||||||
|
)
|
||||||
|
|
||||||
|
client = get_pooled_http_client(_CODE_EXECUTOR_CLIENT_KEY, _build_code_executor_client)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
response = post(
|
response = client.post(
|
||||||
str(url),
|
str(url),
|
||||||
json=data,
|
json=data,
|
||||||
headers=headers,
|
headers=headers,
|
||||||
timeout=Timeout(
|
timeout=timeout,
|
||||||
connect=dify_config.CODE_EXECUTION_CONNECT_TIMEOUT,
|
|
||||||
read=dify_config.CODE_EXECUTION_READ_TIMEOUT,
|
|
||||||
write=dify_config.CODE_EXECUTION_WRITE_TIMEOUT,
|
|
||||||
pool=None,
|
|
||||||
),
|
|
||||||
)
|
)
|
||||||
if response.status_code == 503:
|
if response.status_code == 503:
|
||||||
raise CodeExecutionError("Code execution service is unavailable")
|
raise CodeExecutionError("Code execution service is unavailable")
|
||||||
|
|
@ -106,8 +125,8 @@ class CodeExecutor:
|
||||||
|
|
||||||
try:
|
try:
|
||||||
response_data = response.json()
|
response_data = response.json()
|
||||||
except:
|
except Exception as e:
|
||||||
raise CodeExecutionError("Failed to parse response")
|
raise CodeExecutionError("Failed to parse response") from e
|
||||||
|
|
||||||
if (code := response_data.get("code")) != 0:
|
if (code := response_data.get("code")) != 0:
|
||||||
raise CodeExecutionError(f"Got error code: {code}. Got error msg: {response_data.get('message')}")
|
raise CodeExecutionError(f"Got error code: {code}. Got error msg: {response_data.get('message')}")
|
||||||
|
|
|
||||||
|
|
@ -0,0 +1,59 @@
|
||||||
|
"""HTTP client pooling utilities."""
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import atexit
|
||||||
|
import threading
|
||||||
|
from collections.abc import Callable
|
||||||
|
|
||||||
|
import httpx
|
||||||
|
|
||||||
|
ClientBuilder = Callable[[], httpx.Client]
|
||||||
|
|
||||||
|
|
||||||
|
class HttpClientPoolFactory:
|
||||||
|
"""Thread-safe factory that maintains reusable HTTP client instances."""
|
||||||
|
|
||||||
|
def __init__(self) -> None:
|
||||||
|
self._clients: dict[str, httpx.Client] = {}
|
||||||
|
self._lock = threading.Lock()
|
||||||
|
|
||||||
|
def get_or_create(self, key: str, builder: ClientBuilder) -> httpx.Client:
|
||||||
|
"""Return a pooled client associated with ``key`` creating it on demand."""
|
||||||
|
client = self._clients.get(key)
|
||||||
|
if client is not None:
|
||||||
|
return client
|
||||||
|
|
||||||
|
with self._lock:
|
||||||
|
client = self._clients.get(key)
|
||||||
|
if client is None:
|
||||||
|
client = builder()
|
||||||
|
self._clients[key] = client
|
||||||
|
return client
|
||||||
|
|
||||||
|
def close_all(self) -> None:
|
||||||
|
"""Close all pooled clients and clear the pool."""
|
||||||
|
with self._lock:
|
||||||
|
for client in self._clients.values():
|
||||||
|
client.close()
|
||||||
|
self._clients.clear()
|
||||||
|
|
||||||
|
|
||||||
|
_factory = HttpClientPoolFactory()
|
||||||
|
|
||||||
|
|
||||||
|
def get_pooled_http_client(key: str, builder: ClientBuilder) -> httpx.Client:
|
||||||
|
"""Return a pooled client for the given ``key`` using ``builder`` when missing."""
|
||||||
|
return _factory.get_or_create(key, builder)
|
||||||
|
|
||||||
|
|
||||||
|
def close_all_pooled_clients() -> None:
|
||||||
|
"""Close every client created through the pooling factory."""
|
||||||
|
_factory.close_all()
|
||||||
|
|
||||||
|
|
||||||
|
def _register_shutdown_hook() -> None:
|
||||||
|
atexit.register(close_all_pooled_clients)
|
||||||
|
|
||||||
|
|
||||||
|
_register_shutdown_hook()
|
||||||
|
|
@ -23,7 +23,7 @@ def batch_fetch_plugin_manifests(plugin_ids: list[str]) -> Sequence[MarketplaceP
|
||||||
return []
|
return []
|
||||||
|
|
||||||
url = str(marketplace_api_url / "api/v1/plugins/batch")
|
url = str(marketplace_api_url / "api/v1/plugins/batch")
|
||||||
response = httpx.post(url, json={"plugin_ids": plugin_ids})
|
response = httpx.post(url, json={"plugin_ids": plugin_ids}, headers={"X-Dify-Version": dify_config.project.version})
|
||||||
response.raise_for_status()
|
response.raise_for_status()
|
||||||
|
|
||||||
return [MarketplacePluginDeclaration(**plugin) for plugin in response.json()["data"]["plugins"]]
|
return [MarketplacePluginDeclaration(**plugin) for plugin in response.json()["data"]["plugins"]]
|
||||||
|
|
@ -36,7 +36,7 @@ def batch_fetch_plugin_manifests_ignore_deserialization_error(
|
||||||
return []
|
return []
|
||||||
|
|
||||||
url = str(marketplace_api_url / "api/v1/plugins/batch")
|
url = str(marketplace_api_url / "api/v1/plugins/batch")
|
||||||
response = httpx.post(url, json={"plugin_ids": plugin_ids})
|
response = httpx.post(url, json={"plugin_ids": plugin_ids}, headers={"X-Dify-Version": dify_config.project.version})
|
||||||
response.raise_for_status()
|
response.raise_for_status()
|
||||||
result: list[MarketplacePluginDeclaration] = []
|
result: list[MarketplacePluginDeclaration] = []
|
||||||
for plugin in response.json()["data"]["plugins"]:
|
for plugin in response.json()["data"]["plugins"]:
|
||||||
|
|
|
||||||
|
|
@ -8,27 +8,23 @@ import time
|
||||||
import httpx
|
import httpx
|
||||||
|
|
||||||
from configs import dify_config
|
from configs import dify_config
|
||||||
|
from core.helper.http_client_pooling import get_pooled_http_client
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
SSRF_DEFAULT_MAX_RETRIES = dify_config.SSRF_DEFAULT_MAX_RETRIES
|
SSRF_DEFAULT_MAX_RETRIES = dify_config.SSRF_DEFAULT_MAX_RETRIES
|
||||||
|
|
||||||
http_request_node_ssl_verify = True # Default value for http_request_node_ssl_verify is True
|
|
||||||
try:
|
|
||||||
config_value = dify_config.HTTP_REQUEST_NODE_SSL_VERIFY
|
|
||||||
http_request_node_ssl_verify_lower = str(config_value).lower()
|
|
||||||
if http_request_node_ssl_verify_lower == "true":
|
|
||||||
http_request_node_ssl_verify = True
|
|
||||||
elif http_request_node_ssl_verify_lower == "false":
|
|
||||||
http_request_node_ssl_verify = False
|
|
||||||
else:
|
|
||||||
raise ValueError("Invalid value. HTTP_REQUEST_NODE_SSL_VERIFY should be 'True' or 'False'")
|
|
||||||
except NameError:
|
|
||||||
http_request_node_ssl_verify = True
|
|
||||||
|
|
||||||
BACKOFF_FACTOR = 0.5
|
BACKOFF_FACTOR = 0.5
|
||||||
STATUS_FORCELIST = [429, 500, 502, 503, 504]
|
STATUS_FORCELIST = [429, 500, 502, 503, 504]
|
||||||
|
|
||||||
|
_SSL_VERIFIED_POOL_KEY = "ssrf:verified"
|
||||||
|
_SSL_UNVERIFIED_POOL_KEY = "ssrf:unverified"
|
||||||
|
_SSRF_CLIENT_LIMITS = httpx.Limits(
|
||||||
|
max_connections=dify_config.SSRF_POOL_MAX_CONNECTIONS,
|
||||||
|
max_keepalive_connections=dify_config.SSRF_POOL_MAX_KEEPALIVE_CONNECTIONS,
|
||||||
|
keepalive_expiry=dify_config.SSRF_POOL_KEEPALIVE_EXPIRY,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
class MaxRetriesExceededError(ValueError):
|
class MaxRetriesExceededError(ValueError):
|
||||||
"""Raised when the maximum number of retries is exceeded."""
|
"""Raised when the maximum number of retries is exceeded."""
|
||||||
|
|
@ -36,6 +32,45 @@ class MaxRetriesExceededError(ValueError):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
def _create_proxy_mounts() -> dict[str, httpx.HTTPTransport]:
|
||||||
|
return {
|
||||||
|
"http://": httpx.HTTPTransport(
|
||||||
|
proxy=dify_config.SSRF_PROXY_HTTP_URL,
|
||||||
|
),
|
||||||
|
"https://": httpx.HTTPTransport(
|
||||||
|
proxy=dify_config.SSRF_PROXY_HTTPS_URL,
|
||||||
|
),
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def _build_ssrf_client(verify: bool) -> httpx.Client:
|
||||||
|
if dify_config.SSRF_PROXY_ALL_URL:
|
||||||
|
return httpx.Client(
|
||||||
|
proxy=dify_config.SSRF_PROXY_ALL_URL,
|
||||||
|
verify=verify,
|
||||||
|
limits=_SSRF_CLIENT_LIMITS,
|
||||||
|
)
|
||||||
|
|
||||||
|
if dify_config.SSRF_PROXY_HTTP_URL and dify_config.SSRF_PROXY_HTTPS_URL:
|
||||||
|
return httpx.Client(
|
||||||
|
mounts=_create_proxy_mounts(),
|
||||||
|
verify=verify,
|
||||||
|
limits=_SSRF_CLIENT_LIMITS,
|
||||||
|
)
|
||||||
|
|
||||||
|
return httpx.Client(verify=verify, limits=_SSRF_CLIENT_LIMITS)
|
||||||
|
|
||||||
|
|
||||||
|
def _get_ssrf_client(ssl_verify_enabled: bool) -> httpx.Client:
|
||||||
|
if not isinstance(ssl_verify_enabled, bool):
|
||||||
|
raise ValueError("SSRF client verify flag must be a boolean")
|
||||||
|
|
||||||
|
return get_pooled_http_client(
|
||||||
|
_SSL_VERIFIED_POOL_KEY if ssl_verify_enabled else _SSL_UNVERIFIED_POOL_KEY,
|
||||||
|
lambda: _build_ssrf_client(verify=ssl_verify_enabled),
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
def make_request(method, url, max_retries=SSRF_DEFAULT_MAX_RETRIES, **kwargs):
|
def make_request(method, url, max_retries=SSRF_DEFAULT_MAX_RETRIES, **kwargs):
|
||||||
if "allow_redirects" in kwargs:
|
if "allow_redirects" in kwargs:
|
||||||
allow_redirects = kwargs.pop("allow_redirects")
|
allow_redirects = kwargs.pop("allow_redirects")
|
||||||
|
|
@ -50,33 +85,22 @@ def make_request(method, url, max_retries=SSRF_DEFAULT_MAX_RETRIES, **kwargs):
|
||||||
write=dify_config.SSRF_DEFAULT_WRITE_TIME_OUT,
|
write=dify_config.SSRF_DEFAULT_WRITE_TIME_OUT,
|
||||||
)
|
)
|
||||||
|
|
||||||
if "ssl_verify" not in kwargs:
|
# prioritize per-call option, which can be switched on and off inside the HTTP node on the web UI
|
||||||
kwargs["ssl_verify"] = http_request_node_ssl_verify
|
verify_option = kwargs.pop("ssl_verify", dify_config.HTTP_REQUEST_NODE_SSL_VERIFY)
|
||||||
|
client = _get_ssrf_client(verify_option)
|
||||||
ssl_verify = kwargs.pop("ssl_verify")
|
|
||||||
|
|
||||||
retries = 0
|
retries = 0
|
||||||
while retries <= max_retries:
|
while retries <= max_retries:
|
||||||
try:
|
try:
|
||||||
if dify_config.SSRF_PROXY_ALL_URL:
|
response = client.request(method=method, url=url, **kwargs)
|
||||||
with httpx.Client(proxy=dify_config.SSRF_PROXY_ALL_URL, verify=ssl_verify) as client:
|
|
||||||
response = client.request(method=method, url=url, **kwargs)
|
|
||||||
elif dify_config.SSRF_PROXY_HTTP_URL and dify_config.SSRF_PROXY_HTTPS_URL:
|
|
||||||
proxy_mounts = {
|
|
||||||
"http://": httpx.HTTPTransport(proxy=dify_config.SSRF_PROXY_HTTP_URL, verify=ssl_verify),
|
|
||||||
"https://": httpx.HTTPTransport(proxy=dify_config.SSRF_PROXY_HTTPS_URL, verify=ssl_verify),
|
|
||||||
}
|
|
||||||
with httpx.Client(mounts=proxy_mounts, verify=ssl_verify) as client:
|
|
||||||
response = client.request(method=method, url=url, **kwargs)
|
|
||||||
else:
|
|
||||||
with httpx.Client(verify=ssl_verify) as client:
|
|
||||||
response = client.request(method=method, url=url, **kwargs)
|
|
||||||
|
|
||||||
if response.status_code not in STATUS_FORCELIST:
|
if response.status_code not in STATUS_FORCELIST:
|
||||||
return response
|
return response
|
||||||
else:
|
else:
|
||||||
logger.warning(
|
logger.warning(
|
||||||
"Received status code %s for URL %s which is in the force list", response.status_code, url
|
"Received status code %s for URL %s which is in the force list",
|
||||||
|
response.status_code,
|
||||||
|
url,
|
||||||
)
|
)
|
||||||
|
|
||||||
except httpx.RequestError as e:
|
except httpx.RequestError as e:
|
||||||
|
|
|
||||||
|
|
@ -123,7 +123,7 @@ class OceanBaseVector(BaseVector):
|
||||||
# Get parser from config or use default ik parser
|
# Get parser from config or use default ik parser
|
||||||
parser_name = dify_config.OCEANBASE_FULLTEXT_PARSER or "ik"
|
parser_name = dify_config.OCEANBASE_FULLTEXT_PARSER or "ik"
|
||||||
|
|
||||||
allowed_parsers = ["ik", "japanese_ftparser", "thai_ftparser"]
|
allowed_parsers = ["ngram", "beng", "space", "ngram2", "ik", "japanese_ftparser", "thai_ftparser"]
|
||||||
if parser_name not in allowed_parsers:
|
if parser_name not in allowed_parsers:
|
||||||
raise ValueError(
|
raise ValueError(
|
||||||
f"Invalid OceanBase full-text parser: {parser_name}. "
|
f"Invalid OceanBase full-text parser: {parser_name}. "
|
||||||
|
|
|
||||||
|
|
@ -1,9 +1,11 @@
|
||||||
|
import contextvars
|
||||||
import logging
|
import logging
|
||||||
from collections.abc import Generator, Mapping, Sequence
|
from collections.abc import Generator, Mapping, Sequence
|
||||||
from concurrent.futures import Future, ThreadPoolExecutor, as_completed
|
from concurrent.futures import Future, ThreadPoolExecutor, as_completed
|
||||||
from datetime import UTC, datetime
|
from datetime import UTC, datetime
|
||||||
from typing import TYPE_CHECKING, Any, NewType, cast
|
from typing import TYPE_CHECKING, Any, NewType, cast
|
||||||
|
|
||||||
|
from flask import Flask, current_app
|
||||||
from typing_extensions import TypeIs
|
from typing_extensions import TypeIs
|
||||||
|
|
||||||
from core.variables import IntegerVariable, NoneSegment
|
from core.variables import IntegerVariable, NoneSegment
|
||||||
|
|
@ -35,6 +37,7 @@ from core.workflow.nodes.base.entities import BaseNodeData, RetryConfig
|
||||||
from core.workflow.nodes.base.node import Node
|
from core.workflow.nodes.base.node import Node
|
||||||
from core.workflow.nodes.iteration.entities import ErrorHandleMode, IterationNodeData
|
from core.workflow.nodes.iteration.entities import ErrorHandleMode, IterationNodeData
|
||||||
from libs.datetime_utils import naive_utc_now
|
from libs.datetime_utils import naive_utc_now
|
||||||
|
from libs.flask_utils import preserve_flask_contexts
|
||||||
|
|
||||||
from .exc import (
|
from .exc import (
|
||||||
InvalidIteratorValueError,
|
InvalidIteratorValueError,
|
||||||
|
|
@ -239,6 +242,8 @@ class IterationNode(Node):
|
||||||
self._execute_single_iteration_parallel,
|
self._execute_single_iteration_parallel,
|
||||||
index=index,
|
index=index,
|
||||||
item=item,
|
item=item,
|
||||||
|
flask_app=current_app._get_current_object(), # type: ignore
|
||||||
|
context_vars=contextvars.copy_context(),
|
||||||
)
|
)
|
||||||
future_to_index[future] = index
|
future_to_index[future] = index
|
||||||
|
|
||||||
|
|
@ -281,26 +286,29 @@ class IterationNode(Node):
|
||||||
self,
|
self,
|
||||||
index: int,
|
index: int,
|
||||||
item: object,
|
item: object,
|
||||||
|
flask_app: Flask,
|
||||||
|
context_vars: contextvars.Context,
|
||||||
) -> tuple[datetime, list[GraphNodeEventBase], object | None, int]:
|
) -> tuple[datetime, list[GraphNodeEventBase], object | None, int]:
|
||||||
"""Execute a single iteration in parallel mode and return results."""
|
"""Execute a single iteration in parallel mode and return results."""
|
||||||
iter_start_at = datetime.now(UTC).replace(tzinfo=None)
|
with preserve_flask_contexts(flask_app=flask_app, context_vars=context_vars):
|
||||||
events: list[GraphNodeEventBase] = []
|
iter_start_at = datetime.now(UTC).replace(tzinfo=None)
|
||||||
outputs_temp: list[object] = []
|
events: list[GraphNodeEventBase] = []
|
||||||
|
outputs_temp: list[object] = []
|
||||||
|
|
||||||
graph_engine = self._create_graph_engine(index, item)
|
graph_engine = self._create_graph_engine(index, item)
|
||||||
|
|
||||||
# Collect events instead of yielding them directly
|
# Collect events instead of yielding them directly
|
||||||
for event in self._run_single_iter(
|
for event in self._run_single_iter(
|
||||||
variable_pool=graph_engine.graph_runtime_state.variable_pool,
|
variable_pool=graph_engine.graph_runtime_state.variable_pool,
|
||||||
outputs=outputs_temp,
|
outputs=outputs_temp,
|
||||||
graph_engine=graph_engine,
|
graph_engine=graph_engine,
|
||||||
):
|
):
|
||||||
events.append(event)
|
events.append(event)
|
||||||
|
|
||||||
# Get the output value from the temporary outputs list
|
# Get the output value from the temporary outputs list
|
||||||
output_value = outputs_temp[0] if outputs_temp else None
|
output_value = outputs_temp[0] if outputs_temp else None
|
||||||
|
|
||||||
return iter_start_at, events, output_value, graph_engine.graph_runtime_state.total_tokens
|
return iter_start_at, events, output_value, graph_engine.graph_runtime_state.total_tokens
|
||||||
|
|
||||||
def _handle_iteration_success(
|
def _handle_iteration_success(
|
||||||
self,
|
self,
|
||||||
|
|
|
||||||
|
|
@ -63,7 +63,7 @@ class RetrievalSetting(BaseModel):
|
||||||
Retrieval Setting.
|
Retrieval Setting.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
search_method: Literal["semantic_search", "keyword_search", "fulltext_search", "hybrid_search"]
|
search_method: Literal["semantic_search", "keyword_search", "full_text_search", "hybrid_search"]
|
||||||
top_k: int
|
top_k: int
|
||||||
score_threshold: float | None = 0.5
|
score_threshold: float | None = 0.5
|
||||||
score_threshold_enabled: bool = False
|
score_threshold_enabled: bool = False
|
||||||
|
|
|
||||||
|
|
@ -8,6 +8,7 @@ from typing import Any
|
||||||
import httpx
|
import httpx
|
||||||
from sqlalchemy import select
|
from sqlalchemy import select
|
||||||
from sqlalchemy.orm import Session
|
from sqlalchemy.orm import Session
|
||||||
|
from werkzeug.http import parse_options_header
|
||||||
|
|
||||||
from constants import AUDIO_EXTENSIONS, DOCUMENT_EXTENSIONS, IMAGE_EXTENSIONS, VIDEO_EXTENSIONS
|
from constants import AUDIO_EXTENSIONS, DOCUMENT_EXTENSIONS, IMAGE_EXTENSIONS, VIDEO_EXTENSIONS
|
||||||
from core.file import File, FileBelongsTo, FileTransferMethod, FileType, FileUploadConfig, helpers
|
from core.file import File, FileBelongsTo, FileTransferMethod, FileType, FileUploadConfig, helpers
|
||||||
|
|
@ -247,6 +248,25 @@ def _build_from_remote_url(
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def _extract_filename(url_path: str, content_disposition: str | None) -> str | None:
|
||||||
|
filename = None
|
||||||
|
# Try to extract from Content-Disposition header first
|
||||||
|
if content_disposition:
|
||||||
|
_, params = parse_options_header(content_disposition)
|
||||||
|
# RFC 5987 https://datatracker.ietf.org/doc/html/rfc5987: filename* takes precedence over filename
|
||||||
|
filename = params.get("filename*") or params.get("filename")
|
||||||
|
# Fallback to URL path if no filename from header
|
||||||
|
if not filename:
|
||||||
|
filename = os.path.basename(url_path)
|
||||||
|
return filename or None
|
||||||
|
|
||||||
|
|
||||||
|
def _guess_mime_type(filename: str) -> str:
|
||||||
|
"""Guess MIME type from filename, returning empty string if None."""
|
||||||
|
guessed_mime, _ = mimetypes.guess_type(filename)
|
||||||
|
return guessed_mime or ""
|
||||||
|
|
||||||
|
|
||||||
def _get_remote_file_info(url: str):
|
def _get_remote_file_info(url: str):
|
||||||
file_size = -1
|
file_size = -1
|
||||||
parsed_url = urllib.parse.urlparse(url)
|
parsed_url = urllib.parse.urlparse(url)
|
||||||
|
|
@ -254,23 +274,26 @@ def _get_remote_file_info(url: str):
|
||||||
filename = os.path.basename(url_path)
|
filename = os.path.basename(url_path)
|
||||||
|
|
||||||
# Initialize mime_type from filename as fallback
|
# Initialize mime_type from filename as fallback
|
||||||
mime_type, _ = mimetypes.guess_type(filename)
|
mime_type = _guess_mime_type(filename)
|
||||||
if mime_type is None:
|
|
||||||
mime_type = ""
|
|
||||||
|
|
||||||
resp = ssrf_proxy.head(url, follow_redirects=True)
|
resp = ssrf_proxy.head(url, follow_redirects=True)
|
||||||
if resp.status_code == httpx.codes.OK:
|
if resp.status_code == httpx.codes.OK:
|
||||||
if content_disposition := resp.headers.get("Content-Disposition"):
|
content_disposition = resp.headers.get("Content-Disposition")
|
||||||
filename = str(content_disposition.split("filename=")[-1].strip('"'))
|
extracted_filename = _extract_filename(url_path, content_disposition)
|
||||||
# Re-guess mime_type from updated filename
|
if extracted_filename:
|
||||||
mime_type, _ = mimetypes.guess_type(filename)
|
filename = extracted_filename
|
||||||
if mime_type is None:
|
mime_type = _guess_mime_type(filename)
|
||||||
mime_type = ""
|
|
||||||
file_size = int(resp.headers.get("Content-Length", file_size))
|
file_size = int(resp.headers.get("Content-Length", file_size))
|
||||||
# Fallback to Content-Type header if mime_type is still empty
|
# Fallback to Content-Type header if mime_type is still empty
|
||||||
if not mime_type:
|
if not mime_type:
|
||||||
mime_type = resp.headers.get("Content-Type", "").split(";")[0].strip()
|
mime_type = resp.headers.get("Content-Type", "").split(";")[0].strip()
|
||||||
|
|
||||||
|
if not filename:
|
||||||
|
extension = mimetypes.guess_extension(mime_type) or ".bin"
|
||||||
|
filename = f"{uuid.uuid4().hex}{extension}"
|
||||||
|
if not mime_type:
|
||||||
|
mime_type = _guess_mime_type(filename)
|
||||||
|
|
||||||
return mime_type, filename, file_size
|
return mime_type, filename, file_size
|
||||||
|
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -910,7 +910,7 @@ class AppDatasetJoin(Base):
|
||||||
id = mapped_column(StringUUID, primary_key=True, nullable=False, server_default=sa.text("uuid_generate_v4()"))
|
id = mapped_column(StringUUID, primary_key=True, nullable=False, server_default=sa.text("uuid_generate_v4()"))
|
||||||
app_id = mapped_column(StringUUID, nullable=False)
|
app_id = mapped_column(StringUUID, nullable=False)
|
||||||
dataset_id = mapped_column(StringUUID, nullable=False)
|
dataset_id = mapped_column(StringUUID, nullable=False)
|
||||||
created_at: Mapped[datetime] = mapped_column(DateTime, nullable=False, server_default=db.func.current_timestamp())
|
created_at: Mapped[datetime] = mapped_column(DateTime, nullable=False, server_default=sa.func.current_timestamp())
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def app(self):
|
def app(self):
|
||||||
|
|
@ -931,7 +931,7 @@ class DatasetQuery(Base):
|
||||||
source_app_id = mapped_column(StringUUID, nullable=True)
|
source_app_id = mapped_column(StringUUID, nullable=True)
|
||||||
created_by_role = mapped_column(String, nullable=False)
|
created_by_role = mapped_column(String, nullable=False)
|
||||||
created_by = mapped_column(StringUUID, nullable=False)
|
created_by = mapped_column(StringUUID, nullable=False)
|
||||||
created_at: Mapped[datetime] = mapped_column(DateTime, nullable=False, server_default=db.func.current_timestamp())
|
created_at: Mapped[datetime] = mapped_column(DateTime, nullable=False, server_default=sa.func.current_timestamp())
|
||||||
|
|
||||||
|
|
||||||
class DatasetKeywordTable(Base):
|
class DatasetKeywordTable(Base):
|
||||||
|
|
|
||||||
|
|
@ -1731,7 +1731,7 @@ class MessageChain(Base):
|
||||||
type: Mapped[str] = mapped_column(String(255), nullable=False)
|
type: Mapped[str] = mapped_column(String(255), nullable=False)
|
||||||
input = mapped_column(sa.Text, nullable=True)
|
input = mapped_column(sa.Text, nullable=True)
|
||||||
output = mapped_column(sa.Text, nullable=True)
|
output = mapped_column(sa.Text, nullable=True)
|
||||||
created_at = mapped_column(sa.DateTime, nullable=False, server_default=db.func.current_timestamp())
|
created_at = mapped_column(sa.DateTime, nullable=False, server_default=sa.func.current_timestamp())
|
||||||
|
|
||||||
|
|
||||||
class MessageAgentThought(Base):
|
class MessageAgentThought(Base):
|
||||||
|
|
@ -1769,7 +1769,7 @@ class MessageAgentThought(Base):
|
||||||
latency: Mapped[float | None] = mapped_column(sa.Float, nullable=True)
|
latency: Mapped[float | None] = mapped_column(sa.Float, nullable=True)
|
||||||
created_by_role = mapped_column(String, nullable=False)
|
created_by_role = mapped_column(String, nullable=False)
|
||||||
created_by = mapped_column(StringUUID, nullable=False)
|
created_by = mapped_column(StringUUID, nullable=False)
|
||||||
created_at = mapped_column(sa.DateTime, nullable=False, server_default=db.func.current_timestamp())
|
created_at = mapped_column(sa.DateTime, nullable=False, server_default=sa.func.current_timestamp())
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def files(self) -> list[Any]:
|
def files(self) -> list[Any]:
|
||||||
|
|
@ -1872,7 +1872,7 @@ class DatasetRetrieverResource(Base):
|
||||||
index_node_hash = mapped_column(sa.Text, nullable=True)
|
index_node_hash = mapped_column(sa.Text, nullable=True)
|
||||||
retriever_from = mapped_column(sa.Text, nullable=False)
|
retriever_from = mapped_column(sa.Text, nullable=False)
|
||||||
created_by = mapped_column(StringUUID, nullable=False)
|
created_by = mapped_column(StringUUID, nullable=False)
|
||||||
created_at = mapped_column(sa.DateTime, nullable=False, server_default=db.func.current_timestamp())
|
created_at = mapped_column(sa.DateTime, nullable=False, server_default=sa.func.current_timestamp())
|
||||||
|
|
||||||
|
|
||||||
class Tag(Base):
|
class Tag(Base):
|
||||||
|
|
|
||||||
|
|
@ -2,6 +2,7 @@ import json
|
||||||
import logging
|
import logging
|
||||||
from typing import TypedDict, cast
|
from typing import TypedDict, cast
|
||||||
|
|
||||||
|
import sqlalchemy as sa
|
||||||
from flask_sqlalchemy.pagination import Pagination
|
from flask_sqlalchemy.pagination import Pagination
|
||||||
|
|
||||||
from configs import dify_config
|
from configs import dify_config
|
||||||
|
|
@ -65,7 +66,7 @@ class AppService:
|
||||||
return None
|
return None
|
||||||
|
|
||||||
app_models = db.paginate(
|
app_models = db.paginate(
|
||||||
db.select(App).where(*filters).order_by(App.created_at.desc()),
|
sa.select(App).where(*filters).order_by(App.created_at.desc()),
|
||||||
page=args["page"],
|
page=args["page"],
|
||||||
per_page=args["limit"],
|
per_page=args["limit"],
|
||||||
error_out=False,
|
error_out=False,
|
||||||
|
|
|
||||||
|
|
@ -115,12 +115,12 @@ class DatasetService:
|
||||||
# Check if permitted_dataset_ids is not empty to avoid WHERE false condition
|
# Check if permitted_dataset_ids is not empty to avoid WHERE false condition
|
||||||
if permitted_dataset_ids and len(permitted_dataset_ids) > 0:
|
if permitted_dataset_ids and len(permitted_dataset_ids) > 0:
|
||||||
query = query.where(
|
query = query.where(
|
||||||
db.or_(
|
sa.or_(
|
||||||
Dataset.permission == DatasetPermissionEnum.ALL_TEAM,
|
Dataset.permission == DatasetPermissionEnum.ALL_TEAM,
|
||||||
db.and_(
|
sa.and_(
|
||||||
Dataset.permission == DatasetPermissionEnum.ONLY_ME, Dataset.created_by == user.id
|
Dataset.permission == DatasetPermissionEnum.ONLY_ME, Dataset.created_by == user.id
|
||||||
),
|
),
|
||||||
db.and_(
|
sa.and_(
|
||||||
Dataset.permission == DatasetPermissionEnum.PARTIAL_TEAM,
|
Dataset.permission == DatasetPermissionEnum.PARTIAL_TEAM,
|
||||||
Dataset.id.in_(permitted_dataset_ids),
|
Dataset.id.in_(permitted_dataset_ids),
|
||||||
),
|
),
|
||||||
|
|
@ -128,9 +128,9 @@ class DatasetService:
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
query = query.where(
|
query = query.where(
|
||||||
db.or_(
|
sa.or_(
|
||||||
Dataset.permission == DatasetPermissionEnum.ALL_TEAM,
|
Dataset.permission == DatasetPermissionEnum.ALL_TEAM,
|
||||||
db.and_(
|
sa.and_(
|
||||||
Dataset.permission == DatasetPermissionEnum.ONLY_ME, Dataset.created_by == user.id
|
Dataset.permission == DatasetPermissionEnum.ONLY_ME, Dataset.created_by == user.id
|
||||||
),
|
),
|
||||||
)
|
)
|
||||||
|
|
@ -1879,7 +1879,7 @@ class DocumentService:
|
||||||
# for notion_info in notion_info_list:
|
# for notion_info in notion_info_list:
|
||||||
# workspace_id = notion_info.workspace_id
|
# workspace_id = notion_info.workspace_id
|
||||||
# data_source_binding = DataSourceOauthBinding.query.filter(
|
# data_source_binding = DataSourceOauthBinding.query.filter(
|
||||||
# db.and_(
|
# sa.and_(
|
||||||
# DataSourceOauthBinding.tenant_id == current_user.current_tenant_id,
|
# DataSourceOauthBinding.tenant_id == current_user.current_tenant_id,
|
||||||
# DataSourceOauthBinding.provider == "notion",
|
# DataSourceOauthBinding.provider == "notion",
|
||||||
# DataSourceOauthBinding.disabled == False,
|
# DataSourceOauthBinding.disabled == False,
|
||||||
|
|
|
||||||
|
|
@ -83,7 +83,7 @@ class RetrievalSetting(BaseModel):
|
||||||
Retrieval Setting.
|
Retrieval Setting.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
search_method: Literal["semantic_search", "fulltext_search", "keyword_search", "hybrid_search"]
|
search_method: Literal["semantic_search", "full_text_search", "keyword_search", "hybrid_search"]
|
||||||
top_k: int
|
top_k: int
|
||||||
score_threshold: float | None = 0.5
|
score_threshold: float | None = 0.5
|
||||||
score_threshold_enabled: bool = False
|
score_threshold_enabled: bool = False
|
||||||
|
|
|
||||||
|
|
@ -471,7 +471,7 @@ class PluginMigration:
|
||||||
total_failed_tenant = 0
|
total_failed_tenant = 0
|
||||||
while True:
|
while True:
|
||||||
# paginate
|
# paginate
|
||||||
tenants = db.paginate(db.select(Tenant).order_by(Tenant.created_at.desc()), page=page, per_page=100)
|
tenants = db.paginate(sa.select(Tenant).order_by(Tenant.created_at.desc()), page=page, per_page=100)
|
||||||
if tenants.items is None or len(tenants.items) == 0:
|
if tenants.items is None or len(tenants.items) == 0:
|
||||||
break
|
break
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -1,5 +1,6 @@
|
||||||
import uuid
|
import uuid
|
||||||
|
|
||||||
|
import sqlalchemy as sa
|
||||||
from flask_login import current_user
|
from flask_login import current_user
|
||||||
from sqlalchemy import func, select
|
from sqlalchemy import func, select
|
||||||
from werkzeug.exceptions import NotFound
|
from werkzeug.exceptions import NotFound
|
||||||
|
|
@ -18,7 +19,7 @@ class TagService:
|
||||||
.where(Tag.type == tag_type, Tag.tenant_id == current_tenant_id)
|
.where(Tag.type == tag_type, Tag.tenant_id == current_tenant_id)
|
||||||
)
|
)
|
||||||
if keyword:
|
if keyword:
|
||||||
query = query.where(db.and_(Tag.name.ilike(f"%{keyword}%")))
|
query = query.where(sa.and_(Tag.name.ilike(f"%{keyword}%")))
|
||||||
query = query.group_by(Tag.id, Tag.type, Tag.name, Tag.created_at)
|
query = query.group_by(Tag.id, Tag.type, Tag.name, Tag.created_at)
|
||||||
results: list = query.order_by(Tag.created_at.desc()).all()
|
results: list = query.order_by(Tag.created_at.desc()).all()
|
||||||
return results
|
return results
|
||||||
|
|
|
||||||
|
|
@ -262,6 +262,14 @@ class VariableTruncator:
|
||||||
target_length = self._array_element_limit
|
target_length = self._array_element_limit
|
||||||
|
|
||||||
for i, item in enumerate(value):
|
for i, item in enumerate(value):
|
||||||
|
# Dirty fix:
|
||||||
|
# The output of `Start` node may contain list of `File` elements,
|
||||||
|
# causing `AssertionError` while invoking `_truncate_json_primitives`.
|
||||||
|
#
|
||||||
|
# This check ensures that `list[File]` are handled separately
|
||||||
|
if isinstance(item, File):
|
||||||
|
truncated_value.append(item)
|
||||||
|
continue
|
||||||
if i >= target_length:
|
if i >= target_length:
|
||||||
return _PartResult(truncated_value, used_size, True)
|
return _PartResult(truncated_value, used_size, True)
|
||||||
if i > 0:
|
if i > 0:
|
||||||
|
|
|
||||||
|
|
@ -2,6 +2,7 @@ import logging
|
||||||
import time
|
import time
|
||||||
|
|
||||||
import click
|
import click
|
||||||
|
import sqlalchemy as sa
|
||||||
from celery import shared_task
|
from celery import shared_task
|
||||||
from sqlalchemy import select
|
from sqlalchemy import select
|
||||||
|
|
||||||
|
|
@ -51,7 +52,7 @@ def document_indexing_sync_task(dataset_id: str, document_id: str):
|
||||||
data_source_binding = (
|
data_source_binding = (
|
||||||
db.session.query(DataSourceOauthBinding)
|
db.session.query(DataSourceOauthBinding)
|
||||||
.where(
|
.where(
|
||||||
db.and_(
|
sa.and_(
|
||||||
DataSourceOauthBinding.tenant_id == document.tenant_id,
|
DataSourceOauthBinding.tenant_id == document.tenant_id,
|
||||||
DataSourceOauthBinding.provider == "notion",
|
DataSourceOauthBinding.provider == "notion",
|
||||||
DataSourceOauthBinding.disabled == False,
|
DataSourceOauthBinding.disabled == False,
|
||||||
|
|
|
||||||
|
|
@ -0,0 +1,115 @@
|
||||||
|
import re
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
|
||||||
|
from factories.file_factory import _get_remote_file_info
|
||||||
|
|
||||||
|
|
||||||
|
class _FakeResponse:
|
||||||
|
def __init__(self, status_code: int, headers: dict[str, str]):
|
||||||
|
self.status_code = status_code
|
||||||
|
self.headers = headers
|
||||||
|
|
||||||
|
|
||||||
|
def _mock_head(monkeypatch: pytest.MonkeyPatch, headers: dict[str, str], status_code: int = 200):
|
||||||
|
def _fake_head(url: str, follow_redirects: bool = True):
|
||||||
|
return _FakeResponse(status_code=status_code, headers=headers)
|
||||||
|
|
||||||
|
monkeypatch.setattr("factories.file_factory.ssrf_proxy.head", _fake_head)
|
||||||
|
|
||||||
|
|
||||||
|
class TestGetRemoteFileInfo:
|
||||||
|
"""Tests for _get_remote_file_info focusing on filename extraction rules."""
|
||||||
|
|
||||||
|
def test_inline_no_filename(self, monkeypatch: pytest.MonkeyPatch):
|
||||||
|
_mock_head(
|
||||||
|
monkeypatch,
|
||||||
|
{
|
||||||
|
"Content-Disposition": "inline",
|
||||||
|
"Content-Type": "application/pdf",
|
||||||
|
"Content-Length": "123",
|
||||||
|
},
|
||||||
|
)
|
||||||
|
mime_type, filename, size = _get_remote_file_info("http://example.com/some/path/file.pdf")
|
||||||
|
assert filename == "file.pdf"
|
||||||
|
assert mime_type == "application/pdf"
|
||||||
|
assert size == 123
|
||||||
|
|
||||||
|
def test_attachment_no_filename(self, monkeypatch: pytest.MonkeyPatch):
|
||||||
|
_mock_head(
|
||||||
|
monkeypatch,
|
||||||
|
{
|
||||||
|
"Content-Disposition": "attachment",
|
||||||
|
"Content-Type": "application/octet-stream",
|
||||||
|
"Content-Length": "456",
|
||||||
|
},
|
||||||
|
)
|
||||||
|
mime_type, filename, size = _get_remote_file_info("http://example.com/downloads/data.bin")
|
||||||
|
assert filename == "data.bin"
|
||||||
|
assert mime_type == "application/octet-stream"
|
||||||
|
assert size == 456
|
||||||
|
|
||||||
|
def test_attachment_quoted_space_filename(self, monkeypatch: pytest.MonkeyPatch):
|
||||||
|
_mock_head(
|
||||||
|
monkeypatch,
|
||||||
|
{
|
||||||
|
"Content-Disposition": 'attachment; filename="file name.jpg"',
|
||||||
|
"Content-Type": "image/jpeg",
|
||||||
|
"Content-Length": "789",
|
||||||
|
},
|
||||||
|
)
|
||||||
|
mime_type, filename, size = _get_remote_file_info("http://example.com/ignored")
|
||||||
|
assert filename == "file name.jpg"
|
||||||
|
assert mime_type == "image/jpeg"
|
||||||
|
assert size == 789
|
||||||
|
|
||||||
|
def test_attachment_filename_star_percent20(self, monkeypatch: pytest.MonkeyPatch):
|
||||||
|
_mock_head(
|
||||||
|
monkeypatch,
|
||||||
|
{
|
||||||
|
"Content-Disposition": "attachment; filename*=UTF-8''file%20name.jpg",
|
||||||
|
"Content-Type": "image/jpeg",
|
||||||
|
},
|
||||||
|
)
|
||||||
|
mime_type, filename, _ = _get_remote_file_info("http://example.com/ignored")
|
||||||
|
assert filename == "file name.jpg"
|
||||||
|
assert mime_type == "image/jpeg"
|
||||||
|
|
||||||
|
def test_attachment_filename_star_chinese(self, monkeypatch: pytest.MonkeyPatch):
|
||||||
|
_mock_head(
|
||||||
|
monkeypatch,
|
||||||
|
{
|
||||||
|
"Content-Disposition": "attachment; filename*=UTF-8''%E6%B5%8B%E8%AF%95%E6%96%87%E4%BB%B6.jpg",
|
||||||
|
"Content-Type": "image/jpeg",
|
||||||
|
},
|
||||||
|
)
|
||||||
|
mime_type, filename, _ = _get_remote_file_info("http://example.com/ignored")
|
||||||
|
assert filename == "测试文件.jpg"
|
||||||
|
assert mime_type == "image/jpeg"
|
||||||
|
|
||||||
|
def test_filename_from_url_when_no_header(self, monkeypatch: pytest.MonkeyPatch):
|
||||||
|
_mock_head(
|
||||||
|
monkeypatch,
|
||||||
|
{
|
||||||
|
# No Content-Disposition
|
||||||
|
"Content-Type": "text/plain",
|
||||||
|
"Content-Length": "12",
|
||||||
|
},
|
||||||
|
)
|
||||||
|
mime_type, filename, size = _get_remote_file_info("http://example.com/static/file.txt")
|
||||||
|
assert filename == "file.txt"
|
||||||
|
assert mime_type == "text/plain"
|
||||||
|
assert size == 12
|
||||||
|
|
||||||
|
def test_no_filename_in_url_or_header_generates_uuid_bin(self, monkeypatch: pytest.MonkeyPatch):
|
||||||
|
_mock_head(
|
||||||
|
monkeypatch,
|
||||||
|
{
|
||||||
|
"Content-Disposition": "inline",
|
||||||
|
"Content-Type": "application/octet-stream",
|
||||||
|
},
|
||||||
|
)
|
||||||
|
mime_type, filename, _ = _get_remote_file_info("http://example.com/test/")
|
||||||
|
# Should generate a random hex filename with .bin extension
|
||||||
|
assert re.match(r"^[0-9a-f]{32}\.bin$", filename) is not None
|
||||||
|
assert mime_type == "application/octet-stream"
|
||||||
|
|
@ -588,3 +588,11 @@ class TestIntegrationScenarios:
|
||||||
if isinstance(result.result, ObjectSegment):
|
if isinstance(result.result, ObjectSegment):
|
||||||
result_size = truncator.calculate_json_size(result.result.value)
|
result_size = truncator.calculate_json_size(result.result.value)
|
||||||
assert result_size <= original_size
|
assert result_size <= original_size
|
||||||
|
|
||||||
|
def test_file_and_array_file_variable_mapping(self, file):
|
||||||
|
truncator = VariableTruncator(string_length_limit=30, array_element_limit=3, max_size_bytes=300)
|
||||||
|
|
||||||
|
mapping = {"array_file": [file]}
|
||||||
|
truncated_mapping, truncated = truncator.truncate_variable_mapping(mapping)
|
||||||
|
assert truncated is False
|
||||||
|
assert truncated_mapping == mapping
|
||||||
|
|
|
||||||
|
|
@ -655,6 +655,8 @@ LINDORM_USING_UGC=True
|
||||||
LINDORM_QUERY_TIMEOUT=1
|
LINDORM_QUERY_TIMEOUT=1
|
||||||
|
|
||||||
# OceanBase Vector configuration, only available when VECTOR_STORE is `oceanbase`
|
# OceanBase Vector configuration, only available when VECTOR_STORE is `oceanbase`
|
||||||
|
# Built-in fulltext parsers are `ngram`, `beng`, `space`, `ngram2`, `ik`
|
||||||
|
# External fulltext parsers (require plugin installation) are `japanese_ftparser`, `thai_ftparser`
|
||||||
OCEANBASE_VECTOR_HOST=oceanbase
|
OCEANBASE_VECTOR_HOST=oceanbase
|
||||||
OCEANBASE_VECTOR_PORT=2881
|
OCEANBASE_VECTOR_PORT=2881
|
||||||
OCEANBASE_VECTOR_USER=root@test
|
OCEANBASE_VECTOR_USER=root@test
|
||||||
|
|
@ -857,6 +859,10 @@ OWNER_TRANSFER_TOKEN_EXPIRY_MINUTES=5
|
||||||
# The sandbox service endpoint.
|
# The sandbox service endpoint.
|
||||||
CODE_EXECUTION_ENDPOINT=http://sandbox:8194
|
CODE_EXECUTION_ENDPOINT=http://sandbox:8194
|
||||||
CODE_EXECUTION_API_KEY=dify-sandbox
|
CODE_EXECUTION_API_KEY=dify-sandbox
|
||||||
|
CODE_EXECUTION_SSL_VERIFY=True
|
||||||
|
CODE_EXECUTION_POOL_MAX_CONNECTIONS=100
|
||||||
|
CODE_EXECUTION_POOL_MAX_KEEPALIVE_CONNECTIONS=20
|
||||||
|
CODE_EXECUTION_POOL_KEEPALIVE_EXPIRY=5.0
|
||||||
CODE_MAX_NUMBER=9223372036854775807
|
CODE_MAX_NUMBER=9223372036854775807
|
||||||
CODE_MIN_NUMBER=-9223372036854775808
|
CODE_MIN_NUMBER=-9223372036854775808
|
||||||
CODE_MAX_DEPTH=5
|
CODE_MAX_DEPTH=5
|
||||||
|
|
@ -1135,6 +1141,9 @@ SSRF_DEFAULT_TIME_OUT=5
|
||||||
SSRF_DEFAULT_CONNECT_TIME_OUT=5
|
SSRF_DEFAULT_CONNECT_TIME_OUT=5
|
||||||
SSRF_DEFAULT_READ_TIME_OUT=5
|
SSRF_DEFAULT_READ_TIME_OUT=5
|
||||||
SSRF_DEFAULT_WRITE_TIME_OUT=5
|
SSRF_DEFAULT_WRITE_TIME_OUT=5
|
||||||
|
SSRF_POOL_MAX_CONNECTIONS=100
|
||||||
|
SSRF_POOL_MAX_KEEPALIVE_CONNECTIONS=20
|
||||||
|
SSRF_POOL_KEEPALIVE_EXPIRY=5.0
|
||||||
|
|
||||||
# ------------------------------
|
# ------------------------------
|
||||||
# docker env var for specifying vector db type at startup
|
# docker env var for specifying vector db type at startup
|
||||||
|
|
|
||||||
|
|
@ -382,6 +382,10 @@ x-shared-env: &shared-api-worker-env
|
||||||
OWNER_TRANSFER_TOKEN_EXPIRY_MINUTES: ${OWNER_TRANSFER_TOKEN_EXPIRY_MINUTES:-5}
|
OWNER_TRANSFER_TOKEN_EXPIRY_MINUTES: ${OWNER_TRANSFER_TOKEN_EXPIRY_MINUTES:-5}
|
||||||
CODE_EXECUTION_ENDPOINT: ${CODE_EXECUTION_ENDPOINT:-http://sandbox:8194}
|
CODE_EXECUTION_ENDPOINT: ${CODE_EXECUTION_ENDPOINT:-http://sandbox:8194}
|
||||||
CODE_EXECUTION_API_KEY: ${CODE_EXECUTION_API_KEY:-dify-sandbox}
|
CODE_EXECUTION_API_KEY: ${CODE_EXECUTION_API_KEY:-dify-sandbox}
|
||||||
|
CODE_EXECUTION_SSL_VERIFY: ${CODE_EXECUTION_SSL_VERIFY:-True}
|
||||||
|
CODE_EXECUTION_POOL_MAX_CONNECTIONS: ${CODE_EXECUTION_POOL_MAX_CONNECTIONS:-100}
|
||||||
|
CODE_EXECUTION_POOL_MAX_KEEPALIVE_CONNECTIONS: ${CODE_EXECUTION_POOL_MAX_KEEPALIVE_CONNECTIONS:-20}
|
||||||
|
CODE_EXECUTION_POOL_KEEPALIVE_EXPIRY: ${CODE_EXECUTION_POOL_KEEPALIVE_EXPIRY:-5.0}
|
||||||
CODE_MAX_NUMBER: ${CODE_MAX_NUMBER:-9223372036854775807}
|
CODE_MAX_NUMBER: ${CODE_MAX_NUMBER:-9223372036854775807}
|
||||||
CODE_MIN_NUMBER: ${CODE_MIN_NUMBER:--9223372036854775808}
|
CODE_MIN_NUMBER: ${CODE_MIN_NUMBER:--9223372036854775808}
|
||||||
CODE_MAX_DEPTH: ${CODE_MAX_DEPTH:-5}
|
CODE_MAX_DEPTH: ${CODE_MAX_DEPTH:-5}
|
||||||
|
|
@ -498,6 +502,9 @@ x-shared-env: &shared-api-worker-env
|
||||||
SSRF_DEFAULT_CONNECT_TIME_OUT: ${SSRF_DEFAULT_CONNECT_TIME_OUT:-5}
|
SSRF_DEFAULT_CONNECT_TIME_OUT: ${SSRF_DEFAULT_CONNECT_TIME_OUT:-5}
|
||||||
SSRF_DEFAULT_READ_TIME_OUT: ${SSRF_DEFAULT_READ_TIME_OUT:-5}
|
SSRF_DEFAULT_READ_TIME_OUT: ${SSRF_DEFAULT_READ_TIME_OUT:-5}
|
||||||
SSRF_DEFAULT_WRITE_TIME_OUT: ${SSRF_DEFAULT_WRITE_TIME_OUT:-5}
|
SSRF_DEFAULT_WRITE_TIME_OUT: ${SSRF_DEFAULT_WRITE_TIME_OUT:-5}
|
||||||
|
SSRF_POOL_MAX_CONNECTIONS: ${SSRF_POOL_MAX_CONNECTIONS:-100}
|
||||||
|
SSRF_POOL_MAX_KEEPALIVE_CONNECTIONS: ${SSRF_POOL_MAX_KEEPALIVE_CONNECTIONS:-20}
|
||||||
|
SSRF_POOL_KEEPALIVE_EXPIRY: ${SSRF_POOL_KEEPALIVE_EXPIRY:-5.0}
|
||||||
EXPOSE_NGINX_PORT: ${EXPOSE_NGINX_PORT:-80}
|
EXPOSE_NGINX_PORT: ${EXPOSE_NGINX_PORT:-80}
|
||||||
EXPOSE_NGINX_SSL_PORT: ${EXPOSE_NGINX_SSL_PORT:-443}
|
EXPOSE_NGINX_SSL_PORT: ${EXPOSE_NGINX_SSL_PORT:-443}
|
||||||
POSITION_TOOL_PINS: ${POSITION_TOOL_PINS:-}
|
POSITION_TOOL_PINS: ${POSITION_TOOL_PINS:-}
|
||||||
|
|
|
||||||
|
|
@ -35,7 +35,7 @@ import { useStore as useAppStore } from '@/app/components/app/store'
|
||||||
import { useAppContext } from '@/context/app-context'
|
import { useAppContext } from '@/context/app-context'
|
||||||
import useTimestamp from '@/hooks/use-timestamp'
|
import useTimestamp from '@/hooks/use-timestamp'
|
||||||
import Tooltip from '@/app/components/base/tooltip'
|
import Tooltip from '@/app/components/base/tooltip'
|
||||||
import { CopyIcon } from '@/app/components/base/copy-icon'
|
import CopyIcon from '@/app/components/base/copy-icon'
|
||||||
import { buildChatItemTree, getThreadMessages } from '@/app/components/base/chat/utils'
|
import { buildChatItemTree, getThreadMessages } from '@/app/components/base/chat/utils'
|
||||||
import { getProcessedFilesFromResponse } from '@/app/components/base/file-uploader/utils'
|
import { getProcessedFilesFromResponse } from '@/app/components/base/file-uploader/utils'
|
||||||
import cn from '@/utils/classnames'
|
import cn from '@/utils/classnames'
|
||||||
|
|
|
||||||
|
|
@ -15,7 +15,7 @@ type Props = {
|
||||||
|
|
||||||
const prefixEmbedded = 'appOverview.overview.appInfo.embedded'
|
const prefixEmbedded = 'appOverview.overview.appInfo.embedded'
|
||||||
|
|
||||||
export const CopyIcon = ({ content }: Props) => {
|
const CopyIcon = ({ content }: Props) => {
|
||||||
const { t } = useTranslation()
|
const { t } = useTranslation()
|
||||||
const [isCopied, setIsCopied] = useState<boolean>(false)
|
const [isCopied, setIsCopied] = useState<boolean>(false)
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -63,7 +63,7 @@ const useThinkTimer = (children: any) => {
|
||||||
return { elapsedTime, isComplete }
|
return { elapsedTime, isComplete }
|
||||||
}
|
}
|
||||||
|
|
||||||
export const ThinkBlock = ({ children, ...props }: any) => {
|
const ThinkBlock = ({ children, ...props }: React.ComponentProps<'details'>) => {
|
||||||
const { elapsedTime, isComplete } = useThinkTimer(children)
|
const { elapsedTime, isComplete } = useThinkTimer(children)
|
||||||
const displayContent = removeEndThink(children)
|
const displayContent = removeEndThink(children)
|
||||||
const { t } = useTranslation()
|
const { t } = useTranslation()
|
||||||
|
|
|
||||||
|
|
@ -53,7 +53,7 @@ const SearchInput: FC<SearchInputProps> = ({
|
||||||
}}
|
}}
|
||||||
onCompositionEnd={(e) => {
|
onCompositionEnd={(e) => {
|
||||||
isComposing.current = false
|
isComposing.current = false
|
||||||
onChange(e.data)
|
onChange(e.currentTarget.value)
|
||||||
}}
|
}}
|
||||||
onFocus={() => setFocus(true)}
|
onFocus={() => setFocus(true)}
|
||||||
onBlur={() => setFocus(false)}
|
onBlur={() => setFocus(false)}
|
||||||
|
|
|
||||||
|
|
@ -3,7 +3,7 @@ import { SVG } from '@svgdotjs/svg.js'
|
||||||
import DOMPurify from 'dompurify'
|
import DOMPurify from 'dompurify'
|
||||||
import ImagePreview from '@/app/components/base/image-uploader/image-preview'
|
import ImagePreview from '@/app/components/base/image-uploader/image-preview'
|
||||||
|
|
||||||
export const SVGRenderer = ({ content }: { content: string }) => {
|
const SVGRenderer = ({ content }: { content: string }) => {
|
||||||
const svgRef = useRef<HTMLDivElement>(null)
|
const svgRef = useRef<HTMLDivElement>(null)
|
||||||
const [imagePreview, setImagePreview] = useState('')
|
const [imagePreview, setImagePreview] = useState('')
|
||||||
const [windowSize, setWindowSize] = useState({
|
const [windowSize, setWindowSize] = useState({
|
||||||
|
|
|
||||||
|
|
@ -1,9 +1,18 @@
|
||||||
import { usePipelineTemplateList } from '@/service/use-pipeline'
|
import { usePipelineTemplateList } from '@/service/use-pipeline'
|
||||||
import TemplateCard from './template-card'
|
import TemplateCard from './template-card'
|
||||||
import CreateCard from './create-card'
|
import CreateCard from './create-card'
|
||||||
|
import { useI18N } from '@/context/i18n'
|
||||||
|
import { useMemo } from 'react'
|
||||||
|
import { LanguagesSupported } from '@/i18n-config/language'
|
||||||
|
|
||||||
const BuiltInPipelineList = () => {
|
const BuiltInPipelineList = () => {
|
||||||
const { data: pipelineList, isLoading } = usePipelineTemplateList({ type: 'built-in' })
|
const { locale } = useI18N()
|
||||||
|
const language = useMemo(() => {
|
||||||
|
if (['zh-Hans', 'ja-JP'].includes(locale))
|
||||||
|
return locale
|
||||||
|
return LanguagesSupported[0]
|
||||||
|
}, [locale])
|
||||||
|
const { data: pipelineList, isLoading } = usePipelineTemplateList({ type: 'built-in', language })
|
||||||
const list = pipelineList?.pipeline_templates || []
|
const list = pipelineList?.pipeline_templates || []
|
||||||
|
|
||||||
return (
|
return (
|
||||||
|
|
|
||||||
|
|
@ -146,7 +146,6 @@ const PluginItem: FC<Props> = ({
|
||||||
{/* Organization & Name */}
|
{/* Organization & Name */}
|
||||||
<div className='flex grow items-center overflow-hidden'>
|
<div className='flex grow items-center overflow-hidden'>
|
||||||
<OrgInfo
|
<OrgInfo
|
||||||
className='mt-0.5'
|
|
||||||
orgName={orgName}
|
orgName={orgName}
|
||||||
packageName={name}
|
packageName={name}
|
||||||
packageNameClassName='w-auto max-w-[150px]'
|
packageNameClassName='w-auto max-w-[150px]'
|
||||||
|
|
@ -154,8 +153,8 @@ const PluginItem: FC<Props> = ({
|
||||||
{category === PluginType.extension && (
|
{category === PluginType.extension && (
|
||||||
<>
|
<>
|
||||||
<div className='system-xs-regular mx-2 text-text-quaternary'>·</div>
|
<div className='system-xs-regular mx-2 text-text-quaternary'>·</div>
|
||||||
<div className='system-xs-regular flex space-x-1 overflow-hidden text-text-tertiary'>
|
<div className='system-xs-regular flex items-center gap-x-1 overflow-hidden text-text-tertiary'>
|
||||||
<RiLoginCircleLine className='h-4 w-4 shrink-0' />
|
<RiLoginCircleLine className='size-3 shrink-0' />
|
||||||
<span
|
<span
|
||||||
className='truncate'
|
className='truncate'
|
||||||
title={t('plugin.endpointsEnabled', { num: endpoints_active })}
|
title={t('plugin.endpointsEnabled', { num: endpoints_active })}
|
||||||
|
|
@ -184,7 +183,7 @@ const PluginItem: FC<Props> = ({
|
||||||
&& <>
|
&& <>
|
||||||
<a href={getMarketplaceUrl(`/plugins/${author}/${name}`, { theme })} target='_blank' className='flex items-center gap-0.5'>
|
<a href={getMarketplaceUrl(`/plugins/${author}/${name}`, { theme })} target='_blank' className='flex items-center gap-0.5'>
|
||||||
<div className='system-2xs-medium-uppercase text-text-tertiary'>{t('plugin.from')} <span className='text-text-secondary'>marketplace</span></div>
|
<div className='system-2xs-medium-uppercase text-text-tertiary'>{t('plugin.from')} <span className='text-text-secondary'>marketplace</span></div>
|
||||||
<RiArrowRightUpLine className='h-3 w-3 text-text-tertiary' />
|
<RiArrowRightUpLine className='h-3 w-3 text-text-secondary' />
|
||||||
</a>
|
</a>
|
||||||
</>
|
</>
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -244,9 +244,8 @@ const ProviderDetail = ({
|
||||||
<div className="flex h-5 items-center">
|
<div className="flex h-5 items-center">
|
||||||
<Title title={collection.label[language]} />
|
<Title title={collection.label[language]} />
|
||||||
</div>
|
</div>
|
||||||
<div className='mb-1 flex h-4 items-center justify-between'>
|
<div className='mb-1 mt-0.5 flex h-4 items-center justify-between'>
|
||||||
<OrgInfo
|
<OrgInfo
|
||||||
className="mt-0.5"
|
|
||||||
packageNameClassName='w-auto'
|
packageNameClassName='w-auto'
|
||||||
orgName={collection.author}
|
orgName={collection.author}
|
||||||
packageName={collection.name}
|
packageName={collection.name}
|
||||||
|
|
|
||||||
|
|
@ -30,7 +30,7 @@ enum GeneratorView {
|
||||||
result = 'result',
|
result = 'result',
|
||||||
}
|
}
|
||||||
|
|
||||||
export const JsonSchemaGenerator: FC<JsonSchemaGeneratorProps> = ({
|
const JsonSchemaGenerator: FC<JsonSchemaGeneratorProps> = ({
|
||||||
onApply,
|
onApply,
|
||||||
crossAxisOffset,
|
crossAxisOffset,
|
||||||
}) => {
|
}) => {
|
||||||
|
|
|
||||||
|
|
@ -1,12 +1,18 @@
|
||||||
@import "preflight.css";
|
@import "preflight.css";
|
||||||
@tailwind base;
|
|
||||||
@tailwind components;
|
|
||||||
|
|
||||||
@import '../../themes/light.css';
|
@import '../../themes/light.css';
|
||||||
@import '../../themes/dark.css';
|
@import '../../themes/dark.css';
|
||||||
@import "../../themes/manual-light.css";
|
@import "../../themes/manual-light.css";
|
||||||
@import "../../themes/manual-dark.css";
|
@import "../../themes/manual-dark.css";
|
||||||
|
|
||||||
|
@import "../components/base/button/index.css";
|
||||||
|
@import "../components/base/action-button/index.css";
|
||||||
|
@import "../components/base/modal/index.css";
|
||||||
|
|
||||||
|
@tailwind base;
|
||||||
|
@tailwind components;
|
||||||
|
|
||||||
html {
|
html {
|
||||||
color-scheme: light;
|
color-scheme: light;
|
||||||
}
|
}
|
||||||
|
|
@ -680,10 +686,6 @@ button:focus-within {
|
||||||
display: none;
|
display: none;
|
||||||
}
|
}
|
||||||
|
|
||||||
@import "../components/base/button/index.css";
|
|
||||||
@import "../components/base/action-button/index.css";
|
|
||||||
@import "../components/base/modal/index.css";
|
|
||||||
|
|
||||||
@tailwind utilities;
|
@tailwind utilities;
|
||||||
|
|
||||||
@layer utilities {
|
@layer utilities {
|
||||||
|
|
|
||||||
|
|
@ -17,6 +17,7 @@ export enum DatasourceType {
|
||||||
|
|
||||||
export type PipelineTemplateListParams = {
|
export type PipelineTemplateListParams = {
|
||||||
type: 'built-in' | 'customized'
|
type: 'built-in' | 'customized'
|
||||||
|
language?: string
|
||||||
}
|
}
|
||||||
|
|
||||||
export type PipelineTemplate = {
|
export type PipelineTemplate = {
|
||||||
|
|
|
||||||
|
|
@ -91,12 +91,10 @@ const remoteImageURLs = [hasSetWebPrefix ? new URL(`${process.env.NEXT_PUBLIC_WE
|
||||||
/** @type {import('next').NextConfig} */
|
/** @type {import('next').NextConfig} */
|
||||||
const nextConfig = {
|
const nextConfig = {
|
||||||
basePath: process.env.NEXT_PUBLIC_BASE_PATH || '',
|
basePath: process.env.NEXT_PUBLIC_BASE_PATH || '',
|
||||||
webpack: (config, { dev, isServer }) => {
|
turbopack: {
|
||||||
if (dev) {
|
rules: codeInspectorPlugin({
|
||||||
config.plugins.push(codeInspectorPlugin({ bundler: 'webpack' }))
|
bundler: 'turbopack'
|
||||||
}
|
})
|
||||||
|
|
||||||
return config
|
|
||||||
},
|
},
|
||||||
productionBrowserSourceMaps: false, // enable browser source map generation during the production build
|
productionBrowserSourceMaps: false, // enable browser source map generation during the production build
|
||||||
// Configure pageExtensions to include md and mdx
|
// Configure pageExtensions to include md and mdx
|
||||||
|
|
@ -112,6 +110,10 @@ const nextConfig = {
|
||||||
})),
|
})),
|
||||||
},
|
},
|
||||||
experimental: {
|
experimental: {
|
||||||
|
optimizePackageImports: [
|
||||||
|
'@remixicon/react',
|
||||||
|
'@heroicons/react'
|
||||||
|
],
|
||||||
},
|
},
|
||||||
// fix all before production. Now it slow the develop speed.
|
// fix all before production. Now it slow the develop speed.
|
||||||
eslint: {
|
eslint: {
|
||||||
|
|
|
||||||
|
|
@ -19,7 +19,7 @@
|
||||||
"and_qq >= 14.9"
|
"and_qq >= 14.9"
|
||||||
],
|
],
|
||||||
"scripts": {
|
"scripts": {
|
||||||
"dev": "cross-env NODE_OPTIONS='--inspect' next dev",
|
"dev": "cross-env NODE_OPTIONS='--inspect' next dev --turbopack",
|
||||||
"build": "next build",
|
"build": "next build",
|
||||||
"build:docker": "next build && node scripts/optimize-standalone.js",
|
"build:docker": "next build && node scripts/optimize-standalone.js",
|
||||||
"start": "cp -r .next/static .next/standalone/.next/static && cp -r public .next/standalone/public && cross-env PORT=$npm_config_port HOSTNAME=$npm_config_host node .next/standalone/server.js",
|
"start": "cp -r .next/static .next/standalone/.next/static && cp -r public .next/standalone/public && cross-env PORT=$npm_config_port HOSTNAME=$npm_config_host node .next/standalone/server.js",
|
||||||
|
|
@ -204,7 +204,7 @@
|
||||||
"autoprefixer": "^10.4.20",
|
"autoprefixer": "^10.4.20",
|
||||||
"babel-loader": "^10.0.0",
|
"babel-loader": "^10.0.0",
|
||||||
"bing-translate-api": "^4.0.2",
|
"bing-translate-api": "^4.0.2",
|
||||||
"code-inspector-plugin": "^0.18.1",
|
"code-inspector-plugin": "1.2.9",
|
||||||
"cross-env": "^7.0.3",
|
"cross-env": "^7.0.3",
|
||||||
"eslint": "^9.35.0",
|
"eslint": "^9.35.0",
|
||||||
"eslint-config-next": "15.5.0",
|
"eslint-config-next": "15.5.0",
|
||||||
|
|
|
||||||
|
|
@ -522,8 +522,8 @@ importers:
|
||||||
specifier: ^4.0.2
|
specifier: ^4.0.2
|
||||||
version: 4.1.0
|
version: 4.1.0
|
||||||
code-inspector-plugin:
|
code-inspector-plugin:
|
||||||
specifier: ^0.18.1
|
specifier: 1.2.9
|
||||||
version: 0.18.3
|
version: 1.2.9
|
||||||
cross-env:
|
cross-env:
|
||||||
specifier: ^7.0.3
|
specifier: ^7.0.3
|
||||||
version: 7.0.3
|
version: 7.0.3
|
||||||
|
|
@ -1375,6 +1375,24 @@ packages:
|
||||||
'@clack/prompts@0.11.0':
|
'@clack/prompts@0.11.0':
|
||||||
resolution: {integrity: sha512-pMN5FcrEw9hUkZA4f+zLlzivQSeQf5dRGJjSUbvVYDLvpKCdQx5OaknvKzgbtXOizhP+SJJJjqEbOe55uKKfAw==}
|
resolution: {integrity: sha512-pMN5FcrEw9hUkZA4f+zLlzivQSeQf5dRGJjSUbvVYDLvpKCdQx5OaknvKzgbtXOizhP+SJJJjqEbOe55uKKfAw==}
|
||||||
|
|
||||||
|
'@code-inspector/core@1.2.9':
|
||||||
|
resolution: {integrity: sha512-A1w+G73HlTB6S8X6sA6tT+ziWHTAcTyH+7FZ1Sgd3ZLXF/E/jT+hgRbKposjXMwxcbodRc6hBG6UyiV+VxwE6Q==}
|
||||||
|
|
||||||
|
'@code-inspector/esbuild@1.2.9':
|
||||||
|
resolution: {integrity: sha512-DuyfxGupV43CN8YElIqynAniBtE86i037+3OVJYrm3jlJscXzbV98/kOzvu+VJQQvElcDgpgD6C/aGmPvFEiUg==}
|
||||||
|
|
||||||
|
'@code-inspector/mako@1.2.9':
|
||||||
|
resolution: {integrity: sha512-8N+MHdr64AnthLB4v+YGe8/9bgog3BnkxIW/fqX5iVS0X06mF7X1pxfZOD2bABVtv1tW25lRtNs5AgvYJs0vpg==}
|
||||||
|
|
||||||
|
'@code-inspector/turbopack@1.2.9':
|
||||||
|
resolution: {integrity: sha512-UVOUbqU6rpi5eOkrFamKrdeSWb0/OFFJQBaxbgs1RK5V5f4/iVwC5KjO2wkjv8cOGU4EppLfBVSBI1ysOo8S5A==}
|
||||||
|
|
||||||
|
'@code-inspector/vite@1.2.9':
|
||||||
|
resolution: {integrity: sha512-saIokJ3o3SdrHEgTEg1fbbowbKfh7J4mYtu0i1mVfah1b1UfdCF/iFHTEJ6SADMiY47TeNZTg0TQWTlU1AWPww==}
|
||||||
|
|
||||||
|
'@code-inspector/webpack@1.2.9':
|
||||||
|
resolution: {integrity: sha512-9YEykVrOIc0zMV7pyTyZhCprjScjn6gPPmxb4/OQXKCrP2fAm+NB188rg0s95e4sM7U3qRUpPA4NUH5F7Ogo+g==}
|
||||||
|
|
||||||
'@cspotcode/source-map-support@0.8.1':
|
'@cspotcode/source-map-support@0.8.1':
|
||||||
resolution: {integrity: sha512-IchNf6dN4tHoMFIn/7OE8LWZ19Y6q/67Bmf6vnGREv8RSbBVb9LPJxEcnwrcwX6ixSvaiGoomAUvu4YSxXrVgw==}
|
resolution: {integrity: sha512-IchNf6dN4tHoMFIn/7OE8LWZ19Y6q/67Bmf6vnGREv8RSbBVb9LPJxEcnwrcwX6ixSvaiGoomAUvu4YSxXrVgw==}
|
||||||
engines: {node: '>=12'}
|
engines: {node: '>=12'}
|
||||||
|
|
@ -4428,11 +4446,8 @@ packages:
|
||||||
resolution: {integrity: sha512-QVb0dM5HvG+uaxitm8wONl7jltx8dqhfU33DcqtOZcLSVIKSDDLDi7+0LbAKiyI8hD9u42m2YxXSkMGWThaecQ==}
|
resolution: {integrity: sha512-QVb0dM5HvG+uaxitm8wONl7jltx8dqhfU33DcqtOZcLSVIKSDDLDi7+0LbAKiyI8hD9u42m2YxXSkMGWThaecQ==}
|
||||||
engines: {iojs: '>= 1.0.0', node: '>= 0.12.0'}
|
engines: {iojs: '>= 1.0.0', node: '>= 0.12.0'}
|
||||||
|
|
||||||
code-inspector-core@0.18.3:
|
code-inspector-plugin@1.2.9:
|
||||||
resolution: {integrity: sha512-60pT2cPoguMTUYdN1MMpjoPUnuF0ud/u7M2y+Vqit/bniLEit9dySEWAVxLU/Ukc5ILrDeLKEttc6fCMl9RUrA==}
|
resolution: {integrity: sha512-PGp/AQ03vaajimG9rn5+eQHGifrym5CSNLCViPtwzot7FM3MqEkGNqcvimH0FVuv3wDOcP5KvETAUSLf1BE3HA==}
|
||||||
|
|
||||||
code-inspector-plugin@0.18.3:
|
|
||||||
resolution: {integrity: sha512-d9oJXZUsnvfTaQDwFmDNA2F+AR/TXIxWg1rr8KGcEskltR2prbZsfuu1z70EAn4khpx0smfi/PvIIwNJQ7FAMw==}
|
|
||||||
|
|
||||||
collapse-white-space@2.1.0:
|
collapse-white-space@2.1.0:
|
||||||
resolution: {integrity: sha512-loKTxY1zCOuG4j9f6EPnuyyYkf58RnhhWTvRoZEokgB+WbdXehfjFviyOVYkqzEWz1Q5kRiZdBYS5SwxbQYwzw==}
|
resolution: {integrity: sha512-loKTxY1zCOuG4j9f6EPnuyyYkf58RnhhWTvRoZEokgB+WbdXehfjFviyOVYkqzEWz1Q5kRiZdBYS5SwxbQYwzw==}
|
||||||
|
|
@ -5062,9 +5077,6 @@ packages:
|
||||||
esast-util-from-js@2.0.1:
|
esast-util-from-js@2.0.1:
|
||||||
resolution: {integrity: sha512-8Ja+rNJ0Lt56Pcf3TAmpBZjmx8ZcK5Ts4cAzIOjsjevg9oSXJnl6SUQ2EevU8tv3h6ZLWmoKL5H4fgWvdvfETw==}
|
resolution: {integrity: sha512-8Ja+rNJ0Lt56Pcf3TAmpBZjmx8ZcK5Ts4cAzIOjsjevg9oSXJnl6SUQ2EevU8tv3h6ZLWmoKL5H4fgWvdvfETw==}
|
||||||
|
|
||||||
esbuild-code-inspector-plugin@0.18.3:
|
|
||||||
resolution: {integrity: sha512-FaPt5eFMtW1oXMWqAcqfAJByNagP1V/R9dwDDLQO29JmryMF35+frskTqy+G53whmTaVi19+TCrFqhNbMZH5ZQ==}
|
|
||||||
|
|
||||||
esbuild-register@3.6.0:
|
esbuild-register@3.6.0:
|
||||||
resolution: {integrity: sha512-H2/S7Pm8a9CL1uhp9OvjwrBh5Pvx0H8qVOxNu8Wed9Y7qv56MPtq+GGM8RJpq6glYJn9Wspr8uw7l55uyinNeg==}
|
resolution: {integrity: sha512-H2/S7Pm8a9CL1uhp9OvjwrBh5Pvx0H8qVOxNu8Wed9Y7qv56MPtq+GGM8RJpq6glYJn9Wspr8uw7l55uyinNeg==}
|
||||||
peerDependencies:
|
peerDependencies:
|
||||||
|
|
@ -6420,8 +6432,8 @@ packages:
|
||||||
resolution: {integrity: sha512-MbjN408fEndfiQXbFQ1vnd+1NoLDsnQW41410oQBXiyXDMYH5z505juWa4KUE1LqxRC7DgOgZDbKLxHIwm27hA==}
|
resolution: {integrity: sha512-MbjN408fEndfiQXbFQ1vnd+1NoLDsnQW41410oQBXiyXDMYH5z505juWa4KUE1LqxRC7DgOgZDbKLxHIwm27hA==}
|
||||||
engines: {node: '>=0.10'}
|
engines: {node: '>=0.10'}
|
||||||
|
|
||||||
launch-ide@1.0.1:
|
launch-ide@1.2.0:
|
||||||
resolution: {integrity: sha512-U7qBxSNk774PxWq4XbmRe0ThiIstPoa4sMH/OGSYxrFVvg8x3biXcF1fsH6wasDpEmEXMdINUrQhBdwsSgKyMg==}
|
resolution: {integrity: sha512-7nXSPQOt3b2JT52Ge8jp4miFcY+nrUEZxNLWBzrEfjmByDTb9b5ytqMSwGhsNwY6Cntwop+6n7rWIFN0+S8PTw==}
|
||||||
|
|
||||||
layout-base@1.0.2:
|
layout-base@1.0.2:
|
||||||
resolution: {integrity: sha512-8h2oVEZNktL4BH2JCOI90iD1yXwL6iNW7KcCKT2QZgQJR2vbqDsldCTPRU9NifTCqHZci57XvQQ15YTu+sTYPg==}
|
resolution: {integrity: sha512-8h2oVEZNktL4BH2JCOI90iD1yXwL6iNW7KcCKT2QZgQJR2vbqDsldCTPRU9NifTCqHZci57XvQQ15YTu+sTYPg==}
|
||||||
|
|
@ -8704,9 +8716,6 @@ packages:
|
||||||
vfile@6.0.3:
|
vfile@6.0.3:
|
||||||
resolution: {integrity: sha512-KzIbH/9tXat2u30jf+smMwFCsno4wHVdNmzFyL+T/L3UGqqk6JKfVqOFOZEpZSHADH1k40ab6NUIXZq422ov3Q==}
|
resolution: {integrity: sha512-KzIbH/9tXat2u30jf+smMwFCsno4wHVdNmzFyL+T/L3UGqqk6JKfVqOFOZEpZSHADH1k40ab6NUIXZq422ov3Q==}
|
||||||
|
|
||||||
vite-code-inspector-plugin@0.18.3:
|
|
||||||
resolution: {integrity: sha512-178H73vbDUHE+JpvfAfioUHlUr7qXCYIEa2YNXtzenFQGOjtae59P1jjcxGfa6pPHEnOoaitb13K+0qxwhi/WA==}
|
|
||||||
|
|
||||||
vm-browserify@1.1.2:
|
vm-browserify@1.1.2:
|
||||||
resolution: {integrity: sha512-2ham8XPWTONajOR0ohOKOHXkm3+gaBmGut3SRuu75xLd/RRaY6vqgh8NBYYk7+RW3u5AtzPQZG8F10LHkl0lAQ==}
|
resolution: {integrity: sha512-2ham8XPWTONajOR0ohOKOHXkm3+gaBmGut3SRuu75xLd/RRaY6vqgh8NBYYk7+RW3u5AtzPQZG8F10LHkl0lAQ==}
|
||||||
|
|
||||||
|
|
@ -8765,9 +8774,6 @@ packages:
|
||||||
engines: {node: '>= 10.13.0'}
|
engines: {node: '>= 10.13.0'}
|
||||||
hasBin: true
|
hasBin: true
|
||||||
|
|
||||||
webpack-code-inspector-plugin@0.18.3:
|
|
||||||
resolution: {integrity: sha512-3782rsJhBnRiw0IpR6EqnyGDQoiSq0CcGeLJ52rZXlszYCe8igXtcujq7OhI0byaivWQ1LW7sXKyMEoVpBhq0w==}
|
|
||||||
|
|
||||||
webpack-dev-middleware@6.1.3:
|
webpack-dev-middleware@6.1.3:
|
||||||
resolution: {integrity: sha512-A4ChP0Qj8oGociTs6UdlRUGANIGrCDL3y+pmQMc+dSsraXHCatFpmMey4mYELA+juqwUqwQsUgJJISXl1KWmiw==}
|
resolution: {integrity: sha512-A4ChP0Qj8oGociTs6UdlRUGANIGrCDL3y+pmQMc+dSsraXHCatFpmMey4mYELA+juqwUqwQsUgJJISXl1KWmiw==}
|
||||||
engines: {node: '>= 14.15.0'}
|
engines: {node: '>= 14.15.0'}
|
||||||
|
|
@ -10004,6 +10010,48 @@ snapshots:
|
||||||
picocolors: 1.1.1
|
picocolors: 1.1.1
|
||||||
sisteransi: 1.0.5
|
sisteransi: 1.0.5
|
||||||
|
|
||||||
|
'@code-inspector/core@1.2.9':
|
||||||
|
dependencies:
|
||||||
|
'@vue/compiler-dom': 3.5.17
|
||||||
|
chalk: 4.1.2
|
||||||
|
dotenv: 16.6.1
|
||||||
|
launch-ide: 1.2.0
|
||||||
|
portfinder: 1.0.37
|
||||||
|
transitivePeerDependencies:
|
||||||
|
- supports-color
|
||||||
|
|
||||||
|
'@code-inspector/esbuild@1.2.9':
|
||||||
|
dependencies:
|
||||||
|
'@code-inspector/core': 1.2.9
|
||||||
|
transitivePeerDependencies:
|
||||||
|
- supports-color
|
||||||
|
|
||||||
|
'@code-inspector/mako@1.2.9':
|
||||||
|
dependencies:
|
||||||
|
'@code-inspector/core': 1.2.9
|
||||||
|
transitivePeerDependencies:
|
||||||
|
- supports-color
|
||||||
|
|
||||||
|
'@code-inspector/turbopack@1.2.9':
|
||||||
|
dependencies:
|
||||||
|
'@code-inspector/core': 1.2.9
|
||||||
|
'@code-inspector/webpack': 1.2.9
|
||||||
|
transitivePeerDependencies:
|
||||||
|
- supports-color
|
||||||
|
|
||||||
|
'@code-inspector/vite@1.2.9':
|
||||||
|
dependencies:
|
||||||
|
'@code-inspector/core': 1.2.9
|
||||||
|
chalk: 4.1.1
|
||||||
|
transitivePeerDependencies:
|
||||||
|
- supports-color
|
||||||
|
|
||||||
|
'@code-inspector/webpack@1.2.9':
|
||||||
|
dependencies:
|
||||||
|
'@code-inspector/core': 1.2.9
|
||||||
|
transitivePeerDependencies:
|
||||||
|
- supports-color
|
||||||
|
|
||||||
'@cspotcode/source-map-support@0.8.1':
|
'@cspotcode/source-map-support@0.8.1':
|
||||||
dependencies:
|
dependencies:
|
||||||
'@jridgewell/trace-mapping': 0.3.9
|
'@jridgewell/trace-mapping': 0.3.9
|
||||||
|
|
@ -12810,7 +12858,7 @@ snapshots:
|
||||||
|
|
||||||
'@vue/compiler-core@3.5.17':
|
'@vue/compiler-core@3.5.17':
|
||||||
dependencies:
|
dependencies:
|
||||||
'@babel/parser': 7.28.0
|
'@babel/parser': 7.28.4
|
||||||
'@vue/shared': 3.5.17
|
'@vue/shared': 3.5.17
|
||||||
entities: 4.5.0
|
entities: 4.5.0
|
||||||
estree-walker: 2.0.2
|
estree-walker: 2.0.2
|
||||||
|
|
@ -13514,24 +13562,15 @@ snapshots:
|
||||||
|
|
||||||
co@4.6.0: {}
|
co@4.6.0: {}
|
||||||
|
|
||||||
code-inspector-core@0.18.3:
|
code-inspector-plugin@1.2.9:
|
||||||
dependencies:
|
dependencies:
|
||||||
'@vue/compiler-dom': 3.5.17
|
'@code-inspector/core': 1.2.9
|
||||||
|
'@code-inspector/esbuild': 1.2.9
|
||||||
|
'@code-inspector/mako': 1.2.9
|
||||||
|
'@code-inspector/turbopack': 1.2.9
|
||||||
|
'@code-inspector/vite': 1.2.9
|
||||||
|
'@code-inspector/webpack': 1.2.9
|
||||||
chalk: 4.1.1
|
chalk: 4.1.1
|
||||||
dotenv: 16.6.1
|
|
||||||
launch-ide: 1.0.1
|
|
||||||
portfinder: 1.0.37
|
|
||||||
transitivePeerDependencies:
|
|
||||||
- supports-color
|
|
||||||
|
|
||||||
code-inspector-plugin@0.18.3:
|
|
||||||
dependencies:
|
|
||||||
chalk: 4.1.1
|
|
||||||
code-inspector-core: 0.18.3
|
|
||||||
dotenv: 16.6.1
|
|
||||||
esbuild-code-inspector-plugin: 0.18.3
|
|
||||||
vite-code-inspector-plugin: 0.18.3
|
|
||||||
webpack-code-inspector-plugin: 0.18.3
|
|
||||||
transitivePeerDependencies:
|
transitivePeerDependencies:
|
||||||
- supports-color
|
- supports-color
|
||||||
|
|
||||||
|
|
@ -14175,12 +14214,6 @@ snapshots:
|
||||||
esast-util-from-estree: 2.0.0
|
esast-util-from-estree: 2.0.0
|
||||||
vfile-message: 4.0.2
|
vfile-message: 4.0.2
|
||||||
|
|
||||||
esbuild-code-inspector-plugin@0.18.3:
|
|
||||||
dependencies:
|
|
||||||
code-inspector-core: 0.18.3
|
|
||||||
transitivePeerDependencies:
|
|
||||||
- supports-color
|
|
||||||
|
|
||||||
esbuild-register@3.6.0(esbuild@0.25.0):
|
esbuild-register@3.6.0(esbuild@0.25.0):
|
||||||
dependencies:
|
dependencies:
|
||||||
debug: 4.4.1
|
debug: 4.4.1
|
||||||
|
|
@ -16035,7 +16068,7 @@ snapshots:
|
||||||
dependencies:
|
dependencies:
|
||||||
language-subtag-registry: 0.3.23
|
language-subtag-registry: 0.3.23
|
||||||
|
|
||||||
launch-ide@1.0.1:
|
launch-ide@1.2.0:
|
||||||
dependencies:
|
dependencies:
|
||||||
chalk: 4.1.2
|
chalk: 4.1.2
|
||||||
dotenv: 16.6.1
|
dotenv: 16.6.1
|
||||||
|
|
@ -18796,12 +18829,6 @@ snapshots:
|
||||||
'@types/unist': 3.0.3
|
'@types/unist': 3.0.3
|
||||||
vfile-message: 4.0.2
|
vfile-message: 4.0.2
|
||||||
|
|
||||||
vite-code-inspector-plugin@0.18.3:
|
|
||||||
dependencies:
|
|
||||||
code-inspector-core: 0.18.3
|
|
||||||
transitivePeerDependencies:
|
|
||||||
- supports-color
|
|
||||||
|
|
||||||
vm-browserify@1.1.2: {}
|
vm-browserify@1.1.2: {}
|
||||||
|
|
||||||
void-elements@3.1.0: {}
|
void-elements@3.1.0: {}
|
||||||
|
|
@ -18872,12 +18899,6 @@ snapshots:
|
||||||
- bufferutil
|
- bufferutil
|
||||||
- utf-8-validate
|
- utf-8-validate
|
||||||
|
|
||||||
webpack-code-inspector-plugin@0.18.3:
|
|
||||||
dependencies:
|
|
||||||
code-inspector-core: 0.18.3
|
|
||||||
transitivePeerDependencies:
|
|
||||||
- supports-color
|
|
||||||
|
|
||||||
webpack-dev-middleware@6.1.3(webpack@5.100.2(esbuild@0.25.0)(uglify-js@3.19.3)):
|
webpack-dev-middleware@6.1.3(webpack@5.100.2(esbuild@0.25.0)(uglify-js@3.19.3)):
|
||||||
dependencies:
|
dependencies:
|
||||||
colorette: 2.0.20
|
colorette: 2.0.20
|
||||||
|
|
|
||||||
|
|
@ -40,8 +40,9 @@ const NAME_SPACE = 'pipeline'
|
||||||
|
|
||||||
export const PipelineTemplateListQueryKeyPrefix = [NAME_SPACE, 'template-list']
|
export const PipelineTemplateListQueryKeyPrefix = [NAME_SPACE, 'template-list']
|
||||||
export const usePipelineTemplateList = (params: PipelineTemplateListParams) => {
|
export const usePipelineTemplateList = (params: PipelineTemplateListParams) => {
|
||||||
|
const { type, language } = params
|
||||||
return useQuery<PipelineTemplateListResponse>({
|
return useQuery<PipelineTemplateListResponse>({
|
||||||
queryKey: [...PipelineTemplateListQueryKeyPrefix, params.type],
|
queryKey: [...PipelineTemplateListQueryKeyPrefix, type, language],
|
||||||
queryFn: () => {
|
queryFn: () => {
|
||||||
return get<PipelineTemplateListResponse>('/rag/pipeline/templates', { params })
|
return get<PipelineTemplateListResponse>('/rag/pipeline/templates', { params })
|
||||||
},
|
},
|
||||||
|
|
@ -55,7 +56,7 @@ export const useInvalidCustomizedTemplateList = () => {
|
||||||
export const usePipelineTemplateById = (params: PipelineTemplateByIdRequest, enabled: boolean) => {
|
export const usePipelineTemplateById = (params: PipelineTemplateByIdRequest, enabled: boolean) => {
|
||||||
const { template_id, type } = params
|
const { template_id, type } = params
|
||||||
return useQuery<PipelineTemplateByIdResponse>({
|
return useQuery<PipelineTemplateByIdResponse>({
|
||||||
queryKey: [NAME_SPACE, 'template', template_id],
|
queryKey: [NAME_SPACE, 'template', type, template_id],
|
||||||
queryFn: () => {
|
queryFn: () => {
|
||||||
return get<PipelineTemplateByIdResponse>(`/rag/pipeline/templates/${template_id}`, {
|
return get<PipelineTemplateByIdResponse>(`/rag/pipeline/templates/${template_id}`, {
|
||||||
params: {
|
params: {
|
||||||
|
|
@ -64,6 +65,7 @@ export const usePipelineTemplateById = (params: PipelineTemplateByIdRequest, ena
|
||||||
})
|
})
|
||||||
},
|
},
|
||||||
enabled,
|
enabled,
|
||||||
|
staleTime: 0,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -26,6 +26,9 @@
|
||||||
"paths": {
|
"paths": {
|
||||||
"@/*": [
|
"@/*": [
|
||||||
"./*"
|
"./*"
|
||||||
|
],
|
||||||
|
"~@/*": [
|
||||||
|
"./*"
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
|
|
||||||
Loading…
Reference in New Issue