mirror of https://github.com/langgenius/dify.git
Merge branch 'main' into feat/rag-pipeline
This commit is contained in:
commit
5ac1e3584d
|
|
@ -31,11 +31,19 @@ jobs:
|
|||
echo "FILES_CHANGED=false" >> $GITHUB_ENV
|
||||
fi
|
||||
|
||||
- name: Install pnpm
|
||||
uses: pnpm/action-setup@v4
|
||||
with:
|
||||
version: 10
|
||||
run_install: false
|
||||
|
||||
- name: Set up Node.js
|
||||
if: env.FILES_CHANGED == 'true'
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: 'lts/*'
|
||||
cache: pnpm
|
||||
cache-dependency-path: ./web/package.json
|
||||
|
||||
- name: Install dependencies
|
||||
if: env.FILES_CHANGED == 'true'
|
||||
|
|
|
|||
|
|
@ -152,6 +152,7 @@ QDRANT_API_KEY=difyai123456
|
|||
QDRANT_CLIENT_TIMEOUT=20
|
||||
QDRANT_GRPC_ENABLED=false
|
||||
QDRANT_GRPC_PORT=6334
|
||||
QDRANT_REPLICATION_FACTOR=1
|
||||
|
||||
#Couchbase configuration
|
||||
COUCHBASE_CONNECTION_STRING=127.0.0.1
|
||||
|
|
|
|||
|
|
@ -33,3 +33,8 @@ class QdrantConfig(BaseSettings):
|
|||
description="Port number for gRPC connection to Qdrant server (default is 6334)",
|
||||
default=6334,
|
||||
)
|
||||
|
||||
QDRANT_REPLICATION_FACTOR: PositiveInt = Field(
|
||||
description="Replication factor for Qdrant collections (default is 1)",
|
||||
default=1,
|
||||
)
|
||||
|
|
|
|||
|
|
@ -9,7 +9,7 @@ class PackagingInfo(BaseSettings):
|
|||
|
||||
CURRENT_VERSION: str = Field(
|
||||
description="Dify version",
|
||||
default="1.4.0",
|
||||
default="1.4.1",
|
||||
)
|
||||
|
||||
COMMIT_SHA: str = Field(
|
||||
|
|
|
|||
|
|
@ -41,12 +41,16 @@ class PluginListApi(Resource):
|
|||
@account_initialization_required
|
||||
def get(self):
|
||||
tenant_id = current_user.current_tenant_id
|
||||
parser = reqparse.RequestParser()
|
||||
parser.add_argument("page", type=int, required=False, location="args", default=1)
|
||||
parser.add_argument("page_size", type=int, required=False, location="args", default=256)
|
||||
args = parser.parse_args()
|
||||
try:
|
||||
plugins = PluginService.list(tenant_id)
|
||||
plugins_with_total = PluginService.list_with_total(tenant_id, args["page"], args["page_size"])
|
||||
except PluginDaemonClientSideError as e:
|
||||
raise ValueError(e)
|
||||
|
||||
return jsonable_encoder({"plugins": plugins})
|
||||
return jsonable_encoder({"plugins": plugins_with_total.list, "total": plugins_with_total.total})
|
||||
|
||||
|
||||
class PluginListLatestVersionsApi(Resource):
|
||||
|
|
|
|||
|
|
@ -452,7 +452,7 @@ class AdvancedChatAppGenerator(MessageBasedAppGenerator):
|
|||
for var, val in context.items():
|
||||
var.set(val)
|
||||
|
||||
# Save current user before entering new app context
|
||||
# FIXME(-LAN-): Save current user before entering new app context
|
||||
from flask import g
|
||||
|
||||
saved_user = None
|
||||
|
|
|
|||
|
|
@ -232,7 +232,7 @@ class AgentChatAppGenerator(MessageBasedAppGenerator):
|
|||
for var, val in context.items():
|
||||
var.set(val)
|
||||
|
||||
# Save current user before entering new app context
|
||||
# FIXME(-LAN-): Save current user before entering new app context
|
||||
from flask import g
|
||||
|
||||
saved_user = None
|
||||
|
|
|
|||
|
|
@ -411,7 +411,7 @@ class WorkflowAppGenerator(BaseAppGenerator):
|
|||
for var, val in context.items():
|
||||
var.set(val)
|
||||
|
||||
# Save current user before entering new app context
|
||||
# FIXME(-LAN-): Save current user before entering new app context
|
||||
from flask import g
|
||||
|
||||
saved_user = None
|
||||
|
|
|
|||
|
|
@ -9,7 +9,7 @@ from core.agent.plugin_entities import AgentProviderEntityWithPlugin
|
|||
from core.model_runtime.entities.model_entities import AIModelEntity
|
||||
from core.model_runtime.entities.provider_entities import ProviderEntity
|
||||
from core.plugin.entities.base import BasePluginEntity
|
||||
from core.plugin.entities.plugin import PluginDeclaration
|
||||
from core.plugin.entities.plugin import PluginDeclaration, PluginEntity
|
||||
from core.tools.entities.common_entities import I18nObject
|
||||
from core.tools.entities.tool_entities import ToolProviderEntityWithPlugin
|
||||
|
||||
|
|
@ -167,3 +167,8 @@ class PluginOAuthAuthorizationUrlResponse(BaseModel):
|
|||
|
||||
class PluginOAuthCredentialsResponse(BaseModel):
|
||||
credentials: Mapping[str, Any] = Field(description="The credentials of the OAuth.")
|
||||
|
||||
|
||||
class PluginListResponse(BaseModel):
|
||||
list: list[PluginEntity]
|
||||
total: int
|
||||
|
|
|
|||
|
|
@ -9,7 +9,12 @@ from core.plugin.entities.plugin import (
|
|||
PluginInstallation,
|
||||
PluginInstallationSource,
|
||||
)
|
||||
from core.plugin.entities.plugin_daemon import PluginInstallTask, PluginInstallTaskStartResponse, PluginUploadResponse
|
||||
from core.plugin.entities.plugin_daemon import (
|
||||
PluginInstallTask,
|
||||
PluginInstallTaskStartResponse,
|
||||
PluginListResponse,
|
||||
PluginUploadResponse,
|
||||
)
|
||||
from core.plugin.impl.base import BasePluginClient
|
||||
|
||||
|
||||
|
|
@ -27,11 +32,20 @@ class PluginInstaller(BasePluginClient):
|
|||
)
|
||||
|
||||
def list_plugins(self, tenant_id: str) -> list[PluginEntity]:
|
||||
result = self._request_with_plugin_daemon_response(
|
||||
"GET",
|
||||
f"plugin/{tenant_id}/management/list",
|
||||
PluginListResponse,
|
||||
params={"page": 1, "page_size": 256},
|
||||
)
|
||||
return result.list
|
||||
|
||||
def list_plugins_with_total(self, tenant_id: str, page: int, page_size: int) -> PluginListResponse:
|
||||
return self._request_with_plugin_daemon_response(
|
||||
"GET",
|
||||
f"plugin/{tenant_id}/management/list",
|
||||
list[PluginEntity],
|
||||
params={"page": 1, "page_size": 256},
|
||||
PluginListResponse,
|
||||
params={"page": page, "page_size": page_size},
|
||||
)
|
||||
|
||||
def upload_pkg(
|
||||
|
|
|
|||
|
|
@ -46,6 +46,7 @@ class QdrantConfig(BaseModel):
|
|||
root_path: Optional[str] = None
|
||||
grpc_port: int = 6334
|
||||
prefer_grpc: bool = False
|
||||
replication_factor: int = 1
|
||||
|
||||
def to_qdrant_params(self):
|
||||
if self.endpoint and self.endpoint.startswith("path:"):
|
||||
|
|
@ -119,11 +120,13 @@ class QdrantVector(BaseVector):
|
|||
max_indexing_threads=0,
|
||||
on_disk=False,
|
||||
)
|
||||
|
||||
self._client.create_collection(
|
||||
collection_name=collection_name,
|
||||
vectors_config=vectors_config,
|
||||
hnsw_config=hnsw_config,
|
||||
timeout=int(self._client_config.timeout),
|
||||
replication_factor=self._client_config.replication_factor,
|
||||
)
|
||||
|
||||
# create group_id payload index
|
||||
|
|
@ -466,5 +469,6 @@ class QdrantVectorFactory(AbstractVectorFactory):
|
|||
timeout=dify_config.QDRANT_CLIENT_TIMEOUT,
|
||||
grpc_port=dify_config.QDRANT_GRPC_PORT,
|
||||
prefer_grpc=dify_config.QDRANT_GRPC_ENABLED,
|
||||
replication_factor=dify_config.QDRANT_REPLICATION_FACTOR,
|
||||
),
|
||||
)
|
||||
|
|
|
|||
|
|
@ -49,6 +49,7 @@ class TidbOnQdrantConfig(BaseModel):
|
|||
root_path: Optional[str] = None
|
||||
grpc_port: int = 6334
|
||||
prefer_grpc: bool = False
|
||||
replication_factor: int = 1
|
||||
|
||||
def to_qdrant_params(self):
|
||||
if self.endpoint and self.endpoint.startswith("path:"):
|
||||
|
|
@ -134,6 +135,7 @@ class TidbOnQdrantVector(BaseVector):
|
|||
vectors_config=vectors_config,
|
||||
hnsw_config=hnsw_config,
|
||||
timeout=int(self._client_config.timeout),
|
||||
replication_factor=self._client_config.replication_factor,
|
||||
)
|
||||
|
||||
# create group_id payload index
|
||||
|
|
@ -484,6 +486,7 @@ class TidbOnQdrantVectorFactory(AbstractVectorFactory):
|
|||
timeout=dify_config.TIDB_ON_QDRANT_CLIENT_TIMEOUT,
|
||||
grpc_port=dify_config.TIDB_ON_QDRANT_GRPC_PORT,
|
||||
prefer_grpc=dify_config.TIDB_ON_QDRANT_GRPC_ENABLED,
|
||||
replication_factor=dify_config.QDRANT_REPLICATION_FACTOR,
|
||||
),
|
||||
)
|
||||
|
||||
|
|
|
|||
|
|
@ -1,21 +1,13 @@
|
|||
import hashlib
|
||||
import json
|
||||
import mimetypes
|
||||
import os
|
||||
import re
|
||||
import site
|
||||
import subprocess
|
||||
import tempfile
|
||||
import unicodedata
|
||||
from contextlib import contextmanager
|
||||
from pathlib import Path
|
||||
from typing import Any, Literal, Optional, cast
|
||||
from collections.abc import Sequence
|
||||
from dataclasses import dataclass
|
||||
from typing import Any, Optional, cast
|
||||
from urllib.parse import unquote
|
||||
|
||||
import chardet
|
||||
import cloudscraper # type: ignore
|
||||
from bs4 import BeautifulSoup, CData, Comment, NavigableString # type: ignore
|
||||
from regex import regex # type: ignore
|
||||
from readabilipy import simple_json_from_html_string # type: ignore
|
||||
|
||||
from core.helper import ssrf_proxy
|
||||
from core.rag.extractor import extract_processor
|
||||
|
|
@ -23,9 +15,7 @@ from core.rag.extractor.extract_processor import ExtractProcessor
|
|||
|
||||
FULL_TEMPLATE = """
|
||||
TITLE: {title}
|
||||
AUTHORS: {authors}
|
||||
PUBLISH DATE: {publish_date}
|
||||
TOP_IMAGE_URL: {top_image}
|
||||
AUTHOR: {author}
|
||||
TEXT:
|
||||
|
||||
{text}
|
||||
|
|
@ -73,8 +63,8 @@ def get_url(url: str, user_agent: Optional[str] = None) -> str:
|
|||
response = ssrf_proxy.get(url, headers=headers, follow_redirects=True, timeout=(120, 300))
|
||||
elif response.status_code == 403:
|
||||
scraper = cloudscraper.create_scraper()
|
||||
scraper.perform_request = ssrf_proxy.make_request
|
||||
response = scraper.get(url, headers=headers, follow_redirects=True, timeout=(120, 300))
|
||||
scraper.perform_request = ssrf_proxy.make_request # type: ignore
|
||||
response = scraper.get(url, headers=headers, follow_redirects=True, timeout=(120, 300)) # type: ignore
|
||||
|
||||
if response.status_code != 200:
|
||||
return "URL returned status code {}.".format(response.status_code)
|
||||
|
|
@ -90,273 +80,36 @@ def get_url(url: str, user_agent: Optional[str] = None) -> str:
|
|||
else:
|
||||
content = response.text
|
||||
|
||||
a = extract_using_readabilipy(content)
|
||||
article = extract_using_readabilipy(content)
|
||||
|
||||
if not a["plain_text"] or not a["plain_text"].strip():
|
||||
if not article.text:
|
||||
return ""
|
||||
|
||||
res = FULL_TEMPLATE.format(
|
||||
title=a["title"],
|
||||
authors=a["byline"],
|
||||
publish_date=a["date"],
|
||||
top_image="",
|
||||
text=a["plain_text"] or "",
|
||||
title=article.title,
|
||||
author=article.auther,
|
||||
text=article.text,
|
||||
)
|
||||
|
||||
return res
|
||||
|
||||
|
||||
def extract_using_readabilipy(html):
|
||||
with tempfile.NamedTemporaryFile(delete=False, mode="w+") as f_html:
|
||||
f_html.write(html)
|
||||
f_html.close()
|
||||
html_path = f_html.name
|
||||
|
||||
# Call Mozilla's Readability.js Readability.parse() function via node, writing output to a temporary file
|
||||
article_json_path = html_path + ".json"
|
||||
jsdir = os.path.join(find_module_path("readabilipy"), "javascript")
|
||||
with chdir(jsdir):
|
||||
subprocess.check_call(["node", "ExtractArticle.js", "-i", html_path, "-o", article_json_path])
|
||||
|
||||
# Read output of call to Readability.parse() from JSON file and return as Python dictionary
|
||||
input_json = json.loads(Path(article_json_path).read_text(encoding="utf-8"))
|
||||
|
||||
# Deleting files after processing
|
||||
os.unlink(article_json_path)
|
||||
os.unlink(html_path)
|
||||
|
||||
article_json: dict[str, Any] = {
|
||||
"title": None,
|
||||
"byline": None,
|
||||
"date": None,
|
||||
"content": None,
|
||||
"plain_content": None,
|
||||
"plain_text": None,
|
||||
}
|
||||
# Populate article fields from readability fields where present
|
||||
if input_json:
|
||||
if input_json.get("title"):
|
||||
article_json["title"] = input_json["title"]
|
||||
if input_json.get("byline"):
|
||||
article_json["byline"] = input_json["byline"]
|
||||
if input_json.get("date"):
|
||||
article_json["date"] = input_json["date"]
|
||||
if input_json.get("content"):
|
||||
article_json["content"] = input_json["content"]
|
||||
article_json["plain_content"] = plain_content(article_json["content"], False, False)
|
||||
article_json["plain_text"] = extract_text_blocks_as_plain_text(article_json["plain_content"])
|
||||
if input_json.get("textContent"):
|
||||
article_json["plain_text"] = input_json["textContent"]
|
||||
article_json["plain_text"] = re.sub(r"\n\s*\n", "\n", article_json["plain_text"])
|
||||
|
||||
return article_json
|
||||
@dataclass
|
||||
class Article:
|
||||
title: str
|
||||
auther: str
|
||||
text: Sequence[dict]
|
||||
|
||||
|
||||
def find_module_path(module_name):
|
||||
for package_path in site.getsitepackages():
|
||||
potential_path = os.path.join(package_path, module_name)
|
||||
if os.path.exists(potential_path):
|
||||
return potential_path
|
||||
|
||||
return None
|
||||
|
||||
|
||||
@contextmanager
|
||||
def chdir(path):
|
||||
"""Change directory in context and return to original on exit"""
|
||||
# From https://stackoverflow.com/a/37996581, couldn't find a built-in
|
||||
original_path = os.getcwd()
|
||||
os.chdir(path)
|
||||
try:
|
||||
yield
|
||||
finally:
|
||||
os.chdir(original_path)
|
||||
|
||||
|
||||
def extract_text_blocks_as_plain_text(paragraph_html):
|
||||
# Load article as DOM
|
||||
soup = BeautifulSoup(paragraph_html, "html.parser")
|
||||
# Select all lists
|
||||
list_elements = soup.find_all(["ul", "ol"])
|
||||
# Prefix text in all list items with "* " and make lists paragraphs
|
||||
for list_element in list_elements:
|
||||
plain_items = "".join(
|
||||
list(filter(None, [plain_text_leaf_node(li)["text"] for li in list_element.find_all("li")]))
|
||||
)
|
||||
list_element.string = plain_items
|
||||
list_element.name = "p"
|
||||
# Select all text blocks
|
||||
text_blocks = [s.parent for s in soup.find_all(string=True)]
|
||||
text_blocks = [plain_text_leaf_node(block) for block in text_blocks]
|
||||
# Drop empty paragraphs
|
||||
text_blocks = list(filter(lambda p: p["text"] is not None, text_blocks))
|
||||
return text_blocks
|
||||
|
||||
|
||||
def plain_text_leaf_node(element):
|
||||
# Extract all text, stripped of any child HTML elements and normalize it
|
||||
plain_text = normalize_text(element.get_text())
|
||||
if plain_text != "" and element.name == "li":
|
||||
plain_text = "* {}, ".format(plain_text)
|
||||
if plain_text == "":
|
||||
plain_text = None
|
||||
if "data-node-index" in element.attrs:
|
||||
plain = {"node_index": element["data-node-index"], "text": plain_text}
|
||||
else:
|
||||
plain = {"text": plain_text}
|
||||
return plain
|
||||
|
||||
|
||||
def plain_content(readability_content, content_digests, node_indexes):
|
||||
# Load article as DOM
|
||||
soup = BeautifulSoup(readability_content, "html.parser")
|
||||
# Make all elements plain
|
||||
elements = plain_elements(soup.contents, content_digests, node_indexes)
|
||||
if node_indexes:
|
||||
# Add node index attributes to nodes
|
||||
elements = [add_node_indexes(element) for element in elements]
|
||||
# Replace article contents with plain elements
|
||||
soup.contents = elements
|
||||
return str(soup)
|
||||
|
||||
|
||||
def plain_elements(elements, content_digests, node_indexes):
|
||||
# Get plain content versions of all elements
|
||||
elements = [plain_element(element, content_digests, node_indexes) for element in elements]
|
||||
if content_digests:
|
||||
# Add content digest attribute to nodes
|
||||
elements = [add_content_digest(element) for element in elements]
|
||||
return elements
|
||||
|
||||
|
||||
def plain_element(element, content_digests, node_indexes):
|
||||
# For lists, we make each item plain text
|
||||
if is_leaf(element):
|
||||
# For leaf node elements, extract the text content, discarding any HTML tags
|
||||
# 1. Get element contents as text
|
||||
plain_text = element.get_text()
|
||||
# 2. Normalize the extracted text string to a canonical representation
|
||||
plain_text = normalize_text(plain_text)
|
||||
# 3. Update element content to be plain text
|
||||
element.string = plain_text
|
||||
elif is_text(element):
|
||||
if is_non_printing(element):
|
||||
# The simplified HTML may have come from Readability.js so might
|
||||
# have non-printing text (e.g. Comment or CData). In this case, we
|
||||
# keep the structure, but ensure that the string is empty.
|
||||
element = type(element)("")
|
||||
else:
|
||||
plain_text = element.string
|
||||
plain_text = normalize_text(plain_text)
|
||||
element = type(element)(plain_text)
|
||||
else:
|
||||
# If not a leaf node or leaf type call recursively on child nodes, replacing
|
||||
element.contents = plain_elements(element.contents, content_digests, node_indexes)
|
||||
return element
|
||||
|
||||
|
||||
def add_node_indexes(element, node_index="0"):
|
||||
# Can't add attributes to string types
|
||||
if is_text(element):
|
||||
return element
|
||||
# Add index to current element
|
||||
element["data-node-index"] = node_index
|
||||
# Add index to child elements
|
||||
for local_idx, child in enumerate([c for c in element.contents if not is_text(c)], start=1):
|
||||
# Can't add attributes to leaf string types
|
||||
child_index = "{stem}.{local}".format(stem=node_index, local=local_idx)
|
||||
add_node_indexes(child, node_index=child_index)
|
||||
return element
|
||||
|
||||
|
||||
def normalize_text(text):
|
||||
"""Normalize unicode and whitespace."""
|
||||
# Normalize unicode first to try and standardize whitespace characters as much as possible before normalizing them
|
||||
text = strip_control_characters(text)
|
||||
text = normalize_unicode(text)
|
||||
text = normalize_whitespace(text)
|
||||
return text
|
||||
|
||||
|
||||
def strip_control_characters(text):
|
||||
"""Strip out unicode control characters which might break the parsing."""
|
||||
# Unicode control characters
|
||||
# [Cc]: Other, Control [includes new lines]
|
||||
# [Cf]: Other, Format
|
||||
# [Cn]: Other, Not Assigned
|
||||
# [Co]: Other, Private Use
|
||||
# [Cs]: Other, Surrogate
|
||||
control_chars = {"Cc", "Cf", "Cn", "Co", "Cs"}
|
||||
retained_chars = ["\t", "\n", "\r", "\f"]
|
||||
|
||||
# Remove non-printing control characters
|
||||
return "".join(
|
||||
[
|
||||
"" if (unicodedata.category(char) in control_chars) and (char not in retained_chars) else char
|
||||
for char in text
|
||||
]
|
||||
def extract_using_readabilipy(html: str):
|
||||
json_article: dict[str, Any] = simple_json_from_html_string(html, use_readability=True)
|
||||
article = Article(
|
||||
title=json_article.get("title") or "",
|
||||
auther=json_article.get("byline") or "",
|
||||
text=json_article.get("plain_text") or [],
|
||||
)
|
||||
|
||||
|
||||
def normalize_unicode(text):
|
||||
"""Normalize unicode such that things that are visually equivalent map to the same unicode string where possible."""
|
||||
normal_form: Literal["NFC", "NFD", "NFKC", "NFKD"] = "NFKC"
|
||||
text = unicodedata.normalize(normal_form, text)
|
||||
return text
|
||||
|
||||
|
||||
def normalize_whitespace(text):
|
||||
"""Replace runs of whitespace characters with a single space as this is what happens when HTML text is displayed."""
|
||||
text = regex.sub(r"\s+", " ", text)
|
||||
# Remove leading and trailing whitespace
|
||||
text = text.strip()
|
||||
return text
|
||||
|
||||
|
||||
def is_leaf(element):
|
||||
return element.name in {"p", "li"}
|
||||
|
||||
|
||||
def is_text(element):
|
||||
return isinstance(element, NavigableString)
|
||||
|
||||
|
||||
def is_non_printing(element):
|
||||
return any(isinstance(element, _e) for _e in [Comment, CData])
|
||||
|
||||
|
||||
def add_content_digest(element):
|
||||
if not is_text(element):
|
||||
element["data-content-digest"] = content_digest(element)
|
||||
return element
|
||||
|
||||
|
||||
def content_digest(element):
|
||||
digest: Any
|
||||
if is_text(element):
|
||||
# Hash
|
||||
trimmed_string = element.string.strip()
|
||||
if trimmed_string == "":
|
||||
digest = ""
|
||||
else:
|
||||
digest = hashlib.sha256(trimmed_string.encode("utf-8")).hexdigest()
|
||||
else:
|
||||
contents = element.contents
|
||||
num_contents = len(contents)
|
||||
if num_contents == 0:
|
||||
# No hash when no child elements exist
|
||||
digest = ""
|
||||
elif num_contents == 1:
|
||||
# If single child, use digest of child
|
||||
digest = content_digest(contents[0])
|
||||
else:
|
||||
# Build content digest from the "non-empty" digests of child nodes
|
||||
digest = hashlib.sha256()
|
||||
child_digests = list(filter(lambda x: x != "", [content_digest(content) for content in contents]))
|
||||
for child in child_digests:
|
||||
digest.update(child.encode("utf-8"))
|
||||
digest = digest.hexdigest()
|
||||
return digest
|
||||
return article
|
||||
|
||||
|
||||
def get_image_upload_file_ids(content):
|
||||
|
|
|
|||
|
|
@ -9,7 +9,7 @@ from copy import copy, deepcopy
|
|||
from datetime import UTC, datetime
|
||||
from typing import Any, Optional, cast
|
||||
|
||||
from flask import Flask, current_app
|
||||
from flask import Flask, current_app, has_request_context
|
||||
|
||||
from configs import dify_config
|
||||
from core.app.apps.base_app_queue_manager import GenerateTaskStoppedError
|
||||
|
|
@ -540,8 +540,21 @@ class GraphEngine:
|
|||
for var, val in context.items():
|
||||
var.set(val)
|
||||
|
||||
# FIXME(-LAN-): Save current user before entering new app context
|
||||
from flask import g
|
||||
|
||||
saved_user = None
|
||||
if has_request_context() and hasattr(g, "_login_user"):
|
||||
saved_user = g._login_user
|
||||
|
||||
with flask_app.app_context():
|
||||
try:
|
||||
# Restore user in new app context
|
||||
if saved_user is not None:
|
||||
from flask import g
|
||||
|
||||
g._login_user = saved_user
|
||||
|
||||
q.put(
|
||||
ParallelBranchRunStartedEvent(
|
||||
parallel_id=parallel_id,
|
||||
|
|
|
|||
|
|
@ -7,6 +7,7 @@ import tempfile
|
|||
from collections.abc import Mapping, Sequence
|
||||
from typing import Any, cast
|
||||
|
||||
import chardet
|
||||
import docx
|
||||
import pandas as pd
|
||||
import pypandoc # type: ignore
|
||||
|
|
@ -180,26 +181,64 @@ def _extract_text_by_file_extension(*, file_content: bytes, file_extension: str)
|
|||
|
||||
def _extract_text_from_plain_text(file_content: bytes) -> str:
|
||||
try:
|
||||
return file_content.decode("utf-8", "ignore")
|
||||
except UnicodeDecodeError as e:
|
||||
raise TextExtractionError("Failed to decode plain text file") from e
|
||||
# Detect encoding using chardet
|
||||
result = chardet.detect(file_content)
|
||||
encoding = result["encoding"]
|
||||
|
||||
# Fallback to utf-8 if detection fails
|
||||
if not encoding:
|
||||
encoding = "utf-8"
|
||||
|
||||
return file_content.decode(encoding, errors="ignore")
|
||||
except (UnicodeDecodeError, LookupError) as e:
|
||||
# If decoding fails, try with utf-8 as last resort
|
||||
try:
|
||||
return file_content.decode("utf-8", errors="ignore")
|
||||
except UnicodeDecodeError:
|
||||
raise TextExtractionError(f"Failed to decode plain text file: {e}") from e
|
||||
|
||||
|
||||
def _extract_text_from_json(file_content: bytes) -> str:
|
||||
try:
|
||||
json_data = json.loads(file_content.decode("utf-8", "ignore"))
|
||||
# Detect encoding using chardet
|
||||
result = chardet.detect(file_content)
|
||||
encoding = result["encoding"]
|
||||
|
||||
# Fallback to utf-8 if detection fails
|
||||
if not encoding:
|
||||
encoding = "utf-8"
|
||||
|
||||
json_data = json.loads(file_content.decode(encoding, errors="ignore"))
|
||||
return json.dumps(json_data, indent=2, ensure_ascii=False)
|
||||
except (UnicodeDecodeError, json.JSONDecodeError) as e:
|
||||
raise TextExtractionError(f"Failed to decode or parse JSON file: {e}") from e
|
||||
except (UnicodeDecodeError, LookupError, json.JSONDecodeError) as e:
|
||||
# If decoding fails, try with utf-8 as last resort
|
||||
try:
|
||||
json_data = json.loads(file_content.decode("utf-8", errors="ignore"))
|
||||
return json.dumps(json_data, indent=2, ensure_ascii=False)
|
||||
except (UnicodeDecodeError, json.JSONDecodeError):
|
||||
raise TextExtractionError(f"Failed to decode or parse JSON file: {e}") from e
|
||||
|
||||
|
||||
def _extract_text_from_yaml(file_content: bytes) -> str:
|
||||
"""Extract the content from yaml file"""
|
||||
try:
|
||||
yaml_data = yaml.safe_load_all(file_content.decode("utf-8", "ignore"))
|
||||
# Detect encoding using chardet
|
||||
result = chardet.detect(file_content)
|
||||
encoding = result["encoding"]
|
||||
|
||||
# Fallback to utf-8 if detection fails
|
||||
if not encoding:
|
||||
encoding = "utf-8"
|
||||
|
||||
yaml_data = yaml.safe_load_all(file_content.decode(encoding, errors="ignore"))
|
||||
return cast(str, yaml.dump_all(yaml_data, allow_unicode=True, sort_keys=False))
|
||||
except (UnicodeDecodeError, yaml.YAMLError) as e:
|
||||
raise TextExtractionError(f"Failed to decode or parse YAML file: {e}") from e
|
||||
except (UnicodeDecodeError, LookupError, yaml.YAMLError) as e:
|
||||
# If decoding fails, try with utf-8 as last resort
|
||||
try:
|
||||
yaml_data = yaml.safe_load_all(file_content.decode("utf-8", errors="ignore"))
|
||||
return cast(str, yaml.dump_all(yaml_data, allow_unicode=True, sort_keys=False))
|
||||
except (UnicodeDecodeError, yaml.YAMLError):
|
||||
raise TextExtractionError(f"Failed to decode or parse YAML file: {e}") from e
|
||||
|
||||
|
||||
def _extract_text_from_pdf(file_content: bytes) -> str:
|
||||
|
|
@ -338,7 +377,20 @@ def _extract_text_from_file(file: File):
|
|||
|
||||
def _extract_text_from_csv(file_content: bytes) -> str:
|
||||
try:
|
||||
csv_file = io.StringIO(file_content.decode("utf-8", "ignore"))
|
||||
# Detect encoding using chardet
|
||||
result = chardet.detect(file_content)
|
||||
encoding = result["encoding"]
|
||||
|
||||
# Fallback to utf-8 if detection fails
|
||||
if not encoding:
|
||||
encoding = "utf-8"
|
||||
|
||||
try:
|
||||
csv_file = io.StringIO(file_content.decode(encoding, errors="ignore"))
|
||||
except (UnicodeDecodeError, LookupError):
|
||||
# If decoding fails, try with utf-8 as last resort
|
||||
csv_file = io.StringIO(file_content.decode("utf-8", errors="ignore"))
|
||||
|
||||
csv_reader = csv.reader(csv_file)
|
||||
rows = list(csv_reader)
|
||||
|
||||
|
|
|
|||
|
|
@ -7,7 +7,7 @@ from datetime import UTC, datetime
|
|||
from queue import Empty, Queue
|
||||
from typing import TYPE_CHECKING, Any, Optional, cast
|
||||
|
||||
from flask import Flask, current_app
|
||||
from flask import Flask, current_app, has_request_context
|
||||
|
||||
from configs import dify_config
|
||||
from core.variables import ArrayVariable, IntegerVariable, NoneVariable
|
||||
|
|
@ -586,7 +586,21 @@ class IterationNode(BaseNode[IterationNodeData]):
|
|||
"""
|
||||
for var, val in context.items():
|
||||
var.set(val)
|
||||
|
||||
# FIXME(-LAN-): Save current user before entering new app context
|
||||
from flask import g
|
||||
|
||||
saved_user = None
|
||||
if has_request_context() and hasattr(g, "_login_user"):
|
||||
saved_user = g._login_user
|
||||
|
||||
with flask_app.app_context():
|
||||
# Restore user in new app context
|
||||
if saved_user is not None:
|
||||
from flask import g
|
||||
|
||||
g._login_user = saved_user
|
||||
|
||||
parallel_mode_run_id = uuid.uuid4().hex
|
||||
graph_engine_copy = graph_engine.create_copy()
|
||||
variable_pool_copy = graph_engine_copy.graph_runtime_state.variable_pool
|
||||
|
|
|
|||
|
|
@ -125,6 +125,7 @@ class WorkflowCycleManager:
|
|||
)
|
||||
)
|
||||
|
||||
self._workflow_execution_repository.save(workflow_execution)
|
||||
return workflow_execution
|
||||
|
||||
def handle_workflow_run_partial_success(
|
||||
|
|
@ -158,6 +159,7 @@ class WorkflowCycleManager:
|
|||
)
|
||||
)
|
||||
|
||||
self._workflow_execution_repository.save(execution)
|
||||
return execution
|
||||
|
||||
def handle_workflow_run_failed(
|
||||
|
|
@ -172,44 +174,45 @@ class WorkflowCycleManager:
|
|||
trace_manager: Optional[TraceQueueManager] = None,
|
||||
exceptions_count: int = 0,
|
||||
) -> WorkflowExecution:
|
||||
execution = self._get_workflow_execution_or_raise_error(workflow_run_id)
|
||||
workflow_execution = self._get_workflow_execution_or_raise_error(workflow_run_id)
|
||||
|
||||
execution.status = WorkflowExecutionStatus(status.value)
|
||||
execution.error_message = error_message
|
||||
execution.total_tokens = total_tokens
|
||||
execution.total_steps = total_steps
|
||||
execution.finished_at = datetime.now(UTC).replace(tzinfo=None)
|
||||
execution.exceptions_count = exceptions_count
|
||||
workflow_execution.status = WorkflowExecutionStatus(status.value)
|
||||
workflow_execution.error_message = error_message
|
||||
workflow_execution.total_tokens = total_tokens
|
||||
workflow_execution.total_steps = total_steps
|
||||
workflow_execution.finished_at = datetime.now(UTC).replace(tzinfo=None)
|
||||
workflow_execution.exceptions_count = exceptions_count
|
||||
|
||||
# Use the instance repository to find running executions for a workflow run
|
||||
running_domain_executions = self._workflow_node_execution_repository.get_running_executions(
|
||||
workflow_run_id=execution.id
|
||||
running_node_executions = self._workflow_node_execution_repository.get_running_executions(
|
||||
workflow_run_id=workflow_execution.id
|
||||
)
|
||||
|
||||
# Update the domain models
|
||||
now = datetime.now(UTC).replace(tzinfo=None)
|
||||
for domain_execution in running_domain_executions:
|
||||
if domain_execution.node_execution_id:
|
||||
for node_execution in running_node_executions:
|
||||
if node_execution.node_execution_id:
|
||||
# Update the domain model
|
||||
domain_execution.status = NodeExecutionStatus.FAILED
|
||||
domain_execution.error = error_message
|
||||
domain_execution.finished_at = now
|
||||
domain_execution.elapsed_time = (now - domain_execution.created_at).total_seconds()
|
||||
node_execution.status = NodeExecutionStatus.FAILED
|
||||
node_execution.error = error_message
|
||||
node_execution.finished_at = now
|
||||
node_execution.elapsed_time = (now - node_execution.created_at).total_seconds()
|
||||
|
||||
# Update the repository with the domain model
|
||||
self._workflow_node_execution_repository.save(domain_execution)
|
||||
self._workflow_node_execution_repository.save(node_execution)
|
||||
|
||||
if trace_manager:
|
||||
trace_manager.add_trace_task(
|
||||
TraceTask(
|
||||
TraceTaskName.WORKFLOW_TRACE,
|
||||
workflow_execution=execution,
|
||||
workflow_execution=workflow_execution,
|
||||
conversation_id=conversation_id,
|
||||
user_id=trace_manager.user_id,
|
||||
)
|
||||
)
|
||||
|
||||
return execution
|
||||
self._workflow_execution_repository.save(workflow_execution)
|
||||
return workflow_execution
|
||||
|
||||
def handle_node_execution_start(
|
||||
self,
|
||||
|
|
|
|||
|
|
@ -12,19 +12,30 @@ from flask_login import user_loaded_from_request, user_logged_in # type: ignore
|
|||
|
||||
from configs import dify_config
|
||||
from dify_app import DifyApp
|
||||
from models import Account, EndUser
|
||||
|
||||
|
||||
@user_logged_in.connect
|
||||
@user_loaded_from_request.connect
|
||||
def on_user_loaded(_sender, user):
|
||||
def on_user_loaded(_sender, user: Union["Account", "EndUser"]):
|
||||
if dify_config.ENABLE_OTEL:
|
||||
from opentelemetry.trace import get_current_span
|
||||
|
||||
if user:
|
||||
current_span = get_current_span()
|
||||
if current_span:
|
||||
current_span.set_attribute("service.tenant.id", user.current_tenant_id)
|
||||
current_span.set_attribute("service.user.id", user.id)
|
||||
try:
|
||||
current_span = get_current_span()
|
||||
if isinstance(user, Account) and user.current_tenant_id:
|
||||
tenant_id = user.current_tenant_id
|
||||
elif isinstance(user, EndUser):
|
||||
tenant_id = user.tenant_id
|
||||
else:
|
||||
return
|
||||
if current_span:
|
||||
current_span.set_attribute("service.tenant.id", tenant_id)
|
||||
current_span.set_attribute("service.user.id", user.id)
|
||||
except Exception:
|
||||
logging.exception("Error setting tenant and user attributes")
|
||||
pass
|
||||
|
||||
|
||||
def init_app(app: DifyApp):
|
||||
|
|
@ -47,21 +58,25 @@ def init_app(app: DifyApp):
|
|||
|
||||
def response_hook(span: Span, status: str, response_headers: list):
|
||||
if span and span.is_recording():
|
||||
if status.startswith("2"):
|
||||
span.set_status(StatusCode.OK)
|
||||
else:
|
||||
span.set_status(StatusCode.ERROR, status)
|
||||
try:
|
||||
if status.startswith("2"):
|
||||
span.set_status(StatusCode.OK)
|
||||
else:
|
||||
span.set_status(StatusCode.ERROR, status)
|
||||
|
||||
status = status.split(" ")[0]
|
||||
status_code = int(status)
|
||||
status_class = f"{status_code // 100}xx"
|
||||
attributes: dict[str, str | int] = {"status_code": status_code, "status_class": status_class}
|
||||
request = flask.request
|
||||
if request and request.url_rule:
|
||||
attributes[SpanAttributes.HTTP_TARGET] = str(request.url_rule.rule)
|
||||
if request and request.method:
|
||||
attributes[SpanAttributes.HTTP_METHOD] = str(request.method)
|
||||
_http_response_counter.add(1, attributes)
|
||||
status = status.split(" ")[0]
|
||||
status_code = int(status)
|
||||
status_class = f"{status_code // 100}xx"
|
||||
attributes: dict[str, str | int] = {"status_code": status_code, "status_class": status_class}
|
||||
request = flask.request
|
||||
if request and request.url_rule:
|
||||
attributes[SpanAttributes.HTTP_TARGET] = str(request.url_rule.rule)
|
||||
if request and request.method:
|
||||
attributes[SpanAttributes.HTTP_METHOD] = str(request.method)
|
||||
_http_response_counter.add(1, attributes)
|
||||
except Exception:
|
||||
logging.exception("Error setting status and attributes")
|
||||
pass
|
||||
|
||||
instrumentor = FlaskInstrumentor()
|
||||
if dify_config.DEBUG:
|
||||
|
|
@ -92,7 +107,7 @@ def init_app(app: DifyApp):
|
|||
class ExceptionLoggingHandler(logging.Handler):
|
||||
"""Custom logging handler that creates spans for logging.exception() calls"""
|
||||
|
||||
def emit(self, record):
|
||||
def emit(self, record: logging.LogRecord):
|
||||
try:
|
||||
if record.exc_info:
|
||||
tracer = get_tracer_provider().get_tracer("dify.exception.logging")
|
||||
|
|
@ -107,9 +122,12 @@ def init_app(app: DifyApp):
|
|||
},
|
||||
) as span:
|
||||
span.set_status(StatusCode.ERROR)
|
||||
span.record_exception(record.exc_info[1])
|
||||
span.set_attribute("exception.type", record.exc_info[0].__name__)
|
||||
span.set_attribute("exception.message", str(record.exc_info[1]))
|
||||
if record.exc_info[1]:
|
||||
span.record_exception(record.exc_info[1])
|
||||
span.set_attribute("exception.message", str(record.exc_info[1]))
|
||||
if record.exc_info[0]:
|
||||
span.set_attribute("exception.type", record.exc_info[0].__name__)
|
||||
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
|
|
|
|||
|
|
@ -190,7 +190,7 @@ vdb = [
|
|||
"pymilvus~=2.5.0",
|
||||
"pymochow==1.3.1",
|
||||
"pyobvector~=0.1.6",
|
||||
"qdrant-client==1.7.3",
|
||||
"qdrant-client==1.9.0",
|
||||
"tablestore==6.1.0",
|
||||
"tcvectordb~=1.6.4",
|
||||
"tidb-vector==0.0.9",
|
||||
|
|
|
|||
|
|
@ -477,17 +477,15 @@ class DatasetService:
|
|||
if dataset.permission == DatasetPermissionEnum.ONLY_ME and dataset.created_by != user.id:
|
||||
logging.debug(f"User {user.id} does not have permission to access dataset {dataset.id}")
|
||||
raise NoPermissionError("You do not have permission to access this dataset.")
|
||||
if dataset.permission == "partial_members":
|
||||
user_permission = (
|
||||
db.session.query(DatasetPermission).filter_by(dataset_id=dataset.id, account_id=user.id).first()
|
||||
)
|
||||
if (
|
||||
not user_permission
|
||||
and dataset.tenant_id != user.current_tenant_id
|
||||
and dataset.created_by != user.id
|
||||
):
|
||||
logging.debug(f"User {user.id} does not have permission to access dataset {dataset.id}")
|
||||
raise NoPermissionError("You do not have permission to access this dataset.")
|
||||
if dataset.permission == DatasetPermissionEnum.PARTIAL_TEAM:
|
||||
# For partial team permission, user needs explicit permission or be the creator
|
||||
if dataset.created_by != user.id:
|
||||
user_permission = (
|
||||
db.session.query(DatasetPermission).filter_by(dataset_id=dataset.id, account_id=user.id).first()
|
||||
)
|
||||
if not user_permission:
|
||||
logging.debug(f"User {user.id} does not have permission to access dataset {dataset.id}")
|
||||
raise NoPermissionError("You do not have permission to access this dataset.")
|
||||
|
||||
@staticmethod
|
||||
def check_dataset_operator_permission(user: Optional[Account] = None, dataset: Optional[Dataset] = None):
|
||||
|
|
|
|||
|
|
@ -17,7 +17,7 @@ from core.plugin.entities.plugin import (
|
|||
PluginInstallation,
|
||||
PluginInstallationSource,
|
||||
)
|
||||
from core.plugin.entities.plugin_daemon import PluginInstallTask, PluginUploadResponse
|
||||
from core.plugin.entities.plugin_daemon import PluginInstallTask, PluginListResponse, PluginUploadResponse
|
||||
from core.plugin.impl.asset import PluginAssetManager
|
||||
from core.plugin.impl.debugging import PluginDebuggingClient
|
||||
from core.plugin.impl.plugin import PluginInstaller
|
||||
|
|
@ -110,6 +110,15 @@ class PluginService:
|
|||
plugins = manager.list_plugins(tenant_id)
|
||||
return plugins
|
||||
|
||||
@staticmethod
|
||||
def list_with_total(tenant_id: str, page: int, page_size: int) -> PluginListResponse:
|
||||
"""
|
||||
list all plugins of the tenant
|
||||
"""
|
||||
manager = PluginInstaller()
|
||||
plugins = manager.list_plugins_with_total(tenant_id, page, page_size)
|
||||
return plugins
|
||||
|
||||
@staticmethod
|
||||
def list_installations_from_ids(tenant_id: str, ids: Sequence[str]) -> Sequence[PluginInstallation]:
|
||||
"""
|
||||
|
|
|
|||
|
|
@ -4,16 +4,12 @@ from collections.abc import Callable
|
|||
|
||||
import click
|
||||
from celery import shared_task # type: ignore
|
||||
from sqlalchemy import delete, select
|
||||
from sqlalchemy import delete
|
||||
from sqlalchemy.exc import SQLAlchemyError
|
||||
from sqlalchemy.orm import Session
|
||||
|
||||
from core.repositories import SQLAlchemyWorkflowNodeExecutionRepository
|
||||
from extensions.ext_database import db
|
||||
from models import (
|
||||
Account,
|
||||
ApiToken,
|
||||
App,
|
||||
AppAnnotationHitHistory,
|
||||
AppAnnotationSetting,
|
||||
AppDatasetJoin,
|
||||
|
|
@ -34,7 +30,7 @@ from models import (
|
|||
)
|
||||
from models.tools import WorkflowToolProvider
|
||||
from models.web import PinnedConversation, SavedMessage
|
||||
from models.workflow import ConversationVariable, Workflow, WorkflowAppLog, WorkflowRun
|
||||
from models.workflow import ConversationVariable, Workflow, WorkflowAppLog, WorkflowNodeExecution, WorkflowRun
|
||||
|
||||
|
||||
@shared_task(queue="app_deletion", bind=True, max_retries=3)
|
||||
|
|
@ -191,31 +187,18 @@ def _delete_app_workflow_runs(tenant_id: str, app_id: str):
|
|||
|
||||
|
||||
def _delete_app_workflow_node_executions(tenant_id: str, app_id: str):
|
||||
# Get app's owner
|
||||
with Session(db.engine, expire_on_commit=False) as session:
|
||||
stmt = select(Account).where(Account.id == App.created_by).where(App.id == app_id)
|
||||
user = session.scalar(stmt)
|
||||
|
||||
if user is None:
|
||||
errmsg = (
|
||||
f"Failed to delete workflow node executions for tenant {tenant_id} and app {app_id}, app's owner not found"
|
||||
def del_workflow_node_execution(workflow_node_execution_id: str):
|
||||
db.session.query(WorkflowNodeExecution).filter(WorkflowNodeExecution.id == workflow_node_execution_id).delete(
|
||||
synchronize_session=False
|
||||
)
|
||||
logging.error(errmsg)
|
||||
raise ValueError(errmsg)
|
||||
|
||||
# Create a repository instance for WorkflowNodeExecution
|
||||
repository = SQLAlchemyWorkflowNodeExecutionRepository(
|
||||
session_factory=db.engine,
|
||||
user=user,
|
||||
app_id=app_id,
|
||||
triggered_from=None,
|
||||
_delete_records(
|
||||
"""select id from workflow_node_executions where tenant_id=:tenant_id and app_id=:app_id limit 1000""",
|
||||
{"tenant_id": tenant_id, "app_id": app_id},
|
||||
del_workflow_node_execution,
|
||||
"workflow node execution",
|
||||
)
|
||||
|
||||
# Use the clear method to delete all records for this tenant_id and app_id
|
||||
repository.clear()
|
||||
|
||||
logging.info(click.style(f"Deleted workflow node executions for tenant {tenant_id} and app {app_id}", fg="green"))
|
||||
|
||||
|
||||
def _delete_app_workflow_app_logs(tenant_id: str, app_id: str):
|
||||
def del_workflow_app_log(workflow_app_log_id: str):
|
||||
|
|
|
|||
|
|
@ -150,7 +150,7 @@ def test_extract_text_from_plain_text_non_utf8():
|
|||
temp_file.write(non_utf8_content)
|
||||
temp_file.seek(0)
|
||||
text = _extract_text_from_plain_text(temp_file.read())
|
||||
assert text == "Hello, world."
|
||||
assert text == "Hello, world©."
|
||||
|
||||
|
||||
@patch("pypdfium2.PdfDocument")
|
||||
|
|
|
|||
|
|
@ -0,0 +1,158 @@
|
|||
from unittest.mock import Mock, patch
|
||||
|
||||
import pytest
|
||||
|
||||
from models.account import Account, TenantAccountRole
|
||||
from models.dataset import Dataset, DatasetPermission, DatasetPermissionEnum
|
||||
from services.dataset_service import DatasetService
|
||||
from services.errors.account import NoPermissionError
|
||||
|
||||
|
||||
class TestDatasetPermissionService:
|
||||
"""Test cases for dataset permission checking functionality"""
|
||||
|
||||
def setup_method(self):
|
||||
"""Set up test fixtures"""
|
||||
# Mock tenant and user
|
||||
self.tenant_id = "test-tenant-123"
|
||||
self.creator_id = "creator-456"
|
||||
self.normal_user_id = "normal-789"
|
||||
self.owner_user_id = "owner-999"
|
||||
|
||||
# Mock dataset
|
||||
self.dataset = Mock(spec=Dataset)
|
||||
self.dataset.id = "dataset-123"
|
||||
self.dataset.tenant_id = self.tenant_id
|
||||
self.dataset.created_by = self.creator_id
|
||||
|
||||
# Mock users
|
||||
self.creator_user = Mock(spec=Account)
|
||||
self.creator_user.id = self.creator_id
|
||||
self.creator_user.current_tenant_id = self.tenant_id
|
||||
self.creator_user.current_role = TenantAccountRole.EDITOR
|
||||
|
||||
self.normal_user = Mock(spec=Account)
|
||||
self.normal_user.id = self.normal_user_id
|
||||
self.normal_user.current_tenant_id = self.tenant_id
|
||||
self.normal_user.current_role = TenantAccountRole.NORMAL
|
||||
|
||||
self.owner_user = Mock(spec=Account)
|
||||
self.owner_user.id = self.owner_user_id
|
||||
self.owner_user.current_tenant_id = self.tenant_id
|
||||
self.owner_user.current_role = TenantAccountRole.OWNER
|
||||
|
||||
def test_permission_check_different_tenant_should_fail(self):
|
||||
"""Test that users from different tenants cannot access dataset"""
|
||||
self.normal_user.current_tenant_id = "different-tenant"
|
||||
|
||||
with pytest.raises(NoPermissionError, match="You do not have permission to access this dataset."):
|
||||
DatasetService.check_dataset_permission(self.dataset, self.normal_user)
|
||||
|
||||
def test_owner_can_access_any_dataset(self):
|
||||
"""Test that tenant owners can access any dataset regardless of permission"""
|
||||
self.dataset.permission = DatasetPermissionEnum.ONLY_ME
|
||||
|
||||
# Should not raise any exception
|
||||
DatasetService.check_dataset_permission(self.dataset, self.owner_user)
|
||||
|
||||
def test_only_me_permission_creator_can_access(self):
|
||||
"""Test ONLY_ME permission allows only creator to access"""
|
||||
self.dataset.permission = DatasetPermissionEnum.ONLY_ME
|
||||
|
||||
# Creator should be able to access
|
||||
DatasetService.check_dataset_permission(self.dataset, self.creator_user)
|
||||
|
||||
def test_only_me_permission_others_cannot_access(self):
|
||||
"""Test ONLY_ME permission denies access to non-creators"""
|
||||
self.dataset.permission = DatasetPermissionEnum.ONLY_ME
|
||||
|
||||
with pytest.raises(NoPermissionError, match="You do not have permission to access this dataset."):
|
||||
DatasetService.check_dataset_permission(self.dataset, self.normal_user)
|
||||
|
||||
def test_all_team_permission_allows_access(self):
|
||||
"""Test ALL_TEAM permission allows any team member to access"""
|
||||
self.dataset.permission = DatasetPermissionEnum.ALL_TEAM
|
||||
|
||||
# Should not raise any exception for team members
|
||||
DatasetService.check_dataset_permission(self.dataset, self.normal_user)
|
||||
DatasetService.check_dataset_permission(self.dataset, self.creator_user)
|
||||
|
||||
@patch("services.dataset_service.db.session")
|
||||
def test_partial_team_permission_creator_can_access(self, mock_session):
|
||||
"""Test PARTIAL_TEAM permission allows creator to access"""
|
||||
self.dataset.permission = DatasetPermissionEnum.PARTIAL_TEAM
|
||||
|
||||
# Should not raise any exception for creator
|
||||
DatasetService.check_dataset_permission(self.dataset, self.creator_user)
|
||||
|
||||
# Should not query database for creator
|
||||
mock_session.query.assert_not_called()
|
||||
|
||||
@patch("services.dataset_service.db.session")
|
||||
def test_partial_team_permission_with_explicit_permission(self, mock_session):
|
||||
"""Test PARTIAL_TEAM permission allows users with explicit permission"""
|
||||
self.dataset.permission = DatasetPermissionEnum.PARTIAL_TEAM
|
||||
|
||||
# Mock database query to return a permission record
|
||||
mock_permission = Mock(spec=DatasetPermission)
|
||||
mock_session.query().filter_by().first.return_value = mock_permission
|
||||
|
||||
# Should not raise any exception
|
||||
DatasetService.check_dataset_permission(self.dataset, self.normal_user)
|
||||
|
||||
# Verify database was queried correctly
|
||||
mock_session.query().filter_by.assert_called_with(dataset_id=self.dataset.id, account_id=self.normal_user.id)
|
||||
|
||||
@patch("services.dataset_service.db.session")
|
||||
def test_partial_team_permission_without_explicit_permission(self, mock_session):
|
||||
"""Test PARTIAL_TEAM permission denies users without explicit permission"""
|
||||
self.dataset.permission = DatasetPermissionEnum.PARTIAL_TEAM
|
||||
|
||||
# Mock database query to return None (no permission record)
|
||||
mock_session.query().filter_by().first.return_value = None
|
||||
|
||||
with pytest.raises(NoPermissionError, match="You do not have permission to access this dataset."):
|
||||
DatasetService.check_dataset_permission(self.dataset, self.normal_user)
|
||||
|
||||
# Verify database was queried correctly
|
||||
mock_session.query().filter_by.assert_called_with(dataset_id=self.dataset.id, account_id=self.normal_user.id)
|
||||
|
||||
@patch("services.dataset_service.db.session")
|
||||
def test_partial_team_permission_non_creator_without_permission_fails(self, mock_session):
|
||||
"""Test that non-creators without explicit permission are denied access"""
|
||||
self.dataset.permission = DatasetPermissionEnum.PARTIAL_TEAM
|
||||
|
||||
# Create a different user (not the creator)
|
||||
other_user = Mock(spec=Account)
|
||||
other_user.id = "other-user-123"
|
||||
other_user.current_tenant_id = self.tenant_id
|
||||
other_user.current_role = TenantAccountRole.NORMAL
|
||||
|
||||
# Mock database query to return None (no permission record)
|
||||
mock_session.query().filter_by().first.return_value = None
|
||||
|
||||
with pytest.raises(NoPermissionError, match="You do not have permission to access this dataset."):
|
||||
DatasetService.check_dataset_permission(self.dataset, other_user)
|
||||
|
||||
def test_partial_team_permission_uses_correct_enum(self):
|
||||
"""Test that the method correctly uses DatasetPermissionEnum.PARTIAL_TEAM"""
|
||||
# This test ensures we're using the enum instead of string literals
|
||||
self.dataset.permission = DatasetPermissionEnum.PARTIAL_TEAM
|
||||
|
||||
# Creator should always have access
|
||||
DatasetService.check_dataset_permission(self.dataset, self.creator_user)
|
||||
|
||||
@patch("services.dataset_service.logging")
|
||||
@patch("services.dataset_service.db.session")
|
||||
def test_permission_denied_logs_debug_message(self, mock_session, mock_logging):
|
||||
"""Test that permission denied events are logged"""
|
||||
self.dataset.permission = DatasetPermissionEnum.PARTIAL_TEAM
|
||||
mock_session.query().filter_by().first.return_value = None
|
||||
|
||||
with pytest.raises(NoPermissionError):
|
||||
DatasetService.check_dataset_permission(self.dataset, self.normal_user)
|
||||
|
||||
# Verify debug message was logged
|
||||
mock_logging.debug.assert_called_with(
|
||||
f"User {self.normal_user.id} does not have permission to access dataset {self.dataset.id}"
|
||||
)
|
||||
4328
api/uv.lock
4328
api/uv.lock
File diff suppressed because it is too large
Load Diff
|
|
@ -412,6 +412,7 @@ QDRANT_API_KEY=difyai123456
|
|||
QDRANT_CLIENT_TIMEOUT=20
|
||||
QDRANT_GRPC_ENABLED=false
|
||||
QDRANT_GRPC_PORT=6334
|
||||
QDRANT_REPLICATION_FACTOR=1
|
||||
|
||||
# Milvus configuration. Only available when VECTOR_STORE is `milvus`.
|
||||
# The milvus uri.
|
||||
|
|
|
|||
|
|
@ -2,7 +2,7 @@ x-shared-env: &shared-api-worker-env
|
|||
services:
|
||||
# API service
|
||||
api:
|
||||
image: langgenius/dify-api:1.4.0
|
||||
image: langgenius/dify-api:1.4.1
|
||||
restart: always
|
||||
environment:
|
||||
# Use the shared environment variables.
|
||||
|
|
@ -31,7 +31,7 @@ services:
|
|||
# worker service
|
||||
# The Celery worker for processing the queue.
|
||||
worker:
|
||||
image: langgenius/dify-api:1.4.0
|
||||
image: langgenius/dify-api:1.4.1
|
||||
restart: always
|
||||
environment:
|
||||
# Use the shared environment variables.
|
||||
|
|
@ -57,7 +57,7 @@ services:
|
|||
|
||||
# Frontend web application.
|
||||
web:
|
||||
image: langgenius/dify-web:1.4.0
|
||||
image: langgenius/dify-web:1.4.1
|
||||
restart: always
|
||||
environment:
|
||||
CONSOLE_API_URL: ${CONSOLE_API_URL:-}
|
||||
|
|
@ -142,7 +142,7 @@ services:
|
|||
|
||||
# plugin daemon
|
||||
plugin_daemon:
|
||||
image: langgenius/dify-plugin-daemon:0.0.10-local
|
||||
image: langgenius/dify-plugin-daemon:0.1.1-local
|
||||
restart: always
|
||||
environment:
|
||||
# Use the shared environment variables.
|
||||
|
|
|
|||
|
|
@ -71,7 +71,7 @@ services:
|
|||
|
||||
# plugin daemon
|
||||
plugin_daemon:
|
||||
image: langgenius/dify-plugin-daemon:0.0.10-local
|
||||
image: langgenius/dify-plugin-daemon:0.1.1-local
|
||||
restart: always
|
||||
env_file:
|
||||
- ./middleware.env
|
||||
|
|
|
|||
|
|
@ -138,6 +138,7 @@ x-shared-env: &shared-api-worker-env
|
|||
QDRANT_CLIENT_TIMEOUT: ${QDRANT_CLIENT_TIMEOUT:-20}
|
||||
QDRANT_GRPC_ENABLED: ${QDRANT_GRPC_ENABLED:-false}
|
||||
QDRANT_GRPC_PORT: ${QDRANT_GRPC_PORT:-6334}
|
||||
QDRANT_REPLICATION_FACTOR: ${QDRANT_REPLICATION_FACTOR:-1}
|
||||
MILVUS_URI: ${MILVUS_URI:-http://host.docker.internal:19530}
|
||||
MILVUS_DATABASE: ${MILVUS_DATABASE:-}
|
||||
MILVUS_TOKEN: ${MILVUS_TOKEN:-}
|
||||
|
|
@ -500,7 +501,7 @@ x-shared-env: &shared-api-worker-env
|
|||
services:
|
||||
# API service
|
||||
api:
|
||||
image: langgenius/dify-api:1.4.0
|
||||
image: langgenius/dify-api:1.4.1
|
||||
restart: always
|
||||
environment:
|
||||
# Use the shared environment variables.
|
||||
|
|
@ -529,7 +530,7 @@ services:
|
|||
# worker service
|
||||
# The Celery worker for processing the queue.
|
||||
worker:
|
||||
image: langgenius/dify-api:1.4.0
|
||||
image: langgenius/dify-api:1.4.1
|
||||
restart: always
|
||||
environment:
|
||||
# Use the shared environment variables.
|
||||
|
|
@ -555,7 +556,7 @@ services:
|
|||
|
||||
# Frontend web application.
|
||||
web:
|
||||
image: langgenius/dify-web:1.4.0
|
||||
image: langgenius/dify-web:1.4.1
|
||||
restart: always
|
||||
environment:
|
||||
CONSOLE_API_URL: ${CONSOLE_API_URL:-}
|
||||
|
|
@ -640,7 +641,7 @@ services:
|
|||
|
||||
# plugin daemon
|
||||
plugin_daemon:
|
||||
image: langgenius/dify-plugin-daemon:0.0.10-local
|
||||
image: langgenius/dify-plugin-daemon:0.1.1-local
|
||||
restart: always
|
||||
environment:
|
||||
# Use the shared environment variables.
|
||||
|
|
|
|||
Binary file not shown.
|
Before Width: | Height: | Size: 53 KiB After Width: | Height: | Size: 60 KiB |
|
|
@ -1,6 +1,5 @@
|
|||
import type { FC } from 'react'
|
||||
import type { ModelProvider } from '../declarations'
|
||||
import { basePath } from '@/utils/var'
|
||||
import { useLanguage } from '../hooks'
|
||||
import { Openai } from '@/app/components/base/icons/src/vender/other'
|
||||
import { AnthropicDark, AnthropicLight } from '@/app/components/base/icons/src/public/llm'
|
||||
|
|
@ -41,7 +40,7 @@ const ProviderIcon: FC<ProviderIconProps> = ({
|
|||
<div className={cn('inline-flex items-center gap-2', className)}>
|
||||
<img
|
||||
alt='provider-icon'
|
||||
src={basePath + renderI18nObject(provider.icon_small, language)}
|
||||
src={renderI18nObject(provider.icon_small, language)}
|
||||
className='h-6 w-6'
|
||||
/>
|
||||
<div className='system-md-semibold text-text-primary'>
|
||||
|
|
|
|||
|
|
@ -78,7 +78,7 @@ const ActionList = ({
|
|||
className='w-full'
|
||||
onClick={() => setShowSettingAuth(true)}
|
||||
disabled={!isCurrentWorkspaceManager}
|
||||
>{t('tools.auth.unauthorized')}</Button>
|
||||
>{t('workflow.nodes.tool.authorize')}</Button>
|
||||
)}
|
||||
</div>
|
||||
<div className='flex flex-col gap-2'>
|
||||
|
|
|
|||
|
|
@ -141,7 +141,7 @@ const MultipleToolSelector = ({
|
|||
}
|
||||
panelShowState={panelShowState}
|
||||
onPanelShowStateChange={setPanelShowState}
|
||||
|
||||
isEdit={false}
|
||||
/>
|
||||
{value.length === 0 && (
|
||||
<div className='system-xs-regular flex justify-center rounded-[10px] bg-background-section p-3 text-text-tertiary'>{t('plugin.detailPanel.toolSelector.empty')}</div>
|
||||
|
|
@ -158,6 +158,7 @@ const MultipleToolSelector = ({
|
|||
onSelect={item => handleConfigure(item, index)}
|
||||
onDelete={() => handleDelete(index)}
|
||||
supportEnableSwitch
|
||||
isEdit
|
||||
/>
|
||||
</div>
|
||||
))}
|
||||
|
|
|
|||
|
|
@ -54,6 +54,7 @@ type Props = {
|
|||
scope?: string
|
||||
value?: ToolValue
|
||||
selectedTools?: ToolValue[]
|
||||
isEdit?: boolean
|
||||
onSelect: (tool: {
|
||||
provider_name: string
|
||||
tool_name: string
|
||||
|
|
@ -77,6 +78,7 @@ type Props = {
|
|||
const ToolSelector: FC<Props> = ({
|
||||
value,
|
||||
selectedTools,
|
||||
isEdit,
|
||||
disabled,
|
||||
placement = 'left',
|
||||
offset = 4,
|
||||
|
|
@ -277,7 +279,7 @@ const ToolSelector: FC<Props> = ({
|
|||
<div className={cn('relative max-h-[642px] min-h-20 w-[361px] rounded-xl border-[0.5px] border-components-panel-border bg-components-panel-bg-blur pb-4 shadow-lg backdrop-blur-sm', !isShowSettingAuth && 'overflow-y-auto pb-2')}>
|
||||
{!isShowSettingAuth && (
|
||||
<>
|
||||
<div className='system-xl-semibold px-4 pb-1 pt-3.5 text-text-primary'>{t('plugin.detailPanel.toolSelector.title')}</div>
|
||||
<div className='system-xl-semibold px-4 pb-1 pt-3.5 text-text-primary'>{t(`plugin.detailPanel.toolSelector.${isEdit ? 'toolSetting' : 'title'}`)}</div>
|
||||
{/* base form */}
|
||||
<div className='flex flex-col gap-3 px-4 py-2'>
|
||||
<div className='flex flex-col gap-1'>
|
||||
|
|
|
|||
|
|
@ -1,20 +1,23 @@
|
|||
'use client'
|
||||
import { useMemo } from 'react'
|
||||
import { useTranslation } from 'react-i18next'
|
||||
import type { FilterState } from './filter-management'
|
||||
import FilterManagement from './filter-management'
|
||||
import List from './list'
|
||||
import { useInstalledLatestVersion, useInstalledPluginList, useInvalidateInstalledPluginList } from '@/service/use-plugins'
|
||||
import { useInstalledLatestVersion, useInstalledPluginListWithPagination, useInvalidateInstalledPluginList } from '@/service/use-plugins'
|
||||
import PluginDetailPanel from '@/app/components/plugins/plugin-detail-panel'
|
||||
import { usePluginPageContext } from './context'
|
||||
import { useDebounceFn } from 'ahooks'
|
||||
import Button from '@/app/components/base/button'
|
||||
import Empty from './empty'
|
||||
import Loading from '../../base/loading'
|
||||
import { PluginSource } from '../types'
|
||||
|
||||
const PluginsPanel = () => {
|
||||
const { t } = useTranslation()
|
||||
const filters = usePluginPageContext(v => v.filters) as FilterState
|
||||
const setFilters = usePluginPageContext(v => v.setFilters)
|
||||
const { data: pluginList, isLoading: isPluginListLoading } = useInstalledPluginList()
|
||||
const { data: pluginList, isLoading: isPluginListLoading, isFetching, isLastPage, loadNextPage } = useInstalledPluginListWithPagination()
|
||||
const { data: installedLatestVersion } = useInstalledLatestVersion(
|
||||
pluginList?.plugins
|
||||
.filter(plugin => plugin.source === PluginSource.marketplace)
|
||||
|
|
@ -64,10 +67,16 @@ const PluginsPanel = () => {
|
|||
/>
|
||||
</div>
|
||||
{isPluginListLoading ? <Loading type='app' /> : (filteredList?.length ?? 0) > 0 ? (
|
||||
<div className='flex grow flex-wrap content-start items-start gap-2 self-stretch px-12'>
|
||||
<div className='flex grow flex-wrap content-start items-start justify-center gap-2 self-stretch px-12'>
|
||||
<div className='w-full'>
|
||||
<List pluginList={filteredList || []} />
|
||||
</div>
|
||||
{!isLastPage && !isFetching && (
|
||||
<Button onClick={loadNextPage}>
|
||||
{t('workflow.common.loadMore')}
|
||||
</Button>
|
||||
)}
|
||||
{isFetching && <div className='system-md-semibold text-text-secondary'>{t('appLog.detail.loading')}</div>}
|
||||
</div>
|
||||
) : (
|
||||
<Empty />
|
||||
|
|
|
|||
|
|
@ -325,6 +325,11 @@ export type InstalledPluginListResponse = {
|
|||
plugins: PluginDetail[]
|
||||
}
|
||||
|
||||
export type InstalledPluginListWithTotalResponse = {
|
||||
plugins: PluginDetail[]
|
||||
total: number
|
||||
}
|
||||
|
||||
export type InstalledLatestVersionResponse = {
|
||||
versions: {
|
||||
[plugin_id: string]: {
|
||||
|
|
|
|||
|
|
@ -42,6 +42,12 @@ const useConfig = (id: string, payload: HttpNodeType) => {
|
|||
data: transformToBodyPayload(bodyData, [BodyType.formData, BodyType.xWwwFormUrlencoded].includes(newInputs.body.type)),
|
||||
}
|
||||
}
|
||||
else if (!bodyData) {
|
||||
newInputs.body = {
|
||||
...newInputs.body,
|
||||
data: [],
|
||||
}
|
||||
}
|
||||
|
||||
setInputs(newInputs)
|
||||
setIsDataReady(true)
|
||||
|
|
@ -151,7 +157,7 @@ const useConfig = (id: string, payload: HttpNodeType) => {
|
|||
inputs.url,
|
||||
inputs.headers,
|
||||
inputs.params,
|
||||
typeof inputs.body.data === 'string' ? inputs.body.data : inputs.body.data.map(item => item.value).join(''),
|
||||
typeof inputs.body.data === 'string' ? inputs.body.data : inputs.body.data?.map(item => item.value).join(''),
|
||||
fileVarInputs,
|
||||
])
|
||||
|
||||
|
|
|
|||
|
|
@ -79,7 +79,7 @@ const Panel: FC<NodePanelProps<ToolNodeType>> = ({
|
|||
className='w-full'
|
||||
onClick={showSetAuthModal}
|
||||
>
|
||||
{t(`${i18nPrefix}.toAuthorize`)}
|
||||
{t(`${i18nPrefix}.authorize`)}
|
||||
</Button>
|
||||
</div>
|
||||
</>
|
||||
|
|
|
|||
|
|
@ -10,7 +10,7 @@ export default function RoutePrefixHandle() {
|
|||
const addPrefixToImg = (e: HTMLImageElement) => {
|
||||
const url = new URL(e.src)
|
||||
const prefix = url.pathname.substr(0, basePath.length)
|
||||
if (prefix !== basePath) {
|
||||
if (prefix !== basePath && !url.href.startsWith('blob:') && !url.href.startsWith('data:')) {
|
||||
url.pathname = basePath + url.pathname
|
||||
e.src = url.toString()
|
||||
}
|
||||
|
|
|
|||
|
|
@ -14,7 +14,6 @@ const translation = {
|
|||
},
|
||||
author: 'Von',
|
||||
auth: {
|
||||
unauthorized: 'Zur Autorisierung',
|
||||
authorized: 'Autorisiert',
|
||||
setup: 'Autorisierung einrichten, um zu nutzen',
|
||||
setupModalTitle: 'Autorisierung einrichten',
|
||||
|
|
|
|||
|
|
@ -648,7 +648,6 @@ const translation = {
|
|||
'assignedVarsDescription': 'Zugewiesene Variablen müssen beschreibbare Variablen sein, z. B. Konversationsvariablen.',
|
||||
},
|
||||
tool: {
|
||||
toAuthorize: 'Autorisieren',
|
||||
inputVars: 'Eingabevariablen',
|
||||
outputVars: {
|
||||
text: 'durch das Tool generierter Inhalt',
|
||||
|
|
|
|||
|
|
@ -77,6 +77,7 @@ const translation = {
|
|||
modelNum: '{{num}} MODELS INCLUDED',
|
||||
toolSelector: {
|
||||
title: 'Add tool',
|
||||
toolSetting: 'Tool Settings',
|
||||
toolLabel: 'Tool',
|
||||
descriptionLabel: 'Tool description',
|
||||
descriptionPlaceholder: 'Brief description of the tool\'s purpose, e.g., get the temperature for a specific location.',
|
||||
|
|
|
|||
|
|
@ -15,7 +15,6 @@ const translation = {
|
|||
},
|
||||
author: 'By',
|
||||
auth: {
|
||||
unauthorized: 'To Authorize',
|
||||
authorized: 'Authorized',
|
||||
setup: 'Set up authorization to use',
|
||||
setupModalTitle: 'Set Up Authorization',
|
||||
|
|
|
|||
|
|
@ -659,7 +659,7 @@ const translation = {
|
|||
'assignedVarsDescription': 'Assigned variables must be writable variables, such as conversation variables.',
|
||||
},
|
||||
tool: {
|
||||
toAuthorize: 'To authorize',
|
||||
authorize: 'Authorize',
|
||||
inputVars: 'Input Variables',
|
||||
outputVars: {
|
||||
text: 'tool generated content',
|
||||
|
|
|
|||
|
|
@ -15,7 +15,6 @@ const translation = {
|
|||
},
|
||||
author: 'Por',
|
||||
auth: {
|
||||
unauthorized: 'Para Autorizar',
|
||||
authorized: 'Autorizado',
|
||||
setup: 'Configurar la autorización para usar',
|
||||
setupModalTitle: 'Configurar Autorización',
|
||||
|
|
|
|||
|
|
@ -646,7 +646,6 @@ const translation = {
|
|||
'assignedVarsDescription': 'Las variables asignadas deben ser variables grabables, como las variables de conversación.',
|
||||
},
|
||||
tool: {
|
||||
toAuthorize: 'Para autorizar',
|
||||
inputVars: 'Variables de entrada',
|
||||
outputVars: {
|
||||
text: 'Contenido generado por la herramienta',
|
||||
|
|
|
|||
|
|
@ -15,7 +15,6 @@ const translation = {
|
|||
},
|
||||
author: 'توسط',
|
||||
auth: {
|
||||
unauthorized: 'برای مجوز دادن',
|
||||
authorized: 'مجوز داده شده',
|
||||
setup: 'تنظیم مجوز برای استفاده',
|
||||
setupModalTitle: 'تنظیم مجوز',
|
||||
|
|
|
|||
|
|
@ -648,7 +648,6 @@ const translation = {
|
|||
'varNotSet': 'متغیر NOT Set',
|
||||
},
|
||||
tool: {
|
||||
toAuthorize: 'برای مجوز دادن',
|
||||
inputVars: 'متغیرهای ورودی',
|
||||
outputVars: {
|
||||
text: 'محتوای تولید شده توسط ابزار',
|
||||
|
|
|
|||
|
|
@ -14,7 +14,6 @@ const translation = {
|
|||
},
|
||||
author: 'Par',
|
||||
auth: {
|
||||
unauthorized: 'Pour Autoriser',
|
||||
authorized: 'Autorisé',
|
||||
setup: 'Mettez en place l\'autorisation à utiliser',
|
||||
setupModalTitle: 'Configurer l\'Autorisation',
|
||||
|
|
|
|||
|
|
@ -647,7 +647,6 @@ const translation = {
|
|||
'selectAssignedVariable': 'Sélectionner la variable affectée...',
|
||||
},
|
||||
tool: {
|
||||
toAuthorize: 'Autoriser',
|
||||
inputVars: 'Variables de saisie',
|
||||
outputVars: {
|
||||
text: 'contenu généré par l\'outil',
|
||||
|
|
|
|||
|
|
@ -15,7 +15,6 @@ const translation = {
|
|||
},
|
||||
author: 'द्वारा',
|
||||
auth: {
|
||||
unauthorized: 'अधिकृत करने के लिए',
|
||||
authorized: 'अधिकृत',
|
||||
setup: 'उपयोग करने के लिए अधिकृति सेटअप करें',
|
||||
setupModalTitle: 'अधिकृति सेटअप करें',
|
||||
|
|
|
|||
|
|
@ -664,7 +664,6 @@ const translation = {
|
|||
'noAssignedVars': 'कोई उपलब्ध असाइन किए गए चर नहीं',
|
||||
},
|
||||
tool: {
|
||||
toAuthorize: 'अधिकृत करने के लिए',
|
||||
inputVars: 'इनपुट वेरिएबल्स',
|
||||
outputVars: {
|
||||
text: 'उपकरण द्वारा उत्पन्न सामग्री',
|
||||
|
|
|
|||
|
|
@ -15,7 +15,6 @@ const translation = {
|
|||
},
|
||||
author: 'Di',
|
||||
auth: {
|
||||
unauthorized: 'Per Autorizzare',
|
||||
authorized: 'Autorizzato',
|
||||
setup: 'Configura l\'autorizzazione per utilizzare',
|
||||
setupModalTitle: 'Configura Autorizzazione',
|
||||
|
|
|
|||
|
|
@ -666,7 +666,6 @@ const translation = {
|
|||
'noVarTip': 'Fare clic sul pulsante "+" per aggiungere variabili',
|
||||
},
|
||||
tool: {
|
||||
toAuthorize: 'Per autorizzare',
|
||||
inputVars: 'Variabili di Input',
|
||||
outputVars: {
|
||||
text: 'contenuto generato dallo strumento',
|
||||
|
|
|
|||
|
|
@ -15,7 +15,6 @@ const translation = {
|
|||
},
|
||||
author: '著者:',
|
||||
auth: {
|
||||
unauthorized: '認証する',
|
||||
authorized: '認証済み',
|
||||
setup: '使用するための認証を設定する',
|
||||
setupModalTitle: '認証の設定',
|
||||
|
|
|
|||
|
|
@ -654,7 +654,6 @@ const translation = {
|
|||
'assignedVarsDescription': '代入される変数は、会話変数などの書き込み可能な変数である必要があります。',
|
||||
},
|
||||
tool: {
|
||||
toAuthorize: '承認するには',
|
||||
inputVars: '入力変数',
|
||||
outputVars: {
|
||||
text: 'ツールが生成したコンテンツ',
|
||||
|
|
|
|||
|
|
@ -15,7 +15,6 @@ const translation = {
|
|||
},
|
||||
author: '저자',
|
||||
auth: {
|
||||
unauthorized: '인증되지 않음',
|
||||
authorized: '인증됨',
|
||||
setup: '사용을 위한 인증 설정',
|
||||
setupModalTitle: '인증 설정',
|
||||
|
|
|
|||
|
|
@ -648,7 +648,6 @@ const translation = {
|
|||
'varNotSet': '변수가 설정되지 않음',
|
||||
},
|
||||
tool: {
|
||||
toAuthorize: '승인하기',
|
||||
inputVars: '입력 변수',
|
||||
outputVars: {
|
||||
text: '도구가 생성한 내용',
|
||||
|
|
|
|||
|
|
@ -14,7 +14,6 @@ const translation = {
|
|||
},
|
||||
author: 'Przez',
|
||||
auth: {
|
||||
unauthorized: 'Autoryzacja',
|
||||
authorized: 'Zautoryzowane',
|
||||
setup: 'Skonfiguruj autoryzację aby użyć',
|
||||
setupModalTitle: 'Konfiguruj autoryzację',
|
||||
|
|
|
|||
|
|
@ -648,7 +648,6 @@ const translation = {
|
|||
'noVarTip': 'Kliknij przycisk "+", aby dodać zmienne',
|
||||
},
|
||||
tool: {
|
||||
toAuthorize: 'Do autoryzacji',
|
||||
inputVars: 'Zmienne wejściowe',
|
||||
outputVars: {
|
||||
text: 'treść generowana przez narzędzie',
|
||||
|
|
|
|||
|
|
@ -14,7 +14,6 @@ const translation = {
|
|||
},
|
||||
author: 'Por',
|
||||
auth: {
|
||||
unauthorized: 'Para Autorizar',
|
||||
authorized: 'Autorizado',
|
||||
setup: 'Configurar autorização para usar',
|
||||
setupModalTitle: 'Configurar Autorização',
|
||||
|
|
|
|||
|
|
@ -648,7 +648,6 @@ const translation = {
|
|||
'variables': 'Variáveis',
|
||||
},
|
||||
tool: {
|
||||
toAuthorize: 'Autorizar',
|
||||
inputVars: 'Variáveis de entrada',
|
||||
outputVars: {
|
||||
text: 'conteúdo gerado pela ferramenta',
|
||||
|
|
|
|||
|
|
@ -14,7 +14,6 @@ const translation = {
|
|||
},
|
||||
author: 'De',
|
||||
auth: {
|
||||
unauthorized: 'Pentru a Autoriza',
|
||||
authorized: 'Autorizat',
|
||||
setup: 'Configurează autorizarea pentru a utiliza',
|
||||
setupModalTitle: 'Configurează Autorizarea',
|
||||
|
|
|
|||
|
|
@ -648,7 +648,6 @@ const translation = {
|
|||
'variables': 'Variabile',
|
||||
},
|
||||
tool: {
|
||||
toAuthorize: 'Autorizați',
|
||||
inputVars: 'Variabile de intrare',
|
||||
outputVars: {
|
||||
text: 'conținut generat de instrument',
|
||||
|
|
|
|||
|
|
@ -15,7 +15,6 @@ const translation = {
|
|||
},
|
||||
author: 'Автор',
|
||||
auth: {
|
||||
unauthorized: 'Авторизовать',
|
||||
authorized: 'Авторизовано',
|
||||
setup: 'Настроить авторизацию для использования',
|
||||
setupModalTitle: 'Настроить авторизацию',
|
||||
|
|
|
|||
|
|
@ -648,7 +648,6 @@ const translation = {
|
|||
'selectAssignedVariable': 'Выберите назначенную переменную...',
|
||||
},
|
||||
tool: {
|
||||
toAuthorize: 'Авторизовать',
|
||||
inputVars: 'Входные переменные',
|
||||
outputVars: {
|
||||
text: 'контент, сгенерированный инструментом',
|
||||
|
|
|
|||
|
|
@ -15,7 +15,6 @@ const translation = {
|
|||
},
|
||||
author: 'Avtor',
|
||||
auth: {
|
||||
unauthorized: 'Za avtorizacijo',
|
||||
authorized: 'Avtorizirano',
|
||||
setup: 'Nastavite avtorizacijo za uporabo',
|
||||
setupModalTitle: 'Nastavi avtorizacijo',
|
||||
|
|
|
|||
|
|
@ -488,7 +488,6 @@ const translation = {
|
|||
'variable': 'Spremenljivka',
|
||||
},
|
||||
tool: {
|
||||
toAuthorize: 'Za avtorizacijo',
|
||||
inputVars: 'Vhodne spremenljivke',
|
||||
outputVars: {
|
||||
text: 'orodje je ustvarilo vsebino',
|
||||
|
|
|
|||
|
|
@ -15,7 +15,6 @@ const translation = {
|
|||
},
|
||||
author: 'โดย',
|
||||
auth: {
|
||||
unauthorized: 'การอนุญาต',
|
||||
authorized: 'อนุญาต',
|
||||
setup: 'ตั้งค่าการให้สิทธิ์เพื่อใช้',
|
||||
setupModalTitle: 'ตั้งค่าการให้สิทธิ์',
|
||||
|
|
|
|||
|
|
@ -647,7 +647,6 @@ const translation = {
|
|||
'setParameter': 'ตั้งค่าพารามิเตอร์...',
|
||||
},
|
||||
tool: {
|
||||
toAuthorize: 'เพื่ออนุญาต',
|
||||
inputVars: 'ตัวแปรอินพุต',
|
||||
outputVars: {
|
||||
text: 'เนื้อหาที่สร้างขึ้นด้วยเครื่องมือ',
|
||||
|
|
|
|||
|
|
@ -15,7 +15,6 @@ const translation = {
|
|||
},
|
||||
author: 'Tarafından',
|
||||
auth: {
|
||||
unauthorized: 'Yetki Ver',
|
||||
authorized: 'Yetkilendirildi',
|
||||
setup: 'Kullanmak için yetkilendirmeyi ayarla',
|
||||
setupModalTitle: 'Yetkilendirmeyi Ayarla',
|
||||
|
|
|
|||
|
|
@ -649,7 +649,6 @@ const translation = {
|
|||
'noAssignedVars': 'Kullanılabilir atanmış değişken yok',
|
||||
},
|
||||
tool: {
|
||||
toAuthorize: 'Yetkilendirmek için',
|
||||
inputVars: 'Giriş Değişkenleri',
|
||||
outputVars: {
|
||||
text: 'araç tarafından oluşturulan içerik',
|
||||
|
|
|
|||
|
|
@ -14,7 +14,6 @@ const translation = {
|
|||
},
|
||||
author: 'Автор',
|
||||
auth: {
|
||||
unauthorized: 'Авторизуватися',
|
||||
authorized: 'Авторизовано',
|
||||
setup: 'Налаштувати авторизацію, щоб використовувати',
|
||||
setupModalTitle: 'Налаштування авторизації',
|
||||
|
|
|
|||
|
|
@ -648,7 +648,6 @@ const translation = {
|
|||
'setParameter': 'Встановити параметр...',
|
||||
},
|
||||
tool: {
|
||||
toAuthorize: 'Авторизувати',
|
||||
inputVars: 'Вхідні змінні',
|
||||
outputVars: {
|
||||
text: 'генерований вміст інструменту',
|
||||
|
|
|
|||
|
|
@ -14,7 +14,6 @@ const translation = {
|
|||
},
|
||||
author: 'Tác giả',
|
||||
auth: {
|
||||
unauthorized: 'Chưa xác thực',
|
||||
authorized: 'Đã xác thực',
|
||||
setup: 'Thiết lập xác thực để sử dụng',
|
||||
setupModalTitle: 'Thiết lập xác thực',
|
||||
|
|
|
|||
|
|
@ -648,7 +648,6 @@ const translation = {
|
|||
'variables': 'Biến',
|
||||
},
|
||||
tool: {
|
||||
toAuthorize: 'Ủy quyền',
|
||||
inputVars: 'Biến đầu vào',
|
||||
outputVars: {
|
||||
text: 'nội dung do công cụ tạo ra',
|
||||
|
|
|
|||
|
|
@ -77,6 +77,7 @@ const translation = {
|
|||
modelNum: '{{num}} 模型已包含',
|
||||
toolSelector: {
|
||||
title: '添加工具',
|
||||
toolSetting: '工具设置',
|
||||
toolLabel: '工具',
|
||||
descriptionLabel: '工具描述',
|
||||
descriptionPlaceholder: '简要描述工具目的,例如,获取特定位置的温度。',
|
||||
|
|
|
|||
|
|
@ -15,7 +15,6 @@ const translation = {
|
|||
},
|
||||
author: '作者',
|
||||
auth: {
|
||||
unauthorized: '去授权',
|
||||
authorized: '已授权',
|
||||
setup: '要使用请先授权',
|
||||
setupModalTitle: '设置授权',
|
||||
|
|
|
|||
|
|
@ -660,7 +660,7 @@ const translation = {
|
|||
'assignedVarsDescription': '赋值变量必须是可写入的变量,例如会话变量。',
|
||||
},
|
||||
tool: {
|
||||
toAuthorize: '授权',
|
||||
authorize: '授权',
|
||||
inputVars: '输入变量',
|
||||
outputVars: {
|
||||
text: '工具生成的内容',
|
||||
|
|
|
|||
|
|
@ -14,7 +14,6 @@ const translation = {
|
|||
},
|
||||
author: '作者',
|
||||
auth: {
|
||||
unauthorized: '去授權',
|
||||
authorized: '已授權',
|
||||
setup: '要使用請先授權',
|
||||
setupModalTitle: '設定授權',
|
||||
|
|
|
|||
|
|
@ -648,7 +648,7 @@ const translation = {
|
|||
'varNotSet': '未設置變數',
|
||||
},
|
||||
tool: {
|
||||
toAuthorize: '授權',
|
||||
authorize: '授權',
|
||||
inputVars: '輸入變量',
|
||||
outputVars: {
|
||||
text: '工具生成的內容',
|
||||
|
|
|
|||
|
|
@ -1,6 +1,6 @@
|
|||
{
|
||||
"name": "dify-web",
|
||||
"version": "1.4.0",
|
||||
"version": "1.4.1",
|
||||
"private": true,
|
||||
"engines": {
|
||||
"node": ">=v22.11.0"
|
||||
|
|
|
|||
|
|
@ -11,6 +11,7 @@ import type {
|
|||
InstallPackageResponse,
|
||||
InstalledLatestVersionResponse,
|
||||
InstalledPluginListResponse,
|
||||
InstalledPluginListWithTotalResponse,
|
||||
PackageDependency,
|
||||
Permissions,
|
||||
Plugin,
|
||||
|
|
@ -33,6 +34,7 @@ import type {
|
|||
import { get, getMarketplace, post, postMarketplace } from './base'
|
||||
import type { MutateOptions, QueryOptions } from '@tanstack/react-query'
|
||||
import {
|
||||
useInfiniteQuery,
|
||||
useMutation,
|
||||
useQuery,
|
||||
useQueryClient,
|
||||
|
|
@ -74,6 +76,53 @@ export const useInstalledPluginList = (disable?: boolean) => {
|
|||
})
|
||||
}
|
||||
|
||||
export const useInstalledPluginListWithPagination = (pageSize = 100) => {
|
||||
const fetchPlugins = async ({ pageParam = 1 }) => {
|
||||
const response = await get<InstalledPluginListWithTotalResponse>(
|
||||
`/workspaces/current/plugin/list?page=${pageParam}&page_size=${pageSize}`,
|
||||
)
|
||||
return response
|
||||
}
|
||||
|
||||
const {
|
||||
data,
|
||||
error,
|
||||
fetchNextPage,
|
||||
hasNextPage,
|
||||
isFetchingNextPage,
|
||||
isLoading,
|
||||
} = useInfiniteQuery({
|
||||
queryKey: ['installed-plugins', pageSize],
|
||||
queryFn: fetchPlugins,
|
||||
getNextPageParam: (lastPage, pages) => {
|
||||
const totalItems = lastPage.total
|
||||
const currentPage = pages.length
|
||||
const itemsLoaded = currentPage * pageSize
|
||||
|
||||
if (itemsLoaded >= totalItems)
|
||||
return
|
||||
|
||||
return currentPage + 1
|
||||
},
|
||||
initialPageParam: 1,
|
||||
})
|
||||
|
||||
const plugins = data?.pages.flatMap(page => page.plugins) ?? []
|
||||
|
||||
return {
|
||||
data: {
|
||||
plugins,
|
||||
},
|
||||
isLastPage: !hasNextPage,
|
||||
loadNextPage: () => {
|
||||
fetchNextPage()
|
||||
},
|
||||
isLoading,
|
||||
isFetching: isFetchingNextPage,
|
||||
error,
|
||||
}
|
||||
}
|
||||
|
||||
export const useInstalledLatestVersion = (pluginIds: string[]) => {
|
||||
return useQuery<InstalledLatestVersionResponse>({
|
||||
queryKey: [NAME_SPACE, 'installedLatestVersion', pluginIds],
|
||||
|
|
|
|||
Loading…
Reference in New Issue