mirror of https://github.com/langgenius/dify.git
Merge branch 'main' into feat/attachments
This commit is contained in:
commit
1668df104f
|
|
@ -22,7 +22,7 @@ body:
|
|||
- type: input
|
||||
attributes:
|
||||
label: Dify version
|
||||
placeholder: 0.6.11
|
||||
placeholder: 0.6.15
|
||||
description: See about section in Dify console
|
||||
validations:
|
||||
required: true
|
||||
|
|
|
|||
|
|
@ -216,6 +216,7 @@ UNSTRUCTURED_API_KEY=
|
|||
|
||||
SSRF_PROXY_HTTP_URL=
|
||||
SSRF_PROXY_HTTPS_URL=
|
||||
SSRF_DEFAULT_MAX_RETRIES=3
|
||||
|
||||
BATCH_UPLOAD_LIMIT=10
|
||||
KEYWORD_DATA_SOURCE_TYPE=database
|
||||
|
|
|
|||
|
|
@ -1,4 +1,5 @@
|
|||
from typing import Any, Optional
|
||||
from urllib.parse import quote_plus
|
||||
|
||||
from pydantic import Field, NonNegativeInt, PositiveInt, computed_field
|
||||
from pydantic_settings import BaseSettings
|
||||
|
|
@ -104,7 +105,7 @@ class DatabaseConfig:
|
|||
).strip("&")
|
||||
db_extras = f"?{db_extras}" if db_extras else ""
|
||||
return (f"{self.SQLALCHEMY_DATABASE_URI_SCHEME}://"
|
||||
f"{self.DB_USERNAME}:{self.DB_PASSWORD}@{self.DB_HOST}:{self.DB_PORT}/{self.DB_DATABASE}"
|
||||
f"{quote_plus(self.DB_USERNAME)}:{quote_plus(self.DB_PASSWORD)}@{self.DB_HOST}:{self.DB_PORT}/{self.DB_DATABASE}"
|
||||
f"{db_extras}")
|
||||
|
||||
SQLALCHEMY_POOL_SIZE: NonNegativeInt = Field(
|
||||
|
|
|
|||
|
|
@ -34,12 +34,6 @@ class Segment(BaseModel):
|
|||
return str(self.value)
|
||||
|
||||
def to_object(self) -> Any:
|
||||
if isinstance(self.value, Segment):
|
||||
return self.value.to_object()
|
||||
if isinstance(self.value, list):
|
||||
return [v.to_object() for v in self.value]
|
||||
if isinstance(self.value, dict):
|
||||
return {k: v.to_object() for k, v in self.value.items()}
|
||||
return self.value
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -56,6 +56,9 @@ class ObjectVariable(Variable):
|
|||
# TODO: Use markdown code block
|
||||
return json.dumps(self.model_dump()['value'], ensure_ascii=False, indent=2)
|
||||
|
||||
def to_object(self):
|
||||
return {k: v.to_object() for k, v in self.value.items()}
|
||||
|
||||
|
||||
class ArrayVariable(Variable):
|
||||
value_type: SegmentType = SegmentType.ARRAY
|
||||
|
|
@ -65,6 +68,9 @@ class ArrayVariable(Variable):
|
|||
def markdown(self) -> str:
|
||||
return '\n'.join(['- ' + item.markdown for item in self.value])
|
||||
|
||||
def to_object(self):
|
||||
return [v.to_object() for v in self.value]
|
||||
|
||||
|
||||
class FileVariable(Variable):
|
||||
value_type: SegmentType = SegmentType.FILE
|
||||
|
|
|
|||
|
|
@ -1,48 +1,75 @@
|
|||
"""
|
||||
Proxy requests to avoid SSRF
|
||||
"""
|
||||
import logging
|
||||
import os
|
||||
import time
|
||||
|
||||
import httpx
|
||||
|
||||
SSRF_PROXY_ALL_URL = os.getenv('SSRF_PROXY_ALL_URL', '')
|
||||
SSRF_PROXY_HTTP_URL = os.getenv('SSRF_PROXY_HTTP_URL', '')
|
||||
SSRF_PROXY_HTTPS_URL = os.getenv('SSRF_PROXY_HTTPS_URL', '')
|
||||
SSRF_DEFAULT_MAX_RETRIES = int(os.getenv('SSRF_DEFAULT_MAX_RETRIES', '3'))
|
||||
|
||||
proxies = {
|
||||
'http://': SSRF_PROXY_HTTP_URL,
|
||||
'https://': SSRF_PROXY_HTTPS_URL
|
||||
} if SSRF_PROXY_HTTP_URL and SSRF_PROXY_HTTPS_URL else None
|
||||
|
||||
BACKOFF_FACTOR = 0.5
|
||||
STATUS_FORCELIST = [429, 500, 502, 503, 504]
|
||||
|
||||
def make_request(method, url, **kwargs):
|
||||
if SSRF_PROXY_ALL_URL:
|
||||
return httpx.request(method=method, url=url, proxy=SSRF_PROXY_ALL_URL, **kwargs)
|
||||
elif proxies:
|
||||
return httpx.request(method=method, url=url, proxies=proxies, **kwargs)
|
||||
else:
|
||||
return httpx.request(method=method, url=url, **kwargs)
|
||||
def make_request(method, url, max_retries=SSRF_DEFAULT_MAX_RETRIES, **kwargs):
|
||||
if "allow_redirects" in kwargs:
|
||||
allow_redirects = kwargs.pop("allow_redirects")
|
||||
if "follow_redirects" not in kwargs:
|
||||
kwargs["follow_redirects"] = allow_redirects
|
||||
|
||||
retries = 0
|
||||
while retries <= max_retries:
|
||||
try:
|
||||
if SSRF_PROXY_ALL_URL:
|
||||
response = httpx.request(method=method, url=url, proxy=SSRF_PROXY_ALL_URL, **kwargs)
|
||||
elif proxies:
|
||||
response = httpx.request(method=method, url=url, proxies=proxies, **kwargs)
|
||||
else:
|
||||
response = httpx.request(method=method, url=url, **kwargs)
|
||||
|
||||
if response.status_code not in STATUS_FORCELIST:
|
||||
return response
|
||||
else:
|
||||
logging.warning(f"Received status code {response.status_code} for URL {url} which is in the force list")
|
||||
|
||||
except httpx.RequestError as e:
|
||||
logging.warning(f"Request to URL {url} failed on attempt {retries + 1}: {e}")
|
||||
|
||||
retries += 1
|
||||
if retries <= max_retries:
|
||||
time.sleep(BACKOFF_FACTOR * (2 ** (retries - 1)))
|
||||
|
||||
raise Exception(f"Reached maximum retries ({max_retries}) for URL {url}")
|
||||
|
||||
|
||||
def get(url, **kwargs):
|
||||
return make_request('GET', url, **kwargs)
|
||||
def get(url, max_retries=SSRF_DEFAULT_MAX_RETRIES, **kwargs):
|
||||
return make_request('GET', url, max_retries=max_retries, **kwargs)
|
||||
|
||||
|
||||
def post(url, **kwargs):
|
||||
return make_request('POST', url, **kwargs)
|
||||
def post(url, max_retries=SSRF_DEFAULT_MAX_RETRIES, **kwargs):
|
||||
return make_request('POST', url, max_retries=max_retries, **kwargs)
|
||||
|
||||
|
||||
def put(url, **kwargs):
|
||||
return make_request('PUT', url, **kwargs)
|
||||
def put(url, max_retries=SSRF_DEFAULT_MAX_RETRIES, **kwargs):
|
||||
return make_request('PUT', url, max_retries=max_retries, **kwargs)
|
||||
|
||||
|
||||
def patch(url, **kwargs):
|
||||
return make_request('PATCH', url, **kwargs)
|
||||
def patch(url, max_retries=SSRF_DEFAULT_MAX_RETRIES, **kwargs):
|
||||
return make_request('PATCH', url, max_retries=max_retries, **kwargs)
|
||||
|
||||
|
||||
def delete(url, **kwargs):
|
||||
return make_request('DELETE', url, **kwargs)
|
||||
def delete(url, max_retries=SSRF_DEFAULT_MAX_RETRIES, **kwargs):
|
||||
return make_request('DELETE', url, max_retries=max_retries, **kwargs)
|
||||
|
||||
|
||||
def head(url, **kwargs):
|
||||
return make_request('HEAD', url, **kwargs)
|
||||
def head(url, max_retries=SSRF_DEFAULT_MAX_RETRIES, **kwargs):
|
||||
return make_request('HEAD', url, max_retries=max_retries, **kwargs)
|
||||
|
|
|
|||
|
|
@ -10,10 +10,13 @@
|
|||
- cohere.command-text-v14
|
||||
- cohere.command-r-plus-v1.0
|
||||
- cohere.command-r-v1.0
|
||||
- meta.llama3-1-8b-instruct-v1:0
|
||||
- meta.llama3-1-70b-instruct-v1:0
|
||||
- meta.llama3-8b-instruct-v1:0
|
||||
- meta.llama3-70b-instruct-v1:0
|
||||
- meta.llama2-13b-chat-v1
|
||||
- meta.llama2-70b-chat-v1
|
||||
- mistral.mistral-large-2407-v1:0
|
||||
- mistral.mistral-small-2402-v1:0
|
||||
- mistral.mistral-large-2402-v1:0
|
||||
- mistral.mixtral-8x7b-instruct-v0:1
|
||||
|
|
|
|||
|
|
@ -208,14 +208,25 @@ class BedrockLargeLanguageModel(LargeLanguageModel):
|
|||
|
||||
if model_info['support_tool_use'] and tools:
|
||||
parameters['toolConfig'] = self._convert_converse_tool_config(tools=tools)
|
||||
try:
|
||||
if stream:
|
||||
response = bedrock_client.converse_stream(**parameters)
|
||||
return self._handle_converse_stream_response(model_info['model'], credentials, response, prompt_messages)
|
||||
else:
|
||||
response = bedrock_client.converse(**parameters)
|
||||
return self._handle_converse_response(model_info['model'], credentials, response, prompt_messages)
|
||||
except ClientError as ex:
|
||||
error_code = ex.response['Error']['Code']
|
||||
full_error_msg = f"{error_code}: {ex.response['Error']['Message']}"
|
||||
raise self._map_client_to_invoke_error(error_code, full_error_msg)
|
||||
except (EndpointConnectionError, NoRegionError, ServiceNotInRegionError) as ex:
|
||||
raise InvokeConnectionError(str(ex))
|
||||
|
||||
if stream:
|
||||
response = bedrock_client.converse_stream(**parameters)
|
||||
return self._handle_converse_stream_response(model_info['model'], credentials, response, prompt_messages)
|
||||
else:
|
||||
response = bedrock_client.converse(**parameters)
|
||||
return self._handle_converse_response(model_info['model'], credentials, response, prompt_messages)
|
||||
except UnknownServiceError as ex:
|
||||
raise InvokeServerUnavailableError(str(ex))
|
||||
|
||||
except Exception as ex:
|
||||
raise InvokeError(str(ex))
|
||||
def _handle_converse_response(self, model: str, credentials: dict, response: dict,
|
||||
prompt_messages: list[PromptMessage]) -> LLMResult:
|
||||
"""
|
||||
|
|
@ -558,7 +569,6 @@ class BedrockLargeLanguageModel(LargeLanguageModel):
|
|||
except ClientError as ex:
|
||||
error_code = ex.response['Error']['Code']
|
||||
full_error_msg = f"{error_code}: {ex.response['Error']['Message']}"
|
||||
|
||||
raise CredentialsValidateFailedError(str(self._map_client_to_invoke_error(error_code, full_error_msg)))
|
||||
|
||||
except Exception as ex:
|
||||
|
|
|
|||
|
|
@ -0,0 +1,25 @@
|
|||
model: meta.llama3-1-70b-instruct-v1:0
|
||||
label:
|
||||
en_US: Llama 3.1 Instruct 70B
|
||||
model_type: llm
|
||||
model_properties:
|
||||
mode: completion
|
||||
context_size: 128000
|
||||
parameter_rules:
|
||||
- name: temperature
|
||||
use_template: temperature
|
||||
default: 0.5
|
||||
- name: top_p
|
||||
use_template: top_p
|
||||
default: 0.9
|
||||
- name: max_gen_len
|
||||
use_template: max_tokens
|
||||
required: true
|
||||
default: 512
|
||||
min: 1
|
||||
max: 2048
|
||||
pricing:
|
||||
input: '0.00265'
|
||||
output: '0.0035'
|
||||
unit: '0.001'
|
||||
currency: USD
|
||||
|
|
@ -0,0 +1,25 @@
|
|||
model: meta.llama3-1-8b-instruct-v1:0
|
||||
label:
|
||||
en_US: Llama 3.1 Instruct 8B
|
||||
model_type: llm
|
||||
model_properties:
|
||||
mode: completion
|
||||
context_size: 128000
|
||||
parameter_rules:
|
||||
- name: temperature
|
||||
use_template: temperature
|
||||
default: 0.5
|
||||
- name: top_p
|
||||
use_template: top_p
|
||||
default: 0.9
|
||||
- name: max_gen_len
|
||||
use_template: max_tokens
|
||||
required: true
|
||||
default: 512
|
||||
min: 1
|
||||
max: 2048
|
||||
pricing:
|
||||
input: '0.0003'
|
||||
output: '0.0006'
|
||||
unit: '0.001'
|
||||
currency: USD
|
||||
|
|
@ -0,0 +1,29 @@
|
|||
model: mistral.mistral-large-2407-v1:0
|
||||
label:
|
||||
en_US: Mistral Large 2 (24.07)
|
||||
model_type: llm
|
||||
features:
|
||||
- tool-call
|
||||
model_properties:
|
||||
mode: completion
|
||||
context_size: 128000
|
||||
parameter_rules:
|
||||
- name: temperature
|
||||
use_template: temperature
|
||||
required: false
|
||||
default: 0.7
|
||||
- name: top_p
|
||||
use_template: top_p
|
||||
required: false
|
||||
default: 1
|
||||
- name: max_tokens
|
||||
use_template: max_tokens
|
||||
required: true
|
||||
default: 512
|
||||
min: 1
|
||||
max: 8192
|
||||
pricing:
|
||||
input: '0.003'
|
||||
output: '0.009'
|
||||
unit: '0.001'
|
||||
currency: USD
|
||||
|
|
@ -14,6 +14,7 @@ from core.model_runtime.entities.message_entities import (
|
|||
PromptMessage,
|
||||
PromptMessageTool,
|
||||
SystemPromptMessage,
|
||||
ToolPromptMessage,
|
||||
UserPromptMessage,
|
||||
)
|
||||
from core.model_runtime.errors.invoke import InvokeError
|
||||
|
|
@ -44,6 +45,17 @@ class HunyuanLargeLanguageModel(LargeLanguageModel):
|
|||
"Stream": stream,
|
||||
**custom_parameters,
|
||||
}
|
||||
# add Tools and ToolChoice
|
||||
if (tools and len(tools) > 0):
|
||||
params['ToolChoice'] = "auto"
|
||||
params['Tools'] = [{
|
||||
"Type": "function",
|
||||
"Function": {
|
||||
"Name": tool.name,
|
||||
"Description": tool.description,
|
||||
"Parameters": json.dumps(tool.parameters)
|
||||
}
|
||||
} for tool in tools]
|
||||
|
||||
request.from_json_string(json.dumps(params))
|
||||
response = client.ChatCompletions(request)
|
||||
|
|
@ -89,9 +101,43 @@ class HunyuanLargeLanguageModel(LargeLanguageModel):
|
|||
|
||||
def _convert_prompt_messages_to_dicts(self, prompt_messages: list[PromptMessage]) -> list[dict]:
|
||||
"""Convert a list of PromptMessage objects to a list of dictionaries with 'Role' and 'Content' keys."""
|
||||
return [{"Role": message.role.value, "Content": message.content} for message in prompt_messages]
|
||||
dict_list = []
|
||||
for message in prompt_messages:
|
||||
if isinstance(message, AssistantPromptMessage):
|
||||
tool_calls = message.tool_calls
|
||||
if (tool_calls and len(tool_calls) > 0):
|
||||
dict_tool_calls = [
|
||||
{
|
||||
"Id": tool_call.id,
|
||||
"Type": tool_call.type,
|
||||
"Function": {
|
||||
"Name": tool_call.function.name,
|
||||
"Arguments": tool_call.function.arguments if (tool_call.function.arguments == "") else "{}"
|
||||
}
|
||||
} for tool_call in tool_calls]
|
||||
|
||||
dict_list.append({
|
||||
"Role": message.role.value,
|
||||
# fix set content = "" while tool_call request
|
||||
# fix [hunyuan] None, [TencentCloudSDKException] code:InvalidParameter message:Messages Content and Contents not allowed empty at the same time.
|
||||
"Content": " ", # message.content if (message.content is not None) else "",
|
||||
"ToolCalls": dict_tool_calls
|
||||
})
|
||||
else:
|
||||
dict_list.append({ "Role": message.role.value, "Content": message.content })
|
||||
elif isinstance(message, ToolPromptMessage):
|
||||
tool_execute_result = { "result": message.content }
|
||||
content =json.dumps(tool_execute_result, ensure_ascii=False)
|
||||
dict_list.append({ "Role": message.role.value, "Content": content, "ToolCallId": message.tool_call_id })
|
||||
else:
|
||||
dict_list.append({ "Role": message.role.value, "Content": message.content })
|
||||
return dict_list
|
||||
|
||||
def _handle_stream_chat_response(self, model, credentials, prompt_messages, resp):
|
||||
|
||||
tool_call = None
|
||||
tool_calls = []
|
||||
|
||||
for index, event in enumerate(resp):
|
||||
logging.debug("_handle_stream_chat_response, event: %s", event)
|
||||
|
||||
|
|
@ -109,20 +155,54 @@ class HunyuanLargeLanguageModel(LargeLanguageModel):
|
|||
usage = data.get('Usage', {})
|
||||
prompt_tokens = usage.get('PromptTokens', 0)
|
||||
completion_tokens = usage.get('CompletionTokens', 0)
|
||||
usage = self._calc_response_usage(model, credentials, prompt_tokens, completion_tokens)
|
||||
|
||||
response_tool_calls = delta.get('ToolCalls')
|
||||
if (response_tool_calls is not None):
|
||||
new_tool_calls = self._extract_response_tool_calls(response_tool_calls)
|
||||
if (len(new_tool_calls) > 0):
|
||||
new_tool_call = new_tool_calls[0]
|
||||
if (tool_call is None): tool_call = new_tool_call
|
||||
elif (tool_call.id != new_tool_call.id):
|
||||
tool_calls.append(tool_call)
|
||||
tool_call = new_tool_call
|
||||
else:
|
||||
tool_call.function.name += new_tool_call.function.name
|
||||
tool_call.function.arguments += new_tool_call.function.arguments
|
||||
if (tool_call is not None and len(tool_call.function.name) > 0 and len(tool_call.function.arguments) > 0):
|
||||
tool_calls.append(tool_call)
|
||||
tool_call = None
|
||||
|
||||
assistant_prompt_message = AssistantPromptMessage(
|
||||
content=message_content,
|
||||
tool_calls=[]
|
||||
)
|
||||
# rewrite content = "" while tool_call to avoid show content on web page
|
||||
if (len(tool_calls) > 0): assistant_prompt_message.content = ""
|
||||
|
||||
# add tool_calls to assistant_prompt_message
|
||||
if (finish_reason == 'tool_calls'):
|
||||
assistant_prompt_message.tool_calls = tool_calls
|
||||
tool_call = None
|
||||
tool_calls = []
|
||||
|
||||
delta_chunk = LLMResultChunkDelta(
|
||||
index=index,
|
||||
role=delta.get('Role', 'assistant'),
|
||||
message=assistant_prompt_message,
|
||||
usage=usage,
|
||||
finish_reason=finish_reason,
|
||||
)
|
||||
if (len(finish_reason) > 0):
|
||||
usage = self._calc_response_usage(model, credentials, prompt_tokens, completion_tokens)
|
||||
|
||||
delta_chunk = LLMResultChunkDelta(
|
||||
index=index,
|
||||
role=delta.get('Role', 'assistant'),
|
||||
message=assistant_prompt_message,
|
||||
usage=usage,
|
||||
finish_reason=finish_reason,
|
||||
)
|
||||
tool_call = None
|
||||
tool_calls = []
|
||||
|
||||
else:
|
||||
delta_chunk = LLMResultChunkDelta(
|
||||
index=index,
|
||||
message=assistant_prompt_message,
|
||||
)
|
||||
|
||||
yield LLMResultChunk(
|
||||
model=model,
|
||||
|
|
@ -177,12 +257,15 @@ class HunyuanLargeLanguageModel(LargeLanguageModel):
|
|||
"""
|
||||
human_prompt = "\n\nHuman:"
|
||||
ai_prompt = "\n\nAssistant:"
|
||||
tool_prompt = "\n\nTool:"
|
||||
content = message.content
|
||||
|
||||
if isinstance(message, UserPromptMessage):
|
||||
message_text = f"{human_prompt} {content}"
|
||||
elif isinstance(message, AssistantPromptMessage):
|
||||
message_text = f"{ai_prompt} {content}"
|
||||
elif isinstance(message, ToolPromptMessage):
|
||||
message_text = f"{tool_prompt} {content}"
|
||||
elif isinstance(message, SystemPromptMessage):
|
||||
message_text = content
|
||||
else:
|
||||
|
|
@ -203,3 +286,30 @@ class HunyuanLargeLanguageModel(LargeLanguageModel):
|
|||
return {
|
||||
InvokeError: [TencentCloudSDKException],
|
||||
}
|
||||
|
||||
def _extract_response_tool_calls(self,
|
||||
response_tool_calls: list[dict]) \
|
||||
-> list[AssistantPromptMessage.ToolCall]:
|
||||
"""
|
||||
Extract tool calls from response
|
||||
|
||||
:param response_tool_calls: response tool calls
|
||||
:return: list of tool calls
|
||||
"""
|
||||
tool_calls = []
|
||||
if response_tool_calls:
|
||||
for response_tool_call in response_tool_calls:
|
||||
response_function = response_tool_call.get('Function', {})
|
||||
function = AssistantPromptMessage.ToolCall.ToolCallFunction(
|
||||
name=response_function.get('Name', ''),
|
||||
arguments=response_function.get('Arguments', '')
|
||||
)
|
||||
|
||||
tool_call = AssistantPromptMessage.ToolCall(
|
||||
id=response_tool_call.get('Id', 0),
|
||||
type='function',
|
||||
function=function
|
||||
)
|
||||
tool_calls.append(tool_call)
|
||||
|
||||
return tool_calls
|
||||
|
|
@ -55,7 +55,7 @@ CREATE TABLE IF NOT EXISTS {table_name} (
|
|||
)
|
||||
"""
|
||||
SQL_CREATE_INDEX = """
|
||||
CREATE INDEX idx_docs_{table_name} ON {table_name}(text)
|
||||
CREATE INDEX IF NOT EXISTS idx_docs_{table_name} ON {table_name}(text)
|
||||
INDEXTYPE IS CTXSYS.CONTEXT PARAMETERS
|
||||
('FILTER CTXSYS.NULL_FILTER SECTION GROUP CTXSYS.HTML_SECTION_GROUP LEXER sys.my_chinese_vgram_lexer')
|
||||
"""
|
||||
|
|
@ -248,7 +248,7 @@ class OracleVector(BaseVector):
|
|||
|
||||
def delete(self) -> None:
|
||||
with self._get_cursor() as cur:
|
||||
cur.execute(f"DROP TABLE IF EXISTS {self.table_name}")
|
||||
cur.execute(f"DROP TABLE IF EXISTS {self.table_name} cascade constraints")
|
||||
|
||||
def _create_collection(self, dimension: int):
|
||||
cache_key = f"vector_indexing_{self._collection_name}"
|
||||
|
|
|
|||
|
|
@ -4,9 +4,8 @@ from pathlib import Path
|
|||
from typing import Union
|
||||
from urllib.parse import unquote
|
||||
|
||||
import requests
|
||||
|
||||
from configs import dify_config
|
||||
from core.helper import ssrf_proxy
|
||||
from core.rag.extractor.csv_extractor import CSVExtractor
|
||||
from core.rag.extractor.entity.datasource_type import DatasourceType
|
||||
from core.rag.extractor.entity.extract_setting import ExtractSetting
|
||||
|
|
@ -51,7 +50,7 @@ class ExtractProcessor:
|
|||
|
||||
@classmethod
|
||||
def load_from_url(cls, url: str, return_text: bool = False) -> Union[list[Document], str]:
|
||||
response = requests.get(url, headers={
|
||||
response = ssrf_proxy.get(url, headers={
|
||||
"User-Agent": USER_AGENT
|
||||
})
|
||||
|
||||
|
|
|
|||
|
|
@ -54,8 +54,16 @@ class MarkdownExtractor(BaseExtractor):
|
|||
|
||||
current_header = None
|
||||
current_text = ""
|
||||
code_block_flag = False
|
||||
|
||||
for line in lines:
|
||||
if line.startswith("```"):
|
||||
code_block_flag = not code_block_flag
|
||||
current_text += line + "\n"
|
||||
continue
|
||||
if code_block_flag:
|
||||
current_text += line + "\n"
|
||||
continue
|
||||
header_match = re.match(r"^#+\s", line)
|
||||
if header_match:
|
||||
if current_header is not None:
|
||||
|
|
|
|||
|
|
@ -60,11 +60,13 @@ class JinaReaderTool(BuiltinTool):
|
|||
if tool_parameters.get('no_cache', False):
|
||||
headers['X-No-Cache'] = 'true'
|
||||
|
||||
max_retries = tool_parameters.get('max_retries', 3)
|
||||
response = ssrf_proxy.get(
|
||||
str(URL(self._jina_reader_endpoint + url)),
|
||||
headers=headers,
|
||||
params=request_params,
|
||||
timeout=(10, 60),
|
||||
max_retries=max_retries
|
||||
)
|
||||
|
||||
if tool_parameters.get('summary', False):
|
||||
|
|
|
|||
|
|
@ -150,3 +150,17 @@ parameters:
|
|||
pt_BR: Habilitar resumo para a saída
|
||||
llm_description: enable summary
|
||||
form: form
|
||||
- name: max_retries
|
||||
type: number
|
||||
required: false
|
||||
default: 3
|
||||
label:
|
||||
en_US: Retry
|
||||
zh_Hans: 重试
|
||||
pt_BR: Repetir
|
||||
human_description:
|
||||
en_US: Number of times to retry the request if it fails
|
||||
zh_Hans: 请求失败时重试的次数
|
||||
pt_BR: Número de vezes para repetir a solicitação se falhar
|
||||
llm_description: Number of times to retry the request if it fails
|
||||
form: form
|
||||
|
|
|
|||
|
|
@ -40,10 +40,12 @@ class JinaSearchTool(BuiltinTool):
|
|||
if tool_parameters.get('no_cache', False):
|
||||
headers['X-No-Cache'] = 'true'
|
||||
|
||||
max_retries = tool_parameters.get('max_retries', 3)
|
||||
response = ssrf_proxy.get(
|
||||
str(URL(self._jina_search_endpoint + query)),
|
||||
headers=headers,
|
||||
timeout=(10, 60)
|
||||
timeout=(10, 60),
|
||||
max_retries=max_retries
|
||||
)
|
||||
|
||||
return self.create_text_message(response.text)
|
||||
|
|
|
|||
|
|
@ -91,3 +91,17 @@ parameters:
|
|||
pt_BR: Ignorar o cache
|
||||
llm_description: bypass the cache
|
||||
form: form
|
||||
- name: max_retries
|
||||
type: number
|
||||
required: false
|
||||
default: 3
|
||||
label:
|
||||
en_US: Retry
|
||||
zh_Hans: 重试
|
||||
pt_BR: Repetir
|
||||
human_description:
|
||||
en_US: Number of times to retry the request if it fails
|
||||
zh_Hans: 请求失败时重试的次数
|
||||
pt_BR: Número de vezes para repetir a solicitação se falhar
|
||||
llm_description: Number of times to retry the request if it fails
|
||||
form: form
|
||||
|
|
|
|||
|
|
@ -11,11 +11,10 @@ from contextlib import contextmanager
|
|||
from urllib.parse import unquote
|
||||
|
||||
import cloudscraper
|
||||
import requests
|
||||
from bs4 import BeautifulSoup, CData, Comment, NavigableString
|
||||
from newspaper import Article
|
||||
from regex import regex
|
||||
|
||||
from core.helper import ssrf_proxy
|
||||
from core.rag.extractor import extract_processor
|
||||
from core.rag.extractor.extract_processor import ExtractProcessor
|
||||
|
||||
|
|
@ -45,7 +44,7 @@ def get_url(url: str, user_agent: str = None) -> str:
|
|||
|
||||
main_content_type = None
|
||||
supported_content_types = extract_processor.SUPPORT_URL_CONTENT_TYPES + ["text/html"]
|
||||
response = requests.head(url, headers=headers, allow_redirects=True, timeout=(5, 10))
|
||||
response = ssrf_proxy.head(url, headers=headers, follow_redirects=True, timeout=(5, 10))
|
||||
|
||||
if response.status_code == 200:
|
||||
# check content-type
|
||||
|
|
@ -67,10 +66,11 @@ def get_url(url: str, user_agent: str = None) -> str:
|
|||
if main_content_type in extract_processor.SUPPORT_URL_CONTENT_TYPES:
|
||||
return ExtractProcessor.load_from_url(url, return_text=True)
|
||||
|
||||
response = requests.get(url, headers=headers, allow_redirects=True, timeout=(120, 300))
|
||||
response = ssrf_proxy.get(url, headers=headers, follow_redirects=True, timeout=(120, 300))
|
||||
elif response.status_code == 403:
|
||||
scraper = cloudscraper.create_scraper()
|
||||
response = scraper.get(url, headers=headers, allow_redirects=True, timeout=(120, 300))
|
||||
scraper.perform_request = ssrf_proxy.make_request
|
||||
response = scraper.get(url, headers=headers, follow_redirects=True, timeout=(120, 300))
|
||||
|
||||
if response.status_code != 200:
|
||||
return "URL returned status code {}.".format(response.status_code)
|
||||
|
|
@ -78,7 +78,7 @@ def get_url(url: str, user_agent: str = None) -> str:
|
|||
a = extract_using_readabilipy(response.text)
|
||||
|
||||
if not a['plain_text'] or not a['plain_text'].strip():
|
||||
return get_url_from_newspaper3k(url)
|
||||
return ''
|
||||
|
||||
res = FULL_TEMPLATE.format(
|
||||
title=a['title'],
|
||||
|
|
@ -91,23 +91,6 @@ def get_url(url: str, user_agent: str = None) -> str:
|
|||
return res
|
||||
|
||||
|
||||
def get_url_from_newspaper3k(url: str) -> str:
|
||||
|
||||
a = Article(url)
|
||||
a.download()
|
||||
a.parse()
|
||||
|
||||
res = FULL_TEMPLATE.format(
|
||||
title=a.title,
|
||||
authors=a.authors,
|
||||
publish_date=a.publish_date,
|
||||
top_image=a.top_image,
|
||||
text=a.text,
|
||||
)
|
||||
|
||||
return res
|
||||
|
||||
|
||||
def extract_using_readabilipy(html):
|
||||
with tempfile.NamedTemporaryFile(delete=False, mode='w+') as f_html:
|
||||
f_html.write(html)
|
||||
|
|
|
|||
|
|
@ -125,11 +125,15 @@ class ToolNode(BaseNode):
|
|||
]
|
||||
else:
|
||||
tool_input = node_data.tool_parameters[parameter_name]
|
||||
segment_group = parser.convert_template(
|
||||
template=str(tool_input.value),
|
||||
variable_pool=variable_pool,
|
||||
)
|
||||
result[parameter_name] = segment_group.log if for_log else segment_group.text
|
||||
if tool_input.type == 'variable':
|
||||
parameter_value = variable_pool.get(tool_input.value).value
|
||||
else:
|
||||
segment_group = parser.convert_template(
|
||||
template=str(tool_input.value),
|
||||
variable_pool=variable_pool,
|
||||
)
|
||||
parameter_value = segment_group.log if for_log else segment_group.text
|
||||
result[parameter_name] = parameter_value
|
||||
|
||||
return result
|
||||
|
||||
|
|
|
|||
|
|
@ -32,8 +32,7 @@ class TencentStorage(BaseStorage):
|
|||
def load_stream(self, filename: str) -> Generator:
|
||||
def generate(filename: str = filename) -> Generator:
|
||||
response = self.client.get_object(Bucket=self.bucket_name, Key=filename)
|
||||
while chunk := response['Body'].get_stream(chunk_size=4096):
|
||||
yield chunk
|
||||
yield from response['Body'].get_stream(chunk_size=4096)
|
||||
|
||||
return generate()
|
||||
|
||||
|
|
|
|||
|
|
@ -1,10 +1,12 @@
|
|||
from flask_restful import fields
|
||||
|
||||
from core.app.segments import SecretVariable, Variable
|
||||
from core.app.segments import SecretVariable, SegmentType, Variable
|
||||
from core.helper import encrypter
|
||||
from fields.member_fields import simple_account_fields
|
||||
from libs.helper import TimestampField
|
||||
|
||||
ENVIRONMENT_VARIABLE_SUPPORTED_TYPES = (SegmentType.STRING, SegmentType.NUMBER, SegmentType.SECRET)
|
||||
|
||||
|
||||
class EnvironmentVariableField(fields.Raw):
|
||||
def format(self, value):
|
||||
|
|
@ -16,14 +18,18 @@ class EnvironmentVariableField(fields.Raw):
|
|||
'value': encrypter.obfuscated_token(value.value),
|
||||
'value_type': value.value_type.value,
|
||||
}
|
||||
elif isinstance(value, Variable):
|
||||
if isinstance(value, Variable):
|
||||
return {
|
||||
'id': value.id,
|
||||
'name': value.name,
|
||||
'value': value.value,
|
||||
'value_type': value.value_type.value,
|
||||
}
|
||||
return value
|
||||
if isinstance(value, dict):
|
||||
value_type = value.get('value_type')
|
||||
if value_type not in ENVIRONMENT_VARIABLE_SUPPORTED_TYPES:
|
||||
raise ValueError(f'Unsupported environment variable value type: {value_type}')
|
||||
return value
|
||||
|
||||
|
||||
environment_variable_fields = {
|
||||
|
|
|
|||
|
|
@ -0,0 +1,52 @@
|
|||
import random
|
||||
from unittest.mock import MagicMock, patch
|
||||
|
||||
from core.helper.ssrf_proxy import SSRF_DEFAULT_MAX_RETRIES, STATUS_FORCELIST, make_request
|
||||
|
||||
|
||||
@patch('httpx.request')
|
||||
def test_successful_request(mock_request):
|
||||
mock_response = MagicMock()
|
||||
mock_response.status_code = 200
|
||||
mock_request.return_value = mock_response
|
||||
|
||||
response = make_request('GET', 'http://example.com')
|
||||
assert response.status_code == 200
|
||||
|
||||
|
||||
@patch('httpx.request')
|
||||
def test_retry_exceed_max_retries(mock_request):
|
||||
mock_response = MagicMock()
|
||||
mock_response.status_code = 500
|
||||
|
||||
side_effects = [mock_response] * SSRF_DEFAULT_MAX_RETRIES
|
||||
mock_request.side_effect = side_effects
|
||||
|
||||
try:
|
||||
make_request('GET', 'http://example.com', max_retries=SSRF_DEFAULT_MAX_RETRIES - 1)
|
||||
raise AssertionError("Expected Exception not raised")
|
||||
except Exception as e:
|
||||
assert str(e) == f"Reached maximum retries ({SSRF_DEFAULT_MAX_RETRIES - 1}) for URL http://example.com"
|
||||
|
||||
|
||||
@patch('httpx.request')
|
||||
def test_retry_logic_success(mock_request):
|
||||
side_effects = []
|
||||
|
||||
for _ in range(SSRF_DEFAULT_MAX_RETRIES):
|
||||
status_code = random.choice(STATUS_FORCELIST)
|
||||
mock_response = MagicMock()
|
||||
mock_response.status_code = status_code
|
||||
side_effects.append(mock_response)
|
||||
|
||||
mock_response_200 = MagicMock()
|
||||
mock_response_200.status_code = 200
|
||||
side_effects.append(mock_response_200)
|
||||
|
||||
mock_request.side_effect = side_effects
|
||||
|
||||
response = make_request('GET', 'http://example.com', max_retries=SSRF_DEFAULT_MAX_RETRIES)
|
||||
|
||||
assert response.status_code == 200
|
||||
assert mock_request.call_count == SSRF_DEFAULT_MAX_RETRIES + 1
|
||||
assert mock_request.call_args_list[0][1].get('method') == 'GET'
|
||||
|
|
@ -96,7 +96,7 @@ const ConfigVar: FC<IConfigVarProps> = ({ promptVariables, readonly, onPromptVar
|
|||
...rest,
|
||||
type: type === InputVarType.textInput ? 'string' : type,
|
||||
key: variable,
|
||||
name: label,
|
||||
name: label as string,
|
||||
}
|
||||
|
||||
if (payload.type === InputVarType.textInput)
|
||||
|
|
|
|||
|
|
@ -4,7 +4,6 @@ import React from 'react'
|
|||
import { useTranslation } from 'react-i18next'
|
||||
import { useContext } from 'use-context-selector'
|
||||
import I18n from '@/context/i18n'
|
||||
import { FlipBackward } from '@/app/components/base/icons/src/vender/line/arrows'
|
||||
import { LanguagesSupported } from '@/i18n/language'
|
||||
type Props = {
|
||||
onReturnToSimpleMode: () => void
|
||||
|
|
@ -38,7 +37,6 @@ const AdvancedModeWarning: FC<Props> = ({
|
|||
onClick={onReturnToSimpleMode}
|
||||
className='shrink-0 flex items-center h-6 px-2 bg-indigo-600 shadow-xs border border-gray-200 rounded-lg text-white text-xs font-semibold cursor-pointer space-x-1'
|
||||
>
|
||||
<FlipBackward className='w-3 h-3 text-white' />
|
||||
<div className='text-xs font-semibold uppercase'>{t('appDebug.promptMode.switchBack')}</div>
|
||||
</div>
|
||||
<div
|
||||
|
|
|
|||
|
|
@ -2,15 +2,15 @@ import { useState } from 'react'
|
|||
import { useTranslation } from 'react-i18next'
|
||||
import { XMarkIcon } from '@heroicons/react/24/outline'
|
||||
import NotionPageSelector from '../base'
|
||||
import type { NotionPageSelectorValue } from '../base'
|
||||
import s from './index.module.css'
|
||||
import type { NotionPage } from '@/models/common'
|
||||
import cn from '@/utils/classnames'
|
||||
import Modal from '@/app/components/base/modal'
|
||||
|
||||
type NotionPageSelectorModalProps = {
|
||||
isShow: boolean
|
||||
onClose: () => void
|
||||
onSave: (selectedPages: NotionPageSelectorValue[]) => void
|
||||
onSave: (selectedPages: NotionPage[]) => void
|
||||
datasetId: string
|
||||
}
|
||||
const NotionPageSelectorModal = ({
|
||||
|
|
@ -20,12 +20,12 @@ const NotionPageSelectorModal = ({
|
|||
datasetId,
|
||||
}: NotionPageSelectorModalProps) => {
|
||||
const { t } = useTranslation()
|
||||
const [selectedPages, setSelectedPages] = useState<NotionPageSelectorValue[]>([])
|
||||
const [selectedPages, setSelectedPages] = useState<NotionPage[]>([])
|
||||
|
||||
const handleClose = () => {
|
||||
onClose()
|
||||
}
|
||||
const handleSelectPage = (newSelectedPages: NotionPageSelectorValue[]) => {
|
||||
const handleSelectPage = (newSelectedPages: NotionPage[]) => {
|
||||
setSelectedPages(newSelectedPages)
|
||||
}
|
||||
const handleSave = () => {
|
||||
|
|
|
|||
|
|
@ -191,7 +191,7 @@ const SimpleSelect: FC<ISelectProps> = ({
|
|||
onClick={(e) => {
|
||||
e.stopPropagation()
|
||||
setSelectedItem(null)
|
||||
onSelect({ value: null })
|
||||
onSelect({ name: '', value: '' })
|
||||
}}
|
||||
className="h-5 w-5 text-gray-400 cursor-pointer"
|
||||
aria-hidden="false"
|
||||
|
|
|
|||
|
|
@ -28,7 +28,7 @@ const Category: FC<ICategoryProps> = ({
|
|||
allCategoriesEn,
|
||||
}) => {
|
||||
const { t } = useTranslation()
|
||||
const isAllCategories = !list.includes(value)
|
||||
const isAllCategories = !list.includes(value as AppCategory)
|
||||
|
||||
const itemClassName = (isSelected: boolean) => cn(
|
||||
'flex items-center px-3 py-[7px] h-[32px] rounded-lg border-[0.5px] border-transparent text-gray-700 font-medium leading-[18px] cursor-pointer hover:bg-gray-200',
|
||||
|
|
|
|||
|
|
@ -12,6 +12,7 @@ export enum FormTypeEnum {
|
|||
secretInput = 'secret-input',
|
||||
select = 'select',
|
||||
radio = 'radio',
|
||||
boolean = 'boolean',
|
||||
files = 'files',
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -11,6 +11,7 @@ import useSWR from 'swr'
|
|||
import useSWRInfinite from 'swr/infinite'
|
||||
import { flatten } from 'lodash-es'
|
||||
import Nav from '../nav'
|
||||
import type { NavItem } from '../nav/nav-selector'
|
||||
import { fetchDatasetDetail, fetchDatasets } from '@/service/datasets'
|
||||
import type { DataSetListResponse } from '@/models/datasets'
|
||||
|
||||
|
|
@ -31,7 +32,7 @@ const DatasetNav = () => {
|
|||
datasetId,
|
||||
}
|
||||
: null,
|
||||
apiParams => fetchDatasetDetail(apiParams.datasetId))
|
||||
apiParams => fetchDatasetDetail(apiParams.datasetId as string))
|
||||
const { data: datasetsData, setSize } = useSWRInfinite(datasetId ? getKey : () => null, fetchDatasets, { revalidateFirstPage: false, revalidateAll: true })
|
||||
const datasetItems = flatten(datasetsData?.map(datasetData => datasetData.data))
|
||||
|
||||
|
|
@ -46,14 +47,14 @@ const DatasetNav = () => {
|
|||
text={t('common.menus.datasets')}
|
||||
activeSegment='datasets'
|
||||
link='/datasets'
|
||||
curNav={currentDataset}
|
||||
curNav={currentDataset as Omit<NavItem, 'link'>}
|
||||
navs={datasetItems.map(dataset => ({
|
||||
id: dataset.id,
|
||||
name: dataset.name,
|
||||
link: `/datasets/${dataset.id}/documents`,
|
||||
icon: dataset.icon,
|
||||
icon_background: dataset.icon_background,
|
||||
}))}
|
||||
})) as NavItem[]}
|
||||
createText={t('common.menus.newDataset')}
|
||||
onCreate={() => router.push('/datasets/create')}
|
||||
onLoadmore={handleLoadmore}
|
||||
|
|
|
|||
|
|
@ -23,13 +23,13 @@ export type NavItem = {
|
|||
link: string
|
||||
icon: string
|
||||
icon_background: string
|
||||
mode: string
|
||||
mode?: string
|
||||
}
|
||||
export type INavSelectorProps = {
|
||||
navs: NavItem[]
|
||||
curNav?: Omit<NavItem, 'link'>
|
||||
createText: string
|
||||
isApp: boolean
|
||||
isApp?: boolean
|
||||
onCreate: (state: string) => void
|
||||
onLoadmore?: () => void
|
||||
}
|
||||
|
|
|
|||
|
|
@ -36,7 +36,7 @@ const ProviderCard = ({
|
|||
}, [collection.labels, labelList, language])
|
||||
|
||||
return (
|
||||
<div className={cn('group flex col-span-1 bg-white border-2 border-solid border-transparent rounded-xl shadow-sm min-h-[160px] flex flex-col transition-all duration-200 ease-in-out cursor-pointer hover:shadow-lg', active && '!border-primary-400')} onClick={onSelect}>
|
||||
<div className={cn('group col-span-1 bg-white border-2 border-solid border-transparent rounded-xl shadow-sm min-h-[160px] flex flex-col transition-all duration-200 ease-in-out cursor-pointer hover:shadow-lg', active && '!border-primary-400')} onClick={onSelect}>
|
||||
<div className='flex pt-[14px] px-[14px] pb-3 h-[66px] items-center gap-3 grow-0 shrink-0'>
|
||||
<div className='relative shrink-0'>
|
||||
{typeof collection.icon === 'string' && (
|
||||
|
|
|
|||
|
|
@ -85,7 +85,7 @@ const ProviderDetail = ({
|
|||
const [customCollection, setCustomCollection] = useState<CustomCollectionBackend | WorkflowToolProviderResponse | null>(null)
|
||||
const [isShowEditCollectionToolModal, setIsShowEditCustomCollectionModal] = useState(false)
|
||||
const [showConfirmDelete, setShowConfirmDelete] = useState(false)
|
||||
const [deleteAction, setDeleteAction] = useState(null)
|
||||
const [deleteAction, setDeleteAction] = useState('')
|
||||
const doUpdateCustomToolCollection = async (data: CustomCollectionBackend) => {
|
||||
await updateCustomCollection(data)
|
||||
onRefreshData()
|
||||
|
|
|
|||
|
|
@ -173,7 +173,7 @@ const WorkflowToolAsModal: FC<Props> = ({
|
|||
<div>
|
||||
<div className='py-2 leading-5 text-sm font-medium text-gray-900'>{t('tools.createTool.description')}</div>
|
||||
<textarea
|
||||
className='w-full h-10 px-3 py-2 text-sm font-normal bg-gray-100 rounded-lg border border-transparent outline-none appearance-none caret-primary-600 placeholder:text-gray-400 hover:bg-gray-50 hover:border hover:border-gray-300 focus:bg-gray-50 focus:border focus:border-gray-300 focus:shadow-xs h-[80px] resize-none'
|
||||
className='w-full px-3 py-2 text-sm font-normal bg-gray-100 rounded-lg border border-transparent outline-none appearance-none caret-primary-600 placeholder:text-gray-400 hover:bg-gray-50 hover:border hover:border-gray-300 focus:bg-gray-50 focus:border focus:border-gray-300 focus:shadow-xs h-[80px] resize-none'
|
||||
placeholder={t('tools.createTool.descriptionPlaceholder') || ''}
|
||||
value={description}
|
||||
onChange={e => setDescription(e.target.value)}
|
||||
|
|
|
|||
|
|
@ -1,6 +1,7 @@
|
|||
import { pinyin } from 'pinyin-pro'
|
||||
import type { FC, RefObject } from 'react'
|
||||
|
||||
export const groupItems = (items, getFirstChar) => {
|
||||
export const groupItems = (items: Array<any>, getFirstChar: (item: string) => string) => {
|
||||
const groups = items.reduce((acc, item) => {
|
||||
const firstChar = getFirstChar(item)
|
||||
if (!firstChar || firstChar.length === 0)
|
||||
|
|
@ -34,9 +35,14 @@ export const groupItems = (items, getFirstChar) => {
|
|||
return { letters, groups }
|
||||
}
|
||||
|
||||
const IndexBar = ({ letters, itemRefs }) => {
|
||||
const handleIndexClick = (letter) => {
|
||||
const element = itemRefs.current[letter]
|
||||
type IndexBarProps = {
|
||||
letters: string[]
|
||||
itemRefs: RefObject<{ [key: string]: HTMLElement | null }>
|
||||
}
|
||||
|
||||
const IndexBar: FC<IndexBarProps> = ({ letters, itemRefs }) => {
|
||||
const handleIndexClick = (letter: string) => {
|
||||
const element = itemRefs.current?.[letter]
|
||||
if (element)
|
||||
element.scrollIntoView({ behavior: 'smooth' })
|
||||
}
|
||||
|
|
|
|||
|
|
@ -69,6 +69,35 @@ const translation = {
|
|||
manageInTools: 'Gérer dans les outils',
|
||||
workflowAsToolTip: 'Reconfiguration de l\'outil requise après la mise à jour du flux de travail.',
|
||||
viewDetailInTracingPanel: 'Voir les détails',
|
||||
syncingData: 'Synchroniser des données en quelques secondes.',
|
||||
importDSL: 'Importe DSL',
|
||||
importDSLTip: 'Le projet actuel sera écrasé. Exporter le flux de travail en tant que sauvegarde avant d\'importer.',
|
||||
backupCurrentDraft: 'Sauvegarder le projet actuel',
|
||||
chooseDSL: 'Choisir le fichier DSL(yml)',
|
||||
overwriteAndImport: 'Écraser et importer',
|
||||
importFailure: 'Echec de l\'importation',
|
||||
importSuccess: 'Import avec succès',
|
||||
},
|
||||
env: {
|
||||
envPanelTitle: 'Variables d\'Environnement',
|
||||
envDescription: 'Les variables d\'environnement peuvent être utilisées pour stocker des informations privées et des informations d\'identification. Elles sont en lecture seule et peuvent être séparées du fichier DSL lors de l\'exportation.',
|
||||
envPanelButton: 'Ajouter Variable',
|
||||
modal: {
|
||||
title: 'Ajouter Variables d\'Environnement',
|
||||
editTitle: 'Editer titre',
|
||||
type: 'Type',
|
||||
name: 'Nom',
|
||||
namePlaceholder: 'Nom de l\'env',
|
||||
value: 'valeur',
|
||||
valuePlaceholder: 'Valeur de l\'env',
|
||||
secretTip: 'Utilisé pour définir des informations ou des données sensibles, avec des paramètres DSL configurés pour la prévention des fuites.',
|
||||
},
|
||||
export: {
|
||||
title: 'Exporter des variables d\'environnement secrètes?',
|
||||
checkbox: 'Exporter les valeurs secrètes',
|
||||
ignore: 'Exporter DSL',
|
||||
export: 'Exporter les DSL avec des valeurs secrètes',
|
||||
},
|
||||
},
|
||||
changeHistory: {
|
||||
title: 'Historique des modifications',
|
||||
|
|
@ -440,6 +469,25 @@ const translation = {
|
|||
iteration_other: '{{count}} Itérations',
|
||||
currentIteration: 'Itération actuelle',
|
||||
},
|
||||
note: {
|
||||
addNote: 'Ajouter note',
|
||||
editor: {
|
||||
placeholder: 'Redigez votre note...',
|
||||
small: 'Petit',
|
||||
medium: 'Moyen',
|
||||
large: 'Grand',
|
||||
bold: 'Gras',
|
||||
italic: 'Italique',
|
||||
strikethrough: 'Barré',
|
||||
link: 'Lien',
|
||||
openLink: 'Ouvrir',
|
||||
unlink: 'Annuler le lien',
|
||||
enterUrl: 'Entrer l\'URL...',
|
||||
invalidUrl: 'URL invalide',
|
||||
bulletList: 'Liste à puces',
|
||||
showAuthor: 'Afficher l\'auteur',
|
||||
},
|
||||
},
|
||||
},
|
||||
tracing: {
|
||||
stopBy: 'Arrêté par {{user}}',
|
||||
|
|
|
|||
|
|
@ -3,7 +3,7 @@ const translation = {
|
|||
line1: 'プロンプト',
|
||||
line2: 'エンジニアリング',
|
||||
},
|
||||
orchestrate: 'Orchestrate',
|
||||
orchestrate: 'オーケストレーション',
|
||||
promptMode: {
|
||||
simple: 'エキスパートモードに切り替えて、PROMPT全体を編集します',
|
||||
advanced: 'エキスパートモード',
|
||||
|
|
|
|||
|
|
@ -69,6 +69,35 @@ const translation = {
|
|||
manageInTools: 'ツールで管理',
|
||||
workflowAsToolTip: 'ワークフローの更新後、ツールの再設定が必要です。',
|
||||
viewDetailInTracingPanel: '詳細を表示',
|
||||
syncingData: 'データを同期中,数秒程度で終わります。',
|
||||
importDSL: 'DSLをインポート',
|
||||
importDSLTip: '現在のドラフトは上書きされますので、インポートする際は、事前にワークフローをバックアップとしてエクスポートいただきますよう、お願い申し上げます。',
|
||||
backupCurrentDraft: '現在のドラフトをバックアップ',
|
||||
chooseDSL: 'DSL(yml)ファイルを選択',
|
||||
overwriteAndImport: 'オーバライトとインポート',
|
||||
importFailure: 'インポート失敗',
|
||||
importSuccess: 'インポート成功',
|
||||
},
|
||||
env: {
|
||||
envPanelTitle: '環境変数',
|
||||
envDescription: '環境変数は、個人情報や認証情報を格納するために使用することができます。これらは読み取り専用であり、DSLファイルからエクスポートする際には分離されます。',
|
||||
envPanelButton: '環境変数を追加',
|
||||
modal: {
|
||||
title: '環境変数を追加',
|
||||
editTitle: '環境変数を編集',
|
||||
type: 'タイプ',
|
||||
name: '名前',
|
||||
namePlaceholder: '変数名',
|
||||
value: '値',
|
||||
valuePlaceholder: '変数値',
|
||||
secretTip: 'このような機密情報やデータは、定義に使用され、DSLの設定は情報漏洩を防ぐために特別に構成されています。',
|
||||
},
|
||||
export: {
|
||||
title: 'シークレット環境変数をエクスポートしますか?',
|
||||
checkbox: 'シクレート値をエクスポート',
|
||||
ignore: 'DSLをエクスポート',
|
||||
export: 'シクレート値を含むDSLをエクスポート',
|
||||
},
|
||||
},
|
||||
changeHistory: {
|
||||
title: '変更履歴',
|
||||
|
|
@ -441,6 +470,25 @@ const translation = {
|
|||
iteration_other: '{{count}} イテレーション',
|
||||
currentIteration: '現在のイテレーション',
|
||||
},
|
||||
note: {
|
||||
addNote: 'コメントを追加',
|
||||
editor: {
|
||||
placeholder: 'メモを書く...',
|
||||
small: '小',
|
||||
medium: '中',
|
||||
large: '大',
|
||||
bold: '太字',
|
||||
italic: '斜体',
|
||||
strikethrough: '打ち消し線',
|
||||
link: 'リンク',
|
||||
openLink: '開く',
|
||||
unlink: 'リンクをキャンセル',
|
||||
enterUrl: 'リンク入力中...',
|
||||
invalidUrl: 'リンク無効',
|
||||
bulletList: 'リスト',
|
||||
showAuthor: '著者を表示する',
|
||||
},
|
||||
},
|
||||
},
|
||||
tracing: {
|
||||
stopBy: '{{user}}によって停止',
|
||||
|
|
|
|||
|
|
@ -3,7 +3,7 @@ const translation = {
|
|||
line1: 'PROMPT',
|
||||
line2: 'Engineering',
|
||||
},
|
||||
orchestrate: 'Orchestrate',
|
||||
orchestrate: 'Dàn nhạc',
|
||||
promptMode: {
|
||||
simple: 'Chuyển sang Chế độ Chuyên gia để chỉnh sửa toàn bộ PROMPT',
|
||||
advanced: 'Chế độ Chuyên gia',
|
||||
|
|
|
|||
Loading…
Reference in New Issue