mirror of https://github.com/langgenius/dify.git
Merge branch 'main' into feat/explore
This commit is contained in:
commit
832815e509
|
|
@ -0,0 +1,32 @@
|
|||
---
|
||||
name: "\U0001F41B Bug report"
|
||||
about: Create a report to help us improve
|
||||
title: ''
|
||||
labels: bug
|
||||
assignees: ''
|
||||
|
||||
---
|
||||
|
||||
<!--
|
||||
Please provide a clear and concise description of what the bug is. Include
|
||||
screenshots if needed. Please test using the latest version of the relevant
|
||||
Dify packages to make sure your issue has not already been fixed.
|
||||
-->
|
||||
|
||||
Dify version: Cloud | Self Host
|
||||
|
||||
## Steps To Reproduce
|
||||
<!--
|
||||
Your bug will get fixed much faster if we can run your code and it doesn't
|
||||
have dependencies other than Dify. Issues without reproduction steps or
|
||||
code examples may be immediately closed as not actionable.
|
||||
-->
|
||||
|
||||
1.
|
||||
2.
|
||||
|
||||
|
||||
## The current behavior
|
||||
|
||||
|
||||
## The expected behavior
|
||||
|
|
@ -0,0 +1,20 @@
|
|||
---
|
||||
name: "\U0001F680 Feature request"
|
||||
about: Suggest an idea for this project
|
||||
title: ''
|
||||
labels: enhancement
|
||||
assignees: ''
|
||||
|
||||
---
|
||||
|
||||
**Is your feature request related to a problem? Please describe.**
|
||||
A clear and concise description of what the problem is. Ex. I'm always frustrated when [...]
|
||||
|
||||
**Describe the solution you'd like**
|
||||
A clear and concise description of what you want to happen.
|
||||
|
||||
**Describe alternatives you've considered**
|
||||
A clear and concise description of any alternative solutions or features you've considered.
|
||||
|
||||
**Additional context**
|
||||
Add any other context or screenshots about the feature request here.
|
||||
|
|
@ -0,0 +1,10 @@
|
|||
---
|
||||
name: "\U0001F914 Questions and Help"
|
||||
about: Ask a usage or consultation question
|
||||
title: ''
|
||||
labels: ''
|
||||
assignees: ''
|
||||
|
||||
---
|
||||
|
||||
|
||||
|
|
@ -7,6 +7,9 @@
|
|||
|
||||
[Website](https://dify.ai) • [Docs](https://docs.dify.ai) • [Twitter](https://twitter.com/dify_ai) • [Discord](https://discord.gg/FngNHpbcY7)
|
||||
|
||||
Vote for us on Product Hunt ↓
|
||||
<a href="https://www.producthunt.com/posts/dify-ai"><img src="https://api.producthunt.com/widgets/embed-image/v1/featured.svg?sanitize=true&post_id=dify-ai&theme=light" alt="Product Hunt Badge" width="250" height="54"></a>
|
||||
|
||||
**Dify** is an easy-to-use LLMOps platform designed to empower more people to create sustainable, AI-native applications. With visual orchestration for various application types, Dify offers out-of-the-box, ready-to-use applications that can also serve as Backend-as-a-Service APIs. Unify your development process with one API for plugins and datasets integration, and streamline your operations using a single interface for prompt engineering, visual analytics, and continuous improvement.
|
||||
|
||||
Applications created with Dify include:
|
||||
|
|
|
|||
|
|
@ -8,6 +8,9 @@
|
|||
|
||||
[官方网站](https://dify.ai) • [文档](https://docs.dify.ai/v/zh-hans) • [Twitter](https://twitter.com/dify_ai) • [Discord](https://discord.gg/FngNHpbcY7)
|
||||
|
||||
在 Product Hunt 上投我们一票吧 ↓
|
||||
<a href="https://www.producthunt.com/posts/dify-ai"><img src="https://api.producthunt.com/widgets/embed-image/v1/featured.svg?sanitize=true&post_id=dify-ai&theme=light" alt="Product Hunt Badge" width="250" height="54"></a>
|
||||
|
||||
**Dify** 是一个易用的 LLMOps 平台,旨在让更多人可以创建可持续运营的原生 AI 应用。Dify 提供多种类型应用的可视化编排,应用可开箱即用,也能以“后端即服务”的 API 提供服务。
|
||||
|
||||
通过 Dify 创建的应用包含了:
|
||||
|
|
|
|||
|
|
@ -7,6 +7,10 @@
|
|||
|
||||
[Web サイト](https://dify.ai) • [ドキュメント](https://docs.dify.ai) • [Twitter](https://twitter.com/dify_ai) • [Discord](https://discord.gg/FngNHpbcY7)
|
||||
|
||||
Product Huntで私たちに投票してください ↓
|
||||
<a href="https://www.producthunt.com/posts/dify-ai"><img src="https://api.producthunt.com/widgets/embed-image/v1/featured.svg?sanitize=true&post_id=dify-ai&theme=light" alt="Product Hunt Badge" width="250" height="54"></a>
|
||||
|
||||
|
||||
**Dify** は、より多くの人々が持続可能な AI ネイティブアプリケーションを作成できるように設計された、使いやすい LLMOps プラットフォームです。様々なアプリケーションタイプに対応したビジュアルオーケストレーションにより Dify は Backend-as-a-Service API としても機能する、すぐに使えるアプリケーションを提供します。プラグインやデータセットを統合するための1つの API で開発プロセスを統一し、プロンプトエンジニアリング、ビジュアル分析、継続的な改善のための1つのインターフェイスを使って業務を合理化します。
|
||||
|
||||
Difyで作成したアプリケーションは以下の通りです:
|
||||
|
|
|
|||
|
|
@ -14,7 +14,7 @@ CONSOLE_URL=http://127.0.0.1:5001
|
|||
API_URL=http://127.0.0.1:5001
|
||||
|
||||
# Web APP base URL
|
||||
APP_URL=http://127.0.0.1:5001
|
||||
APP_URL=http://127.0.0.1:3000
|
||||
|
||||
# celery configuration
|
||||
CELERY_BROKER_URL=redis://:difyai123456@localhost:6379/1
|
||||
|
|
|
|||
|
|
@ -17,6 +17,6 @@ def _get_app(app_id, mode=None):
|
|||
raise NotFound("App not found")
|
||||
|
||||
if mode and app.mode != mode:
|
||||
raise AppUnavailableError()
|
||||
raise NotFound("The {} app not found".format(mode))
|
||||
|
||||
return app
|
||||
|
|
|
|||
|
|
@ -9,31 +9,33 @@ class AppNotFoundError(BaseHTTPException):
|
|||
|
||||
class ProviderNotInitializeError(BaseHTTPException):
|
||||
error_code = 'provider_not_initialize'
|
||||
description = "Provider Token not initialize."
|
||||
description = "No valid model provider credentials found. " \
|
||||
"Please go to Settings -> Model Provider to complete your provider credentials."
|
||||
code = 400
|
||||
|
||||
|
||||
class ProviderQuotaExceededError(BaseHTTPException):
|
||||
error_code = 'provider_quota_exceeded'
|
||||
description = "Provider quota exceeded."
|
||||
description = "Your quota for Dify Hosted OpenAI has been exhausted. " \
|
||||
"Please go to Settings -> Model Provider to complete your own provider credentials."
|
||||
code = 400
|
||||
|
||||
|
||||
class ProviderModelCurrentlyNotSupportError(BaseHTTPException):
|
||||
error_code = 'model_currently_not_support'
|
||||
description = "GPT-4 currently not support."
|
||||
description = "Dify Hosted OpenAI trial currently not support the GPT-4 model."
|
||||
code = 400
|
||||
|
||||
|
||||
class ConversationCompletedError(BaseHTTPException):
|
||||
error_code = 'conversation_completed'
|
||||
description = "Conversation was completed."
|
||||
description = "The conversation has ended. Please start a new conversation."
|
||||
code = 400
|
||||
|
||||
|
||||
class AppUnavailableError(BaseHTTPException):
|
||||
error_code = 'app_unavailable'
|
||||
description = "App unavailable."
|
||||
description = "App unavailable, please check your app configurations."
|
||||
code = 400
|
||||
|
||||
|
||||
|
|
@ -45,5 +47,5 @@ class CompletionRequestError(BaseHTTPException):
|
|||
|
||||
class AppMoreLikeThisDisabledError(BaseHTTPException):
|
||||
error_code = 'app_more_like_this_disabled'
|
||||
description = "More like this disabled."
|
||||
description = "The 'More like this' feature is disabled. Please refresh your page."
|
||||
code = 403
|
||||
|
|
|
|||
|
|
@ -10,13 +10,14 @@ from werkzeug.exceptions import NotFound, Forbidden
|
|||
|
||||
import services
|
||||
from controllers.console import api
|
||||
from controllers.console.app.error import ProviderNotInitializeError
|
||||
from controllers.console.app.error import ProviderNotInitializeError, ProviderQuotaExceededError, \
|
||||
ProviderModelCurrentlyNotSupportError
|
||||
from controllers.console.datasets.error import DocumentAlreadyFinishedError, InvalidActionError, DocumentIndexingError, \
|
||||
InvalidMetadataError, ArchivedDocumentImmutableError
|
||||
from controllers.console.setup import setup_required
|
||||
from controllers.console.wraps import account_initialization_required
|
||||
from core.indexing_runner import IndexingRunner
|
||||
from core.llm.error import ProviderTokenNotInitError
|
||||
from core.llm.error import ProviderTokenNotInitError, QuotaExceededError, ModelCurrentlyNotSupportError
|
||||
from extensions.ext_redis import redis_client
|
||||
from libs.helper import TimestampField
|
||||
from extensions.ext_database import db
|
||||
|
|
@ -222,6 +223,10 @@ class DatasetDocumentListApi(Resource):
|
|||
document = DocumentService.save_document_with_dataset_id(dataset, args, current_user)
|
||||
except ProviderTokenNotInitError:
|
||||
raise ProviderNotInitializeError()
|
||||
except QuotaExceededError:
|
||||
raise ProviderQuotaExceededError()
|
||||
except ModelCurrentlyNotSupportError:
|
||||
raise ProviderModelCurrentlyNotSupportError()
|
||||
|
||||
return document
|
||||
|
||||
|
|
@ -259,6 +264,10 @@ class DatasetInitApi(Resource):
|
|||
)
|
||||
except ProviderTokenNotInitError:
|
||||
raise ProviderNotInitializeError()
|
||||
except QuotaExceededError:
|
||||
raise ProviderQuotaExceededError()
|
||||
except ModelCurrentlyNotSupportError:
|
||||
raise ProviderModelCurrentlyNotSupportError()
|
||||
|
||||
response = {
|
||||
'dataset': dataset,
|
||||
|
|
|
|||
|
|
@ -3,7 +3,7 @@ from libs.exception import BaseHTTPException
|
|||
|
||||
class NoFileUploadedError(BaseHTTPException):
|
||||
error_code = 'no_file_uploaded'
|
||||
description = "No file uploaded."
|
||||
description = "Please upload your file."
|
||||
code = 400
|
||||
|
||||
|
||||
|
|
@ -27,25 +27,25 @@ class UnsupportedFileTypeError(BaseHTTPException):
|
|||
|
||||
class HighQualityDatasetOnlyError(BaseHTTPException):
|
||||
error_code = 'high_quality_dataset_only'
|
||||
description = "High quality dataset only."
|
||||
description = "Current operation only supports 'high-quality' datasets."
|
||||
code = 400
|
||||
|
||||
|
||||
class DatasetNotInitializedError(BaseHTTPException):
|
||||
error_code = 'dataset_not_initialized'
|
||||
description = "Dataset not initialized."
|
||||
description = "The dataset is still being initialized or indexing. Please wait a moment."
|
||||
code = 400
|
||||
|
||||
|
||||
class ArchivedDocumentImmutableError(BaseHTTPException):
|
||||
error_code = 'archived_document_immutable'
|
||||
description = "Cannot process an archived document."
|
||||
description = "The archived document is not editable."
|
||||
code = 403
|
||||
|
||||
|
||||
class DatasetNameDuplicateError(BaseHTTPException):
|
||||
error_code = 'dataset_name_duplicate'
|
||||
description = "Dataset name already exists."
|
||||
description = "The dataset name already exists. Please modify your dataset name."
|
||||
code = 409
|
||||
|
||||
|
||||
|
|
@ -57,17 +57,17 @@ class InvalidActionError(BaseHTTPException):
|
|||
|
||||
class DocumentAlreadyFinishedError(BaseHTTPException):
|
||||
error_code = 'document_already_finished'
|
||||
description = "Document already finished."
|
||||
description = "The document has been processed. Please refresh the page or go to the document details."
|
||||
code = 400
|
||||
|
||||
|
||||
class DocumentIndexingError(BaseHTTPException):
|
||||
error_code = 'document_indexing'
|
||||
description = "Document indexing."
|
||||
description = "The document is being processed and cannot be edited."
|
||||
code = 400
|
||||
|
||||
|
||||
class InvalidMetadataError(BaseHTTPException):
|
||||
error_code = 'invalid_metadata'
|
||||
description = "Invalid metadata."
|
||||
description = "The metadata content is incorrect. Please check and verify."
|
||||
code = 400
|
||||
|
|
|
|||
|
|
@ -6,9 +6,12 @@ from werkzeug.exceptions import InternalServerError, NotFound, Forbidden
|
|||
|
||||
import services
|
||||
from controllers.console import api
|
||||
from controllers.console.app.error import ProviderNotInitializeError, ProviderQuotaExceededError, \
|
||||
ProviderModelCurrentlyNotSupportError
|
||||
from controllers.console.datasets.error import HighQualityDatasetOnlyError, DatasetNotInitializedError
|
||||
from controllers.console.setup import setup_required
|
||||
from controllers.console.wraps import account_initialization_required
|
||||
from core.llm.error import ProviderTokenNotInitError, QuotaExceededError, ModelCurrentlyNotSupportError
|
||||
from libs.helper import TimestampField
|
||||
from services.dataset_service import DatasetService
|
||||
from services.hit_testing_service import HitTestingService
|
||||
|
|
@ -92,6 +95,12 @@ class HitTestingApi(Resource):
|
|||
return {"query": response['query'], 'records': marshal(response['records'], hit_testing_record_fields)}
|
||||
except services.errors.index.IndexNotInitializedError:
|
||||
raise DatasetNotInitializedError()
|
||||
except ProviderTokenNotInitError:
|
||||
raise ProviderNotInitializeError()
|
||||
except QuotaExceededError:
|
||||
raise ProviderQuotaExceededError()
|
||||
except ModelCurrentlyNotSupportError:
|
||||
raise ProviderModelCurrentlyNotSupportError()
|
||||
except Exception as e:
|
||||
logging.exception("Hit testing failed.")
|
||||
raise InternalServerError(str(e))
|
||||
|
|
|
|||
|
|
@ -3,13 +3,14 @@ from libs.exception import BaseHTTPException
|
|||
|
||||
class AlreadySetupError(BaseHTTPException):
|
||||
error_code = 'already_setup'
|
||||
description = "Application already setup."
|
||||
description = "Dify has been successfully installed. Please refresh the page or return to the dashboard homepage."
|
||||
code = 403
|
||||
|
||||
|
||||
class NotSetupError(BaseHTTPException):
|
||||
error_code = 'not_setup'
|
||||
description = "Application not setup."
|
||||
description = "Dify has not been initialized and installed yet. " \
|
||||
"Please proceed with the initialization and installation process first."
|
||||
code = 401
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -19,6 +19,14 @@ class VersionApi(Resource):
|
|||
args = parser.parse_args()
|
||||
check_update_url = current_app.config['CHECK_UPDATE_URL']
|
||||
|
||||
if not check_update_url:
|
||||
return {
|
||||
'version': '0.0.0',
|
||||
'release_date': '',
|
||||
'release_notes': '',
|
||||
'can_auto_update': False
|
||||
}
|
||||
|
||||
try:
|
||||
response = requests.get(check_update_url, {
|
||||
'current_version': args.get('current_version')
|
||||
|
|
|
|||
|
|
@ -21,11 +21,11 @@ class InvalidInvitationCodeError(BaseHTTPException):
|
|||
|
||||
class AccountAlreadyInitedError(BaseHTTPException):
|
||||
error_code = 'account_already_inited'
|
||||
description = "Account already inited."
|
||||
description = "The account has been initialized. Please refresh the page."
|
||||
code = 400
|
||||
|
||||
|
||||
class AccountNotInitializedError(BaseHTTPException):
|
||||
error_code = 'account_not_initialized'
|
||||
description = "Account not initialized."
|
||||
description = "The account has not been initialized yet. Please proceed with the initialization process first."
|
||||
code = 400
|
||||
|
|
|
|||
|
|
@ -90,8 +90,8 @@ class ProviderTokenApi(Resource):
|
|||
configs=args['token']
|
||||
)
|
||||
token_is_valid = True
|
||||
except ValidateFailedError:
|
||||
token_is_valid = False
|
||||
except ValidateFailedError as ex:
|
||||
raise ValueError(str(ex))
|
||||
|
||||
base64_encrypted_token = ProviderService.get_encrypted_token(
|
||||
tenant=current_user.current_tenant,
|
||||
|
|
@ -157,7 +157,7 @@ class ProviderTokenValidateApi(Resource):
|
|||
args = parser.parse_args()
|
||||
|
||||
# todo: remove this when the provider is supported
|
||||
if provider in [ProviderName.ANTHROPIC.value, ProviderName.AZURE_OPENAI.value, ProviderName.COHERE.value,
|
||||
if provider in [ProviderName.ANTHROPIC.value, ProviderName.COHERE.value,
|
||||
ProviderName.HUGGINGFACEHUB.value]:
|
||||
return {'result': 'success', 'warning': 'MOCK: This provider is not supported yet.'}
|
||||
|
||||
|
|
|
|||
|
|
@ -4,43 +4,45 @@ from libs.exception import BaseHTTPException
|
|||
|
||||
class AppUnavailableError(BaseHTTPException):
|
||||
error_code = 'app_unavailable'
|
||||
description = "App unavailable."
|
||||
description = "App unavailable, please check your app configurations."
|
||||
code = 400
|
||||
|
||||
|
||||
class NotCompletionAppError(BaseHTTPException):
|
||||
error_code = 'not_completion_app'
|
||||
description = "Not Completion App"
|
||||
description = "Please check if your Completion app mode matches the right API route."
|
||||
code = 400
|
||||
|
||||
|
||||
class NotChatAppError(BaseHTTPException):
|
||||
error_code = 'not_chat_app'
|
||||
description = "Not Chat App"
|
||||
description = "Please check if your Chat app mode matches the right API route."
|
||||
code = 400
|
||||
|
||||
|
||||
class ConversationCompletedError(BaseHTTPException):
|
||||
error_code = 'conversation_completed'
|
||||
description = "Conversation Completed."
|
||||
description = "The conversation has ended. Please start a new conversation."
|
||||
code = 400
|
||||
|
||||
|
||||
class ProviderNotInitializeError(BaseHTTPException):
|
||||
error_code = 'provider_not_initialize'
|
||||
description = "Provider Token not initialize."
|
||||
description = "No valid model provider credentials found. " \
|
||||
"Please go to Settings -> Model Provider to complete your provider credentials."
|
||||
code = 400
|
||||
|
||||
|
||||
class ProviderQuotaExceededError(BaseHTTPException):
|
||||
error_code = 'provider_quota_exceeded'
|
||||
description = "Provider quota exceeded."
|
||||
description = "Your quota for Dify Hosted OpenAI has been exhausted. " \
|
||||
"Please go to Settings -> Model Provider to complete your own provider credentials."
|
||||
code = 400
|
||||
|
||||
|
||||
class ProviderModelCurrentlyNotSupportError(BaseHTTPException):
|
||||
error_code = 'model_currently_not_support'
|
||||
description = "GPT-4 currently not support."
|
||||
description = "Dify Hosted OpenAI trial currently not support the GPT-4 model."
|
||||
code = 400
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -16,5 +16,5 @@ class DocumentIndexingError(BaseHTTPException):
|
|||
|
||||
class DatasetNotInitedError(BaseHTTPException):
|
||||
error_code = 'dataset_not_inited'
|
||||
description = "Dataset not inited."
|
||||
description = "The dataset is still being initialized or indexing. Please wait a moment."
|
||||
code = 403
|
||||
|
|
|
|||
|
|
@ -4,43 +4,45 @@ from libs.exception import BaseHTTPException
|
|||
|
||||
class AppUnavailableError(BaseHTTPException):
|
||||
error_code = 'app_unavailable'
|
||||
description = "App unavailable."
|
||||
description = "App unavailable, please check your app configurations."
|
||||
code = 400
|
||||
|
||||
|
||||
class NotCompletionAppError(BaseHTTPException):
|
||||
error_code = 'not_completion_app'
|
||||
description = "Not Completion App"
|
||||
description = "Please check if your Completion app mode matches the right API route."
|
||||
code = 400
|
||||
|
||||
|
||||
class NotChatAppError(BaseHTTPException):
|
||||
error_code = 'not_chat_app'
|
||||
description = "Not Chat App"
|
||||
description = "Please check if your Chat app mode matches the right API route."
|
||||
code = 400
|
||||
|
||||
|
||||
class ConversationCompletedError(BaseHTTPException):
|
||||
error_code = 'conversation_completed'
|
||||
description = "Conversation Completed."
|
||||
description = "The conversation has ended. Please start a new conversation."
|
||||
code = 400
|
||||
|
||||
|
||||
class ProviderNotInitializeError(BaseHTTPException):
|
||||
error_code = 'provider_not_initialize'
|
||||
description = "Provider Token not initialize."
|
||||
description = "No valid model provider credentials found. " \
|
||||
"Please go to Settings -> Model Provider to complete your provider credentials."
|
||||
code = 400
|
||||
|
||||
|
||||
class ProviderQuotaExceededError(BaseHTTPException):
|
||||
error_code = 'provider_quota_exceeded'
|
||||
description = "Provider quota exceeded."
|
||||
description = "Your quota for Dify Hosted OpenAI has been exhausted. " \
|
||||
"Please go to Settings -> Model Provider to complete your own provider credentials."
|
||||
code = 400
|
||||
|
||||
|
||||
class ProviderModelCurrentlyNotSupportError(BaseHTTPException):
|
||||
error_code = 'model_currently_not_support'
|
||||
description = "GPT-4 currently not support."
|
||||
description = "Dify Hosted OpenAI trial currently not support the GPT-4 model."
|
||||
code = 400
|
||||
|
||||
|
||||
|
|
@ -52,11 +54,11 @@ class CompletionRequestError(BaseHTTPException):
|
|||
|
||||
class AppMoreLikeThisDisabledError(BaseHTTPException):
|
||||
error_code = 'app_more_like_this_disabled'
|
||||
description = "More like this disabled."
|
||||
description = "The 'More like this' feature is disabled. Please refresh your page."
|
||||
code = 403
|
||||
|
||||
|
||||
class AppSuggestedQuestionsAfterAnswerDisabledError(BaseHTTPException):
|
||||
error_code = 'app_suggested_questions_after_answer_disabled'
|
||||
description = "Function Suggested questions after answer disabled."
|
||||
description = "The 'Suggested Questions After Answer' feature is disabled. Please refresh your page."
|
||||
code = 403
|
||||
|
|
|
|||
|
|
@ -1,4 +1,4 @@
|
|||
from typing import Optional, List, Union
|
||||
from typing import Optional, List, Union, Tuple
|
||||
|
||||
from langchain.callbacks import CallbackManager
|
||||
from langchain.chat_models.base import BaseChatModel
|
||||
|
|
@ -39,7 +39,8 @@ class Completion:
|
|||
memory = cls.get_memory_from_conversation(
|
||||
tenant_id=app.tenant_id,
|
||||
app_model_config=app_model_config,
|
||||
conversation=conversation
|
||||
conversation=conversation,
|
||||
return_messages=False
|
||||
)
|
||||
|
||||
inputs = conversation.inputs
|
||||
|
|
@ -96,7 +97,7 @@ class Completion:
|
|||
)
|
||||
|
||||
# get llm prompt
|
||||
prompt = cls.get_main_llm_prompt(
|
||||
prompt, stop_words = cls.get_main_llm_prompt(
|
||||
mode=mode,
|
||||
llm=final_llm,
|
||||
pre_prompt=app_model_config.pre_prompt,
|
||||
|
|
@ -114,14 +115,22 @@ class Completion:
|
|||
mode=mode
|
||||
)
|
||||
|
||||
response = final_llm.generate([prompt])
|
||||
response = final_llm.generate([prompt], stop_words)
|
||||
|
||||
return response
|
||||
|
||||
@classmethod
|
||||
def get_main_llm_prompt(cls, mode: str, llm: BaseLanguageModel, pre_prompt: str, query: str, inputs: dict, chain_output: Optional[str],
|
||||
def get_main_llm_prompt(cls, mode: str, llm: BaseLanguageModel, pre_prompt: str, query: str, inputs: dict,
|
||||
chain_output: Optional[str],
|
||||
memory: Optional[ReadOnlyConversationTokenDBBufferSharedMemory]) -> \
|
||||
Union[str | List[BaseMessage]]:
|
||||
Tuple[Union[str | List[BaseMessage]], Optional[List[str]]]:
|
||||
# disable template string in query
|
||||
query_params = OutLinePromptTemplate.from_template(template=query).input_variables
|
||||
if query_params:
|
||||
for query_param in query_params:
|
||||
if query_param not in inputs:
|
||||
inputs[query_param] = '{' + query_param + '}'
|
||||
|
||||
pre_prompt = PromptBuilder.process_template(pre_prompt) if pre_prompt else pre_prompt
|
||||
if mode == 'completion':
|
||||
prompt_template = OutLinePromptTemplate.from_template(
|
||||
|
|
@ -142,6 +151,11 @@ And answer according to the language of the user's question.
|
|||
|
||||
if chain_output:
|
||||
inputs['context'] = chain_output
|
||||
context_params = OutLinePromptTemplate.from_template(template=chain_output).input_variables
|
||||
if context_params:
|
||||
for context_param in context_params:
|
||||
if context_param not in inputs:
|
||||
inputs[context_param] = '{' + context_param + '}'
|
||||
|
||||
prompt_inputs = {k: inputs[k] for k in prompt_template.input_variables if k in inputs}
|
||||
prompt_content = prompt_template.format(
|
||||
|
|
@ -151,9 +165,9 @@ And answer according to the language of the user's question.
|
|||
|
||||
if isinstance(llm, BaseChatModel):
|
||||
# use chat llm as completion model
|
||||
return [HumanMessage(content=prompt_content)]
|
||||
return [HumanMessage(content=prompt_content)], None
|
||||
else:
|
||||
return prompt_content
|
||||
return prompt_content, None
|
||||
else:
|
||||
messages: List[BaseMessage] = []
|
||||
|
||||
|
|
@ -161,11 +175,19 @@ And answer according to the language of the user's question.
|
|||
"query": query
|
||||
}
|
||||
|
||||
human_message_prompt = "{query}"
|
||||
human_message_prompt = ""
|
||||
|
||||
if pre_prompt:
|
||||
pre_prompt_inputs = {k: inputs[k] for k in
|
||||
OutLinePromptTemplate.from_template(template=pre_prompt).input_variables
|
||||
if k in inputs}
|
||||
|
||||
if pre_prompt_inputs:
|
||||
human_inputs.update(pre_prompt_inputs)
|
||||
|
||||
if chain_output:
|
||||
human_inputs['context'] = chain_output
|
||||
human_message_instruction = """Use the following CONTEXT as your learned knowledge.
|
||||
human_message_prompt += """Use the following CONTEXT as your learned knowledge.
|
||||
[CONTEXT]
|
||||
{context}
|
||||
[END CONTEXT]
|
||||
|
|
@ -176,15 +198,35 @@ When answer to user:
|
|||
Avoid mentioning that you obtained the information from the context.
|
||||
And answer according to the language of the user's question.
|
||||
"""
|
||||
if pre_prompt:
|
||||
human_inputs.update(inputs)
|
||||
human_message_instruction += pre_prompt + "\n"
|
||||
|
||||
human_message_prompt = human_message_instruction + "Q:{query}\nA:"
|
||||
else:
|
||||
if pre_prompt:
|
||||
human_inputs.update(inputs)
|
||||
human_message_prompt = pre_prompt + "\n" + human_message_prompt
|
||||
if pre_prompt:
|
||||
human_message_prompt += pre_prompt
|
||||
|
||||
query_prompt = "\nHuman: {query}\nAI: "
|
||||
|
||||
if memory:
|
||||
# append chat histories
|
||||
tmp_human_message = PromptBuilder.to_human_message(
|
||||
prompt_content=human_message_prompt + query_prompt,
|
||||
inputs=human_inputs
|
||||
)
|
||||
|
||||
curr_message_tokens = memory.llm.get_messages_tokens([tmp_human_message])
|
||||
rest_tokens = llm_constant.max_context_token_length[memory.llm.model_name] \
|
||||
- memory.llm.max_tokens - curr_message_tokens
|
||||
rest_tokens = max(rest_tokens, 0)
|
||||
histories = cls.get_history_messages_from_memory(memory, rest_tokens)
|
||||
|
||||
# disable template string in query
|
||||
histories_params = OutLinePromptTemplate.from_template(template=histories).input_variables
|
||||
if histories_params:
|
||||
for histories_param in histories_params:
|
||||
if histories_param not in human_inputs:
|
||||
human_inputs[histories_param] = '{' + histories_param + '}'
|
||||
|
||||
human_message_prompt += "\n\n" + histories
|
||||
|
||||
human_message_prompt += query_prompt
|
||||
|
||||
# construct main prompt
|
||||
human_message = PromptBuilder.to_human_message(
|
||||
|
|
@ -192,23 +234,14 @@ And answer according to the language of the user's question.
|
|||
inputs=human_inputs
|
||||
)
|
||||
|
||||
if memory:
|
||||
# append chat histories
|
||||
tmp_messages = messages.copy() + [human_message]
|
||||
curr_message_tokens = memory.llm.get_messages_tokens(tmp_messages)
|
||||
rest_tokens = llm_constant.max_context_token_length[
|
||||
memory.llm.model_name] - memory.llm.max_tokens - curr_message_tokens
|
||||
rest_tokens = max(rest_tokens, 0)
|
||||
history_messages = cls.get_history_messages_from_memory(memory, rest_tokens)
|
||||
messages += history_messages
|
||||
|
||||
messages.append(human_message)
|
||||
|
||||
return messages
|
||||
return messages, ['\nHuman:']
|
||||
|
||||
@classmethod
|
||||
def get_llm_callback_manager(cls, llm: Union[StreamableOpenAI, StreamableChatOpenAI],
|
||||
streaming: bool, conversation_message_task: ConversationMessageTask) -> CallbackManager:
|
||||
streaming: bool,
|
||||
conversation_message_task: ConversationMessageTask) -> CallbackManager:
|
||||
llm_callback_handler = LLMCallbackHandler(llm, conversation_message_task)
|
||||
if streaming:
|
||||
callback_handlers = [llm_callback_handler, DifyStreamingStdOutCallbackHandler()]
|
||||
|
|
@ -220,7 +253,7 @@ And answer according to the language of the user's question.
|
|||
@classmethod
|
||||
def get_history_messages_from_memory(cls, memory: ReadOnlyConversationTokenDBBufferSharedMemory,
|
||||
max_token_limit: int) -> \
|
||||
List[BaseMessage]:
|
||||
str:
|
||||
"""Get memory messages."""
|
||||
memory.max_token_limit = max_token_limit
|
||||
memory_key = memory.memory_variables[0]
|
||||
|
|
@ -290,7 +323,7 @@ And answer according to the language of the user's question.
|
|||
)
|
||||
|
||||
# get llm prompt
|
||||
original_prompt = cls.get_main_llm_prompt(
|
||||
original_prompt, _ = cls.get_main_llm_prompt(
|
||||
mode="completion",
|
||||
llm=llm,
|
||||
pre_prompt=pre_prompt,
|
||||
|
|
|
|||
|
|
@ -56,6 +56,9 @@ class ConversationMessageTask:
|
|||
)
|
||||
|
||||
def init(self):
|
||||
provider_name = LLMBuilder.get_default_provider(self.app.tenant_id)
|
||||
self.model_dict['provider'] = provider_name
|
||||
|
||||
override_model_configs = None
|
||||
if self.is_override:
|
||||
override_model_configs = {
|
||||
|
|
|
|||
|
|
@ -173,6 +173,13 @@ class OpenAIEmbedding(BaseEmbedding):
|
|||
Can be overriden for batch queries.
|
||||
|
||||
"""
|
||||
if self.openai_api_type and self.openai_api_type == 'azure':
|
||||
embeddings = []
|
||||
for text in texts:
|
||||
embeddings.append(self._get_text_embedding(text))
|
||||
|
||||
return embeddings
|
||||
|
||||
if self.deployment_name is not None:
|
||||
engine = self.deployment_name
|
||||
else:
|
||||
|
|
@ -187,6 +194,13 @@ class OpenAIEmbedding(BaseEmbedding):
|
|||
|
||||
async def _aget_text_embeddings(self, texts: List[str]) -> List[List[float]]:
|
||||
"""Asynchronously get text embeddings."""
|
||||
if self.openai_api_type and self.openai_api_type == 'azure':
|
||||
embeddings = []
|
||||
for text in texts:
|
||||
embeddings.append(await self._aget_text_embedding(text))
|
||||
|
||||
return embeddings
|
||||
|
||||
if self.deployment_name is not None:
|
||||
engine = self.deployment_name
|
||||
else:
|
||||
|
|
|
|||
|
|
@ -46,3 +46,15 @@ class IndexBuilder:
|
|||
prompt_helper=prompt_helper,
|
||||
embed_model=OpenAIEmbedding(**model_credentials),
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def get_fake_llm_service_context(cls, tenant_id: str) -> ServiceContext:
|
||||
llm = LLMBuilder.to_llm(
|
||||
tenant_id=tenant_id,
|
||||
model_name='fake'
|
||||
)
|
||||
|
||||
return ServiceContext.from_defaults(
|
||||
llm_predictor=LLMPredictor(llm=llm),
|
||||
embed_model=OpenAIEmbedding()
|
||||
)
|
||||
|
|
|
|||
|
|
@ -83,7 +83,7 @@ class VectorIndex:
|
|||
if not self._dataset.index_struct_dict:
|
||||
return
|
||||
|
||||
service_context = IndexBuilder.get_default_service_context(tenant_id=self._dataset.tenant_id)
|
||||
service_context = IndexBuilder.get_fake_llm_service_context(tenant_id=self._dataset.tenant_id)
|
||||
|
||||
index = vector_store.get_index(
|
||||
service_context=service_context,
|
||||
|
|
@ -101,7 +101,7 @@ class VectorIndex:
|
|||
if not self._dataset.index_struct_dict:
|
||||
return
|
||||
|
||||
service_context = IndexBuilder.get_default_service_context(tenant_id=self._dataset.tenant_id)
|
||||
service_context = IndexBuilder.get_fake_llm_service_context(tenant_id=self._dataset.tenant_id)
|
||||
|
||||
index = vector_store.get_index(
|
||||
service_context=service_context,
|
||||
|
|
|
|||
|
|
@ -1,22 +1,24 @@
|
|||
import json
|
||||
import logging
|
||||
from typing import Optional, Union
|
||||
|
||||
import requests
|
||||
|
||||
from core.llm.provider.base import BaseProvider
|
||||
from core.llm.provider.errors import ValidateFailedError
|
||||
from models.provider import ProviderName
|
||||
|
||||
|
||||
class AzureProvider(BaseProvider):
|
||||
def get_models(self, model_id: Optional[str] = None) -> list[dict]:
|
||||
credentials = self.get_credentials(model_id)
|
||||
def get_models(self, model_id: Optional[str] = None, credentials: Optional[dict] = None) -> list[dict]:
|
||||
credentials = self.get_credentials(model_id) if not credentials else credentials
|
||||
url = "{}/openai/deployments?api-version={}".format(
|
||||
credentials.get('openai_api_base'),
|
||||
credentials.get('openai_api_version')
|
||||
str(credentials.get('openai_api_base')),
|
||||
str(credentials.get('openai_api_version'))
|
||||
)
|
||||
|
||||
headers = {
|
||||
"api-key": credentials.get('openai_api_key'),
|
||||
"api-key": str(credentials.get('openai_api_key')),
|
||||
"content-type": "application/json; charset=utf-8"
|
||||
}
|
||||
|
||||
|
|
@ -29,8 +31,10 @@ class AzureProvider(BaseProvider):
|
|||
'name': '{} ({})'.format(deployment['id'], deployment['model'])
|
||||
} for deployment in result['data'] if deployment['status'] == 'succeeded']
|
||||
else:
|
||||
# TODO: optimize in future
|
||||
raise Exception('Failed to get deployments from Azure OpenAI. Status code: {}'.format(response.status_code))
|
||||
if response.status_code == 401:
|
||||
raise AzureAuthenticationError()
|
||||
else:
|
||||
raise AzureRequestFailedError('Failed to request Azure OpenAI. Status code: {}'.format(response.status_code))
|
||||
|
||||
def get_credentials(self, model_id: Optional[str] = None) -> dict:
|
||||
"""
|
||||
|
|
@ -38,7 +42,7 @@ class AzureProvider(BaseProvider):
|
|||
"""
|
||||
config = self.get_provider_api_key(model_id=model_id)
|
||||
config['openai_api_type'] = 'azure'
|
||||
config['deployment_name'] = model_id.replace('.', '')
|
||||
config['deployment_name'] = model_id.replace('.', '') if model_id else None
|
||||
return config
|
||||
|
||||
def get_provider_name(self):
|
||||
|
|
@ -54,7 +58,7 @@ class AzureProvider(BaseProvider):
|
|||
config = {
|
||||
'openai_api_type': 'azure',
|
||||
'openai_api_version': '2023-03-15-preview',
|
||||
'openai_api_base': 'https://<your-domain-prefix>.openai.azure.com/',
|
||||
'openai_api_base': '',
|
||||
'openai_api_key': ''
|
||||
}
|
||||
|
||||
|
|
@ -63,7 +67,7 @@ class AzureProvider(BaseProvider):
|
|||
config = {
|
||||
'openai_api_type': 'azure',
|
||||
'openai_api_version': '2023-03-15-preview',
|
||||
'openai_api_base': 'https://<your-domain-prefix>.openai.azure.com/',
|
||||
'openai_api_base': '',
|
||||
'openai_api_key': ''
|
||||
}
|
||||
|
||||
|
|
@ -74,14 +78,47 @@ class AzureProvider(BaseProvider):
|
|||
|
||||
def get_token_type(self):
|
||||
# TODO: change to dict when implemented
|
||||
return lambda value: value
|
||||
return dict
|
||||
|
||||
def config_validate(self, config: Union[dict | str]):
|
||||
"""
|
||||
Validates the given config.
|
||||
"""
|
||||
# TODO: implement
|
||||
pass
|
||||
try:
|
||||
if not isinstance(config, dict):
|
||||
raise ValueError('Config must be a object.')
|
||||
|
||||
if 'openai_api_version' not in config:
|
||||
config['openai_api_version'] = '2023-03-15-preview'
|
||||
|
||||
models = self.get_models(credentials=config)
|
||||
|
||||
if not models:
|
||||
raise ValidateFailedError("Please add deployments for 'text-davinci-003', "
|
||||
"'gpt-3.5-turbo', 'text-embedding-ada-002'.")
|
||||
|
||||
fixed_model_ids = [
|
||||
'text-davinci-003',
|
||||
'gpt-35-turbo',
|
||||
'text-embedding-ada-002'
|
||||
]
|
||||
|
||||
current_model_ids = [model['id'] for model in models]
|
||||
|
||||
missing_model_ids = [fixed_model_id for fixed_model_id in fixed_model_ids if
|
||||
fixed_model_id not in current_model_ids]
|
||||
|
||||
if missing_model_ids:
|
||||
raise ValidateFailedError("Please add deployments for '{}'.".format(", ".join(missing_model_ids)))
|
||||
except AzureAuthenticationError:
|
||||
raise ValidateFailedError('Validation failed, please check your API Key.')
|
||||
except (requests.ConnectionError, requests.RequestException):
|
||||
raise ValidateFailedError('Validation failed, please check your API Base Endpoint.')
|
||||
except AzureRequestFailedError as ex:
|
||||
raise ValidateFailedError('Validation failed, error: {}.'.format(str(ex)))
|
||||
except Exception as ex:
|
||||
logging.exception('Azure OpenAI Credentials validation failed')
|
||||
raise ValidateFailedError('Validation failed, error: {}.'.format(str(ex)))
|
||||
|
||||
def get_encrypted_token(self, config: Union[dict | str]):
|
||||
"""
|
||||
|
|
@ -101,3 +138,11 @@ class AzureProvider(BaseProvider):
|
|||
config = json.loads(token)
|
||||
config['openai_api_key'] = self.decrypt_token(config['openai_api_key'])
|
||||
return config
|
||||
|
||||
|
||||
class AzureAuthenticationError(Exception):
|
||||
pass
|
||||
|
||||
|
||||
class AzureRequestFailedError(Exception):
|
||||
pass
|
||||
|
|
|
|||
|
|
@ -21,7 +21,7 @@ class TimestampField(fields.Raw):
|
|||
|
||||
def email(email):
|
||||
# Define a regex pattern for email addresses
|
||||
pattern = r"^[a-zA-Z0-9_.+-]+@[a-zA-Z0-9-]+\.[a-zA-Z0-9-.]+$"
|
||||
pattern = r"^[\w\.-]+@([\w-]+\.)+[\w-]{2,}$"
|
||||
# Check if the email matches the pattern
|
||||
if re.match(pattern, email) is not None:
|
||||
return email
|
||||
|
|
|
|||
|
|
@ -18,6 +18,7 @@ from services.errors.account import NoPermissionError
|
|||
from services.errors.dataset import DatasetNameDuplicateError
|
||||
from services.errors.document import DocumentIndexingError
|
||||
from services.errors.file import FileNotExistsError
|
||||
from tasks.deal_dataset_vector_index_task import deal_dataset_vector_index_task
|
||||
from tasks.document_indexing_task import document_indexing_task
|
||||
|
||||
|
||||
|
|
@ -97,7 +98,12 @@ class DatasetService:
|
|||
def update_dataset(dataset_id, data, user):
|
||||
dataset = DatasetService.get_dataset(dataset_id)
|
||||
DatasetService.check_dataset_permission(dataset, user)
|
||||
|
||||
if dataset.indexing_technique != data['indexing_technique']:
|
||||
# if update indexing_technique
|
||||
if data['indexing_technique'] == 'economy':
|
||||
deal_dataset_vector_index_task.delay(dataset_id, 'remove')
|
||||
elif data['indexing_technique'] == 'high_quality':
|
||||
deal_dataset_vector_index_task.delay(dataset_id, 'add')
|
||||
filtered_data = {k: v for k, v in data.items() if v is not None or k == 'description'}
|
||||
|
||||
filtered_data['updated_by'] = user.id
|
||||
|
|
|
|||
|
|
@ -0,0 +1,75 @@
|
|||
import logging
|
||||
import time
|
||||
|
||||
import click
|
||||
from celery import shared_task
|
||||
from llama_index.data_structs.node_v2 import DocumentRelationship, Node
|
||||
from core.index.vector_index import VectorIndex
|
||||
from extensions.ext_database import db
|
||||
from models.dataset import DocumentSegment, Document, Dataset
|
||||
|
||||
|
||||
@shared_task
|
||||
def deal_dataset_vector_index_task(dataset_id: str, action: str):
|
||||
"""
|
||||
Async deal dataset from index
|
||||
:param dataset_id: dataset_id
|
||||
:param action: action
|
||||
Usage: deal_dataset_vector_index_task.delay(dataset_id, action)
|
||||
"""
|
||||
logging.info(click.style('Start deal dataset vector index: {}'.format(dataset_id), fg='green'))
|
||||
start_at = time.perf_counter()
|
||||
|
||||
try:
|
||||
dataset = Dataset.query.filter_by(
|
||||
id=dataset_id
|
||||
).first()
|
||||
if not dataset:
|
||||
raise Exception('Dataset not found')
|
||||
documents = Document.query.filter_by(dataset_id=dataset_id).all()
|
||||
if documents:
|
||||
vector_index = VectorIndex(dataset=dataset)
|
||||
for document in documents:
|
||||
# delete from vector index
|
||||
if action == "remove":
|
||||
vector_index.del_doc(document.id)
|
||||
elif action == "add":
|
||||
segments = db.session.query(DocumentSegment).filter(
|
||||
DocumentSegment.document_id == document.id,
|
||||
DocumentSegment.enabled == True
|
||||
) .order_by(DocumentSegment.position.asc()).all()
|
||||
|
||||
nodes = []
|
||||
previous_node = None
|
||||
for segment in segments:
|
||||
relationships = {
|
||||
DocumentRelationship.SOURCE: document.id
|
||||
}
|
||||
|
||||
if previous_node:
|
||||
relationships[DocumentRelationship.PREVIOUS] = previous_node.doc_id
|
||||
|
||||
previous_node.relationships[DocumentRelationship.NEXT] = segment.index_node_id
|
||||
|
||||
node = Node(
|
||||
doc_id=segment.index_node_id,
|
||||
doc_hash=segment.index_node_hash,
|
||||
text=segment.content,
|
||||
extra_info=None,
|
||||
node_info=None,
|
||||
relationships=relationships
|
||||
)
|
||||
|
||||
previous_node = node
|
||||
nodes.append(node)
|
||||
# save vector index
|
||||
vector_index.add_nodes(
|
||||
nodes=nodes,
|
||||
duplicate_check=True
|
||||
)
|
||||
|
||||
end_at = time.perf_counter()
|
||||
logging.info(
|
||||
click.style('Deal dataset vector index: {} latency: {}'.format(dataset_id, end_at - start_at), fg='green'))
|
||||
except Exception:
|
||||
logging.exception("Deal dataset vector index failed")
|
||||
|
|
@ -11,7 +11,7 @@ class DifyClient {
|
|||
|
||||
public function __construct($api_key) {
|
||||
$this->api_key = $api_key;
|
||||
$this->base_url = "https://api.dify.ai/v1";
|
||||
$this->base_url = "https://api.dify.ai/v1/";
|
||||
$this->client = new Client([
|
||||
'base_uri' => $this->base_url,
|
||||
'headers' => [
|
||||
|
|
@ -37,12 +37,12 @@ class DifyClient {
|
|||
'rating' => $rating,
|
||||
'user' => $user,
|
||||
];
|
||||
return $this->send_request('POST', "/messages/{$message_id}/feedbacks", $data);
|
||||
return $this->send_request('POST', "messages/{$message_id}/feedbacks", $data);
|
||||
}
|
||||
|
||||
public function get_application_parameters($user) {
|
||||
$params = ['user' => $user];
|
||||
return $this->send_request('GET', '/parameters', null, $params);
|
||||
return $this->send_request('GET', 'parameters', null, $params);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -54,7 +54,7 @@ class CompletionClient extends DifyClient {
|
|||
'response_mode' => $response_mode,
|
||||
'user' => $user,
|
||||
];
|
||||
return $this->send_request('POST', '/completion-messages', $data, null, $response_mode === 'streaming');
|
||||
return $this->send_request('POST', 'completion-messages', $data, null, $response_mode === 'streaming');
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -70,7 +70,7 @@ class ChatClient extends DifyClient {
|
|||
$data['conversation_id'] = $conversation_id;
|
||||
}
|
||||
|
||||
return $this->send_request('POST', '/chat-messages', $data, null, $response_mode === 'streaming');
|
||||
return $this->send_request('POST', 'chat-messages', $data, null, $response_mode === 'streaming');
|
||||
}
|
||||
|
||||
public function get_conversation_messages($user, $conversation_id = null, $first_id = null, $limit = null) {
|
||||
|
|
@ -86,7 +86,7 @@ class ChatClient extends DifyClient {
|
|||
$params['limit'] = $limit;
|
||||
}
|
||||
|
||||
return $this->send_request('GET', '/messages', null, $params);
|
||||
return $this->send_request('GET', 'messages', null, $params);
|
||||
}
|
||||
|
||||
public function get_conversations($user, $first_id = null, $limit = null, $pinned = null) {
|
||||
|
|
@ -96,7 +96,7 @@ class ChatClient extends DifyClient {
|
|||
'limit' => $limit,
|
||||
'pinned'=> $pinned,
|
||||
];
|
||||
return $this->send_request('GET', '/conversations', null, $params);
|
||||
return $this->send_request('GET', 'conversations', null, $params);
|
||||
}
|
||||
|
||||
public function rename_conversation($conversation_id, $name, $user) {
|
||||
|
|
@ -104,6 +104,6 @@ class ChatClient extends DifyClient {
|
|||
'name' => $name,
|
||||
'user' => $user,
|
||||
];
|
||||
return $this->send_request('PATCH', "/conversations/{$conversation_id}", $data);
|
||||
return $this->send_request('PATCH', "conversations/{$conversation_id}", $data);
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -0,0 +1,12 @@
|
|||
# For production release, change this to PRODUCTION
|
||||
NEXT_PUBLIC_DEPLOY_ENV=DEVELOPMENT
|
||||
# The deployment edition, SELF_HOSTED or CLOUD
|
||||
NEXT_PUBLIC_EDITION=SELF_HOSTED
|
||||
# The base URL of console application, refers to the Console base URL of WEB service if console domain is
|
||||
# different from api or web app domain.
|
||||
# example: http://cloud.dify.ai/console/api
|
||||
NEXT_PUBLIC_API_PREFIX=http://localhost:5001/console/api
|
||||
# The URL for Web APP, refers to the Web App base URL of WEB service if web app domain is different from
|
||||
# console or api domain.
|
||||
# example: http://udify.app/api
|
||||
NEXT_PUBLIC_PUBLIC_API_PREFIX=http://localhost:5001/api
|
||||
|
|
@ -16,10 +16,12 @@ import AppsContext from '@/context/app-context'
|
|||
|
||||
export type AppCardProps = {
|
||||
app: App
|
||||
onDelete?: () => void
|
||||
}
|
||||
|
||||
const AppCard = ({
|
||||
app,
|
||||
onDelete
|
||||
}: AppCardProps) => {
|
||||
const { t } = useTranslation()
|
||||
const { notify } = useContext(ToastContext)
|
||||
|
|
@ -35,6 +37,8 @@ const AppCard = ({
|
|||
try {
|
||||
await deleteApp(app.id)
|
||||
notify({ type: 'success', message: t('app.appDeleted') })
|
||||
if (onDelete)
|
||||
onDelete()
|
||||
mutateApps()
|
||||
}
|
||||
catch (e: any) {
|
||||
|
|
@ -47,7 +51,7 @@ const AppCard = ({
|
|||
<>
|
||||
<Link href={`/app/${app.id}/overview`} className={style.listItem}>
|
||||
<div className={style.listItemTitle}>
|
||||
<AppIcon size='small' icon={app.icon} background={app.icon_background}/>
|
||||
<AppIcon size='small' icon={app.icon} background={app.icon_background} />
|
||||
<div className={style.listItemHeading}>
|
||||
<div className={style.listItemHeadingContent}>{app.name}</div>
|
||||
</div>
|
||||
|
|
|
|||
|
|
@ -42,7 +42,9 @@ const Apps = () => {
|
|||
|
||||
return (
|
||||
<nav className='grid content-start grid-cols-1 gap-4 px-12 pt-8 sm:grid-cols-2 lg:grid-cols-4 grow shrink-0'>
|
||||
{data?.map(({ data: apps }) => apps.map(app => (<AppCard key={app.id} app={app} />)))}
|
||||
{data?.map(({ data: apps }) => apps.map(app => (
|
||||
<AppCard key={app.id} app={app} onDelete={mutate} />
|
||||
)))}
|
||||
<NewAppCard ref={anchorRef} onSuccess={mutate} />
|
||||
</nav>
|
||||
)
|
||||
|
|
|
|||
|
|
@ -3,7 +3,13 @@ import { getLocaleOnServer } from '@/i18n/server'
|
|||
import { useTranslation } from '@/i18n/i18next-serverside-config'
|
||||
import Form from '@/app/components/datasets/settings/form'
|
||||
|
||||
const Settings = async () => {
|
||||
type Props = {
|
||||
params: { datasetId: string }
|
||||
}
|
||||
|
||||
const Settings = async ({
|
||||
params: { datasetId },
|
||||
}: Props) => {
|
||||
const locale = getLocaleOnServer()
|
||||
const { t } = await useTranslation(locale, 'dataset-settings')
|
||||
|
||||
|
|
@ -14,7 +20,7 @@ const Settings = async () => {
|
|||
<div className='text-sm text-gray-500'>{t('desc')}</div>
|
||||
</div>
|
||||
<div>
|
||||
<Form />
|
||||
<Form datasetId={datasetId} />
|
||||
</div>
|
||||
</div>
|
||||
)
|
||||
|
|
|
|||
|
|
@ -18,16 +18,16 @@ import classNames from 'classnames'
|
|||
|
||||
export type DatasetCardProps = {
|
||||
dataset: DataSet
|
||||
onDelete?: () => void
|
||||
}
|
||||
|
||||
const DatasetCard = ({
|
||||
dataset,
|
||||
onDelete
|
||||
}: DatasetCardProps) => {
|
||||
const { t } = useTranslation()
|
||||
const { notify } = useContext(ToastContext)
|
||||
|
||||
const { mutate: mutateDatasets } = useSWR({ url: '/datasets', params: { page: 1 } }, fetchDatasets)
|
||||
|
||||
const [showConfirmDelete, setShowConfirmDelete] = useState(false)
|
||||
const onDeleteClick: MouseEventHandler = useCallback((e) => {
|
||||
e.preventDefault()
|
||||
|
|
@ -37,7 +37,8 @@ const DatasetCard = ({
|
|||
try {
|
||||
await deleteDataset(dataset.id)
|
||||
notify({ type: 'success', message: t('dataset.datasetDeleted') })
|
||||
mutateDatasets()
|
||||
if (onDelete)
|
||||
onDelete()
|
||||
}
|
||||
catch (e: any) {
|
||||
notify({ type: 'error', message: `${t('dataset.datasetDeleteFailed')}${'message' in e ? `: ${e.message}` : ''}` })
|
||||
|
|
|
|||
|
|
@ -42,7 +42,9 @@ const Datasets = () => {
|
|||
|
||||
return (
|
||||
<nav className='grid content-start grid-cols-1 gap-4 px-12 pt-8 sm:grid-cols-2 lg:grid-cols-4 grow shrink-0'>
|
||||
{data?.map(({ data: datasets }) => datasets.map(dataset => (<DatasetCard key={dataset.id} dataset={dataset} />)))}
|
||||
{data?.map(({ data: datasets }) => datasets.map(dataset => (
|
||||
<DatasetCard key={dataset.id} dataset={dataset} onDelete={mutate} />)
|
||||
))}
|
||||
<NewDatasetCard ref={anchorRef} />
|
||||
</nav>
|
||||
)
|
||||
|
|
|
|||
|
|
@ -1,18 +1,18 @@
|
|||
'use client'
|
||||
import React from 'react'
|
||||
import { useState, FC, ChangeEvent } from 'react'
|
||||
import data from '@emoji-mart/data'
|
||||
import { init, SearchIndex } from 'emoji-mart'
|
||||
// import AppIcon from '@/app/components/base/app-icon'
|
||||
import cn from 'classnames'
|
||||
import Divider from '@/app/components/base/divider'
|
||||
|
||||
import Button from '@/app/components/base/button'
|
||||
import s from './style.module.css'
|
||||
import { useState, FC, ChangeEvent } from 'react'
|
||||
import {
|
||||
MagnifyingGlassIcon
|
||||
} from '@heroicons/react/24/outline'
|
||||
import React from 'react'
|
||||
|
||||
import Modal from '@/app/components/base/modal'
|
||||
import { useTranslation } from 'react-i18next'
|
||||
|
||||
declare global {
|
||||
namespace JSX {
|
||||
|
|
@ -69,6 +69,7 @@ const EmojiPicker: FC<IEmojiPickerProps> = ({
|
|||
onClose
|
||||
|
||||
}) => {
|
||||
const { t } = useTranslation()
|
||||
const { categories } = data as any
|
||||
const [selectedEmoji, setSelectedEmoji] = useState('')
|
||||
const [selectedBackground, setSelectedBackground] = useState(backgroundColors[0])
|
||||
|
|
@ -187,7 +188,7 @@ const EmojiPicker: FC<IEmojiPickerProps> = ({
|
|||
<Button type="default" className='w-full' onClick={() => {
|
||||
onClose && onClose()
|
||||
}}>
|
||||
Cancel
|
||||
{t('app.emoji.cancel')}
|
||||
</Button>
|
||||
<Button
|
||||
disabled={selectedEmoji == ''}
|
||||
|
|
@ -196,7 +197,7 @@ const EmojiPicker: FC<IEmojiPickerProps> = ({
|
|||
onClick={() => {
|
||||
onSelect && onSelect(selectedEmoji, selectedBackground)
|
||||
}}>
|
||||
OK
|
||||
{t('app.emoji.ok')}
|
||||
</Button>
|
||||
</div>
|
||||
</Modal> : <>
|
||||
|
|
|
|||
|
|
@ -44,7 +44,7 @@
|
|||
@apply h-8 py-0 bg-gray-50 hover:bg-gray-100 rounded-lg shadow-none !important;
|
||||
}
|
||||
.segModalContent {
|
||||
@apply h-96 text-gray-800 text-base overflow-y-scroll;
|
||||
@apply h-96 text-gray-800 text-base break-all overflow-y-scroll;
|
||||
white-space: pre-line;
|
||||
}
|
||||
.footer {
|
||||
|
|
|
|||
|
|
@ -1,5 +1,6 @@
|
|||
'use client'
|
||||
import { useState } from 'react'
|
||||
import { Dispatch, SetStateAction, useEffect, useState } from 'react'
|
||||
import useSWR from 'swr'
|
||||
import { useContext } from 'use-context-selector'
|
||||
import { BookOpenIcon } from '@heroicons/react/24/outline'
|
||||
import { useTranslation } from 'react-i18next'
|
||||
|
|
@ -7,8 +8,8 @@ import { ToastContext } from '@/app/components/base/toast'
|
|||
import PermissionsRadio from '../permissions-radio'
|
||||
import IndexMethodRadio from '../index-method-radio'
|
||||
import Button from '@/app/components/base/button'
|
||||
import { useDatasetsContext } from '@/context/datasets-context'
|
||||
import { updateDatasetSetting } from '@/service/datasets'
|
||||
import { updateDatasetSetting, fetchDataDetail } from '@/service/datasets'
|
||||
import { DataSet } from '@/models/datasets'
|
||||
|
||||
const rowClass = `
|
||||
flex justify-between py-4
|
||||
|
|
@ -20,13 +21,25 @@ const inputClass = `
|
|||
w-[480px] px-3 bg-gray-100 text-sm text-gray-800 rounded-lg outline-none appearance-none
|
||||
`
|
||||
|
||||
const Form = () => {
|
||||
const useInitialValue = <T,>(depend: T, dispatch: Dispatch<SetStateAction<T>>) => {
|
||||
useEffect(() => {
|
||||
dispatch(depend)
|
||||
}, [depend])
|
||||
}
|
||||
|
||||
type Props = {
|
||||
datasetId: string
|
||||
}
|
||||
|
||||
const Form = ({
|
||||
datasetId
|
||||
}: Props) => {
|
||||
const { t } = useTranslation()
|
||||
const { notify } = useContext(ToastContext)
|
||||
const { currentDataset, mutateDatasets } = useDatasetsContext()
|
||||
const { data: currentDataset, mutate: mutateDatasets } = useSWR(datasetId, fetchDataDetail)
|
||||
const [loading, setLoading] = useState(false)
|
||||
const [name, setName] = useState(currentDataset?.name)
|
||||
const [description, setDescription] = useState(currentDataset?.description)
|
||||
const [name, setName] = useState(currentDataset?.name ?? '')
|
||||
const [description, setDescription] = useState(currentDataset?.description ?? '')
|
||||
const [permission, setPermission] = useState(currentDataset?.permission)
|
||||
const [indexMethod, setIndexMethod] = useState(currentDataset?.indexing_technique)
|
||||
|
||||
|
|
@ -48,7 +61,7 @@ const Form = () => {
|
|||
}
|
||||
})
|
||||
notify({ type: 'success', message: t('common.actionMsg.modifiedSuccessfully') })
|
||||
mutateDatasets()
|
||||
await mutateDatasets()
|
||||
} catch (e) {
|
||||
notify({ type: 'error', message: t('common.actionMsg.modificationFailed') })
|
||||
} finally {
|
||||
|
|
@ -56,6 +69,11 @@ const Form = () => {
|
|||
}
|
||||
}
|
||||
|
||||
useInitialValue<string>(currentDataset?.name ?? '', setName)
|
||||
useInitialValue<string>(currentDataset?.description ?? '', setDescription)
|
||||
useInitialValue<DataSet['permission'] | undefined>(currentDataset?.permission, setPermission)
|
||||
useInitialValue<DataSet['indexing_technique'] | undefined>(currentDataset?.indexing_technique, setIndexMethod)
|
||||
|
||||
return (
|
||||
<div className='w-[800px] px-16 py-6'>
|
||||
<div className={rowClass}>
|
||||
|
|
|
|||
|
|
@ -1,6 +1,6 @@
|
|||
'use client'
|
||||
import React, { useEffect, useState } from 'react'
|
||||
import useCopyToClipboard from '@/hooks/use-copy-to-clipboard'
|
||||
import copy from 'copy-to-clipboard'
|
||||
import Tooltip from '@/app/components/base/tooltip'
|
||||
import { t } from 'i18next'
|
||||
import s from './style.module.css'
|
||||
|
|
@ -18,7 +18,6 @@ const InputCopy = ({
|
|||
readOnly = true,
|
||||
children,
|
||||
}: IInputCopyProps) => {
|
||||
const [_, copy] = useCopyToClipboard()
|
||||
const [isCopied, setIsCopied] = useState(false)
|
||||
|
||||
useEffect(() => {
|
||||
|
|
|
|||
|
|
@ -1,14 +1,21 @@
|
|||
import type { Provider, ProviderAzureToken } from '@/models/common'
|
||||
import { ProviderName } from '@/models/common'
|
||||
import { useTranslation } from 'react-i18next'
|
||||
import Link from 'next/link'
|
||||
import { ArrowTopRightOnSquareIcon } from '@heroicons/react/24/outline'
|
||||
import ProviderInput, { ProviderValidateTokenInput} from '../provider-input'
|
||||
import { useState } from 'react'
|
||||
import { ValidatedStatus } from '../provider-input/useValidateToken'
|
||||
import { useState, useEffect } from 'react'
|
||||
import ProviderInput from '../provider-input'
|
||||
import useValidateToken, { ValidatedStatus, ValidatedStatusState } from '../provider-input/useValidateToken'
|
||||
import {
|
||||
ValidatedErrorIcon,
|
||||
ValidatedSuccessIcon,
|
||||
ValidatingTip,
|
||||
ValidatedErrorOnAzureOpenaiTip
|
||||
} from '../provider-input/Validate'
|
||||
|
||||
interface IAzureProviderProps {
|
||||
provider: Provider
|
||||
onValidatedStatus: (status?: ValidatedStatus) => void
|
||||
onValidatedStatus: (status?: ValidatedStatusState) => void
|
||||
onTokenChange: (token: ProviderAzureToken) => void
|
||||
}
|
||||
const AzureProvider = ({
|
||||
|
|
@ -17,19 +24,51 @@ const AzureProvider = ({
|
|||
onValidatedStatus
|
||||
}: IAzureProviderProps) => {
|
||||
const { t } = useTranslation()
|
||||
const [token, setToken] = useState(provider.token as ProviderAzureToken || {})
|
||||
const handleFocus = () => {
|
||||
if (token === provider.token) {
|
||||
token.openai_api_key = ''
|
||||
const [token, setToken] = useState<ProviderAzureToken>(provider.provider_name === ProviderName.AZURE_OPENAI ? {...provider.token}: {})
|
||||
const [ validating, validatedStatus, setValidatedStatus, validate ] = useValidateToken(provider.provider_name)
|
||||
const handleFocus = (type: keyof ProviderAzureToken) => {
|
||||
if (token[type] === (provider?.token as ProviderAzureToken)[type]) {
|
||||
token[type] = ''
|
||||
setToken({...token})
|
||||
onTokenChange({...token})
|
||||
setValidatedStatus({})
|
||||
}
|
||||
}
|
||||
const handleChange = (type: keyof ProviderAzureToken, v: string) => {
|
||||
const handleChange = (type: keyof ProviderAzureToken, v: string, validate: any) => {
|
||||
token[type] = v
|
||||
setToken({...token})
|
||||
onTokenChange({...token})
|
||||
validate({...token}, {
|
||||
beforeValidating: () => {
|
||||
if (!token.openai_api_base || !token.openai_api_key) {
|
||||
setValidatedStatus({})
|
||||
return false
|
||||
}
|
||||
return true
|
||||
}
|
||||
})
|
||||
}
|
||||
const getValidatedIcon = () => {
|
||||
if (validatedStatus.status === ValidatedStatus.Error || validatedStatus.status === ValidatedStatus.Exceed) {
|
||||
return <ValidatedErrorIcon />
|
||||
}
|
||||
if (validatedStatus.status === ValidatedStatus.Success) {
|
||||
return <ValidatedSuccessIcon />
|
||||
}
|
||||
}
|
||||
const getValidatedTip = () => {
|
||||
if (validating) {
|
||||
return <ValidatingTip />
|
||||
}
|
||||
if (validatedStatus.status === ValidatedStatus.Error) {
|
||||
return <ValidatedErrorOnAzureOpenaiTip errorMessage={validatedStatus.message ?? ''} />
|
||||
}
|
||||
}
|
||||
useEffect(() => {
|
||||
if (typeof onValidatedStatus === 'function') {
|
||||
onValidatedStatus(validatedStatus)
|
||||
}
|
||||
}, [validatedStatus])
|
||||
|
||||
return (
|
||||
<div className='px-4 py-3'>
|
||||
|
|
@ -38,17 +77,19 @@ const AzureProvider = ({
|
|||
name={t('common.provider.azure.apiBase')}
|
||||
placeholder={t('common.provider.azure.apiBasePlaceholder')}
|
||||
value={token.openai_api_base}
|
||||
onChange={(v) => handleChange('openai_api_base', v)}
|
||||
onChange={(v) => handleChange('openai_api_base', v, validate)}
|
||||
onFocus={() => handleFocus('openai_api_base')}
|
||||
validatedIcon={getValidatedIcon()}
|
||||
/>
|
||||
<ProviderValidateTokenInput
|
||||
<ProviderInput
|
||||
className='mb-4'
|
||||
name={t('common.provider.azure.apiKey')}
|
||||
placeholder={t('common.provider.azure.apiKeyPlaceholder')}
|
||||
value={token.openai_api_key}
|
||||
onChange={v => handleChange('openai_api_key', v)}
|
||||
onFocus={handleFocus}
|
||||
onValidatedStatus={onValidatedStatus}
|
||||
providerName={provider.provider_name}
|
||||
onChange={(v) => handleChange('openai_api_key', v, validate)}
|
||||
onFocus={() => handleFocus('openai_api_key')}
|
||||
validatedIcon={getValidatedIcon()}
|
||||
validatedTip={getValidatedTip()}
|
||||
/>
|
||||
<Link className="flex items-center text-xs cursor-pointer text-primary-600" href="https://platform.openai.com/account/api-keys" target={'_blank'}>
|
||||
{t('common.provider.azure.helpTip')}
|
||||
|
|
|
|||
|
|
@ -67,7 +67,7 @@ const ProviderPage = () => {
|
|||
const providerHosted = data?.filter(provider => provider.provider_name === 'openai' && provider.provider_type === 'system')?.[0]
|
||||
|
||||
return (
|
||||
<div>
|
||||
<div className='pb-7'>
|
||||
{
|
||||
providerHosted && !IS_CE_EDITION && (
|
||||
<>
|
||||
|
|
|
|||
|
|
@ -1,222 +1,91 @@
|
|||
import { ChangeEvent, useEffect, useRef, useState } from 'react'
|
||||
import { useContext } from 'use-context-selector'
|
||||
import type { Provider } from '@/models/common'
|
||||
import { useState, useEffect } from 'react'
|
||||
import { useTranslation } from 'react-i18next'
|
||||
import { debounce } from 'lodash-es'
|
||||
import ProviderInput from '../provider-input'
|
||||
import Link from 'next/link'
|
||||
import useSWR from 'swr'
|
||||
import { ArrowTopRightOnSquareIcon, PencilIcon } from '@heroicons/react/24/outline'
|
||||
import { CheckCircleIcon, ExclamationCircleIcon } from '@heroicons/react/24/solid'
|
||||
import Button from '@/app/components/base/button'
|
||||
import s from './index.module.css'
|
||||
import classNames from 'classnames'
|
||||
import { fetchTenantInfo, validateProviderKey, updateProviderAIKey } from '@/service/common'
|
||||
import { ToastContext } from '@/app/components/base/toast'
|
||||
import Indicator from '../../../indicator'
|
||||
import I18n from '@/context/i18n'
|
||||
import { ArrowTopRightOnSquareIcon } from '@heroicons/react/24/outline'
|
||||
import useValidateToken, { ValidatedStatus, ValidatedStatusState } from '../provider-input/useValidateToken'
|
||||
import {
|
||||
ValidatedErrorIcon,
|
||||
ValidatedSuccessIcon,
|
||||
ValidatingTip,
|
||||
ValidatedExceedOnOpenaiTip,
|
||||
ValidatedErrorOnOpenaiTip
|
||||
} from '../provider-input/Validate'
|
||||
|
||||
type IStatusType = 'normal' | 'verified' | 'error' | 'error-api-key-exceed-bill'
|
||||
|
||||
type TInputWithStatusProps = {
|
||||
value: string
|
||||
onChange: (v: string) => void
|
||||
onValidating: (validating: boolean) => void
|
||||
verifiedStatus: IStatusType
|
||||
onVerified: (verified: IStatusType) => void
|
||||
}
|
||||
const InputWithStatus = ({
|
||||
value,
|
||||
onChange,
|
||||
onValidating,
|
||||
verifiedStatus,
|
||||
onVerified
|
||||
}: TInputWithStatusProps) => {
|
||||
const { t } = useTranslation()
|
||||
const validateKey = useRef(debounce(async (token: string) => {
|
||||
if (!token) return
|
||||
onValidating(true)
|
||||
try {
|
||||
const res = await validateProviderKey({ url: '/workspaces/current/providers/openai/token-validate', body: { token } })
|
||||
onVerified(res.result === 'success' ? 'verified' : 'error')
|
||||
} catch (e: any) {
|
||||
if (e.status === 400) {
|
||||
e.json().then(({ code }: any) => {
|
||||
if (code === 'provider_request_failed') {
|
||||
onVerified('error-api-key-exceed-bill')
|
||||
}
|
||||
})
|
||||
} else {
|
||||
onVerified('error')
|
||||
}
|
||||
} finally {
|
||||
onValidating(false)
|
||||
}
|
||||
}, 500))
|
||||
|
||||
const handleChange = (e: ChangeEvent<HTMLInputElement>) => {
|
||||
const inputValue = e.target.value
|
||||
onChange(inputValue)
|
||||
if (!inputValue) {
|
||||
onVerified('normal')
|
||||
}
|
||||
validateKey.current(inputValue)
|
||||
}
|
||||
return (
|
||||
<div className={classNames('flex items-center h-9 px-3 bg-white border border-gray-300 rounded-lg', s.input)}>
|
||||
<input
|
||||
value={value}
|
||||
placeholder={t('common.provider.enterYourKey') || ''}
|
||||
className='w-full h-9 mr-2 appearance-none outline-none bg-transparent text-xs'
|
||||
onChange={handleChange}
|
||||
/>
|
||||
{
|
||||
verifiedStatus === 'error' && <ExclamationCircleIcon className='w-4 h-4 text-[#D92D20]' />
|
||||
}
|
||||
{
|
||||
verifiedStatus === 'verified' && <CheckCircleIcon className='w-4 h-4 text-[#039855]' />
|
||||
}
|
||||
</div>
|
||||
)
|
||||
interface IOpenaiProviderProps {
|
||||
provider: Provider
|
||||
onValidatedStatus: (status?: ValidatedStatusState) => void
|
||||
onTokenChange: (token: string) => void
|
||||
}
|
||||
|
||||
const OpenaiProvider = () => {
|
||||
const OpenaiProvider = ({
|
||||
provider,
|
||||
onValidatedStatus,
|
||||
onTokenChange
|
||||
}: IOpenaiProviderProps) => {
|
||||
const { t } = useTranslation()
|
||||
const { locale } = useContext(I18n)
|
||||
const { data: userInfo, mutate } = useSWR({ url: '/info' }, fetchTenantInfo)
|
||||
const [inputValue, setInputValue] = useState<string>('')
|
||||
const [validating, setValidating] = useState(false)
|
||||
const [editStatus, setEditStatus] = useState<IStatusType>('normal')
|
||||
const [loading, setLoading] = useState(false)
|
||||
const [editing, setEditing] = useState(false)
|
||||
const [invalidStatus, setInvalidStatus] = useState(false)
|
||||
const { notify } = useContext(ToastContext)
|
||||
const provider = userInfo?.providers?.find(({ provider }) => provider === 'openai')
|
||||
|
||||
const handleReset = () => {
|
||||
setInputValue('')
|
||||
setValidating(false)
|
||||
setEditStatus('normal')
|
||||
setLoading(false)
|
||||
setEditing(false)
|
||||
}
|
||||
const handleSave = async () => {
|
||||
if (editStatus === 'verified') {
|
||||
try {
|
||||
setLoading(true)
|
||||
await updateProviderAIKey({ url: '/workspaces/current/providers/openai/token', body: { token: inputValue ?? '' } })
|
||||
notify({ type: 'success', message: t('common.actionMsg.modifiedSuccessfully') })
|
||||
} catch (e) {
|
||||
notify({ type: 'error', message: t('common.provider.saveFailed') })
|
||||
} finally {
|
||||
setLoading(false)
|
||||
handleReset()
|
||||
mutate()
|
||||
}
|
||||
const [token, setToken] = useState(provider.token as string || '')
|
||||
const [ validating, validatedStatus, setValidatedStatus, validate ] = useValidateToken(provider.provider_name)
|
||||
const handleFocus = () => {
|
||||
if (token === provider.token) {
|
||||
setToken('')
|
||||
onTokenChange('')
|
||||
setValidatedStatus({})
|
||||
}
|
||||
}
|
||||
const handleChange = (v: string) => {
|
||||
setToken(v)
|
||||
onTokenChange(v)
|
||||
validate(v, {
|
||||
beforeValidating: () => {
|
||||
if (!v) {
|
||||
setValidatedStatus({})
|
||||
return false
|
||||
}
|
||||
return true
|
||||
}
|
||||
})
|
||||
}
|
||||
useEffect(() => {
|
||||
if (provider && !provider.token_is_valid && provider.token_is_set) {
|
||||
setInvalidStatus(true)
|
||||
if (typeof onValidatedStatus === 'function') {
|
||||
onValidatedStatus(validatedStatus)
|
||||
}
|
||||
}, [userInfo])
|
||||
}, [validatedStatus])
|
||||
|
||||
const showInvalidStatus = invalidStatus && !editing
|
||||
const renderErrorMessage = () => {
|
||||
const getValidatedIcon = () => {
|
||||
if (validatedStatus?.status === ValidatedStatus.Error || validatedStatus.status === ValidatedStatus.Exceed) {
|
||||
return <ValidatedErrorIcon />
|
||||
}
|
||||
if (validatedStatus.status === ValidatedStatus.Success) {
|
||||
return <ValidatedSuccessIcon />
|
||||
}
|
||||
}
|
||||
const getValidatedTip = () => {
|
||||
if (validating) {
|
||||
return (
|
||||
<div className={`mt-2 text-primary-600 text-xs font-normal`}>
|
||||
{t('common.provider.validating')}
|
||||
</div>
|
||||
)
|
||||
return <ValidatingTip />
|
||||
}
|
||||
if (editStatus === 'error-api-key-exceed-bill') {
|
||||
return (
|
||||
<div className={`mt-2 text-[#D92D20] text-xs font-normal`}>
|
||||
{t('common.provider.apiKeyExceedBill')}
|
||||
<Link
|
||||
className='underline'
|
||||
href="https://platform.openai.com/account/api-keys"
|
||||
target={'_blank'}>
|
||||
{locale === 'en' ? 'this link' : '这篇文档'}
|
||||
</Link>
|
||||
</div>
|
||||
)
|
||||
if (validatedStatus?.status === ValidatedStatus.Error) {
|
||||
return <ValidatedErrorOnOpenaiTip errorMessage={validatedStatus.message ?? ''} />
|
||||
}
|
||||
if (showInvalidStatus || editStatus === 'error') {
|
||||
return (
|
||||
<div className={`mt-2 text-[#D92D20] text-xs font-normal`}>
|
||||
{t('common.provider.invalidKey')}
|
||||
</div>
|
||||
)
|
||||
}
|
||||
return null
|
||||
}
|
||||
|
||||
return (
|
||||
<div className='px-4 pt-3 pb-4'>
|
||||
<div className='flex items-center mb-2 h-6'>
|
||||
<div className='grow text-[13px] text-gray-800 font-medium'>
|
||||
{t('common.provider.apiKey')}
|
||||
</div>
|
||||
{
|
||||
provider && !editing && (
|
||||
<div
|
||||
className='
|
||||
flex items-center h-6 px-2 rounded-md border border-gray-200
|
||||
text-xs font-medium text-gray-700 cursor-pointer
|
||||
'
|
||||
onClick={() => setEditing(true)}
|
||||
>
|
||||
<PencilIcon className='mr-1 w-3 h-3 text-gray-500' />
|
||||
{t('common.operation.edit')}
|
||||
</div>
|
||||
)
|
||||
}
|
||||
{
|
||||
(inputValue || editing) && (
|
||||
<>
|
||||
<Button
|
||||
className={classNames('mr-1', s.button)}
|
||||
loading={loading}
|
||||
onClick={handleReset}
|
||||
>
|
||||
{t('common.operation.cancel')}
|
||||
</Button>
|
||||
<Button
|
||||
type='primary'
|
||||
className={classNames(s.button)}
|
||||
loading={loading}
|
||||
onClick={handleSave}>
|
||||
{t('common.operation.save')}
|
||||
</Button>
|
||||
</>
|
||||
)
|
||||
}
|
||||
</div>
|
||||
{
|
||||
(!provider || (provider && editing)) && (
|
||||
<InputWithStatus
|
||||
value={inputValue}
|
||||
onChange={v => setInputValue(v)}
|
||||
verifiedStatus={editStatus}
|
||||
onVerified={v => setEditStatus(v)}
|
||||
onValidating={v => setValidating(v)}
|
||||
/>
|
||||
)
|
||||
}
|
||||
{
|
||||
(provider && !editing) && (
|
||||
<div className={classNames('flex justify-between items-center bg-white px-3 h-9 rounded-lg text-gray-800 text-xs font-medium', s.input)}>
|
||||
sk-0C...skuA
|
||||
<Indicator color={(provider.token_is_set && provider.token_is_valid) ? 'green' : 'orange'} />
|
||||
</div>
|
||||
)
|
||||
}
|
||||
{renderErrorMessage()}
|
||||
<Link className="inline-flex items-center mt-3 text-xs font-normal cursor-pointer text-primary-600 w-fit" href="https://platform.openai.com/account/api-keys" target={'_blank'}>
|
||||
{t('appOverview.welcome.getKeyTip')}
|
||||
<ArrowTopRightOnSquareIcon className='w-3 h-3 ml-1 text-primary-600' aria-hidden="true" />
|
||||
</Link>
|
||||
</div>
|
||||
<ProviderInput
|
||||
value={token}
|
||||
name={t('common.provider.apiKey')}
|
||||
placeholder={t('common.provider.enterYourKey')}
|
||||
onChange={handleChange}
|
||||
onFocus={handleFocus}
|
||||
validatedIcon={getValidatedIcon()}
|
||||
validatedTip={getValidatedTip()}
|
||||
/>
|
||||
<Link className="inline-flex items-center mt-3 text-xs font-normal cursor-pointer text-primary-600 w-fit" href="https://platform.openai.com/account/api-keys" target={'_blank'}>
|
||||
{t('appOverview.welcome.getKeyTip')}
|
||||
<ArrowTopRightOnSquareIcon className='w-3 h-3 ml-1 text-primary-600' aria-hidden="true" />
|
||||
</Link>
|
||||
</div>
|
||||
)
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -1,52 +0,0 @@
|
|||
import type { Provider } from '@/models/common'
|
||||
import { useState } from 'react'
|
||||
import { useTranslation } from 'react-i18next'
|
||||
import { ProviderValidateTokenInput } from '../provider-input'
|
||||
import Link from 'next/link'
|
||||
import { ArrowTopRightOnSquareIcon } from '@heroicons/react/24/outline'
|
||||
import { ValidatedStatus } from '../provider-input/useValidateToken'
|
||||
|
||||
interface IOpenaiProviderProps {
|
||||
provider: Provider
|
||||
onValidatedStatus: (status?: ValidatedStatus) => void
|
||||
onTokenChange: (token: string) => void
|
||||
}
|
||||
|
||||
const OpenaiProvider = ({
|
||||
provider,
|
||||
onValidatedStatus,
|
||||
onTokenChange
|
||||
}: IOpenaiProviderProps) => {
|
||||
const { t } = useTranslation()
|
||||
const [token, setToken] = useState(provider.token as string || '')
|
||||
const handleFocus = () => {
|
||||
if (token === provider.token) {
|
||||
setToken('')
|
||||
onTokenChange('')
|
||||
}
|
||||
}
|
||||
const handleChange = (v: string) => {
|
||||
setToken(v)
|
||||
onTokenChange(v)
|
||||
}
|
||||
|
||||
return (
|
||||
<div className='px-4 pt-3 pb-4'>
|
||||
<ProviderValidateTokenInput
|
||||
value={token}
|
||||
name={t('common.provider.apiKey')}
|
||||
placeholder={t('common.provider.enterYourKey')}
|
||||
onChange={handleChange}
|
||||
onFocus={handleFocus}
|
||||
onValidatedStatus={onValidatedStatus}
|
||||
providerName={provider.provider_name}
|
||||
/>
|
||||
<Link className="inline-flex items-center mt-3 text-xs font-normal cursor-pointer text-primary-600 w-fit" href="https://platform.openai.com/account/api-keys" target={'_blank'}>
|
||||
{t('appOverview.welcome.getKeyTip')}
|
||||
<ArrowTopRightOnSquareIcon className='w-3 h-3 ml-1 text-primary-600' aria-hidden="true" />
|
||||
</Link>
|
||||
</div>
|
||||
)
|
||||
}
|
||||
|
||||
export default OpenaiProvider
|
||||
|
|
@ -0,0 +1,59 @@
|
|||
import Link from 'next/link'
|
||||
import { CheckCircleIcon, ExclamationCircleIcon } from '@heroicons/react/24/solid'
|
||||
import { useTranslation } from 'react-i18next'
|
||||
import { useContext } from 'use-context-selector'
|
||||
import I18n from '@/context/i18n'
|
||||
|
||||
export const ValidatedErrorIcon = () => {
|
||||
return <ExclamationCircleIcon className='w-4 h-4 text-[#D92D20]' />
|
||||
}
|
||||
|
||||
export const ValidatedSuccessIcon = () => {
|
||||
return <CheckCircleIcon className='w-4 h-4 text-[#039855]' />
|
||||
}
|
||||
|
||||
export const ValidatingTip = () => {
|
||||
const { t } = useTranslation()
|
||||
return (
|
||||
<div className={`mt-2 text-primary-600 text-xs font-normal`}>
|
||||
{t('common.provider.validating')}
|
||||
</div>
|
||||
)
|
||||
}
|
||||
|
||||
export const ValidatedExceedOnOpenaiTip = () => {
|
||||
const { t } = useTranslation()
|
||||
const { locale } = useContext(I18n)
|
||||
|
||||
return (
|
||||
<div className={`mt-2 text-[#D92D20] text-xs font-normal`}>
|
||||
{t('common.provider.apiKeyExceedBill')}
|
||||
<Link
|
||||
className='underline'
|
||||
href="https://platform.openai.com/account/api-keys"
|
||||
target={'_blank'}>
|
||||
{locale === 'en' ? 'this link' : '这篇文档'}
|
||||
</Link>
|
||||
</div>
|
||||
)
|
||||
}
|
||||
|
||||
export const ValidatedErrorOnOpenaiTip = ({ errorMessage }: { errorMessage: string }) => {
|
||||
const { t } = useTranslation()
|
||||
|
||||
return (
|
||||
<div className={`mt-2 text-[#D92D20] text-xs font-normal`}>
|
||||
{t('common.provider.validatedError')}{errorMessage}
|
||||
</div>
|
||||
)
|
||||
}
|
||||
|
||||
export const ValidatedErrorOnAzureOpenaiTip = ({ errorMessage }: { errorMessage: string }) => {
|
||||
const { t } = useTranslation()
|
||||
|
||||
return (
|
||||
<div className={`mt-2 text-[#D92D20] text-xs font-normal`}>
|
||||
{t('common.provider.validatedError')}{errorMessage}
|
||||
</div>
|
||||
)
|
||||
}
|
||||
|
|
@ -1,10 +1,5 @@
|
|||
import { ChangeEvent, useEffect } from 'react'
|
||||
import Link from 'next/link'
|
||||
import { CheckCircleIcon, ExclamationCircleIcon } from '@heroicons/react/24/solid'
|
||||
import { useTranslation } from 'react-i18next'
|
||||
import { useContext } from 'use-context-selector'
|
||||
import I18n from '@/context/i18n'
|
||||
import useValidateToken, { ValidatedStatus } from './useValidateToken'
|
||||
import { ChangeEvent } from 'react'
|
||||
import { ReactElement } from 'react-markdown/lib/react-markdown'
|
||||
|
||||
interface IProviderInputProps {
|
||||
value?: string
|
||||
|
|
@ -13,6 +8,8 @@ interface IProviderInputProps {
|
|||
className?: string
|
||||
onChange: (v: string) => void
|
||||
onFocus?: () => void
|
||||
validatedIcon?: ReactElement
|
||||
validatedTip?: ReactElement
|
||||
}
|
||||
|
||||
const ProviderInput = ({
|
||||
|
|
@ -22,6 +19,8 @@ const ProviderInput = ({
|
|||
className,
|
||||
onChange,
|
||||
onFocus,
|
||||
validatedIcon,
|
||||
validatedTip
|
||||
}: IProviderInputProps) => {
|
||||
|
||||
const handleChange = (e: ChangeEvent<HTMLInputElement>) => {
|
||||
|
|
@ -47,95 +46,9 @@ const ProviderInput = ({
|
|||
onChange={handleChange}
|
||||
onFocus={onFocus}
|
||||
/>
|
||||
{validatedIcon}
|
||||
</div>
|
||||
</div>
|
||||
)
|
||||
}
|
||||
|
||||
type TproviderInputProps = IProviderInputProps
|
||||
& {
|
||||
onValidatedStatus?: (status?: ValidatedStatus) => void
|
||||
providerName: string
|
||||
}
|
||||
export const ProviderValidateTokenInput = ({
|
||||
value,
|
||||
name,
|
||||
placeholder,
|
||||
className,
|
||||
onChange,
|
||||
onFocus,
|
||||
onValidatedStatus,
|
||||
providerName
|
||||
}: TproviderInputProps) => {
|
||||
const { t } = useTranslation()
|
||||
const { locale } = useContext(I18n)
|
||||
const [ validating, validatedStatus, validate ] = useValidateToken(providerName)
|
||||
|
||||
useEffect(() => {
|
||||
if (typeof onValidatedStatus === 'function') {
|
||||
onValidatedStatus(validatedStatus)
|
||||
}
|
||||
}, [validatedStatus])
|
||||
|
||||
const handleChange = (e: ChangeEvent<HTMLInputElement>) => {
|
||||
const inputValue = e.target.value
|
||||
onChange(inputValue)
|
||||
|
||||
validate(inputValue)
|
||||
}
|
||||
|
||||
return (
|
||||
<div className={className}>
|
||||
<div className="mb-2 text-[13px] font-medium text-gray-800">{name}</div>
|
||||
<div className='
|
||||
flex items-center px-3 bg-white rounded-lg
|
||||
shadow-[0_1px_2px_rgba(16,24,40,0.05)]
|
||||
'>
|
||||
<input
|
||||
className='
|
||||
w-full py-[9px]
|
||||
text-xs font-medium text-gray-700 leading-[18px]
|
||||
appearance-none outline-none bg-transparent
|
||||
'
|
||||
value={value}
|
||||
placeholder={placeholder}
|
||||
onChange={handleChange}
|
||||
onFocus={onFocus}
|
||||
/>
|
||||
{
|
||||
validatedStatus === ValidatedStatus.Error && <ExclamationCircleIcon className='w-4 h-4 text-[#D92D20]' />
|
||||
}
|
||||
{
|
||||
validatedStatus === ValidatedStatus.Success && <CheckCircleIcon className='w-4 h-4 text-[#039855]' />
|
||||
}
|
||||
</div>
|
||||
{
|
||||
validating && (
|
||||
<div className={`mt-2 text-primary-600 text-xs font-normal`}>
|
||||
{t('common.provider.validating')}
|
||||
</div>
|
||||
)
|
||||
}
|
||||
{
|
||||
validatedStatus === ValidatedStatus.Exceed && !validating && (
|
||||
<div className={`mt-2 text-[#D92D20] text-xs font-normal`}>
|
||||
{t('common.provider.apiKeyExceedBill')}
|
||||
<Link
|
||||
className='underline'
|
||||
href="https://platform.openai.com/account/api-keys"
|
||||
target={'_blank'}>
|
||||
{locale === 'en' ? 'this link' : '这篇文档'}
|
||||
</Link>
|
||||
</div>
|
||||
)
|
||||
}
|
||||
{
|
||||
validatedStatus === ValidatedStatus.Error && !validating && (
|
||||
<div className={`mt-2 text-[#D92D20] text-xs font-normal`}>
|
||||
{t('common.provider.invalidKey')}
|
||||
</div>
|
||||
)
|
||||
}
|
||||
{validatedTip}
|
||||
</div>
|
||||
)
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,4 +1,4 @@
|
|||
import { useState, useCallback } from 'react'
|
||||
import { useState, useCallback, SetStateAction, Dispatch } from 'react'
|
||||
import debounce from 'lodash-es/debounce'
|
||||
import { DebouncedFunc } from 'lodash-es'
|
||||
import { validateProviderKey } from '@/service/common'
|
||||
|
|
@ -8,29 +8,39 @@ export enum ValidatedStatus {
|
|||
Error = 'error',
|
||||
Exceed = 'exceed'
|
||||
}
|
||||
export type ValidatedStatusState = {
|
||||
status?: ValidatedStatus,
|
||||
message?: string
|
||||
}
|
||||
// export type ValidatedStatusState = ValidatedStatus | undefined | ValidatedError
|
||||
export type SetValidatedStatus = Dispatch<SetStateAction<ValidatedStatusState>>
|
||||
export type ValidateFn = DebouncedFunc<(token: any, config: ValidateFnConfig) => void>
|
||||
type ValidateTokenReturn = [
|
||||
boolean,
|
||||
ValidatedStatusState,
|
||||
SetValidatedStatus,
|
||||
ValidateFn
|
||||
]
|
||||
export type ValidateFnConfig = {
|
||||
beforeValidating: (token: any) => boolean
|
||||
}
|
||||
|
||||
const useValidateToken = (providerName: string): [boolean, ValidatedStatus | undefined, DebouncedFunc<(token: string) => Promise<void>>] => {
|
||||
const useValidateToken = (providerName: string): ValidateTokenReturn => {
|
||||
const [validating, setValidating] = useState(false)
|
||||
const [validatedStatus, setValidatedStatus] = useState<ValidatedStatus | undefined>()
|
||||
const validate = useCallback(debounce(async (token: string) => {
|
||||
if (!token) {
|
||||
setValidatedStatus(undefined)
|
||||
return
|
||||
const [validatedStatus, setValidatedStatus] = useState<ValidatedStatusState>({})
|
||||
const validate = useCallback(debounce(async (token: string, config: ValidateFnConfig) => {
|
||||
if (!config.beforeValidating(token)) {
|
||||
return false
|
||||
}
|
||||
setValidating(true)
|
||||
try {
|
||||
const res = await validateProviderKey({ url: `/workspaces/current/providers/${providerName}/token-validate`, body: { token } })
|
||||
setValidatedStatus(res.result === 'success' ? ValidatedStatus.Success : ValidatedStatus.Error)
|
||||
setValidatedStatus(
|
||||
res.result === 'success'
|
||||
? { status: ValidatedStatus.Success }
|
||||
: { status: ValidatedStatus.Error, message: res.error })
|
||||
} catch (e: any) {
|
||||
if (e.status === 400) {
|
||||
e.json().then(({ code }: any) => {
|
||||
if (code === 'provider_request_failed') {
|
||||
setValidatedStatus(ValidatedStatus.Exceed)
|
||||
}
|
||||
})
|
||||
} else {
|
||||
setValidatedStatus(ValidatedStatus.Error)
|
||||
}
|
||||
setValidatedStatus({ status: ValidatedStatus.Error, message: e.message })
|
||||
} finally {
|
||||
setValidating(false)
|
||||
}
|
||||
|
|
@ -39,7 +49,8 @@ const useValidateToken = (providerName: string): [boolean, ValidatedStatus | und
|
|||
return [
|
||||
validating,
|
||||
validatedStatus,
|
||||
validate,
|
||||
setValidatedStatus,
|
||||
validate
|
||||
]
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -5,9 +5,10 @@ import { useContext } from 'use-context-selector'
|
|||
import Indicator from '../../../indicator'
|
||||
import { useTranslation } from 'react-i18next'
|
||||
import type { Provider, ProviderAzureToken } from '@/models/common'
|
||||
import OpenaiProvider from '../openai-provider/provider'
|
||||
import { ProviderName } from '@/models/common'
|
||||
import OpenaiProvider from '../openai-provider'
|
||||
import AzureProvider from '../azure-provider'
|
||||
import { ValidatedStatus } from '../provider-input/useValidateToken'
|
||||
import { ValidatedStatus, ValidatedStatusState } from '../provider-input/useValidateToken'
|
||||
import { updateProviderAIKey } from '@/service/common'
|
||||
import { ToastContext } from '@/app/components/base/toast'
|
||||
|
||||
|
|
@ -28,7 +29,7 @@ const ProviderItem = ({
|
|||
onSave
|
||||
}: IProviderItemProps) => {
|
||||
const { t } = useTranslation()
|
||||
const [validatedStatus, setValidatedStatus] = useState<ValidatedStatus>()
|
||||
const [validatedStatus, setValidatedStatus] = useState<ValidatedStatusState>()
|
||||
const [loading, setLoading] = useState(false)
|
||||
const { notify } = useContext(ToastContext)
|
||||
const [token, setToken] = useState<ProviderAzureToken | string>(
|
||||
|
|
@ -38,13 +39,23 @@ const ProviderItem = ({
|
|||
)
|
||||
const id = `${provider.provider_name}-${provider.provider_type}`
|
||||
const isOpen = id === activeId
|
||||
const providerKey = provider.provider_name === 'azure_openai' ? (provider.token as ProviderAzureToken)?.openai_api_key : provider.token
|
||||
const comingSoon = false
|
||||
const isValid = provider.is_valid
|
||||
|
||||
const providerTokenHasSetted = () => {
|
||||
if (provider.provider_name === ProviderName.AZURE_OPENAI) {
|
||||
return provider.token && provider.token.openai_api_base && provider.token.openai_api_key ? {
|
||||
openai_api_base: provider.token.openai_api_base,
|
||||
openai_api_key: provider.token.openai_api_key
|
||||
}: undefined
|
||||
}
|
||||
if (provider.provider_name === ProviderName.OPENAI) {
|
||||
return provider.token
|
||||
}
|
||||
}
|
||||
const handleUpdateToken = async () => {
|
||||
if (loading) return
|
||||
if (validatedStatus === ValidatedStatus.Success || !token) {
|
||||
if (validatedStatus?.status === ValidatedStatus.Success) {
|
||||
try {
|
||||
setLoading(true)
|
||||
await updateProviderAIKey({ url: `/workspaces/current/providers/${provider.provider_name}/token`, body: { token } })
|
||||
|
|
@ -65,7 +76,7 @@ const ProviderItem = ({
|
|||
<div className={cn(s[`icon-${icon}`], 'mr-3 w-6 h-6 rounded-md')} />
|
||||
<div className='grow text-sm font-medium text-gray-800'>{name}</div>
|
||||
{
|
||||
providerKey && !comingSoon && !isOpen && (
|
||||
providerTokenHasSetted() && !comingSoon && !isOpen && (
|
||||
<div className='flex items-center mr-4'>
|
||||
{!isValid && <div className='text-xs text-[#D92D20]'>{t('common.provider.invalidApiKey')}</div>}
|
||||
<Indicator color={!isValid ? 'red' : 'green'} className='ml-2' />
|
||||
|
|
@ -78,7 +89,7 @@ const ProviderItem = ({
|
|||
px-3 h-[28px] bg-white border border-gray-200 rounded-md cursor-pointer
|
||||
text-xs font-medium text-gray-700 flex items-center
|
||||
' onClick={() => onActive(id)}>
|
||||
{providerKey ? t('common.provider.editKey') : t('common.provider.addKey')}
|
||||
{providerTokenHasSetted() ? t('common.provider.editKey') : t('common.provider.addKey')}
|
||||
</div>
|
||||
)
|
||||
}
|
||||
|
|
@ -114,7 +125,7 @@ const ProviderItem = ({
|
|||
}
|
||||
</div>
|
||||
{
|
||||
provider.provider_name === 'openai' && isOpen && (
|
||||
provider.provider_name === ProviderName.OPENAI && isOpen && (
|
||||
<OpenaiProvider
|
||||
provider={provider}
|
||||
onValidatedStatus={v => setValidatedStatus(v)}
|
||||
|
|
@ -123,7 +134,7 @@ const ProviderItem = ({
|
|||
)
|
||||
}
|
||||
{
|
||||
provider.provider_name === 'azure_openai' && isOpen && (
|
||||
provider.provider_name === ProviderName.AZURE_OPENAI && isOpen && (
|
||||
<AzureProvider
|
||||
provider={provider}
|
||||
onValidatedStatus={v => setValidatedStatus(v)}
|
||||
|
|
|
|||
|
|
@ -37,7 +37,7 @@ const Nav = ({
|
|||
<Link href={link}>
|
||||
<div
|
||||
className={classNames(`
|
||||
flex items-center h-8 pl-2.5 pr-2
|
||||
flex items-center h-7 pl-2.5 pr-2
|
||||
font-semibold cursor-pointer rounded-[10px]
|
||||
${isActived ? 'text-[#1C64F2]' : 'text-gray-500 hover:bg-gray-200'}
|
||||
${curNav && isActived && 'hover:bg-[#EBF5FF]'}
|
||||
|
|
|
|||
|
|
@ -5,7 +5,7 @@ import Welcome from '../welcome'
|
|||
|
||||
const ConfigSence: FC<IWelcomeProps> = (props) => {
|
||||
return (
|
||||
<div className='mb-5 antialiased font-sans overflow-hidden shrink-0'>
|
||||
<div className='mb-5 antialiased font-sans shrink-0'>
|
||||
<Welcome {...props} />
|
||||
</div>
|
||||
)
|
||||
|
|
|
|||
|
|
@ -7,7 +7,7 @@ import { useRouter } from 'next/navigation'
|
|||
import Toast from '../components/base/toast'
|
||||
import { setup } from '@/service/common'
|
||||
|
||||
const validEmailReg = /^([a-zA-Z0-9_-])+@([a-zA-Z0-9_-])+(\.[a-zA-Z0-9_-])+/
|
||||
const validEmailReg = /^[\w\.-]+@([\w-]+\.)+[\w-]{2,}$/
|
||||
const validPassword = /^(?=.*[a-zA-Z])(?=.*\d).{8,}$/
|
||||
|
||||
const InstallForm = () => {
|
||||
|
|
|
|||
|
|
@ -13,7 +13,7 @@ import Button from '@/app/components/base/button'
|
|||
import { login, oauth } from '@/service/common'
|
||||
import { apiPrefix } from '@/config'
|
||||
|
||||
const validEmailReg = /^([a-zA-Z0-9_-])+@([a-zA-Z0-9_-])+(\.[a-zA-Z0-9_-])+/
|
||||
const validEmailReg = /^[\w\.-]+@([\w-]+\.)+[\w-]{2,}$/
|
||||
|
||||
type IState = {
|
||||
formValid: boolean
|
||||
|
|
|
|||
|
|
@ -131,4 +131,10 @@ button:focus-within {
|
|||
-webkit-text-fill-color: transparent;
|
||||
background-clip: text;
|
||||
text-fill-color: transparent;
|
||||
}
|
||||
|
||||
/* overwrite paging active dark model style */
|
||||
[class*=style_paginatio] li .text-primary-600 {
|
||||
color: rgb(28 100 242);
|
||||
background-color: rgb(235 245 255);
|
||||
}
|
||||
|
|
@ -75,9 +75,9 @@ export const LOCALE_COOKIE_NAME = 'locale'
|
|||
|
||||
export const DEFAULT_VALUE_MAX_LEN = 48
|
||||
|
||||
export const zhRegex = /^[\u4e00-\u9fa5]$/gm
|
||||
export const emojiRegex = /^[\uD800-\uDBFF][\uDC00-\uDFFF]$/gm
|
||||
export const emailRegex = /^(([^<>()[\]\\.,;:\s@\"]+(\.[^<>()[\]\\.,;:\s@\"]+)*)|(\".+\"))@((\[[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}\])|(([a-zA-Z\-0-9]+\.)+[a-zA-Z]{2,}))$/
|
||||
export const zhRegex = /^[\u4e00-\u9fa5]$/m
|
||||
export const emojiRegex = /^[\uD800-\uDBFF][\uDC00-\uDFFF]$/m
|
||||
export const emailRegex = /^[\w\.-]+@([\w-]+\.)+[\w-]{2,}$/m
|
||||
const MAX_ZN_VAR_NAME_LENGHT = 8
|
||||
const MAX_EN_VAR_VALUE_LENGHT = 16
|
||||
export const getMaxVarNameLength = (value: string) => {
|
||||
|
|
|
|||
|
|
@ -35,6 +35,10 @@ const translation = {
|
|||
appCreated: 'App created',
|
||||
appCreateFailed: 'Failed to create app',
|
||||
},
|
||||
emoji: {
|
||||
ok: 'OK',
|
||||
cancel: 'Cancel',
|
||||
}
|
||||
}
|
||||
|
||||
export default translation
|
||||
|
|
|
|||
|
|
@ -34,6 +34,10 @@ const translation = {
|
|||
appCreated: '应用已创建',
|
||||
appCreateFailed: '应用创建失败',
|
||||
},
|
||||
emoji: {
|
||||
ok: '确认',
|
||||
cancel: '取消',
|
||||
}
|
||||
}
|
||||
|
||||
export default translation
|
||||
|
|
|
|||
|
|
@ -142,6 +142,7 @@ const translation = {
|
|||
apiKey: "API Key",
|
||||
enterYourKey: "Enter your API key here",
|
||||
invalidKey: "Invalid OpenAI API key",
|
||||
validatedError: "Validation failed: ",
|
||||
validating: "Validating key...",
|
||||
saveFailed: "Save api key failed",
|
||||
apiKeyExceedBill: "This API KEY has no quota available, please read",
|
||||
|
|
@ -151,7 +152,7 @@ const translation = {
|
|||
invalidApiKey: 'Invalid API key',
|
||||
azure: {
|
||||
apiBase: 'API Base',
|
||||
apiBasePlaceholder: 'The API Base URL of your Azure OpenAI Resource.',
|
||||
apiBasePlaceholder: 'The API Base URL of your Azure OpenAI Endpoint.',
|
||||
apiKey: 'API Key',
|
||||
apiKeyPlaceholder: 'Enter your API key here',
|
||||
helpTip: 'Learn Azure OpenAI Service',
|
||||
|
|
|
|||
|
|
@ -143,6 +143,7 @@ const translation = {
|
|||
apiKey: "API 密钥",
|
||||
enterYourKey: "输入你的 API 密钥",
|
||||
invalidKey: '无效的 OpenAI API 密钥',
|
||||
validatedError: "校验失败:",
|
||||
validating: "验证密钥中...",
|
||||
saveFailed: "API 密钥保存失败",
|
||||
apiKeyExceedBill: "此 API KEY 已没有可用配额,请阅读",
|
||||
|
|
|
|||
|
|
@ -54,18 +54,29 @@ export type Member = Pick<UserProfileResponse, 'id' | 'name' | 'email' | 'last_l
|
|||
role: 'owner' | 'admin' | 'normal'
|
||||
}
|
||||
|
||||
export enum ProviderName {
|
||||
OPENAI = 'openai',
|
||||
AZURE_OPENAI = 'azure_openai'
|
||||
}
|
||||
export type ProviderAzureToken = {
|
||||
openai_api_base: string
|
||||
openai_api_key: string
|
||||
openai_api_base?: string
|
||||
openai_api_key?: string
|
||||
}
|
||||
export type ProviderTokenType = {
|
||||
[ProviderName.OPENAI]: string
|
||||
[ProviderName.AZURE_OPENAI]: ProviderAzureToken
|
||||
}
|
||||
export type Provider = {
|
||||
provider_name: string
|
||||
provider_type: string
|
||||
is_valid: boolean
|
||||
is_enabled: boolean
|
||||
last_used: string
|
||||
token?: string | ProviderAzureToken
|
||||
}
|
||||
[Name in ProviderName]: {
|
||||
provider_name: Name
|
||||
} & {
|
||||
provider_type: 'custom' | 'system'
|
||||
is_valid: boolean
|
||||
is_enabled: boolean
|
||||
last_used: string
|
||||
token?: ProviderTokenType[Name]
|
||||
}
|
||||
}[ProviderName]
|
||||
|
||||
export type ProviderHosted = Provider & {
|
||||
quota_type: string
|
||||
|
|
|
|||
|
|
@ -78,7 +78,7 @@ const handleStream = (response: any, onData: IOnData, onCompleted?: IOnCompleted
|
|||
if (message.startsWith('data: ')) { // check if it starts with data:
|
||||
// console.log(message);
|
||||
bufferObj = JSON.parse(message.substring(6)) // remove data: and parse as json
|
||||
if (bufferObj.status === 400) {
|
||||
if (bufferObj.status === 400 || !bufferObj.event) {
|
||||
onData('', false, {
|
||||
conversationId: undefined,
|
||||
messageId: '',
|
||||
|
|
|
|||
Loading…
Reference in New Issue