mirror of https://github.com/langgenius/dify.git
Merge remote-tracking branch 'origin/main' into feat/collaboration
This commit is contained in:
commit
c0af3414a3
|
|
@ -45,6 +45,79 @@ def _validate_name(name: str) -> str:
|
|||
return name
|
||||
|
||||
|
||||
def _get_retrieval_methods_by_vector_type(vector_type: str | None, is_mock: bool = False) -> dict[str, list[str]]:
|
||||
"""
|
||||
Get supported retrieval methods based on vector database type.
|
||||
|
||||
Args:
|
||||
vector_type: Vector database type, can be None
|
||||
is_mock: Whether this is a Mock API, affects MILVUS handling
|
||||
|
||||
Returns:
|
||||
Dictionary containing supported retrieval methods
|
||||
|
||||
Raises:
|
||||
ValueError: If vector_type is None or unsupported
|
||||
"""
|
||||
if vector_type is None:
|
||||
raise ValueError("Vector store type is not configured.")
|
||||
|
||||
# Define vector database types that only support semantic search
|
||||
semantic_only_types = {
|
||||
VectorType.RELYT,
|
||||
VectorType.TIDB_VECTOR,
|
||||
VectorType.CHROMA,
|
||||
VectorType.PGVECTO_RS,
|
||||
VectorType.VIKINGDB,
|
||||
VectorType.UPSTASH,
|
||||
}
|
||||
|
||||
# Define vector database types that support all retrieval methods
|
||||
full_search_types = {
|
||||
VectorType.QDRANT,
|
||||
VectorType.WEAVIATE,
|
||||
VectorType.OPENSEARCH,
|
||||
VectorType.ANALYTICDB,
|
||||
VectorType.MYSCALE,
|
||||
VectorType.ORACLE,
|
||||
VectorType.ELASTICSEARCH,
|
||||
VectorType.ELASTICSEARCH_JA,
|
||||
VectorType.PGVECTOR,
|
||||
VectorType.VASTBASE,
|
||||
VectorType.TIDB_ON_QDRANT,
|
||||
VectorType.LINDORM,
|
||||
VectorType.COUCHBASE,
|
||||
VectorType.OPENGAUSS,
|
||||
VectorType.OCEANBASE,
|
||||
VectorType.TABLESTORE,
|
||||
VectorType.HUAWEI_CLOUD,
|
||||
VectorType.TENCENT,
|
||||
VectorType.MATRIXONE,
|
||||
VectorType.CLICKZETTA,
|
||||
VectorType.BAIDU,
|
||||
VectorType.ALIBABACLOUD_MYSQL,
|
||||
}
|
||||
|
||||
semantic_methods = {"retrieval_method": [RetrievalMethod.SEMANTIC_SEARCH.value]}
|
||||
full_methods = {
|
||||
"retrieval_method": [
|
||||
RetrievalMethod.SEMANTIC_SEARCH.value,
|
||||
RetrievalMethod.FULL_TEXT_SEARCH.value,
|
||||
RetrievalMethod.HYBRID_SEARCH.value,
|
||||
]
|
||||
}
|
||||
|
||||
if vector_type == VectorType.MILVUS:
|
||||
return semantic_methods if is_mock else full_methods
|
||||
|
||||
if vector_type in semantic_only_types:
|
||||
return semantic_methods
|
||||
elif vector_type in full_search_types:
|
||||
return full_methods
|
||||
else:
|
||||
raise ValueError(f"Unsupported vector db type {vector_type}.")
|
||||
|
||||
|
||||
@console_ns.route("/datasets")
|
||||
class DatasetListApi(Resource):
|
||||
@api.doc("get_datasets")
|
||||
|
|
@ -777,50 +850,7 @@ class DatasetRetrievalSettingApi(Resource):
|
|||
@account_initialization_required
|
||||
def get(self):
|
||||
vector_type = dify_config.VECTOR_STORE
|
||||
match vector_type:
|
||||
case (
|
||||
VectorType.RELYT
|
||||
| VectorType.TIDB_VECTOR
|
||||
| VectorType.CHROMA
|
||||
| VectorType.PGVECTO_RS
|
||||
| VectorType.VIKINGDB
|
||||
| VectorType.UPSTASH
|
||||
):
|
||||
return {"retrieval_method": [RetrievalMethod.SEMANTIC_SEARCH]}
|
||||
case (
|
||||
VectorType.QDRANT
|
||||
| VectorType.WEAVIATE
|
||||
| VectorType.OPENSEARCH
|
||||
| VectorType.ANALYTICDB
|
||||
| VectorType.MYSCALE
|
||||
| VectorType.ORACLE
|
||||
| VectorType.ELASTICSEARCH
|
||||
| VectorType.ELASTICSEARCH_JA
|
||||
| VectorType.PGVECTOR
|
||||
| VectorType.VASTBASE
|
||||
| VectorType.TIDB_ON_QDRANT
|
||||
| VectorType.LINDORM
|
||||
| VectorType.COUCHBASE
|
||||
| VectorType.MILVUS
|
||||
| VectorType.OPENGAUSS
|
||||
| VectorType.OCEANBASE
|
||||
| VectorType.TABLESTORE
|
||||
| VectorType.HUAWEI_CLOUD
|
||||
| VectorType.TENCENT
|
||||
| VectorType.MATRIXONE
|
||||
| VectorType.CLICKZETTA
|
||||
| VectorType.BAIDU
|
||||
| VectorType.ALIBABACLOUD_MYSQL
|
||||
):
|
||||
return {
|
||||
"retrieval_method": [
|
||||
RetrievalMethod.SEMANTIC_SEARCH,
|
||||
RetrievalMethod.FULL_TEXT_SEARCH,
|
||||
RetrievalMethod.HYBRID_SEARCH,
|
||||
]
|
||||
}
|
||||
case _:
|
||||
raise ValueError(f"Unsupported vector db type {vector_type}.")
|
||||
return _get_retrieval_methods_by_vector_type(vector_type, is_mock=False)
|
||||
|
||||
|
||||
@console_ns.route("/datasets/retrieval-setting/<string:vector_type>")
|
||||
|
|
@ -833,49 +863,7 @@ class DatasetRetrievalSettingMockApi(Resource):
|
|||
@login_required
|
||||
@account_initialization_required
|
||||
def get(self, vector_type):
|
||||
match vector_type:
|
||||
case (
|
||||
VectorType.MILVUS
|
||||
| VectorType.RELYT
|
||||
| VectorType.TIDB_VECTOR
|
||||
| VectorType.CHROMA
|
||||
| VectorType.PGVECTO_RS
|
||||
| VectorType.VIKINGDB
|
||||
| VectorType.UPSTASH
|
||||
):
|
||||
return {"retrieval_method": [RetrievalMethod.SEMANTIC_SEARCH]}
|
||||
case (
|
||||
VectorType.QDRANT
|
||||
| VectorType.WEAVIATE
|
||||
| VectorType.OPENSEARCH
|
||||
| VectorType.ANALYTICDB
|
||||
| VectorType.MYSCALE
|
||||
| VectorType.ORACLE
|
||||
| VectorType.ELASTICSEARCH
|
||||
| VectorType.ELASTICSEARCH_JA
|
||||
| VectorType.COUCHBASE
|
||||
| VectorType.PGVECTOR
|
||||
| VectorType.VASTBASE
|
||||
| VectorType.LINDORM
|
||||
| VectorType.OPENGAUSS
|
||||
| VectorType.OCEANBASE
|
||||
| VectorType.TABLESTORE
|
||||
| VectorType.TENCENT
|
||||
| VectorType.HUAWEI_CLOUD
|
||||
| VectorType.MATRIXONE
|
||||
| VectorType.CLICKZETTA
|
||||
| VectorType.BAIDU
|
||||
| VectorType.ALIBABACLOUD_MYSQL
|
||||
):
|
||||
return {
|
||||
"retrieval_method": [
|
||||
RetrievalMethod.SEMANTIC_SEARCH,
|
||||
RetrievalMethod.FULL_TEXT_SEARCH,
|
||||
RetrievalMethod.HYBRID_SEARCH,
|
||||
]
|
||||
}
|
||||
case _:
|
||||
raise ValueError(f"Unsupported vector db type {vector_type}.")
|
||||
return _get_retrieval_methods_by_vector_type(vector_type, is_mock=True)
|
||||
|
||||
|
||||
@console_ns.route("/datasets/<uuid:dataset_id>/error-docs")
|
||||
|
|
|
|||
|
|
@ -1,10 +1,12 @@
|
|||
import logging
|
||||
import queue
|
||||
import threading
|
||||
import time
|
||||
from abc import abstractmethod
|
||||
from enum import IntEnum, auto
|
||||
from typing import Any
|
||||
|
||||
from cachetools import TTLCache, cachedmethod
|
||||
from redis.exceptions import RedisError
|
||||
from sqlalchemy.orm import DeclarativeMeta
|
||||
|
||||
|
|
@ -45,6 +47,8 @@ class AppQueueManager:
|
|||
q: queue.Queue[WorkflowQueueMessage | MessageQueueMessage | None] = queue.Queue()
|
||||
|
||||
self._q = q
|
||||
self._stopped_cache: TTLCache[tuple, bool] = TTLCache(maxsize=1, ttl=1)
|
||||
self._cache_lock = threading.Lock()
|
||||
|
||||
def listen(self):
|
||||
"""
|
||||
|
|
@ -157,6 +161,7 @@ class AppQueueManager:
|
|||
stopped_cache_key = cls._generate_stopped_cache_key(task_id)
|
||||
redis_client.setex(stopped_cache_key, 600, 1)
|
||||
|
||||
@cachedmethod(lambda self: self._stopped_cache, lock=lambda self: self._cache_lock)
|
||||
def _is_stopped(self) -> bool:
|
||||
"""
|
||||
Check if task is stopped
|
||||
|
|
|
|||
|
|
@ -43,8 +43,7 @@ class CacheEmbedding(Embeddings):
|
|||
else:
|
||||
embedding_queue_indices.append(i)
|
||||
|
||||
# release database connection, because embedding may take a long time
|
||||
db.session.close()
|
||||
# NOTE: avoid closing the shared scoped session here; downstream code may still have pending work
|
||||
|
||||
if embedding_queue_indices:
|
||||
embedding_queue_texts = [texts[i] for i in embedding_queue_indices]
|
||||
|
|
|
|||
|
|
@ -111,6 +111,8 @@ const Answer: FC<AnswerProps> = ({
|
|||
}
|
||||
}, [switchSibling, item.prevSibling, item.nextSibling])
|
||||
|
||||
const contentIsEmpty = content.trim() === ''
|
||||
|
||||
return (
|
||||
<div className='mb-2 flex last:mb-0'>
|
||||
<div className='relative h-10 w-10 shrink-0'>
|
||||
|
|
@ -153,14 +155,14 @@ const Answer: FC<AnswerProps> = ({
|
|||
)
|
||||
}
|
||||
{
|
||||
responding && !content && !hasAgentThoughts && (
|
||||
responding && contentIsEmpty && !hasAgentThoughts && (
|
||||
<div className='flex h-5 w-6 items-center justify-center'>
|
||||
<LoadingAnim type='text' />
|
||||
</div>
|
||||
)
|
||||
}
|
||||
{
|
||||
content && !hasAgentThoughts && (
|
||||
!contentIsEmpty && !hasAgentThoughts && (
|
||||
<BasicContent item={item} />
|
||||
)
|
||||
}
|
||||
|
|
|
|||
|
|
@ -83,6 +83,15 @@ const ChatInputArea = ({
|
|||
const historyRef = useRef([''])
|
||||
const [currentIndex, setCurrentIndex] = useState(-1)
|
||||
const isComposingRef = useRef(false)
|
||||
|
||||
const handleQueryChange = useCallback(
|
||||
(value: string) => {
|
||||
setQuery(value)
|
||||
setTimeout(handleTextareaResize, 0)
|
||||
},
|
||||
[handleTextareaResize],
|
||||
)
|
||||
|
||||
const handleSend = () => {
|
||||
if (isResponding) {
|
||||
notify({ type: 'info', message: t('appDebug.errorMessage.waitForResponse') })
|
||||
|
|
@ -101,7 +110,7 @@ const ChatInputArea = ({
|
|||
}
|
||||
if (checkInputsForm(inputs, inputsForm)) {
|
||||
onSend(query, files)
|
||||
setQuery('')
|
||||
handleQueryChange('')
|
||||
setFiles([])
|
||||
}
|
||||
}
|
||||
|
|
@ -131,19 +140,19 @@ const ChatInputArea = ({
|
|||
// When the cmd + up key is pressed, output the previous element
|
||||
if (currentIndex > 0) {
|
||||
setCurrentIndex(currentIndex - 1)
|
||||
setQuery(historyRef.current[currentIndex - 1])
|
||||
handleQueryChange(historyRef.current[currentIndex - 1])
|
||||
}
|
||||
}
|
||||
else if (e.key === 'ArrowDown' && !e.shiftKey && !e.nativeEvent.isComposing && e.metaKey) {
|
||||
// When the cmd + down key is pressed, output the next element
|
||||
if (currentIndex < historyRef.current.length - 1) {
|
||||
setCurrentIndex(currentIndex + 1)
|
||||
setQuery(historyRef.current[currentIndex + 1])
|
||||
handleQueryChange(historyRef.current[currentIndex + 1])
|
||||
}
|
||||
else if (currentIndex === historyRef.current.length - 1) {
|
||||
// If it is the last element, clear the input box
|
||||
setCurrentIndex(historyRef.current.length)
|
||||
setQuery('')
|
||||
handleQueryChange('')
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -171,7 +180,7 @@ const ChatInputArea = ({
|
|||
<>
|
||||
<div
|
||||
className={cn(
|
||||
'relative z-10 rounded-xl border border-components-chat-input-border bg-components-panel-bg-blur pb-[9px] shadow-md',
|
||||
'relative z-10 overflow-hidden rounded-xl border border-components-chat-input-border bg-components-panel-bg-blur pb-[9px] shadow-md',
|
||||
isDragActive && 'border border-dashed border-components-option-card-option-selected-border',
|
||||
disabled && 'pointer-events-none border-components-panel-border opacity-50 shadow-none',
|
||||
)}
|
||||
|
|
@ -197,12 +206,8 @@ const ChatInputArea = ({
|
|||
placeholder={t('common.chat.inputPlaceholder', { botName }) || ''}
|
||||
autoFocus
|
||||
minRows={1}
|
||||
onResize={handleTextareaResize}
|
||||
value={query}
|
||||
onChange={(e) => {
|
||||
setQuery(e.target.value)
|
||||
setTimeout(handleTextareaResize, 0)
|
||||
}}
|
||||
onChange={e => handleQueryChange(e.target.value)}
|
||||
onKeyDown={handleKeyDown}
|
||||
onCompositionStart={handleCompositionStart}
|
||||
onCompositionEnd={handleCompositionEnd}
|
||||
|
|
@ -221,7 +226,7 @@ const ChatInputArea = ({
|
|||
showVoiceInput && (
|
||||
<VoiceInput
|
||||
onCancel={() => setShowVoiceInput(false)}
|
||||
onConverted={text => setQuery(text)}
|
||||
onConverted={text => handleQueryChange(text)}
|
||||
/>
|
||||
)
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,3 +1,4 @@
|
|||
import type { FC, Ref } from 'react'
|
||||
import { memo } from 'react'
|
||||
import {
|
||||
RiMicLine,
|
||||
|
|
@ -18,20 +19,17 @@ type OperationProps = {
|
|||
speechToTextConfig?: EnableType
|
||||
onShowVoiceInput?: () => void
|
||||
onSend: () => void
|
||||
theme?: Theme | null
|
||||
theme?: Theme | null,
|
||||
ref?: Ref<HTMLDivElement>;
|
||||
}
|
||||
const Operation = (
|
||||
{
|
||||
ref,
|
||||
fileConfig,
|
||||
speechToTextConfig,
|
||||
onShowVoiceInput,
|
||||
onSend,
|
||||
theme,
|
||||
}: OperationProps & {
|
||||
ref: React.RefObject<HTMLDivElement>;
|
||||
},
|
||||
) => {
|
||||
const Operation: FC<OperationProps> = ({
|
||||
ref,
|
||||
fileConfig,
|
||||
speechToTextConfig,
|
||||
onShowVoiceInput,
|
||||
onSend,
|
||||
theme,
|
||||
}) => {
|
||||
return (
|
||||
<div
|
||||
className={cn(
|
||||
|
|
|
|||
|
|
@ -240,7 +240,7 @@ const ChatWrapper = () => {
|
|||
config={appConfig}
|
||||
chatList={messageList}
|
||||
isResponding={respondingState}
|
||||
chatContainerInnerClassName={cn('mx-auto w-full max-w-full pt-4 tablet:px-4', isMobile && 'px-4')}
|
||||
chatContainerInnerClassName={cn('mx-auto w-full max-w-full px-4', messageList.length && 'pt-4')}
|
||||
chatFooterClassName={cn('pb-4', !isMobile && 'rounded-b-2xl')}
|
||||
chatFooterInnerClassName={cn('mx-auto w-full max-w-full px-4', isMobile && 'px-2')}
|
||||
onSend={doSend}
|
||||
|
|
|
|||
|
|
@ -17,12 +17,9 @@ import type {
|
|||
import { noop } from 'lodash-es'
|
||||
|
||||
export type EmbeddedChatbotContextValue = {
|
||||
userCanAccess?: boolean
|
||||
appInfoError?: any
|
||||
appInfoLoading?: boolean
|
||||
appMeta?: AppMeta
|
||||
appData?: AppData
|
||||
appParams?: ChatConfig
|
||||
appMeta: AppMeta | null
|
||||
appData: AppData | null
|
||||
appParams: ChatConfig | null
|
||||
appChatListDataLoading?: boolean
|
||||
currentConversationId: string
|
||||
currentConversationItem?: ConversationItem
|
||||
|
|
@ -59,7 +56,10 @@ export type EmbeddedChatbotContextValue = {
|
|||
}
|
||||
|
||||
export const EmbeddedChatbotContext = createContext<EmbeddedChatbotContextValue>({
|
||||
userCanAccess: false,
|
||||
appData: null,
|
||||
appMeta: null,
|
||||
appParams: null,
|
||||
appChatListDataLoading: false,
|
||||
currentConversationId: '',
|
||||
appPrevChatList: [],
|
||||
pinnedConversationList: [],
|
||||
|
|
|
|||
|
|
@ -135,7 +135,7 @@ const Header: FC<IHeaderProps> = ({
|
|||
return (
|
||||
<div
|
||||
className={cn('flex h-14 shrink-0 items-center justify-between rounded-t-2xl px-3')}
|
||||
style={Object.assign({}, CssTransform(theme?.backgroundHeaderColorStyle ?? ''), CssTransform(theme?.headerBorderBottomStyle ?? ''))}
|
||||
style={CssTransform(theme?.headerBorderBottomStyle ?? '')}
|
||||
>
|
||||
<div className="flex grow items-center space-x-3">
|
||||
{customerIcon}
|
||||
|
|
|
|||
|
|
@ -18,9 +18,6 @@ import { CONVERSATION_ID_INFO } from '../constants'
|
|||
import { buildChatItemTree, getProcessedInputsFromUrlParams, getProcessedSystemVariablesFromUrlParams, getProcessedUserVariablesFromUrlParams } from '../utils'
|
||||
import { getProcessedFilesFromResponse } from '../../file-uploader/utils'
|
||||
import {
|
||||
fetchAppInfo,
|
||||
fetchAppMeta,
|
||||
fetchAppParams,
|
||||
fetchChatList,
|
||||
fetchConversations,
|
||||
generationConversationName,
|
||||
|
|
@ -36,8 +33,7 @@ import { InputVarType } from '@/app/components/workflow/types'
|
|||
import { TransferMethod } from '@/types/app'
|
||||
import { addFileInfos, sortAgentSorts } from '@/app/components/tools/utils'
|
||||
import { noop } from 'lodash-es'
|
||||
import { useGetUserCanAccessApp } from '@/service/access-control'
|
||||
import { useGlobalPublicStore } from '@/context/global-public-context'
|
||||
import { useWebAppStore } from '@/context/web-app-context'
|
||||
|
||||
function getFormattedChatList(messages: any[]) {
|
||||
const newChatList: ChatItem[] = []
|
||||
|
|
@ -67,18 +63,10 @@ function getFormattedChatList(messages: any[]) {
|
|||
|
||||
export const useEmbeddedChatbot = () => {
|
||||
const isInstalledApp = false
|
||||
const systemFeatures = useGlobalPublicStore(s => s.systemFeatures)
|
||||
const { data: appInfo, isLoading: appInfoLoading, error: appInfoError } = useSWR('appInfo', fetchAppInfo)
|
||||
const { isPending: isCheckingPermission, data: userCanAccessResult } = useGetUserCanAccessApp({
|
||||
appId: appInfo?.app_id,
|
||||
isInstalledApp,
|
||||
enabled: systemFeatures.webapp_auth.enabled,
|
||||
})
|
||||
|
||||
const appData = useMemo(() => {
|
||||
return appInfo
|
||||
}, [appInfo])
|
||||
const appId = useMemo(() => appData?.app_id, [appData])
|
||||
const appInfo = useWebAppStore(s => s.appInfo)
|
||||
const appMeta = useWebAppStore(s => s.appMeta)
|
||||
const appParams = useWebAppStore(s => s.appParams)
|
||||
const appId = useMemo(() => appInfo?.app_id, [appInfo])
|
||||
|
||||
const [userId, setUserId] = useState<string>()
|
||||
const [conversationId, setConversationId] = useState<string>()
|
||||
|
|
@ -145,8 +133,6 @@ export const useEmbeddedChatbot = () => {
|
|||
return currentConversationId
|
||||
}, [currentConversationId, newConversationId])
|
||||
|
||||
const { data: appParams } = useSWR(['appParams', isInstalledApp, appId], () => fetchAppParams(isInstalledApp, appId))
|
||||
const { data: appMeta } = useSWR(['appMeta', isInstalledApp, appId], () => fetchAppMeta(isInstalledApp, appId))
|
||||
const { data: appPinnedConversationData } = useSWR(['appConversationData', isInstalledApp, appId, true], () => fetchConversations(isInstalledApp, appId, undefined, true, 100))
|
||||
const { data: appConversationData, isLoading: appConversationDataLoading, mutate: mutateAppConversationData } = useSWR(['appConversationData', isInstalledApp, appId, false], () => fetchConversations(isInstalledApp, appId, undefined, false, 100))
|
||||
const { data: appChatListData, isLoading: appChatListDataLoading } = useSWR(chatShouldReloadKey ? ['appChatList', chatShouldReloadKey, isInstalledApp, appId] : null, () => fetchChatList(chatShouldReloadKey, isInstalledApp, appId))
|
||||
|
|
@ -398,16 +384,13 @@ export const useEmbeddedChatbot = () => {
|
|||
}, [isInstalledApp, appId, t, notify])
|
||||
|
||||
return {
|
||||
appInfoError,
|
||||
appInfoLoading: appInfoLoading || (systemFeatures.webapp_auth.enabled && isCheckingPermission),
|
||||
userCanAccess: systemFeatures.webapp_auth.enabled ? userCanAccessResult?.result : true,
|
||||
isInstalledApp,
|
||||
allowResetChat,
|
||||
appId,
|
||||
currentConversationId,
|
||||
currentConversationItem,
|
||||
handleConversationIdInfoChange,
|
||||
appData,
|
||||
appData: appInfo,
|
||||
appParams: appParams || {} as ChatConfig,
|
||||
appMeta,
|
||||
appPinnedConversationData,
|
||||
|
|
|
|||
|
|
@ -49,8 +49,8 @@ const Chatbot = () => {
|
|||
<div className='relative'>
|
||||
<div
|
||||
className={cn(
|
||||
'flex flex-col rounded-2xl border border-components-panel-border-subtle',
|
||||
isMobile ? 'h-[calc(100vh_-_60px)] border-[0.5px] border-components-panel-border shadow-xs' : 'h-[100vh] bg-chatbot-bg',
|
||||
'flex flex-col rounded-2xl',
|
||||
isMobile ? 'h-[calc(100vh_-_60px)] shadow-xs' : 'h-[100vh] bg-chatbot-bg',
|
||||
)}
|
||||
style={isMobile ? Object.assign({}, CssTransform(themeBuilder?.theme?.backgroundHeaderColorStyle ?? '')) : {}}
|
||||
>
|
||||
|
|
@ -62,7 +62,7 @@ const Chatbot = () => {
|
|||
theme={themeBuilder?.theme}
|
||||
onCreateNewChat={handleNewConversation}
|
||||
/>
|
||||
<div className={cn('flex grow flex-col overflow-y-auto', isMobile && '!h-[calc(100vh_-_3rem)] rounded-2xl bg-chatbot-bg')}>
|
||||
<div className={cn('flex grow flex-col overflow-y-auto', isMobile && 'm-[0.5px] !h-[calc(100vh_-_3rem)] rounded-2xl bg-chatbot-bg')}>
|
||||
{appChatListDataLoading && (
|
||||
<Loading type='app' />
|
||||
)}
|
||||
|
|
@ -101,7 +101,6 @@ const EmbeddedChatbotWrapper = () => {
|
|||
|
||||
const {
|
||||
appData,
|
||||
userCanAccess,
|
||||
appParams,
|
||||
appMeta,
|
||||
appChatListDataLoading,
|
||||
|
|
@ -135,7 +134,6 @@ const EmbeddedChatbotWrapper = () => {
|
|||
} = useEmbeddedChatbot()
|
||||
|
||||
return <EmbeddedChatbotContext.Provider value={{
|
||||
userCanAccess,
|
||||
appData,
|
||||
appParams,
|
||||
appMeta,
|
||||
|
|
|
|||
Loading…
Reference in New Issue