Merge branch 'origin-main' into feat/end-user-oauth

This commit is contained in:
zhsama 2025-12-01 12:26:01 +08:00
commit 5e6053b367
6 changed files with 2107 additions and 89 deletions

View File

@ -58,11 +58,39 @@ class OceanBaseVector(BaseVector):
password=self._config.password,
db_name=self._config.database,
)
self._fields: list[str] = [] # List of fields in the collection
if self._client.check_table_exists(collection_name):
self._load_collection_fields()
self._hybrid_search_enabled = self._check_hybrid_search_support() # Check if hybrid search is supported
def get_type(self) -> str:
return VectorType.OCEANBASE
def _load_collection_fields(self):
"""
Load collection fields from the database table.
This method populates the _fields list with column names from the table.
"""
try:
if self._collection_name in self._client.metadata_obj.tables:
table = self._client.metadata_obj.tables[self._collection_name]
# Store all column names except 'id' (primary key)
self._fields = [column.name for column in table.columns if column.name != "id"]
logger.debug("Loaded fields for collection '%s': %s", self._collection_name, self._fields)
else:
logger.warning("Collection '%s' not found in metadata", self._collection_name)
except Exception as e:
logger.warning("Failed to load collection fields for '%s': %s", self._collection_name, str(e))
def field_exists(self, field: str) -> bool:
"""
Check if a field exists in the collection.
:param field: Field name to check
:return: True if field exists, False otherwise
"""
return field in self._fields
def create(self, texts: list[Document], embeddings: list[list[float]], **kwargs):
self._vec_dim = len(embeddings[0])
self._create_collection()
@ -151,6 +179,7 @@ class OceanBaseVector(BaseVector):
logger.debug("DEBUG: Hybrid search is NOT enabled for '%s'", self._collection_name)
self._client.refresh_metadata([self._collection_name])
self._load_collection_fields()
redis_client.set(collection_exist_cache_key, 1, ex=3600)
def _check_hybrid_search_support(self) -> bool:
@ -177,42 +206,134 @@ class OceanBaseVector(BaseVector):
def add_texts(self, documents: list[Document], embeddings: list[list[float]], **kwargs):
ids = self._get_uuids(documents)
for id, doc, emb in zip(ids, documents, embeddings):
self._client.insert(
table_name=self._collection_name,
data={
"id": id,
"vector": emb,
"text": doc.page_content,
"metadata": doc.metadata,
},
)
try:
self._client.insert(
table_name=self._collection_name,
data={
"id": id,
"vector": emb,
"text": doc.page_content,
"metadata": doc.metadata,
},
)
except Exception as e:
logger.exception(
"Failed to insert document with id '%s' in collection '%s'",
id,
self._collection_name,
)
raise Exception(f"Failed to insert document with id '{id}'") from e
def text_exists(self, id: str) -> bool:
cur = self._client.get(table_name=self._collection_name, ids=id)
return bool(cur.rowcount != 0)
try:
cur = self._client.get(table_name=self._collection_name, ids=id)
return bool(cur.rowcount != 0)
except Exception as e:
logger.exception(
"Failed to check if text exists with id '%s' in collection '%s'",
id,
self._collection_name,
)
raise Exception(f"Failed to check text existence for id '{id}'") from e
def delete_by_ids(self, ids: list[str]):
if not ids:
return
self._client.delete(table_name=self._collection_name, ids=ids)
try:
self._client.delete(table_name=self._collection_name, ids=ids)
logger.debug("Deleted %d documents from collection '%s'", len(ids), self._collection_name)
except Exception as e:
logger.exception(
"Failed to delete %d documents from collection '%s'",
len(ids),
self._collection_name,
)
raise Exception(f"Failed to delete documents from collection '{self._collection_name}'") from e
def get_ids_by_metadata_field(self, key: str, value: str) -> list[str]:
from sqlalchemy import text
try:
import re
cur = self._client.get(
table_name=self._collection_name,
ids=None,
where_clause=[text(f"metadata->>'$.{key}' = '{value}'")],
output_column_name=["id"],
)
return [row[0] for row in cur]
from sqlalchemy import text
# Validate key to prevent injection in JSON path
if not re.match(r"^[a-zA-Z0-9_.]+$", key):
raise ValueError(f"Invalid characters in metadata key: {key}")
# Use parameterized query to prevent SQL injection
sql = text(f"SELECT id FROM `{self._collection_name}` WHERE metadata->>'$.{key}' = :value")
with self._client.engine.connect() as conn:
result = conn.execute(sql, {"value": value})
ids = [row[0] for row in result]
logger.debug(
"Found %d documents with metadata field '%s'='%s' in collection '%s'",
len(ids),
key,
value,
self._collection_name,
)
return ids
except Exception as e:
logger.exception(
"Failed to get IDs by metadata field '%s'='%s' in collection '%s'",
key,
value,
self._collection_name,
)
raise Exception(f"Failed to query documents by metadata field '{key}'") from e
def delete_by_metadata_field(self, key: str, value: str):
ids = self.get_ids_by_metadata_field(key, value)
self.delete_by_ids(ids)
if ids:
self.delete_by_ids(ids)
else:
logger.debug("No documents found to delete with metadata field '%s'='%s'", key, value)
def _process_search_results(
self, results: list[tuple], score_threshold: float = 0.0, score_key: str = "score"
) -> list[Document]:
"""
Common method to process search results
:param results: Search results as list of tuples (text, metadata, score)
:param score_threshold: Score threshold for filtering
:param score_key: Key name for score in metadata
:return: List of documents
"""
docs = []
for row in results:
text, metadata_str, score = row[0], row[1], row[2]
# Parse metadata JSON
try:
metadata = json.loads(metadata_str) if isinstance(metadata_str, str) else metadata_str
except json.JSONDecodeError:
logger.warning("Invalid JSON metadata: %s", metadata_str)
metadata = {}
# Add score to metadata
metadata[score_key] = score
# Filter by score threshold
if score >= score_threshold:
docs.append(Document(page_content=text, metadata=metadata))
return docs
def search_by_full_text(self, query: str, **kwargs: Any) -> list[Document]:
if not self._hybrid_search_enabled:
logger.warning(
"Full-text search is disabled: set OCEANBASE_ENABLE_HYBRID_SEARCH=true (requires OceanBase >= 4.3.5.1)."
)
return []
if not self.field_exists("text"):
logger.warning(
"Full-text search unavailable: collection '%s' missing 'text' field; "
"recreate the collection after enabling OCEANBASE_ENABLE_HYBRID_SEARCH to add fulltext index.",
self._collection_name,
)
return []
try:
@ -220,13 +341,24 @@ class OceanBaseVector(BaseVector):
if not isinstance(top_k, int) or top_k <= 0:
raise ValueError("top_k must be a positive integer")
document_ids_filter = kwargs.get("document_ids_filter")
where_clause = ""
if document_ids_filter:
document_ids = ", ".join(f"'{id}'" for id in document_ids_filter)
where_clause = f" AND metadata->>'$.document_id' IN ({document_ids})"
score_threshold = float(kwargs.get("score_threshold") or 0.0)
full_sql = f"""SELECT metadata, text, MATCH (text) AGAINST (:query) AS score
# Build parameterized query to prevent SQL injection
from sqlalchemy import text
document_ids_filter = kwargs.get("document_ids_filter")
params = {"query": query}
where_clause = ""
if document_ids_filter:
# Create parameterized placeholders for document IDs
placeholders = ", ".join(f":doc_id_{i}" for i in range(len(document_ids_filter)))
where_clause = f" AND metadata->>'$.document_id' IN ({placeholders})"
# Add document IDs to parameters
for i, doc_id in enumerate(document_ids_filter):
params[f"doc_id_{i}"] = doc_id
full_sql = f"""SELECT text, metadata, MATCH (text) AGAINST (:query) AS score
FROM {self._collection_name}
WHERE MATCH (text) AGAINST (:query) > 0
{where_clause}
@ -235,35 +367,35 @@ class OceanBaseVector(BaseVector):
with self._client.engine.connect() as conn:
with conn.begin():
from sqlalchemy import text
result = conn.execute(text(full_sql), {"query": query})
result = conn.execute(text(full_sql), params)
rows = result.fetchall()
docs = []
for row in rows:
metadata_str, _text, score = row
try:
metadata = json.loads(metadata_str)
except json.JSONDecodeError:
logger.warning("Invalid JSON metadata: %s", metadata_str)
metadata = {}
metadata["score"] = score
docs.append(Document(page_content=_text, metadata=metadata))
return docs
return self._process_search_results(rows, score_threshold=score_threshold)
except Exception as e:
logger.warning("Failed to fulltext search: %s.", str(e))
return []
logger.exception(
"Failed to perform full-text search on collection '%s' with query '%s'",
self._collection_name,
query,
)
raise Exception(f"Full-text search failed for collection '{self._collection_name}'") from e
def search_by_vector(self, query_vector: list[float], **kwargs: Any) -> list[Document]:
from sqlalchemy import text
document_ids_filter = kwargs.get("document_ids_filter")
_where_clause = None
if document_ids_filter:
# Validate document IDs to prevent SQL injection
# Document IDs should be alphanumeric with hyphens and underscores
import re
for doc_id in document_ids_filter:
if not isinstance(doc_id, str) or not re.match(r"^[a-zA-Z0-9_-]+$", doc_id):
raise ValueError(f"Invalid document ID format: {doc_id}")
# Safe to use in query after validation
document_ids = ", ".join(f"'{id}'" for id in document_ids_filter)
where_clause = f"metadata->>'$.document_id' in ({document_ids})"
from sqlalchemy import text
_where_clause = [text(where_clause)]
ef_search = kwargs.get("ef_search", self._hnsw_ef_search)
if ef_search != self._hnsw_ef_search:
@ -286,27 +418,27 @@ class OceanBaseVector(BaseVector):
where_clause=_where_clause,
)
except Exception as e:
raise Exception("Failed to search by vector. ", e)
docs = []
for _text, metadata, distance in cur:
logger.exception(
"Failed to perform vector search on collection '%s'",
self._collection_name,
)
raise Exception(f"Vector search failed for collection '{self._collection_name}'") from e
# Convert distance to score and prepare results for processing
results = []
for _text, metadata_str, distance in cur:
score = 1 - distance / math.sqrt(2)
if score >= score_threshold:
try:
metadata = json.loads(metadata)
except json.JSONDecodeError:
logger.warning("Invalid JSON metadata: %s", metadata)
metadata = {}
metadata["score"] = score
docs.append(
Document(
page_content=_text,
metadata=metadata,
)
)
return docs
results.append((_text, metadata_str, score))
return self._process_search_results(results, score_threshold=score_threshold)
def delete(self):
self._client.drop_table_if_exist(self._collection_name)
try:
self._client.drop_table_if_exist(self._collection_name)
logger.debug("Dropped collection '%s'", self._collection_name)
except Exception as e:
logger.exception("Failed to delete collection '%s'", self._collection_name)
raise Exception(f"Failed to delete collection '{self._collection_name}'") from e
class OceanBaseVectorFactory(AbstractVectorFactory):

File diff suppressed because it is too large Load Diff

View File

@ -116,7 +116,7 @@ describe('useTabSearchParams', () => {
setActiveTab('settings')
})
expect(mockPush).toHaveBeenCalledWith('/test-path?category=settings')
expect(mockPush).toHaveBeenCalledWith('/test-path?category=settings', { scroll: false })
expect(mockReplace).not.toHaveBeenCalled()
})
@ -137,7 +137,7 @@ describe('useTabSearchParams', () => {
setActiveTab('settings')
})
expect(mockReplace).toHaveBeenCalledWith('/test-path?category=settings')
expect(mockReplace).toHaveBeenCalledWith('/test-path?category=settings', { scroll: false })
expect(mockPush).not.toHaveBeenCalled()
})
@ -157,6 +157,7 @@ describe('useTabSearchParams', () => {
expect(mockPush).toHaveBeenCalledWith(
'/test-path?category=settings%20%26%20config',
{ scroll: false },
)
})
@ -211,7 +212,7 @@ describe('useTabSearchParams', () => {
setActiveTab('profile')
})
expect(mockPush).toHaveBeenCalledWith('/test-path?tab=profile')
expect(mockPush).toHaveBeenCalledWith('/test-path?tab=profile', { scroll: false })
})
})
@ -294,7 +295,7 @@ describe('useTabSearchParams', () => {
const [activeTab] = result.current
expect(activeTab).toBe('')
expect(mockPush).toHaveBeenCalledWith('/test-path?category=')
expect(mockPush).toHaveBeenCalledWith('/test-path?category=', { scroll: false })
})
/**
@ -345,7 +346,7 @@ describe('useTabSearchParams', () => {
setActiveTab('settings')
})
expect(mockPush).toHaveBeenCalledWith('/fallback-path?category=settings')
expect(mockPush).toHaveBeenCalledWith('/fallback-path?category=settings', { scroll: false })
// Restore mock
;(usePathname as jest.Mock).mockReturnValue(mockPathname)
@ -400,7 +401,7 @@ describe('useTabSearchParams', () => {
})
expect(result.current[0]).toBe('settings')
expect(mockPush).toHaveBeenCalledWith('/test-path?category=settings')
expect(mockPush).toHaveBeenCalledWith('/test-path?category=settings', { scroll: false })
// Change to profile tab
act(() => {
@ -409,7 +410,7 @@ describe('useTabSearchParams', () => {
})
expect(result.current[0]).toBe('profile')
expect(mockPush).toHaveBeenCalledWith('/test-path?category=profile')
expect(mockPush).toHaveBeenCalledWith('/test-path?category=profile', { scroll: false })
// Verify push was called twice
expect(mockPush).toHaveBeenCalledTimes(2)
@ -431,7 +432,7 @@ describe('useTabSearchParams', () => {
setActiveTab('advanced')
})
expect(mockPush).toHaveBeenCalledWith('/app/123/settings?category=advanced')
expect(mockPush).toHaveBeenCalledWith('/app/123/settings?category=advanced', { scroll: false })
// Restore mock
;(usePathname as jest.Mock).mockReturnValue(mockPathname)

View File

@ -40,7 +40,7 @@ export const useTabSearchParams = ({
setTab(newActiveTab)
if (disableSearchParams)
return
router[`${routingBehavior}`](`${pathName}?${searchParamName}=${encodeURIComponent(newActiveTab)}`)
router[`${routingBehavior}`](`${pathName}?${searchParamName}=${encodeURIComponent(newActiveTab)}`, { scroll: false })
}
return [activeTab, setActiveTab] as const

View File

@ -56,7 +56,7 @@
"@lexical/list": "^0.36.2",
"@lexical/react": "^0.36.2",
"@lexical/selection": "^0.37.0",
"@lexical/text": "^0.36.2",
"@lexical/text": "^0.38.2",
"@lexical/utils": "^0.37.0",
"@monaco-editor/react": "^4.7.0",
"@octokit/core": "^6.1.6",
@ -79,7 +79,7 @@
"decimal.js": "^10.6.0",
"dompurify": "^3.3.0",
"echarts": "^5.6.0",
"echarts-for-react": "^3.0.2",
"echarts-for-react": "^3.0.5",
"elkjs": "^0.9.3",
"emoji-mart": "^5.6.0",
"fast-deep-equal": "^3.1.3",
@ -141,7 +141,7 @@
"uuid": "^10.0.0",
"zod": "^3.25.76",
"zundo": "^2.3.0",
"zustand": "^4.5.7"
"zustand": "^5.0.9"
},
"devDependencies": {
"@antfu/eslint-config": "^5.4.1",

View File

@ -94,8 +94,8 @@ importers:
specifier: ^0.37.0
version: 0.37.0
'@lexical/text':
specifier: ^0.36.2
version: 0.36.2
specifier: ^0.38.2
version: 0.38.2
'@lexical/utils':
specifier: ^0.37.0
version: 0.37.0
@ -163,8 +163,8 @@ importers:
specifier: ^5.6.0
version: 5.6.0
echarts-for-react:
specifier: ^3.0.2
version: 3.0.2(echarts@5.6.0)(react@19.1.1)
specifier: ^3.0.5
version: 3.0.5(echarts@5.6.0)(react@19.1.1)
elkjs:
specifier: ^0.9.3
version: 0.9.3
@ -347,10 +347,10 @@ importers:
version: 3.25.76
zundo:
specifier: ^2.3.0
version: 2.3.0(zustand@4.5.7(@types/react@19.1.17)(immer@10.1.3)(react@19.1.1))
version: 2.3.0(zustand@5.0.9(@types/react@19.1.17)(immer@10.1.3)(react@19.1.1)(use-sync-external-store@1.6.0(react@19.1.1)))
zustand:
specifier: ^4.5.7
version: 4.5.7(@types/react@19.1.17)(immer@10.1.3)(react@19.1.1)
specifier: ^5.0.9
version: 5.0.9(@types/react@19.1.17)(immer@10.1.3)(react@19.1.1)(use-sync-external-store@1.6.0(react@19.1.1))
devDependencies:
'@antfu/eslint-config':
specifier: ^5.4.1
@ -2087,6 +2087,9 @@ packages:
'@lexical/text@0.36.2':
resolution: {integrity: sha512-IbbqgRdMAD6Uk9b2+qSVoy+8RVcczrz6OgXvg39+EYD+XEC7Rbw7kDTWzuNSJJpP7vxSO8YDZSaIlP5gNH3qKA==}
'@lexical/text@0.38.2':
resolution: {integrity: sha512-+juZxUugtC4T37aE3P0l4I9tsWbogDUnTI/mgYk4Ht9g+gLJnhQkzSA8chIyfTxbj5i0A8yWrUUSw+/xA7lKUQ==}
'@lexical/utils@0.36.2':
resolution: {integrity: sha512-P9+t2Ob10YNGYT/PWEER+1EqH8SAjCNRn+7SBvKbr0IdleGF2JvzbJwAWaRwZs1c18P11XdQZ779dGvWlfwBIw==}
@ -4586,10 +4589,10 @@ packages:
duplexer@0.1.2:
resolution: {integrity: sha512-jtD6YG370ZCIi/9GTaJKQxWTZD045+4R4hTk/x1UyoqadyJ9x9CgSi1RlVDQF8U2sxLLSnFkCaMihqljHIWgMg==}
echarts-for-react@3.0.2:
resolution: {integrity: sha512-DRwIiTzx8JfwPOVgGttDytBqdp5VzCSyMRIxubgU/g2n9y3VLUmF2FK7Icmg/sNVkv4+rktmrLN9w22U2yy3fA==}
echarts-for-react@3.0.5:
resolution: {integrity: sha512-YpEI5Ty7O/2nvCfQ7ybNa+S90DwE8KYZWacGvJW4luUqywP7qStQ+pxDlYOmr4jGDu10mhEkiAuMKcUlT4W5vg==}
peerDependencies:
echarts: ^3.0.0 || ^4.0.0 || ^5.0.0
echarts: ^3.0.0 || ^4.0.0 || ^5.0.0 || ^6.0.0
react: ^15.0.0 || >=16.0.0
echarts@5.6.0:
@ -8445,6 +8448,24 @@ packages:
react:
optional: true
zustand@5.0.9:
resolution: {integrity: sha512-ALBtUj0AfjJt3uNRQoL1tL2tMvj6Gp/6e39dnfT6uzpelGru8v1tPOGBzayOWbPJvujM8JojDk3E1LxeFisBNg==}
engines: {node: '>=12.20.0'}
peerDependencies:
'@types/react': ~19.1.17
immer: '>=9.0.6'
react: '>=18.0.0'
use-sync-external-store: '>=1.2.0'
peerDependenciesMeta:
'@types/react':
optional: true
immer:
optional: true
react:
optional: true
use-sync-external-store:
optional: true
zwitch@2.0.4:
resolution: {integrity: sha512-bXE4cR/kVZhKZX/RjPEflHaKVhUVl85noU3v6b8apfQEc1x4A+zBxjZ4lN8LqGd6WZ3dl98pY4o717VFmoPp+A==}
@ -10369,6 +10390,10 @@ snapshots:
dependencies:
lexical: 0.37.0
'@lexical/text@0.38.2':
dependencies:
lexical: 0.37.0
'@lexical/utils@0.36.2':
dependencies:
'@lexical/list': 0.36.2
@ -13098,7 +13123,7 @@ snapshots:
duplexer@0.1.2: {}
echarts-for-react@3.0.2(echarts@5.6.0)(react@19.1.1):
echarts-for-react@3.0.5(echarts@5.6.0)(react@19.1.1):
dependencies:
echarts: 5.6.0
fast-deep-equal: 3.1.3
@ -17931,9 +17956,9 @@ snapshots:
dependencies:
tslib: 2.3.0
zundo@2.3.0(zustand@4.5.7(@types/react@19.1.17)(immer@10.1.3)(react@19.1.1)):
zundo@2.3.0(zustand@5.0.9(@types/react@19.1.17)(immer@10.1.3)(react@19.1.1)(use-sync-external-store@1.6.0(react@19.1.1))):
dependencies:
zustand: 4.5.7(@types/react@19.1.17)(immer@10.1.3)(react@19.1.1)
zustand: 5.0.9(@types/react@19.1.17)(immer@10.1.3)(react@19.1.1)(use-sync-external-store@1.6.0(react@19.1.1))
zustand@4.5.7(@types/react@19.1.17)(immer@10.1.3)(react@19.1.1):
dependencies:
@ -17943,4 +17968,11 @@ snapshots:
immer: 10.1.3
react: 19.1.1
zustand@5.0.9(@types/react@19.1.17)(immer@10.1.3)(react@19.1.1)(use-sync-external-store@1.6.0(react@19.1.1)):
optionalDependencies:
'@types/react': 19.1.17
immer: 10.1.3
react: 19.1.1
use-sync-external-store: 1.6.0(react@19.1.1)
zwitch@2.0.4: {}