feat:json metadat filter adapt (#65)

* config adapt revert

* ci test

* fix mysql migration test

* fix

* fix

* lint fix

* fix ob config

* fix

* fix

* fix

* test over

* test

* fix

* fix

* fix style

* test over

* retain gin for pg

* gin for pg

* uuid defalut in versions

* ci test

* ci test

* fix

* fix

* fix

* fix

* pg josnb

* fix
This commit is contained in:
longbingljw 2025-11-15 22:29:59 +08:00 committed by GitHub
parent 84935b9169
commit 6433ac8209
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
57 changed files with 386 additions and 487 deletions

View File

@ -1,7 +1,9 @@
name: Run Pytest
on:
workflow_call:
push:
branches:
- mysql-adapt
concurrency:
group: api-tests-${{ github.head_ref || github.run_id }}

View File

@ -1,7 +1,9 @@
name: DB Migration Test
on:
workflow_call:
push:
branches:
- mysql-adapt
concurrency:
group: db-migration-test-${{ github.ref }}

View File

@ -1,7 +1,9 @@
name: Style check
on:
workflow_call:
push:
branches:
- mysql-adapt
concurrency:
group: style-${{ github.head_ref || github.run_id }}

View File

@ -7,8 +7,7 @@ from collections.abc import Generator, Mapping
from typing import Any, Union, cast
from flask import Flask, current_app
from sqlalchemy import Float, and_, or_, select, text
from sqlalchemy import cast as sqlalchemy_cast
from sqlalchemy import and_, or_, select
from core.app.app_config.entities import (
DatasetEntity,
@ -1023,60 +1022,55 @@ class DatasetRetrieval:
self, sequence: int, condition: str, metadata_name: str, value: Any | None, filters: list
):
if value is None and condition not in ("empty", "not empty"):
return
return filters
json_field = DatasetDocument.doc_metadata[metadata_name].as_string()
key = f"{metadata_name}_{sequence}"
key_value = f"{metadata_name}_{sequence}_value"
match condition:
case "contains":
filters.append(
(text(f"documents.doc_metadata ->> :{key} LIKE :{key_value}")).params(
**{key: metadata_name, key_value: f"%{value}%"}
)
)
filters.append(json_field.like(f"%{value}%"))
case "not contains":
filters.append(
(text(f"documents.doc_metadata ->> :{key} NOT LIKE :{key_value}")).params(
**{key: metadata_name, key_value: f"%{value}%"}
)
)
filters.append(json_field.notlike(f"%{value}%"))
case "start with":
filters.append(
(text(f"documents.doc_metadata ->> :{key} LIKE :{key_value}")).params(
**{key: metadata_name, key_value: f"{value}%"}
)
)
filters.append(json_field.like(f"{value}%"))
case "end with":
filters.append(
(text(f"documents.doc_metadata ->> :{key} LIKE :{key_value}")).params(
**{key: metadata_name, key_value: f"%{value}"}
)
)
filters.append(json_field.like(f"%{value}"))
case "is" | "=":
if isinstance(value, str):
filters.append(DatasetDocument.doc_metadata[metadata_name] == f'"{value}"')
else:
filters.append(sqlalchemy_cast(DatasetDocument.doc_metadata[metadata_name].astext, Float) == value)
filters.append(json_field == value)
elif isinstance(value, (int, float)):
filters.append(DatasetDocument.doc_metadata[metadata_name].as_float() == value)
case "is not" | "":
if isinstance(value, str):
filters.append(DatasetDocument.doc_metadata[metadata_name] != f'"{value}"')
else:
filters.append(sqlalchemy_cast(DatasetDocument.doc_metadata[metadata_name].astext, Float) != value)
filters.append(json_field != value)
elif isinstance(value, (int, float)):
filters.append(DatasetDocument.doc_metadata[metadata_name].as_float() != value)
case "empty":
filters.append(DatasetDocument.doc_metadata[metadata_name].is_(None))
case "not empty":
filters.append(DatasetDocument.doc_metadata[metadata_name].isnot(None))
case "before" | "<":
filters.append(sqlalchemy_cast(DatasetDocument.doc_metadata[metadata_name].astext, Float) < value)
filters.append(DatasetDocument.doc_metadata[metadata_name].as_float() < value)
case "after" | ">":
filters.append(sqlalchemy_cast(DatasetDocument.doc_metadata[metadata_name].astext, Float) > value)
filters.append(DatasetDocument.doc_metadata[metadata_name].as_float() > value)
case "" | "<=":
filters.append(sqlalchemy_cast(DatasetDocument.doc_metadata[metadata_name].astext, Float) <= value)
filters.append(DatasetDocument.doc_metadata[metadata_name].as_float() <= value)
case "" | ">=":
filters.append(sqlalchemy_cast(DatasetDocument.doc_metadata[metadata_name].astext, Float) >= value)
filters.append(DatasetDocument.doc_metadata[metadata_name].as_float() >= value)
case _:
pass
return filters
def _fetch_model_config(

View File

@ -6,12 +6,12 @@ from collections import defaultdict
from collections.abc import Mapping, Sequence
from typing import TYPE_CHECKING, Any, cast
from sqlalchemy import Float, and_, func, or_, select, text
from sqlalchemy import cast as sqlalchemy_cast
from sqlalchemy import and_, func, literal, or_, select
from sqlalchemy.orm import sessionmaker
from core.app.app_config.entities import DatasetRetrieveConfigEntity
from core.app.entities.app_invoke_entities import ModelConfigWithCredentialsEntity
from core.callback_handler.index_tool_callback_handler import DatasetDocument
from core.entities.agent_entities import PlanningStrategy
from core.entities.model_entities import ModelStatus
from core.model_manager import ModelInstance, ModelManager
@ -597,79 +597,79 @@ class KnowledgeRetrievalNode(LLMUsageTrackingMixin, Node):
if value is None and condition not in ("empty", "not empty"):
return filters
key = f"{metadata_name}_{sequence}"
key_value = f"{metadata_name}_{sequence}_value"
json_field = DatasetDocument.doc_metadata[metadata_name].as_string()
match condition:
case "contains":
filters.append(
(text(f"documents.doc_metadata ->> :{key} LIKE :{key_value}")).params(
**{key: metadata_name, key_value: f"%{value}%"}
)
)
filters.append(json_field.like(f"%{value}%"))
case "not contains":
filters.append(
(text(f"documents.doc_metadata ->> :{key} NOT LIKE :{key_value}")).params(
**{key: metadata_name, key_value: f"%{value}%"}
)
)
filters.append(json_field.notlike(f"%{value}%"))
case "start with":
filters.append(
(text(f"documents.doc_metadata ->> :{key} LIKE :{key_value}")).params(
**{key: metadata_name, key_value: f"{value}%"}
)
)
filters.append(json_field.like(f"{value}%"))
case "end with":
filters.append(
(text(f"documents.doc_metadata ->> :{key} LIKE :{key_value}")).params(
**{key: metadata_name, key_value: f"%{value}"}
)
)
filters.append(json_field.like(f"%{value}"))
case "in":
if isinstance(value, str):
escaped_values = [v.strip().replace("'", "''") for v in str(value).split(",")]
escaped_value_str = ",".join(escaped_values)
value_list = [v.strip() for v in value.split(",") if v.strip()]
elif isinstance(value, (list, tuple)):
value_list = [str(v) for v in value if v is not None]
else:
escaped_value_str = str(value)
filters.append(
(text(f"documents.doc_metadata ->> :{key} = any(string_to_array(:{key_value},','))")).params(
**{key: metadata_name, key_value: escaped_value_str}
)
)
value_list = [str(value)] if value is not None else []
if not value_list:
filters.append(literal(False))
else:
filters.append(json_field.in_(value_list))
case "not in":
if isinstance(value, str):
escaped_values = [v.strip().replace("'", "''") for v in str(value).split(",")]
escaped_value_str = ",".join(escaped_values)
value_list = [v.strip() for v in value.split(",") if v.strip()]
elif isinstance(value, (list, tuple)):
value_list = [str(v) for v in value if v is not None]
else:
escaped_value_str = str(value)
filters.append(
(text(f"documents.doc_metadata ->> :{key} != all(string_to_array(:{key_value},','))")).params(
**{key: metadata_name, key_value: escaped_value_str}
)
)
case "=" | "is":
value_list = [str(value)] if value is not None else []
if not value_list:
filters.append(literal(True))
else:
filters.append(json_field.notin_(value_list))
case "is" | "=":
if isinstance(value, str):
filters.append(Document.doc_metadata[metadata_name] == f'"{value}"')
else:
filters.append(sqlalchemy_cast(Document.doc_metadata[metadata_name].astext, Float) == value)
filters.append(json_field == value)
elif isinstance(value, (int, float)):
filters.append(DatasetDocument.doc_metadata[metadata_name].as_float() == value)
case "is not" | "":
if isinstance(value, str):
filters.append(Document.doc_metadata[metadata_name] != f'"{value}"')
else:
filters.append(sqlalchemy_cast(Document.doc_metadata[metadata_name].astext, Float) != value)
filters.append(json_field != value)
elif isinstance(value, (int, float)):
filters.append(DatasetDocument.doc_metadata[metadata_name].as_float() != value)
case "empty":
filters.append(Document.doc_metadata[metadata_name].is_(None))
filters.append(DatasetDocument.doc_metadata[metadata_name].is_(None))
case "not empty":
filters.append(Document.doc_metadata[metadata_name].isnot(None))
filters.append(DatasetDocument.doc_metadata[metadata_name].isnot(None))
case "before" | "<":
filters.append(sqlalchemy_cast(Document.doc_metadata[metadata_name].astext, Float) < value)
filters.append(DatasetDocument.doc_metadata[metadata_name].as_float() < value)
case "after" | ">":
filters.append(sqlalchemy_cast(Document.doc_metadata[metadata_name].astext, Float) > value)
filters.append(DatasetDocument.doc_metadata[metadata_name].as_float() > value)
case "" | "<=":
filters.append(sqlalchemy_cast(Document.doc_metadata[metadata_name].astext, Float) <= value)
filters.append(DatasetDocument.doc_metadata[metadata_name].as_float() <= value)
case "" | ">=":
filters.append(sqlalchemy_cast(Document.doc_metadata[metadata_name].astext, Float) >= value)
filters.append(DatasetDocument.doc_metadata[metadata_name].as_float() >= value)
case _:
pass
return filters
@classmethod

View File

@ -178,12 +178,12 @@ def timezone(timezone_string):
def convert_datetime_to_date(field, target_timezone: str = ":tz"):
if dify_config.SQLALCHEMY_DATABASE_URI_SCHEME == "postgresql":
if dify_config.DB_TYPE == "postgresql":
return f"DATE(DATE_TRUNC('day', {field} AT TIME ZONE 'UTC' AT TIME ZONE {target_timezone}))"
elif "mysql" in dify_config.SQLALCHEMY_DATABASE_URI_SCHEME:
elif dify_config.DB_TYPE == "mysql":
return f"DATE(CONVERT_TZ({field}, 'UTC', {target_timezone}))"
else:
raise NotImplementedError(f"Unsupported database URI scheme: {dify_config.SQLALCHEMY_DATABASE_URI_SCHEME}")
raise NotImplementedError(f"Unsupported database type: {dify_config.DB_TYPE}")
def generate_string(n):

View File

@ -7,7 +7,6 @@ Create Date: 2024-06-12 07:49:07.666510
"""
import sqlalchemy as sa
from alembic import op
from uuid import uuid4
import models.types
@ -38,7 +37,7 @@ def upgrade():
)
else:
op.create_table('tracing_app_configs',
sa.Column('id', models.types.StringUUID(), default=lambda: str(uuid4()), nullable=False),
sa.Column('id', models.types.StringUUID(), nullable=False),
sa.Column('app_id', models.types.StringUUID(), nullable=False),
sa.Column('tracing_provider', sa.String(length=255), nullable=True),
sa.Column('tracing_config', sa.JSON(), nullable=True),

View File

@ -7,7 +7,6 @@ Create Date: 2024-01-12 06:47:21.656262
"""
import sqlalchemy as sa
from alembic import op
from uuid import uuid4
from sqlalchemy.dialects import postgresql
import models.types
@ -40,7 +39,7 @@ def upgrade():
)
else:
op.create_table('tool_conversation_variables',
sa.Column('id', models.types.StringUUID(), default=lambda: str(uuid4()), nullable=False),
sa.Column('id', models.types.StringUUID(), nullable=False),
sa.Column('user_id', models.types.StringUUID(), nullable=False),
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
sa.Column('conversation_id', models.types.StringUUID(), nullable=False),

View File

@ -7,7 +7,6 @@ Create Date: 2023-08-06 16:57:51.248337
"""
import sqlalchemy as sa
from alembic import op
from uuid import uuid4
from sqlalchemy.dialects import postgresql
import models.types
@ -43,7 +42,7 @@ def upgrade():
)
else:
op.create_table('provider_models',
sa.Column('id', models.types.StringUUID(), default=lambda: str(uuid4()), nullable=False),
sa.Column('id', models.types.StringUUID(), nullable=False),
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
sa.Column('provider_name', sa.String(length=40), nullable=False),
sa.Column('model_name', sa.String(length=40), nullable=False),
@ -72,7 +71,7 @@ def upgrade():
)
else:
op.create_table('tenant_default_models',
sa.Column('id', models.types.StringUUID(), default=lambda: str(uuid4()), nullable=False),
sa.Column('id', models.types.StringUUID(), nullable=False),
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
sa.Column('provider_name', sa.String(length=40), nullable=False),
sa.Column('model_name', sa.String(length=40), nullable=False),
@ -97,7 +96,7 @@ def upgrade():
)
else:
op.create_table('tenant_preferred_model_providers',
sa.Column('id', models.types.StringUUID(), default=lambda: str(uuid4()), nullable=False),
sa.Column('id', models.types.StringUUID(), nullable=False),
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
sa.Column('provider_name', sa.String(length=40), nullable=False),
sa.Column('preferred_provider_type', sa.String(length=40), nullable=False),

View File

@ -7,7 +7,6 @@ Create Date: 2024-08-15 09:56:59.012490
"""
import sqlalchemy as sa
from alembic import op
from uuid import uuid4
import models as models
@ -41,7 +40,7 @@ def upgrade():
)
else:
op.create_table('tidb_auth_bindings',
sa.Column('id', models.types.StringUUID(), default=lambda: str(uuid4()), nullable=False),
sa.Column('id', models.types.StringUUID(), nullable=False),
sa.Column('tenant_id', models.types.StringUUID(), nullable=True),
sa.Column('cluster_id', sa.String(length=255), nullable=False),
sa.Column('cluster_name', sa.String(length=255), nullable=False),

View File

@ -9,7 +9,6 @@ from alembic import op
import models as models
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
from uuid import uuid4
def _is_pg(conn):
@ -41,7 +40,7 @@ def upgrade():
)
else:
op.create_table('external_knowledge_apis',
sa.Column('id', models.types.StringUUID(), default=lambda: str(uuid4()), nullable=False),
sa.Column('id', models.types.StringUUID(), nullable=False),
sa.Column('name', sa.String(length=255), nullable=False),
sa.Column('description', sa.String(length=255), nullable=False),
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
@ -72,7 +71,7 @@ def upgrade():
)
else:
op.create_table('external_knowledge_bindings',
sa.Column('id', models.types.StringUUID(), default=lambda: str(uuid4()), nullable=False),
sa.Column('id', models.types.StringUUID(), nullable=False),
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
sa.Column('external_knowledge_api_id', models.types.StringUUID(), nullable=False),
sa.Column('dataset_id', models.types.StringUUID(), nullable=False),

View File

@ -9,7 +9,6 @@ from alembic import op
import models as models
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
from uuid import uuid4
def _is_pg(conn):
@ -36,7 +35,7 @@ def upgrade():
)
else:
op.create_table('whitelists',
sa.Column('id', models.types.StringUUID(), default=lambda: str(uuid4()), nullable=False),
sa.Column('id', models.types.StringUUID(), nullable=False),
sa.Column('tenant_id', models.types.StringUUID(), nullable=True),
sa.Column('category', sa.String(length=255), nullable=False),
sa.Column('created_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False),

View File

@ -9,7 +9,6 @@ from alembic import op
import models as models
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
from uuid import uuid4
def _is_pg(conn):
@ -37,7 +36,7 @@ def upgrade():
)
else:
op.create_table('account_plugin_permissions',
sa.Column('id', models.types.StringUUID(), default=lambda: str(uuid4()), nullable=False),
sa.Column('id', models.types.StringUUID(), nullable=False),
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
sa.Column('install_permission', sa.String(length=16), server_default='everyone', nullable=False),
sa.Column('debug_permission', sa.String(length=16), server_default='noone', nullable=False),

View File

@ -8,7 +8,6 @@ Create Date: 2024-11-22 07:01:17.550037
from alembic import op
import models as models
import sqlalchemy as sa
from uuid import uuid4
def _is_pg(conn):
@ -50,7 +49,7 @@ def upgrade():
)
else:
op.create_table('child_chunks',
sa.Column('id', models.types.StringUUID(), default=lambda: str(uuid4()), nullable=False),
sa.Column('id', models.types.StringUUID(), nullable=False),
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
sa.Column('dataset_id', models.types.StringUUID(), nullable=False),
sa.Column('document_id', models.types.StringUUID(), nullable=False),

View File

@ -9,7 +9,6 @@ from alembic import op
import models as models
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
from uuid import uuid4
def _is_pg(conn):
@ -46,7 +45,7 @@ def downgrade():
)
else:
op.create_table('tool_providers',
sa.Column('id', models.types.StringUUID(), default=lambda: str(uuid4()), autoincrement=False, nullable=False),
sa.Column('id', models.types.StringUUID(), autoincrement=False, nullable=False),
sa.Column('tenant_id', models.types.StringUUID(), autoincrement=False, nullable=False),
sa.Column('tool_name', sa.VARCHAR(length=40), autoincrement=False, nullable=False),
sa.Column('encrypted_credentials', models.types.LongText(), autoincrement=False, nullable=True),

View File

@ -9,7 +9,6 @@ from alembic import op
import models as models
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
from uuid import uuid4
def _is_pg(conn):
@ -38,7 +37,7 @@ def upgrade():
)
else:
op.create_table('dataset_auto_disable_logs',
sa.Column('id', models.types.StringUUID(), default=lambda: str(uuid4()), nullable=False),
sa.Column('id', models.types.StringUUID(), nullable=False),
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
sa.Column('dataset_id', models.types.StringUUID(), nullable=False),
sa.Column('document_id', models.types.StringUUID(), nullable=False),

View File

@ -9,7 +9,6 @@ from alembic import op
import models as models
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
from uuid import uuid4
def _is_pg(conn):
@ -37,7 +36,7 @@ def upgrade():
)
else:
op.create_table('rate_limit_logs',
sa.Column('id', models.types.StringUUID(), default=lambda: str(uuid4()), nullable=False),
sa.Column('id', models.types.StringUUID(), nullable=False),
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
sa.Column('subscription_plan', sa.String(length=255), nullable=False),
sa.Column('operation', sa.String(length=255), nullable=False),

View File

@ -9,7 +9,6 @@ from alembic import op
import models as models
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
from uuid import uuid4
def _is_pg(conn):
@ -27,7 +26,6 @@ def upgrade():
conn = op.get_bind()
if _is_pg(conn):
# PostgreSQL: Keep original syntax
op.create_table('dataset_metadata_bindings',
sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False),
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
@ -39,9 +37,8 @@ def upgrade():
sa.PrimaryKeyConstraint('id', name='dataset_metadata_binding_pkey')
)
else:
# MySQL: Use compatible syntax
op.create_table('dataset_metadata_bindings',
sa.Column('id', models.types.StringUUID(), default=lambda: str(uuid4()), nullable=False),
sa.Column('id', models.types.StringUUID(), nullable=False),
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
sa.Column('dataset_id', models.types.StringUUID(), nullable=False),
sa.Column('metadata_id', models.types.StringUUID(), nullable=False),
@ -74,7 +71,7 @@ def upgrade():
else:
# MySQL: Use compatible syntax
op.create_table('dataset_metadatas',
sa.Column('id', models.types.StringUUID(), default=lambda: str(uuid4()), nullable=False),
sa.Column('id', models.types.StringUUID(), nullable=False),
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
sa.Column('dataset_id', models.types.StringUUID(), nullable=False),
sa.Column('type', sa.String(length=255), nullable=False),

View File

@ -8,7 +8,6 @@ Create Date: 2025-05-15 15:31:03.128680
import sqlalchemy as sa
from alembic import op
from uuid import uuid4
import models as models
@ -49,7 +48,7 @@ def upgrade():
else:
op.create_table(
"workflow_draft_variables",
sa.Column("id", models.types.StringUUID(), default=lambda: str(uuid4()), nullable=False),
sa.Column("id", models.types.StringUUID(), nullable=False),
sa.Column("created_at", sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False),
sa.Column("updated_at", sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False),
sa.Column("app_id", models.types.StringUUID(), nullable=False),

View File

@ -8,7 +8,6 @@ Create Date: 2025-06-25 09:36:07.510570
from alembic import op
import models as models
import sqlalchemy as sa
from uuid import uuid4
def _is_pg(conn):
@ -44,7 +43,7 @@ def upgrade():
)
else:
op.create_table('app_mcp_servers',
sa.Column('id', models.types.StringUUID(), default=lambda: str(uuid4()), nullable=False),
sa.Column('id', models.types.StringUUID(), nullable=False),
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
sa.Column('app_id', models.types.StringUUID(), nullable=False),
sa.Column('name', sa.String(length=255), nullable=False),
@ -80,7 +79,7 @@ def upgrade():
)
else:
op.create_table('tool_mcp_providers',
sa.Column('id', models.types.StringUUID(), default=lambda: str(uuid4()), nullable=False),
sa.Column('id', models.types.StringUUID(), nullable=False),
sa.Column('name', sa.String(length=40), nullable=False),
sa.Column('server_identifier', sa.String(length=24), nullable=False),
sa.Column('server_url', models.types.LongText(), nullable=False),

View File

@ -8,7 +8,6 @@ Create Date: 2025-06-24 17:05:43.118647
from alembic import op
import models as models
import sqlalchemy as sa
from uuid import uuid4
def _is_pg(conn):
@ -37,7 +36,7 @@ def upgrade():
)
else:
op.create_table('tool_oauth_system_clients',
sa.Column('id', models.types.StringUUID(), default=lambda: str(uuid4()), nullable=False),
sa.Column('id', models.types.StringUUID(), nullable=False),
sa.Column('plugin_id', sa.String(length=512), nullable=False),
sa.Column('provider', sa.String(length=255), nullable=False),
sa.Column('encrypted_oauth_params', models.types.LongText(), nullable=False),
@ -57,7 +56,7 @@ def upgrade():
)
else:
op.create_table('tool_oauth_tenant_clients',
sa.Column('id', models.types.StringUUID(), default=lambda: str(uuid4()), nullable=False),
sa.Column('id', models.types.StringUUID(), nullable=False),
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
sa.Column('plugin_id', sa.String(length=255), nullable=False),
sa.Column('provider', sa.String(length=255), nullable=False),

View File

@ -9,7 +9,6 @@ from alembic import op
import models as models
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
from uuid import uuid4
def _is_pg(conn):
@ -42,7 +41,7 @@ def upgrade():
)
else:
op.create_table('tenant_plugin_auto_upgrade_strategies',
sa.Column('id', models.types.StringUUID(), default=lambda: str(uuid4()), nullable=False),
sa.Column('id', models.types.StringUUID(), nullable=False),
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
sa.Column('strategy_setting', sa.String(length=16), server_default='fix_only', nullable=False),
sa.Column('upgrade_time_of_day', sa.Integer(), nullable=False),

View File

@ -39,7 +39,7 @@ def upgrade():
)
else:
op.create_table('provider_credentials',
sa.Column('id', models.types.StringUUID(), default=lambda: str(uuidv7()), nullable=False),
sa.Column('id', models.types.StringUUID(), nullable=False),
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
sa.Column('provider_name', sa.String(length=255), nullable=False),
sa.Column('credential_name', sa.String(length=255), nullable=False),

View File

@ -43,7 +43,7 @@ def upgrade():
)
else:
op.create_table('provider_model_credentials',
sa.Column('id', models.types.StringUUID(), default=lambda: str(uuidv7()), nullable=False),
sa.Column('id', models.types.StringUUID(), nullable=False),
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
sa.Column('provider_name', sa.String(length=255), nullable=False),
sa.Column('model_name', sa.String(length=255), nullable=False),

View File

@ -40,7 +40,7 @@ def upgrade():
)
else:
op.create_table('oauth_provider_apps',
sa.Column('id', models.types.StringUUID(), default=lambda: str(uuidv7()), nullable=False),
sa.Column('id', models.types.StringUUID(), nullable=False),
sa.Column('app_icon', sa.String(length=255), nullable=False),
sa.Column('app_label', sa.JSON(), default='{}', nullable=False),
sa.Column('client_id', sa.String(length=255), nullable=False),

View File

@ -37,10 +37,10 @@ def upgrade():
)
else:
op.create_table('datasource_oauth_params',
sa.Column('id', models.types.StringUUID(), default=lambda: str(uuidv7()), nullable=False),
sa.Column('id', models.types.StringUUID(), nullable=False),
sa.Column('plugin_id', sa.String(length=255), nullable=False),
sa.Column('provider', sa.String(length=255), nullable=False),
sa.Column('system_credentials', sa.JSON(), nullable=False),
sa.Column('system_credentials', models.types.AdjustedJSON(astext_type=sa.Text()), nullable=False),
sa.PrimaryKeyConstraint('id', name='datasource_oauth_config_pkey'),
sa.UniqueConstraint('plugin_id', 'provider', name='datasource_oauth_config_datasource_id_provider_idx')
)
@ -59,11 +59,11 @@ def upgrade():
)
else:
op.create_table('datasource_oauth_tenant_params',
sa.Column('id', models.types.StringUUID(), default=lambda: str(uuidv7()), nullable=False),
sa.Column('id', models.types.StringUUID(), nullable=False),
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
sa.Column('provider', sa.String(length=255), nullable=False),
sa.Column('plugin_id', sa.String(length=255), nullable=False),
sa.Column('client_params', sa.JSON(), nullable=False),
sa.Column('client_params', models.types.AdjustedJSON(astext_type=sa.Text()), nullable=False),
sa.Column('enabled', sa.Boolean(), nullable=False),
sa.Column('created_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False),
sa.Column('updated_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False),
@ -89,13 +89,13 @@ def upgrade():
)
else:
op.create_table('datasource_providers',
sa.Column('id', models.types.StringUUID(), default=lambda: str(uuidv7()), nullable=False),
sa.Column('id', models.types.StringUUID(), nullable=False),
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
sa.Column('name', sa.String(length=255), nullable=False),
sa.Column('provider', sa.String(length=128), nullable=False),
sa.Column('plugin_id', sa.String(length=255), nullable=False),
sa.Column('auth_type', sa.String(length=255), nullable=False),
sa.Column('encrypted_credentials', sa.JSON(), nullable=False),
sa.Column('encrypted_credentials', models.types.AdjustedJSON(astext_type=sa.Text()), nullable=False),
sa.Column('avatar_url', models.types.LongText(), nullable=True),
sa.Column('is_default', sa.Boolean(), server_default=sa.text('false'), nullable=False),
sa.Column('expires_at', sa.Integer(), server_default='-1', nullable=False),
@ -122,7 +122,7 @@ def upgrade():
)
else:
op.create_table('document_pipeline_execution_logs',
sa.Column('id', models.types.StringUUID(), default=lambda: str(uuidv7()), nullable=False),
sa.Column('id', models.types.StringUUID(), nullable=False),
sa.Column('pipeline_id', models.types.StringUUID(), nullable=False),
sa.Column('document_id', models.types.StringUUID(), nullable=False),
sa.Column('datasource_type', sa.String(length=255), nullable=False),
@ -157,7 +157,7 @@ def upgrade():
)
else:
op.create_table('pipeline_built_in_templates',
sa.Column('id', models.types.StringUUID(), default=lambda: str(uuidv7()), nullable=False),
sa.Column('id', models.types.StringUUID(), nullable=False),
sa.Column('name', sa.String(length=255), nullable=False),
sa.Column('description', models.types.LongText(), nullable=False),
sa.Column('chunk_structure', sa.String(length=255), nullable=False),
@ -195,7 +195,7 @@ def upgrade():
else:
# MySQL: Use compatible syntax
op.create_table('pipeline_customized_templates',
sa.Column('id', models.types.StringUUID(), default=lambda: str(uuidv7()), nullable=False),
sa.Column('id', models.types.StringUUID(), nullable=False),
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
sa.Column('name', sa.String(length=255), nullable=False),
sa.Column('description', models.types.LongText(), nullable=False),
@ -227,7 +227,7 @@ def upgrade():
)
else:
op.create_table('pipeline_recommended_plugins',
sa.Column('id', models.types.StringUUID(), default=lambda: str(uuidv7()), nullable=False),
sa.Column('id', models.types.StringUUID(), nullable=False),
sa.Column('plugin_id', models.types.LongText(), nullable=False),
sa.Column('provider_name', models.types.LongText(), nullable=False),
sa.Column('position', sa.Integer(), nullable=False),
@ -253,7 +253,7 @@ def upgrade():
)
else:
op.create_table('pipelines',
sa.Column('id', models.types.StringUUID(), default=lambda: str(uuidv7()), nullable=False),
sa.Column('id', models.types.StringUUID(), nullable=False),
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
sa.Column('name', sa.String(length=255), nullable=False),
sa.Column('description', models.types.LongText(), default=sa.text("''"), nullable=False),
@ -281,7 +281,7 @@ def upgrade():
)
else:
op.create_table('workflow_draft_variable_files',
sa.Column('id', models.types.StringUUID(), default=lambda: str(uuidv7()), nullable=False),
sa.Column('id', models.types.StringUUID(), nullable=False),
sa.Column('created_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False),
sa.Column('tenant_id', models.types.StringUUID(), nullable=False, comment='The tenant to which the WorkflowDraftVariableFile belongs, referencing Tenant.id'),
sa.Column('app_id', models.types.StringUUID(), nullable=False, comment='The application to which the WorkflowDraftVariableFile belongs, referencing App.id'),
@ -306,7 +306,7 @@ def upgrade():
)
else:
op.create_table('workflow_node_execution_offload',
sa.Column('id', models.types.StringUUID(), default=lambda: str(uuidv7()), nullable=False),
sa.Column('id', models.types.StringUUID(), nullable=False),
sa.Column('created_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False),
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
sa.Column('app_id', models.types.StringUUID(), nullable=False),
@ -327,7 +327,7 @@ def upgrade():
else:
with op.batch_alter_table('datasets', schema=None) as batch_op:
batch_op.add_column(sa.Column('keyword_number', sa.Integer(), server_default=sa.text('10'), nullable=True))
batch_op.add_column(sa.Column('icon_info', sa.JSON(), nullable=True))
batch_op.add_column(sa.Column('icon_info', models.types.AdjustedJSON(astext_type=sa.Text()), nullable=True))
batch_op.add_column(sa.Column('runtime_mode', sa.String(length=255), server_default=sa.text("'general'"), nullable=True))
batch_op.add_column(sa.Column('pipeline_id', models.types.StringUUID(), nullable=True))
batch_op.add_column(sa.Column('chunk_structure', sa.String(length=255), nullable=True))

View File

@ -43,7 +43,7 @@ def upgrade():
sa.Column("workflow_run_id", models.types.StringUUID(), nullable=False),
sa.Column("resumed_at", sa.DateTime(), nullable=True),
sa.Column("state_object_key", sa.String(length=255), nullable=False),
sa.Column("id", models.types.StringUUID(), default=lambda: str(uuidv7()), nullable=False),
sa.Column("id", models.types.StringUUID(), nullable=False),
sa.Column("created_at", sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False),
sa.Column("updated_at", sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False),
sa.PrimaryKeyConstraint("id", name=op.f("workflow_pauses_pkey")),

View File

@ -8,7 +8,6 @@ Create Date: 2025-10-30 15:18:49.549156
from alembic import op
import models as models
import sqlalchemy as sa
from uuid import uuid4
from libs.uuid_utils import uuidv7
from models.enums import AppTriggerStatus, AppTriggerType
@ -43,7 +42,7 @@ def upgrade():
)
else:
op.create_table('app_triggers',
sa.Column('id', models.types.StringUUID(), default=lambda: str(uuidv7()), nullable=False),
sa.Column('id', models.types.StringUUID(), nullable=False),
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
sa.Column('app_id', models.types.StringUUID(), nullable=False),
sa.Column('node_id', sa.String(length=64), nullable=False),
@ -71,7 +70,7 @@ def upgrade():
)
else:
op.create_table('trigger_oauth_system_clients',
sa.Column('id', models.types.StringUUID(), default=lambda: str(uuid4()), nullable=False),
sa.Column('id', models.types.StringUUID(), nullable=False),
sa.Column('plugin_id', sa.String(length=512), nullable=False),
sa.Column('provider', sa.String(length=255), nullable=False),
sa.Column('encrypted_oauth_params', models.types.LongText(), nullable=False),
@ -95,7 +94,7 @@ def upgrade():
)
else:
op.create_table('trigger_oauth_tenant_clients',
sa.Column('id', models.types.StringUUID(), default=lambda: str(uuid4()), nullable=False),
sa.Column('id', models.types.StringUUID(), nullable=False),
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
sa.Column('plugin_id', sa.String(length=255), nullable=False),
sa.Column('provider', sa.String(length=255), nullable=False),
@ -127,7 +126,7 @@ def upgrade():
)
else:
op.create_table('trigger_subscriptions',
sa.Column('id', models.types.StringUUID(), default=lambda: str(uuid4()), nullable=False),
sa.Column('id', models.types.StringUUID(), nullable=False),
sa.Column('name', sa.String(length=255), nullable=False, comment='Subscription instance name'),
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
sa.Column('user_id', models.types.StringUUID(), nullable=False),
@ -165,7 +164,7 @@ def upgrade():
)
else:
op.create_table('workflow_plugin_triggers',
sa.Column('id', models.types.StringUUID(), default=lambda: str(uuid4()), nullable=False),
sa.Column('id', models.types.StringUUID(), nullable=False),
sa.Column('app_id', models.types.StringUUID(), nullable=False),
sa.Column('node_id', sa.String(length=64), nullable=False),
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
@ -196,7 +195,7 @@ def upgrade():
)
else:
op.create_table('workflow_schedule_plans',
sa.Column('id', models.types.StringUUID(), default=lambda: str(uuidv7()), nullable=False),
sa.Column('id', models.types.StringUUID(), nullable=False),
sa.Column('app_id', models.types.StringUUID(), nullable=False),
sa.Column('node_id', sa.String(length=64), nullable=False),
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
@ -240,7 +239,7 @@ def upgrade():
)
else:
op.create_table('workflow_trigger_logs',
sa.Column('id', models.types.StringUUID(), default=lambda: str(uuidv7()), nullable=False),
sa.Column('id', models.types.StringUUID(), nullable=False),
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
sa.Column('app_id', models.types.StringUUID(), nullable=False),
sa.Column('workflow_id', models.types.StringUUID(), nullable=False),
@ -288,7 +287,7 @@ def upgrade():
)
else:
op.create_table('workflow_webhook_triggers',
sa.Column('id', models.types.StringUUID(), default=lambda: str(uuidv7()), nullable=False),
sa.Column('id', models.types.StringUUID(), nullable=False),
sa.Column('app_id', models.types.StringUUID(), nullable=False),
sa.Column('node_id', sa.String(length=64), nullable=False),
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),

View File

@ -1,231 +0,0 @@
"""empty message
Revision ID: b16d7178fd1d
Revises: 669ffd70119c
Create Date: 2025-11-13 16:13:00.665249
"""
from alembic import op
import models as models
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql,mysql
def _is_pg(conn):
return conn.dialect.name == "postgresql"
# revision identifiers, used by Alembic.
revision = 'b16d7178fd1d'
down_revision = '669ffd70119c'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
conn = op.get_bind()
if _is_pg(conn):
with op.batch_alter_table('data_source_oauth_bindings', schema=None) as batch_op:
batch_op.drop_index(batch_op.f('source_info_idx'), postgresql_using='gin')
batch_op.alter_column('source_info',
existing_type=postgresql.JSONB(astext_type=sa.Text()),
type_=sa.JSON(),
existing_nullable=False)
with op.batch_alter_table('datasets', schema=None) as batch_op:
batch_op.drop_index(batch_op.f('retrieval_model_idx'), postgresql_using='gin')
batch_op.alter_column('retrieval_model',
existing_type=postgresql.JSONB(astext_type=sa.Text()),
type_=sa.JSON(),
existing_nullable=True)
batch_op.alter_column('icon_info',
existing_type=postgresql.JSONB(astext_type=sa.Text()),
type_=sa.JSON(),
existing_nullable=True)
with op.batch_alter_table('datasource_oauth_params', schema=None) as batch_op:
batch_op.alter_column('system_credentials',
existing_type=postgresql.JSONB(astext_type=sa.Text()),
type_=sa.JSON(),
existing_nullable=False)
with op.batch_alter_table('datasource_oauth_tenant_params', schema=None) as batch_op:
batch_op.alter_column('client_params',
existing_type=postgresql.JSONB(astext_type=sa.Text()),
type_=sa.JSON(),
existing_nullable=False)
with op.batch_alter_table('datasource_providers', schema=None) as batch_op:
batch_op.alter_column('provider',
existing_type=sa.VARCHAR(length=255),
type_=sa.String(length=128),
existing_nullable=False)
batch_op.alter_column('encrypted_credentials',
existing_type=postgresql.JSONB(astext_type=sa.Text()),
type_=sa.JSON(),
existing_nullable=False)
with op.batch_alter_table('documents', schema=None) as batch_op:
batch_op.drop_index(batch_op.f('document_metadata_idx'), postgresql_using='gin')
batch_op.alter_column('doc_metadata',
existing_type=postgresql.JSONB(astext_type=sa.Text()),
type_=sa.JSON(),
existing_nullable=True)
with op.batch_alter_table('external_knowledge_bindings', schema=None) as batch_op:
batch_op.alter_column('external_knowledge_id',
existing_type=sa.TEXT(),
type_=sa.String(length=512),
existing_nullable=False)
with op.batch_alter_table('providers', schema=None) as batch_op:
batch_op.alter_column('quota_used',
existing_type=sa.BIGINT(),
nullable=False)
with op.batch_alter_table('tenant_plugin_auto_upgrade_strategies', schema=None) as batch_op:
batch_op.alter_column('exclude_plugins',
existing_type=postgresql.ARRAY(sa.VARCHAR(length=255)),
type_=sa.JSON(),
existing_nullable=False,
postgresql_using='to_jsonb(exclude_plugins)::json')
batch_op.alter_column('include_plugins',
existing_type=postgresql.ARRAY(sa.VARCHAR(length=255)),
type_=sa.JSON(),
existing_nullable=False,
postgresql_using='to_jsonb(include_plugins)::json')
with op.batch_alter_table('tool_oauth_tenant_clients', schema=None) as batch_op:
batch_op.alter_column('plugin_id',
existing_type=sa.VARCHAR(length=512),
type_=sa.String(length=255),
existing_nullable=False)
with op.batch_alter_table('trigger_oauth_system_clients', schema=None) as batch_op:
batch_op.alter_column('plugin_id',
existing_type=sa.VARCHAR(length=512),
type_=sa.String(length=255),
existing_nullable=False)
else:
with op.batch_alter_table('providers', schema=None) as batch_op:
batch_op.alter_column('quota_used',
existing_type=mysql.BIGINT(),
nullable=False)
with op.batch_alter_table('trigger_oauth_system_clients', schema=None) as batch_op:
batch_op.alter_column('plugin_id',
existing_type=mysql.VARCHAR(length=512),
type_=sa.String(length=255),
existing_nullable=False)
with op.batch_alter_table('workflows', schema=None) as batch_op:
batch_op.alter_column('updated_at',
existing_type=mysql.TIMESTAMP(),
type_=sa.DateTime(),
existing_nullable=False)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
conn = op.get_bind()
if _is_pg(conn):
with op.batch_alter_table('trigger_oauth_system_clients', schema=None) as batch_op:
batch_op.alter_column('plugin_id',
existing_type=sa.String(length=255),
type_=sa.VARCHAR(length=512),
existing_nullable=False)
with op.batch_alter_table('tool_oauth_tenant_clients', schema=None) as batch_op:
batch_op.alter_column('plugin_id',
existing_type=sa.String(length=255),
type_=sa.VARCHAR(length=512),
existing_nullable=False)
with op.batch_alter_table('tenant_plugin_auto_upgrade_strategies', schema=None) as batch_op:
batch_op.alter_column('include_plugins',
existing_type=sa.JSON(),
type_=postgresql.ARRAY(sa.VARCHAR(length=255)),
existing_nullable=False)
batch_op.alter_column('exclude_plugins',
existing_type=sa.JSON(),
type_=postgresql.ARRAY(sa.VARCHAR(length=255)),
existing_nullable=False)
with op.batch_alter_table('providers', schema=None) as batch_op:
batch_op.alter_column('quota_used',
existing_type=sa.BIGINT(),
nullable=True)
with op.batch_alter_table('external_knowledge_bindings', schema=None) as batch_op:
batch_op.alter_column('external_knowledge_id',
existing_type=sa.String(length=512),
type_=sa.TEXT(),
existing_nullable=False)
with op.batch_alter_table('documents', schema=None) as batch_op:
batch_op.alter_column('doc_metadata',
existing_type=sa.JSON(),
type_=postgresql.JSONB(astext_type=sa.Text()),
existing_nullable=True)
batch_op.create_index(batch_op.f('document_metadata_idx'), ['doc_metadata'], unique=False, postgresql_using='gin')
with op.batch_alter_table('datasource_providers', schema=None) as batch_op:
batch_op.alter_column('encrypted_credentials',
existing_type=sa.JSON(),
type_=postgresql.JSONB(astext_type=sa.Text()),
existing_nullable=False)
batch_op.alter_column('provider',
existing_type=sa.String(length=128),
type_=sa.VARCHAR(length=255),
existing_nullable=False)
with op.batch_alter_table('datasource_oauth_tenant_params', schema=None) as batch_op:
batch_op.alter_column('client_params',
existing_type=sa.JSON(),
type_=postgresql.JSONB(astext_type=sa.Text()),
existing_nullable=False)
with op.batch_alter_table('datasource_oauth_params', schema=None) as batch_op:
batch_op.alter_column('system_credentials',
existing_type=sa.JSON(),
type_=postgresql.JSONB(astext_type=sa.Text()),
existing_nullable=False)
with op.batch_alter_table('datasets', schema=None) as batch_op:
batch_op.alter_column('icon_info',
existing_type=sa.JSON(),
type_=postgresql.JSONB(astext_type=sa.Text()),
existing_nullable=True)
batch_op.alter_column('retrieval_model',
existing_type=sa.JSON(),
type_=postgresql.JSONB(astext_type=sa.Text()),
existing_nullable=True)
batch_op.create_index(batch_op.f('retrieval_model_idx'), ['retrieval_model'], unique=False, postgresql_using='gin')
with op.batch_alter_table('data_source_oauth_bindings', schema=None) as batch_op:
batch_op.alter_column('source_info',
existing_type=sa.JSON(),
type_=postgresql.JSONB(astext_type=sa.Text()),
existing_nullable=False)
batch_op.create_index(batch_op.f('source_info_idx'), ['source_info'], unique=False, postgresql_using='gin')
else:
with op.batch_alter_table('workflows', schema=None) as batch_op:
batch_op.alter_column('updated_at',
existing_type=sa.DateTime(),
type_=mysql.TIMESTAMP(),
existing_nullable=False)
with op.batch_alter_table('trigger_oauth_system_clients', schema=None) as batch_op:
batch_op.alter_column('plugin_id',
existing_type=sa.String(length=255),
type_=mysql.VARCHAR(length=512),
existing_nullable=False)
with op.batch_alter_table('providers', schema=None) as batch_op:
batch_op.alter_column('quota_used',
existing_type=mysql.BIGINT(),
nullable=True)
# ### end Alembic commands ###

View File

@ -0,0 +1,131 @@
"""empty message
Revision ID: 09cfdda155d1
Revises: 669ffd70119c
Create Date: 2025-11-15 21:02:32.472885
"""
from alembic import op
import models as models
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql, mysql
def _is_pg(conn):
return conn.dialect.name == "postgresql"
# revision identifiers, used by Alembic.
revision = '09cfdda155d1'
down_revision = '669ffd70119c'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
conn = op.get_bind()
if _is_pg(conn):
with op.batch_alter_table('datasource_providers', schema=None) as batch_op:
batch_op.alter_column('provider',
existing_type=sa.VARCHAR(length=255),
type_=sa.String(length=128),
existing_nullable=False)
with op.batch_alter_table('external_knowledge_bindings', schema=None) as batch_op:
batch_op.alter_column('external_knowledge_id',
existing_type=sa.TEXT(),
type_=sa.String(length=512),
existing_nullable=False)
with op.batch_alter_table('tenant_plugin_auto_upgrade_strategies', schema=None) as batch_op:
batch_op.alter_column('exclude_plugins',
existing_type=postgresql.ARRAY(sa.VARCHAR(length=255)),
type_=sa.JSON(),
existing_nullable=False,
postgresql_using='to_jsonb(exclude_plugins)::json')
batch_op.alter_column('include_plugins',
existing_type=postgresql.ARRAY(sa.VARCHAR(length=255)),
type_=sa.JSON(),
existing_nullable=False,
postgresql_using='to_jsonb(include_plugins)::json')
with op.batch_alter_table('tool_oauth_tenant_clients', schema=None) as batch_op:
batch_op.alter_column('plugin_id',
existing_type=sa.VARCHAR(length=512),
type_=sa.String(length=255),
existing_nullable=False)
with op.batch_alter_table('trigger_oauth_system_clients', schema=None) as batch_op:
batch_op.alter_column('plugin_id',
existing_type=sa.VARCHAR(length=512),
type_=sa.String(length=255),
existing_nullable=False)
else:
with op.batch_alter_table('trigger_oauth_system_clients', schema=None) as batch_op:
batch_op.alter_column('plugin_id',
existing_type=mysql.VARCHAR(length=512),
type_=sa.String(length=255),
existing_nullable=False)
with op.batch_alter_table('workflows', schema=None) as batch_op:
batch_op.alter_column('updated_at',
existing_type=mysql.TIMESTAMP(),
type_=sa.DateTime(),
existing_nullable=False)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
conn = op.get_bind()
if _is_pg(conn):
with op.batch_alter_table('trigger_oauth_system_clients', schema=None) as batch_op:
batch_op.alter_column('plugin_id',
existing_type=sa.String(length=255),
type_=sa.VARCHAR(length=512),
existing_nullable=False)
with op.batch_alter_table('tool_oauth_tenant_clients', schema=None) as batch_op:
batch_op.alter_column('plugin_id',
existing_type=sa.String(length=255),
type_=sa.VARCHAR(length=512),
existing_nullable=False)
with op.batch_alter_table('tenant_plugin_auto_upgrade_strategies', schema=None) as batch_op:
batch_op.alter_column('include_plugins',
existing_type=sa.JSON(),
type_=postgresql.ARRAY(sa.VARCHAR(length=255)),
existing_nullable=False)
batch_op.alter_column('exclude_plugins',
existing_type=sa.JSON(),
type_=postgresql.ARRAY(sa.VARCHAR(length=255)),
existing_nullable=False)
with op.batch_alter_table('external_knowledge_bindings', schema=None) as batch_op:
batch_op.alter_column('external_knowledge_id',
existing_type=sa.String(length=512),
type_=sa.TEXT(),
existing_nullable=False)
with op.batch_alter_table('datasource_providers', schema=None) as batch_op:
batch_op.alter_column('provider',
existing_type=sa.String(length=128),
type_=sa.VARCHAR(length=255),
existing_nullable=False)
else:
with op.batch_alter_table('workflows', schema=None) as batch_op:
batch_op.alter_column('updated_at',
existing_type=sa.DateTime(),
type_=mysql.TIMESTAMP(),
existing_nullable=False)
with op.batch_alter_table('trigger_oauth_system_clients', schema=None) as batch_op:
batch_op.alter_column('plugin_id',
existing_type=sa.String(length=255),
type_=mysql.VARCHAR(length=512),
existing_nullable=False)
# ### end Alembic commands ###

View File

@ -7,7 +7,6 @@ Create Date: 2023-12-14 11:26:12.287264
"""
import sqlalchemy as sa
from alembic import op
from uuid import uuid4
from sqlalchemy.dialects import postgresql
import models.types
@ -41,7 +40,7 @@ def upgrade():
)
else:
op.create_table('app_annotation_settings',
sa.Column('id', models.types.StringUUID(), default=lambda: str(uuid4()), nullable=False),
sa.Column('id', models.types.StringUUID(), nullable=False),
sa.Column('app_id', models.types.StringUUID(), nullable=False),
sa.Column('score_threshold', sa.Float(), server_default=sa.text('0'), nullable=False),
sa.Column('collection_binding_id', models.types.StringUUID(), nullable=False),

View File

@ -7,7 +7,6 @@ Create Date: 2024-05-14 09:27:18.857890
"""
import sqlalchemy as sa
from alembic import op
from uuid import uuid4
import models.types
@ -36,7 +35,7 @@ def upgrade():
)
else:
op.create_table('tool_label_bindings',
sa.Column('id', models.types.StringUUID(), default=lambda: str(uuid4()), nullable=False),
sa.Column('id', models.types.StringUUID(), nullable=False),
sa.Column('tool_id', sa.String(length=64), nullable=False),
sa.Column('tool_type', sa.String(length=40), nullable=False),
sa.Column('label_name', sa.String(length=40), nullable=False),

View File

@ -6,8 +6,7 @@ Create Date: 2024-04-11 06:17:34.278594
"""
import sqlalchemy as sa
from alembic import op
from uuid import uuid4
from alembic import op
from sqlalchemy.dialects import postgresql
import models.types
@ -39,7 +38,7 @@ def upgrade():
)
else:
op.create_table('tag_bindings',
sa.Column('id', models.types.StringUUID(), default=lambda: str(uuid4()), nullable=False),
sa.Column('id', models.types.StringUUID(), nullable=False),
sa.Column('tenant_id', models.types.StringUUID(), nullable=True),
sa.Column('tag_id', models.types.StringUUID(), nullable=True),
sa.Column('target_id', models.types.StringUUID(), nullable=True),
@ -64,7 +63,7 @@ def upgrade():
)
else:
op.create_table('tags',
sa.Column('id', models.types.StringUUID(), default=lambda: str(uuid4()), nullable=False),
sa.Column('id', models.types.StringUUID(), nullable=False),
sa.Column('tenant_id', models.types.StringUUID(), nullable=True),
sa.Column('type', sa.String(length=16), nullable=False),
sa.Column('name', sa.String(length=255), nullable=False),

View File

@ -7,7 +7,6 @@ Create Date: 2024-01-05 15:26:25.117551
"""
import sqlalchemy as sa
from alembic import op
from uuid import uuid4
from sqlalchemy.dialects import postgresql
import models.types
@ -43,7 +42,7 @@ def upgrade():
else:
# MySQL: Use compatible syntax
op.create_table('tool_api_providers',
sa.Column('id', models.types.StringUUID(), default=lambda: str(uuid4()), nullable=False),
sa.Column('id', models.types.StringUUID(), nullable=False),
sa.Column('name', sa.String(length=40), nullable=False),
sa.Column('schema', models.types.LongText(), nullable=False),
sa.Column('schema_type_str', sa.String(length=40), nullable=False),
@ -69,7 +68,7 @@ def upgrade():
else:
# MySQL: Use compatible syntax
op.create_table('tool_builtin_providers',
sa.Column('id', models.types.StringUUID(), default=lambda: str(uuid4()), nullable=False),
sa.Column('id', models.types.StringUUID(), nullable=False),
sa.Column('tenant_id', models.types.StringUUID(), nullable=True),
sa.Column('user_id', models.types.StringUUID(), nullable=False),
sa.Column('provider', sa.String(length=40), nullable=False),
@ -100,7 +99,7 @@ def upgrade():
else:
# MySQL: Use compatible syntax
op.create_table('tool_published_apps',
sa.Column('id', models.types.StringUUID(), default=lambda: str(uuid4()), nullable=False),
sa.Column('id', models.types.StringUUID(), nullable=False),
sa.Column('app_id', models.types.StringUUID(), nullable=False),
sa.Column('user_id', models.types.StringUUID(), nullable=False),
sa.Column('description', models.types.LongText(), nullable=False),

View File

@ -7,7 +7,6 @@ Create Date: 2024-01-15 11:37:16.782718
"""
import sqlalchemy as sa
from alembic import op
from uuid import uuid4
from sqlalchemy.dialects import postgresql
import models.types
@ -40,7 +39,7 @@ def upgrade():
)
else:
op.create_table('tool_files',
sa.Column('id', models.types.StringUUID(), default=lambda: str(uuid4()), nullable=False),
sa.Column('id', models.types.StringUUID(), nullable=False),
sa.Column('user_id', models.types.StringUUID(), nullable=False),
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
sa.Column('conversation_id', models.types.StringUUID(), nullable=False),

View File

@ -7,7 +7,6 @@ Create Date: 2024-05-10 12:08:09.812736
"""
import sqlalchemy as sa
from alembic import op
from uuid import uuid4
import models.types
@ -42,7 +41,7 @@ def upgrade():
)
else:
op.create_table('load_balancing_model_configs',
sa.Column('id', models.types.StringUUID(), default=lambda: str(uuid4()), nullable=False),
sa.Column('id', models.types.StringUUID(), nullable=False),
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
sa.Column('provider_name', sa.String(length=255), nullable=False),
sa.Column('model_name', sa.String(length=255), nullable=False),
@ -73,7 +72,7 @@ def upgrade():
)
else:
op.create_table('provider_model_settings',
sa.Column('id', models.types.StringUUID(), default=lambda: str(uuid4()), nullable=False),
sa.Column('id', models.types.StringUUID(), nullable=False),
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
sa.Column('provider_name', sa.String(length=255), nullable=False),
sa.Column('model_name', sa.String(length=255), nullable=False),

View File

@ -8,7 +8,6 @@ Create Date: 2023-05-13 14:26:59.085018
import sqlalchemy as sa
from alembic import op
from sqlalchemy.dialects import postgresql
from uuid import uuid4
import models.types
@ -47,7 +46,7 @@ def upgrade():
)
else:
op.create_table('account_integrates',
sa.Column('id', models.types.StringUUID(), default=lambda: str(uuid4()), nullable=False),
sa.Column('id', models.types.StringUUID(), nullable=False),
sa.Column('account_id', models.types.StringUUID(), nullable=False),
sa.Column('provider', sa.String(length=16), nullable=False),
sa.Column('open_id', sa.String(length=255), nullable=False),
@ -79,7 +78,7 @@ def upgrade():
)
else:
op.create_table('accounts',
sa.Column('id', models.types.StringUUID(), default=lambda: str(uuid4()), nullable=False),
sa.Column('id', models.types.StringUUID(), nullable=False),
sa.Column('name', sa.String(length=255), nullable=False),
sa.Column('email', sa.String(length=255), nullable=False),
sa.Column('password', sa.String(length=255), nullable=True),
@ -113,7 +112,7 @@ def upgrade():
)
else:
op.create_table('api_requests',
sa.Column('id', models.types.StringUUID(), default=lambda: str(uuid4()), nullable=False),
sa.Column('id', models.types.StringUUID(), nullable=False),
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
sa.Column('api_token_id', models.types.StringUUID(), nullable=False),
sa.Column('path', sa.String(length=255), nullable=False),
@ -139,7 +138,7 @@ def upgrade():
)
else:
op.create_table('api_tokens',
sa.Column('id', models.types.StringUUID(), default=lambda: str(uuid4()), nullable=False),
sa.Column('id', models.types.StringUUID(), nullable=False),
sa.Column('app_id', models.types.StringUUID(), nullable=True),
sa.Column('dataset_id', models.types.StringUUID(), nullable=True),
sa.Column('type', sa.String(length=16), nullable=False),
@ -162,7 +161,7 @@ def upgrade():
)
else:
op.create_table('app_dataset_joins',
sa.Column('id', models.types.StringUUID(), default=lambda: str(uuid4()), nullable=False),
sa.Column('id', models.types.StringUUID(), nullable=False),
sa.Column('app_id', models.types.StringUUID(), nullable=False),
sa.Column('dataset_id', models.types.StringUUID(), nullable=False),
sa.Column('created_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False),
@ -192,7 +191,7 @@ def upgrade():
)
else:
op.create_table('app_model_configs',
sa.Column('id', models.types.StringUUID(), default=lambda: str(uuid4()), nullable=False),
sa.Column('id', models.types.StringUUID(), nullable=False),
sa.Column('app_id', models.types.StringUUID(), nullable=False),
sa.Column('provider', sa.String(length=255), nullable=False),
sa.Column('model_id', sa.String(length=255), nullable=False),
@ -234,7 +233,7 @@ def upgrade():
)
else:
op.create_table('apps',
sa.Column('id', models.types.StringUUID(), default=lambda: str(uuid4()), nullable=False),
sa.Column('id', models.types.StringUUID(), nullable=False),
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
sa.Column('name', sa.String(length=255), nullable=False),
sa.Column('mode', sa.String(length=255), nullable=False),
@ -342,7 +341,7 @@ def upgrade():
)
else:
op.create_table('conversations',
sa.Column('id', models.types.StringUUID(), default=lambda: str(uuid4()), nullable=False),
sa.Column('id', models.types.StringUUID(), nullable=False),
sa.Column('app_id', models.types.StringUUID(), nullable=False),
sa.Column('app_model_config_id', models.types.StringUUID(), nullable=False),
sa.Column('model_provider', sa.String(length=255), nullable=False),
@ -378,7 +377,7 @@ def upgrade():
)
else:
op.create_table('dataset_keyword_tables',
sa.Column('id', models.types.StringUUID(), default=lambda: str(uuid4()), nullable=False),
sa.Column('id', models.types.StringUUID(), nullable=False),
sa.Column('dataset_id', models.types.StringUUID(), nullable=False),
sa.Column('keyword_table', models.types.LongText(), nullable=False),
sa.PrimaryKeyConstraint('id', name='dataset_keyword_table_pkey'),
@ -399,7 +398,7 @@ def upgrade():
)
else:
op.create_table('dataset_process_rules',
sa.Column('id', models.types.StringUUID(), default=lambda: str(uuid4()), nullable=False),
sa.Column('id', models.types.StringUUID(), nullable=False),
sa.Column('dataset_id', models.types.StringUUID(), nullable=False),
sa.Column('mode', sa.String(length=255), server_default=sa.text("'automatic'"), nullable=False),
sa.Column('rules', models.types.LongText(), nullable=True),
@ -424,7 +423,7 @@ def upgrade():
)
else:
op.create_table('dataset_queries',
sa.Column('id', models.types.StringUUID(), default=lambda: str(uuid4()), nullable=False),
sa.Column('id', models.types.StringUUID(), nullable=False),
sa.Column('dataset_id', models.types.StringUUID(), nullable=False),
sa.Column('content', models.types.LongText(), nullable=False),
sa.Column('source', sa.String(length=255), nullable=False),
@ -456,7 +455,7 @@ def upgrade():
)
else:
op.create_table('datasets',
sa.Column('id', models.types.StringUUID(), default=lambda: str(uuid4()), nullable=False),
sa.Column('id', models.types.StringUUID(), nullable=False),
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
sa.Column('name', sa.String(length=255), nullable=False),
sa.Column('description', models.types.LongText(), nullable=True),
@ -514,7 +513,7 @@ def upgrade():
)
else:
op.create_table('document_segments',
sa.Column('id', models.types.StringUUID(), default=lambda: str(uuid4()), nullable=False),
sa.Column('id', models.types.StringUUID(), nullable=False),
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
sa.Column('dataset_id', models.types.StringUUID(), nullable=False),
sa.Column('document_id', models.types.StringUUID(), nullable=False),
@ -589,7 +588,7 @@ def upgrade():
)
else:
op.create_table('documents',
sa.Column('id', models.types.StringUUID(), default=lambda: str(uuid4()), nullable=False),
sa.Column('id', models.types.StringUUID(), nullable=False),
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
sa.Column('dataset_id', models.types.StringUUID(), nullable=False),
sa.Column('position', sa.Integer(), nullable=False),
@ -644,7 +643,7 @@ def upgrade():
)
else:
op.create_table('embeddings',
sa.Column('id', models.types.StringUUID(), default=lambda: str(uuid4()), nullable=False),
sa.Column('id', models.types.StringUUID(), nullable=False),
sa.Column('hash', sa.String(length=64), nullable=False),
sa.Column('embedding', models.types.BinaryData(), nullable=False),
sa.Column('created_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False),
@ -667,7 +666,7 @@ def upgrade():
)
else:
op.create_table('end_users',
sa.Column('id', models.types.StringUUID(), default=lambda: str(uuid4()), nullable=False),
sa.Column('id', models.types.StringUUID(), nullable=False),
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
sa.Column('app_id', models.types.StringUUID(), nullable=True),
sa.Column('type', sa.String(length=255), nullable=False),
@ -698,7 +697,7 @@ def upgrade():
)
else:
op.create_table('installed_apps',
sa.Column('id', models.types.StringUUID(), default=lambda: str(uuid4()), nullable=False),
sa.Column('id', models.types.StringUUID(), nullable=False),
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
sa.Column('app_id', models.types.StringUUID(), nullable=False),
sa.Column('app_owner_tenant_id', models.types.StringUUID(), nullable=False),
@ -771,7 +770,7 @@ def upgrade():
)
else:
op.create_table('message_agent_thoughts',
sa.Column('id', models.types.StringUUID(), default=lambda: str(uuid4()), nullable=False),
sa.Column('id', models.types.StringUUID(), nullable=False),
sa.Column('message_id', models.types.StringUUID(), nullable=False),
sa.Column('message_chain_id', models.types.StringUUID(), nullable=False),
sa.Column('position', sa.Integer(), nullable=False),
@ -811,7 +810,7 @@ def upgrade():
)
else:
op.create_table('message_chains',
sa.Column('id', models.types.StringUUID(), default=lambda: str(uuid4()), nullable=False),
sa.Column('id', models.types.StringUUID(), nullable=False),
sa.Column('message_id', models.types.StringUUID(), nullable=False),
sa.Column('type', sa.String(length=255), nullable=False),
sa.Column('input', models.types.LongText(), nullable=True),
@ -839,7 +838,7 @@ def upgrade():
)
else:
op.create_table('message_feedbacks',
sa.Column('id', models.types.StringUUID(), default=lambda: str(uuid4()), nullable=False),
sa.Column('id', models.types.StringUUID(), nullable=False),
sa.Column('app_id', models.types.StringUUID(), nullable=False),
sa.Column('conversation_id', models.types.StringUUID(), nullable=False),
sa.Column('message_id', models.types.StringUUID(), nullable=False),
@ -871,7 +870,7 @@ def upgrade():
)
else:
op.create_table('operation_logs',
sa.Column('id', models.types.StringUUID(), default=lambda: str(uuid4()), nullable=False),
sa.Column('id', models.types.StringUUID(), nullable=False),
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
sa.Column('account_id', models.types.StringUUID(), nullable=False),
sa.Column('action', sa.String(length=255), nullable=False),
@ -895,7 +894,7 @@ def upgrade():
)
else:
op.create_table('pinned_conversations',
sa.Column('id', models.types.StringUUID(), default=lambda: str(uuid4()), nullable=False),
sa.Column('id', models.types.StringUUID(), nullable=False),
sa.Column('app_id', models.types.StringUUID(), nullable=False),
sa.Column('conversation_id', models.types.StringUUID(), nullable=False),
sa.Column('created_by', models.types.StringUUID(), nullable=False),
@ -924,7 +923,7 @@ def upgrade():
)
else:
op.create_table('providers',
sa.Column('id', models.types.StringUUID(), default=lambda: str(uuid4()), nullable=False),
sa.Column('id', models.types.StringUUID(), nullable=False),
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
sa.Column('provider_name', sa.String(length=40), nullable=False),
sa.Column('provider_type', sa.String(length=40), nullable=False, server_default=sa.text("'custom'")),
@ -959,7 +958,7 @@ def upgrade():
)
else:
op.create_table('recommended_apps',
sa.Column('id', models.types.StringUUID(), default=lambda: str(uuid4()), nullable=False),
sa.Column('id', models.types.StringUUID(), nullable=False),
sa.Column('app_id', models.types.StringUUID(), nullable=False),
sa.Column('description', sa.JSON(), nullable=False),
sa.Column('copyright', sa.String(length=255), nullable=False),
@ -987,7 +986,7 @@ def upgrade():
)
else:
op.create_table('saved_messages',
sa.Column('id', models.types.StringUUID(), default=lambda: str(uuid4()), nullable=False),
sa.Column('id', models.types.StringUUID(), nullable=False),
sa.Column('app_id', models.types.StringUUID(), nullable=False),
sa.Column('message_id', models.types.StringUUID(), nullable=False),
sa.Column('created_by', models.types.StringUUID(), nullable=False),
@ -1037,7 +1036,7 @@ def upgrade():
)
else:
op.create_table('sites',
sa.Column('id', models.types.StringUUID(), default=lambda: str(uuid4()), nullable=False),
sa.Column('id', models.types.StringUUID(), nullable=False),
sa.Column('app_id', models.types.StringUUID(), nullable=False),
sa.Column('title', sa.String(length=255), nullable=False),
sa.Column('icon', sa.String(length=255), nullable=True),
@ -1073,7 +1072,7 @@ def upgrade():
)
else:
op.create_table('tenant_account_joins',
sa.Column('id', models.types.StringUUID(), default=lambda: str(uuid4()), nullable=False),
sa.Column('id', models.types.StringUUID(), nullable=False),
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
sa.Column('account_id', models.types.StringUUID(), nullable=False),
sa.Column('role', sa.String(length=16), server_default='normal', nullable=False),
@ -1100,7 +1099,7 @@ def upgrade():
)
else:
op.create_table('tenants',
sa.Column('id', models.types.StringUUID(), default=lambda: str(uuid4()), nullable=False),
sa.Column('id', models.types.StringUUID(), nullable=False),
sa.Column('name', sa.String(length=255), nullable=False),
sa.Column('encrypt_public_key', models.types.LongText(), nullable=True),
sa.Column('plan', sa.String(length=255), server_default=sa.text("'basic'"), nullable=False),
@ -1129,7 +1128,7 @@ def upgrade():
)
else:
op.create_table('upload_files',
sa.Column('id', models.types.StringUUID(), default=lambda: str(uuid4()), nullable=False),
sa.Column('id', models.types.StringUUID(), nullable=False),
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
sa.Column('storage_type', sa.String(length=255), nullable=False),
sa.Column('key', sa.String(length=255), nullable=False),
@ -1162,7 +1161,7 @@ def upgrade():
)
else:
op.create_table('message_annotations',
sa.Column('id', models.types.StringUUID(), default=lambda: str(uuid4()), nullable=False),
sa.Column('id', models.types.StringUUID(), nullable=False),
sa.Column('app_id', models.types.StringUUID(), nullable=False),
sa.Column('conversation_id', models.types.StringUUID(), nullable=False),
sa.Column('message_id', models.types.StringUUID(), nullable=False),
@ -1206,7 +1205,7 @@ def upgrade():
)
else:
op.create_table('messages',
sa.Column('id', models.types.StringUUID(), default=lambda: str(uuid4()), nullable=False),
sa.Column('id', models.types.StringUUID(), nullable=False),
sa.Column('app_id', models.types.StringUUID(), nullable=False),
sa.Column('model_provider', sa.String(length=255), nullable=False),
sa.Column('model_id', sa.String(length=255), nullable=False),

View File

@ -7,7 +7,6 @@ Create Date: 2023-09-06 16:51:27.385844
"""
import sqlalchemy as sa
from alembic import op
from uuid import uuid4
from sqlalchemy.dialects import postgresql
import models.types
@ -51,7 +50,7 @@ def upgrade():
)
else:
op.create_table('dataset_retriever_resources',
sa.Column('id', models.types.StringUUID(), default=lambda: str(uuid4()), nullable=False),
sa.Column('id', models.types.StringUUID(), nullable=False),
sa.Column('message_id', models.types.StringUUID(), nullable=False),
sa.Column('position', sa.Integer(), nullable=False),
sa.Column('dataset_id', models.types.StringUUID(), nullable=False),

View File

@ -7,7 +7,6 @@ Create Date: 2023-09-13 22:16:48.027810
"""
import sqlalchemy as sa
from alembic import op
from uuid import uuid4
from sqlalchemy.dialects import postgresql
import models.types
@ -38,7 +37,7 @@ def upgrade():
)
else:
op.create_table('dataset_collection_bindings',
sa.Column('id', models.types.StringUUID(), default=lambda: str(uuid4()), nullable=False),
sa.Column('id', models.types.StringUUID(), nullable=False),
sa.Column('provider_name', sa.String(length=40), nullable=False),
sa.Column('model_name', sa.String(length=40), nullable=False),
sa.Column('collection_name', sa.String(length=64), nullable=False),

View File

@ -7,7 +7,6 @@ Create Date: 2024-05-14 07:31:29.702766
"""
import sqlalchemy as sa
from alembic import op
from uuid import uuid4
import models.types
@ -42,7 +41,7 @@ def upgrade():
else:
# MySQL: Use compatible syntax
op.create_table('data_source_api_key_auth_bindings',
sa.Column('id', models.types.StringUUID(), default=lambda: str(uuid4()), nullable=False),
sa.Column('id', models.types.StringUUID(), nullable=False),
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
sa.Column('category', sa.String(length=255), nullable=False),
sa.Column('provider', sa.String(length=255), nullable=False),

View File

@ -7,7 +7,6 @@ Create Date: 2024-05-04 09:47:19.366961
"""
import sqlalchemy as sa
from alembic import op
from uuid import uuid4
import models.types
@ -46,7 +45,7 @@ def upgrade():
else:
# MySQL: Use compatible syntax
op.create_table('tool_workflow_providers',
sa.Column('id', models.types.StringUUID(), default=lambda: str(uuid4()), nullable=False),
sa.Column('id', models.types.StringUUID(), nullable=False),
sa.Column('name', sa.String(length=40), nullable=False),
sa.Column('icon', sa.String(length=255), nullable=False),
sa.Column('app_id', models.types.StringUUID(), nullable=False),

View File

@ -7,7 +7,6 @@ Create Date: 2023-07-10 10:26:50.074515
"""
import sqlalchemy as sa
from alembic import op
from uuid import uuid4
from sqlalchemy.dialects import postgresql
import models.types
@ -43,7 +42,7 @@ def upgrade():
else:
# MySQL: Use compatible syntax
op.create_table('tool_providers',
sa.Column('id', models.types.StringUUID(), default=lambda: str(uuid4()), nullable=False),
sa.Column('id', models.types.StringUUID(), nullable=False),
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
sa.Column('tool_name', sa.String(length=40), nullable=False),
sa.Column('encrypted_credentials', models.types.LongText(), nullable=True),

View File

@ -7,7 +7,6 @@ Create Date: 2024-06-25 03:20:46.012193
"""
import sqlalchemy as sa
from alembic import op
from uuid import uuid4
import models.types
@ -37,7 +36,7 @@ def upgrade():
)
else:
op.create_table('dataset_permissions',
sa.Column('id', models.types.StringUUID(), default=lambda: str(uuid4()), nullable=False),
sa.Column('id', models.types.StringUUID(), nullable=False),
sa.Column('dataset_id', models.types.StringUUID(), nullable=False),
sa.Column('account_id', models.types.StringUUID(), nullable=False),
sa.Column('has_permission', sa.Boolean(), server_default=sa.text('true'), nullable=False),

View File

@ -7,7 +7,6 @@ Create Date: 2023-11-09 11:39:00.006432
"""
import sqlalchemy as sa
from alembic import op
from uuid import uuid4
from sqlalchemy.dialects import postgresql
import models.types
@ -42,7 +41,7 @@ def upgrade():
)
else:
op.create_table('message_files',
sa.Column('id', models.types.StringUUID(), default=lambda: str(uuid4()), nullable=False),
sa.Column('id', models.types.StringUUID(), nullable=False),
sa.Column('message_id', models.types.StringUUID(), nullable=False),
sa.Column('type', sa.String(length=255), nullable=False),
sa.Column('transfer_method', sa.String(length=255), nullable=False),

View File

@ -7,7 +7,6 @@ Create Date: 2023-10-27 13:05:58.901858
"""
import sqlalchemy as sa
from alembic import op
from uuid import uuid4
from sqlalchemy.dialects import postgresql
import models.types
@ -39,7 +38,7 @@ def upgrade():
)
else:
op.create_table('api_based_extensions',
sa.Column('id', models.types.StringUUID(), default=lambda: str(uuid4()), nullable=False),
sa.Column('id', models.types.StringUUID(), nullable=False),
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
sa.Column('name', sa.String(length=255), nullable=False),
sa.Column('api_endpoint', sa.String(length=255), nullable=False),

View File

@ -7,7 +7,6 @@ Create Date: 2024-02-19 12:47:24.646954
"""
import sqlalchemy as sa
from alembic import op
from uuid import uuid4
from sqlalchemy.dialects import postgresql
import models.types
@ -42,7 +41,7 @@ def upgrade():
)
else:
op.create_table('workflow_app_logs',
sa.Column('id', models.types.StringUUID(), default=lambda: str(uuid4()), nullable=False),
sa.Column('id', models.types.StringUUID(), nullable=False),
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
sa.Column('app_id', models.types.StringUUID(), nullable=False),
sa.Column('workflow_id', models.types.StringUUID(), nullable=False),
@ -84,7 +83,7 @@ def upgrade():
)
else:
op.create_table('workflow_node_executions',
sa.Column('id', models.types.StringUUID(), default=lambda: str(uuid4()), nullable=False),
sa.Column('id', models.types.StringUUID(), nullable=False),
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
sa.Column('app_id', models.types.StringUUID(), nullable=False),
sa.Column('workflow_id', models.types.StringUUID(), nullable=False),
@ -138,7 +137,7 @@ def upgrade():
)
else:
op.create_table('workflow_runs',
sa.Column('id', models.types.StringUUID(), default=lambda: str(uuid4()), nullable=False),
sa.Column('id', models.types.StringUUID(), nullable=False),
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
sa.Column('app_id', models.types.StringUUID(), nullable=False),
sa.Column('sequence_number', sa.Integer(), nullable=False),
@ -180,7 +179,7 @@ def upgrade():
)
else:
op.create_table('workflows',
sa.Column('id', models.types.StringUUID(), default=lambda: str(uuid4()), nullable=False),
sa.Column('id', models.types.StringUUID(), nullable=False),
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
sa.Column('app_id', models.types.StringUUID(), nullable=False),
sa.Column('type', sa.String(length=255), nullable=False),

View File

@ -7,7 +7,6 @@ Create Date: 2023-08-10 00:03:44.273430
"""
import sqlalchemy as sa
from alembic import op
from uuid import uuid4
from sqlalchemy.dialects import postgresql
import models.types
@ -49,7 +48,7 @@ def upgrade():
)
else:
op.create_table('provider_orders',
sa.Column('id', models.types.StringUUID(), default=lambda: str(uuid4()), nullable=False),
sa.Column('id', models.types.StringUUID(), nullable=False),
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
sa.Column('provider_name', sa.String(length=40), nullable=False),
sa.Column('account_id', models.types.StringUUID(), nullable=False),

View File

@ -8,7 +8,6 @@ Create Date: 2024-06-17 10:01:00.255189
import sqlalchemy as sa
from alembic import op
from sqlalchemy.dialects import postgresql
from uuid import uuid4
import models.types
@ -40,7 +39,7 @@ def upgrade():
)
else:
op.create_table('trace_app_config',
sa.Column('id', models.types.StringUUID(), default=lambda: str(uuid4()), nullable=False),
sa.Column('id', models.types.StringUUID(), nullable=False),
sa.Column('app_id', models.types.StringUUID(), nullable=False),
sa.Column('tracing_provider', sa.String(length=255), nullable=True),
sa.Column('tracing_config', sa.JSON(), nullable=True),

View File

@ -7,7 +7,6 @@ Create Date: 2024-01-09 11:42:50.664797
"""
import sqlalchemy as sa
from alembic import op
from uuid import uuid4
from sqlalchemy.dialects import postgresql
import models.types
@ -52,7 +51,7 @@ def upgrade():
)
else:
op.create_table('tool_model_invokes',
sa.Column('id', models.types.StringUUID(), default=lambda: str(uuid4()), nullable=False),
sa.Column('id', models.types.StringUUID(), nullable=False),
sa.Column('user_id', models.types.StringUUID(), nullable=False),
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
sa.Column('provider', sa.String(length=40), nullable=False),

View File

@ -7,7 +7,6 @@ Create Date: 2023-12-12 06:58:41.054544
"""
import sqlalchemy as sa
from alembic import op
from uuid import uuid4
from sqlalchemy.dialects import postgresql
import models.types
@ -40,7 +39,7 @@ def upgrade():
)
else:
op.create_table('app_annotation_hit_histories',
sa.Column('id', models.types.StringUUID(), default=lambda: str(uuid4()), nullable=False),
sa.Column('id', models.types.StringUUID(), nullable=False),
sa.Column('app_id', models.types.StringUUID(), nullable=False),
sa.Column('annotation_id', models.types.StringUUID(), nullable=False),
sa.Column('source', models.types.LongText(), nullable=False),

View File

@ -7,7 +7,6 @@ Create Date: 2023-06-06 19:58:33.103819
"""
import sqlalchemy as sa
from alembic import op
from uuid import uuid4
from sqlalchemy.dialects import postgresql
import models.types
@ -41,11 +40,11 @@ def upgrade():
)
else:
op.create_table('data_source_bindings',
sa.Column('id', models.types.StringUUID(), default=lambda: str(uuid4()), nullable=False),
sa.Column('id', models.types.StringUUID(), nullable=False),
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
sa.Column('access_token', sa.String(length=255), nullable=False),
sa.Column('provider', sa.String(length=255), nullable=False),
sa.Column('source_info', sa.JSON(), nullable=False),
sa.Column('source_info', models.types.AdjustedJSON(astext_type=sa.Text()), nullable=False),
sa.Column('created_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False),
sa.Column('updated_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False),
sa.Column('disabled', sa.Boolean(), server_default=sa.text('false'), nullable=True),

View File

@ -33,7 +33,7 @@ def upgrade():
batch_op.create_index('retrieval_model_idx', ['retrieval_model'], unique=False, postgresql_using='gin')
else:
with op.batch_alter_table('datasets', schema=None) as batch_op:
batch_op.add_column(sa.Column('retrieval_model', sa.JSON(), nullable=True))
batch_op.add_column(sa.Column('retrieval_model', models.types.AdjustedJSON(astext_type=sa.Text()), nullable=True))
# ### end Alembic commands ###

View File

@ -7,7 +7,6 @@ Create Date: 2024-07-19 12:03:21.217463
"""
import sqlalchemy as sa
from alembic import op
from uuid import uuid4
from sqlalchemy.dialects import postgresql
import models.types
@ -56,7 +55,7 @@ def downgrade():
else:
op.create_table(
'tracing_app_configs',
sa.Column('id', models.types.StringUUID(), default=lambda: str(uuid4()), nullable=False),
sa.Column('id', models.types.StringUUID(), nullable=False),
sa.Column('app_id', models.types.StringUUID(), nullable=False),
sa.Column('tracing_provider', sa.String(length=255), nullable=True),
sa.Column('tracing_config', sa.JSON(), nullable=True),

View File

@ -29,7 +29,7 @@ from .account import Account
from .base import Base
from .engine import db
from .model import App, Tag, TagBinding, UploadFile
from .types import BinaryData, LongText, StringUUID
from .types import AdjustedJSON, BinaryData, LongText, StringUUID, adjusted_json_index
logger = logging.getLogger(__name__)
@ -45,6 +45,7 @@ class Dataset(Base):
__table_args__ = (
sa.PrimaryKeyConstraint("id", name="dataset_pkey"),
sa.Index("dataset_tenant_idx", "tenant_id"),
adjusted_json_index("retrieval_model_idx", "retrieval_model"),
)
INDEXING_TECHNIQUE_LIST = ["high_quality", "economy", None]
@ -69,9 +70,9 @@ class Dataset(Base):
embedding_model_provider = mapped_column(sa.String(255), nullable=True)
keyword_number = mapped_column(sa.Integer, nullable=True, server_default=sa.text("10"))
collection_binding_id = mapped_column(StringUUID, nullable=True)
retrieval_model = mapped_column(sa.JSON, nullable=True)
retrieval_model = mapped_column(AdjustedJSON, nullable=True)
built_in_field_enabled = mapped_column(sa.Boolean, nullable=False, server_default=sa.text("false"))
icon_info = mapped_column(sa.JSON, nullable=True)
icon_info = mapped_column(AdjustedJSON, nullable=True)
runtime_mode = mapped_column(sa.String(255), nullable=True, server_default=sa.text("'general'"))
pipeline_id = mapped_column(StringUUID, nullable=True)
chunk_structure = mapped_column(sa.String(255), nullable=True)
@ -347,6 +348,7 @@ class Document(Base):
sa.Index("document_dataset_id_idx", "dataset_id"),
sa.Index("document_is_paused_idx", "is_paused"),
sa.Index("document_tenant_idx", "tenant_id"),
adjusted_json_index("document_metadata_idx", "doc_metadata"),
)
# initial fields
@ -405,7 +407,7 @@ class Document(Base):
DateTime, nullable=False, server_default=func.current_timestamp(), onupdate=func.current_timestamp()
)
doc_type = mapped_column(String(40), nullable=True)
doc_metadata = mapped_column(sa.JSON, nullable=True)
doc_metadata = mapped_column(AdjustedJSON, nullable=True)
doc_form = mapped_column(String(255), nullable=False, server_default=sa.text("'text_model'"))
doc_language = mapped_column(String(255), nullable=True)

View File

@ -7,7 +7,7 @@ from sqlalchemy.orm import Mapped, mapped_column
from libs.uuid_utils import uuidv7
from .base import Base
from .types import LongText, StringUUID
from .types import AdjustedJSON, LongText, StringUUID
class DatasourceOauthParamConfig(Base): # type: ignore[name-defined]
@ -20,7 +20,7 @@ class DatasourceOauthParamConfig(Base): # type: ignore[name-defined]
id = mapped_column(StringUUID, default=lambda: str(uuidv7()))
plugin_id: Mapped[str] = mapped_column(sa.String(255), nullable=False)
provider: Mapped[str] = mapped_column(sa.String(255), nullable=False)
system_credentials: Mapped[dict] = mapped_column(sa.JSON, nullable=False)
system_credentials: Mapped[dict] = mapped_column(AdjustedJSON, nullable=False)
class DatasourceProvider(Base):
@ -36,7 +36,7 @@ class DatasourceProvider(Base):
provider: Mapped[str] = mapped_column(sa.String(128), nullable=False)
plugin_id: Mapped[str] = mapped_column(sa.String(255), nullable=False)
auth_type: Mapped[str] = mapped_column(sa.String(255), nullable=False)
encrypted_credentials: Mapped[dict] = mapped_column(sa.JSON, nullable=False)
encrypted_credentials: Mapped[dict] = mapped_column(AdjustedJSON, nullable=False)
avatar_url: Mapped[str] = mapped_column(LongText, nullable=True, default="default")
is_default: Mapped[bool] = mapped_column(sa.Boolean, nullable=False, server_default=sa.text("false"))
expires_at: Mapped[int] = mapped_column(sa.Integer, nullable=False, server_default="-1")
@ -58,7 +58,7 @@ class DatasourceOauthTenantParamConfig(Base):
tenant_id = mapped_column(StringUUID, nullable=False)
provider: Mapped[str] = mapped_column(sa.String(255), nullable=False)
plugin_id: Mapped[str] = mapped_column(sa.String(255), nullable=False)
client_params: Mapped[dict] = mapped_column(sa.JSON, nullable=False, default={})
client_params: Mapped[dict] = mapped_column(AdjustedJSON, nullable=False, default={})
enabled: Mapped[bool] = mapped_column(sa.Boolean, nullable=False, default=False)
created_at: Mapped[datetime] = mapped_column(sa.DateTime, nullable=False, server_default=func.current_timestamp())

View File

@ -8,7 +8,7 @@ from sqlalchemy.orm import Mapped, mapped_column
from models.base import TypeBase
from .types import LongText, StringUUID
from .types import AdjustedJSON, LongText, StringUUID, adjusted_json_index
class DataSourceOauthBinding(TypeBase):
@ -16,13 +16,14 @@ class DataSourceOauthBinding(TypeBase):
__table_args__ = (
sa.PrimaryKeyConstraint("id", name="source_binding_pkey"),
sa.Index("source_binding_tenant_id_idx", "tenant_id"),
adjusted_json_index("source_info_idx", "source_info"),
)
id: Mapped[str] = mapped_column(StringUUID, default=lambda: str(uuid4()), init=False)
tenant_id: Mapped[str] = mapped_column(StringUUID, nullable=False)
access_token: Mapped[str] = mapped_column(String(255), nullable=False)
provider: Mapped[str] = mapped_column(String(255), nullable=False)
source_info: Mapped[dict] = mapped_column(sa.JSON, nullable=False)
source_info: Mapped[dict] = mapped_column(AdjustedJSON, nullable=False)
created_at: Mapped[datetime] = mapped_column(
DateTime, nullable=False, server_default=func.current_timestamp(), init=False
)

View File

@ -2,12 +2,15 @@ import enum
import uuid
from typing import Any, Generic, TypeVar
import sqlalchemy as sa
from sqlalchemy import CHAR, TEXT, VARCHAR, LargeBinary, TypeDecorator
from sqlalchemy.dialects.mysql import LONGBLOB, LONGTEXT
from sqlalchemy.dialects.postgresql import BYTEA, UUID
from sqlalchemy.dialects.postgresql import BYTEA, JSONB, UUID
from sqlalchemy.engine.interfaces import Dialect
from sqlalchemy.sql.type_api import TypeEngine
from configs import dify_config
class StringUUID(TypeDecorator[uuid.UUID | str | None]):
impl = CHAR
@ -81,6 +84,32 @@ class BinaryData(TypeDecorator[bytes | None]):
return value
class AdjustedJSON(TypeDecorator[dict | list | None]):
impl = sa.JSON
cache_ok = True
def __init__(self, astext_type=None):
self.astext_type = astext_type
super().__init__()
def load_dialect_impl(self, dialect: Dialect) -> TypeEngine[Any]:
if dialect.name == "postgresql":
if self.astext_type:
return dialect.type_descriptor(JSONB(astext_type=self.astext_type))
else:
return dialect.type_descriptor(JSONB())
elif dialect.name == "mysql":
return dialect.type_descriptor(sa.JSON())
else:
return dialect.type_descriptor(sa.JSON())
def process_bind_param(self, value: dict | list | None, dialect: Dialect) -> dict | list | None:
return value
def process_result_value(self, value: dict | list | None, dialect: Dialect) -> dict | list | None:
return value
_E = TypeVar("_E", bound=enum.StrEnum)
@ -124,3 +153,11 @@ class EnumText(TypeDecorator[_E | None], Generic[_E]):
if x is None or y is None:
return x is y
return x == y
def adjusted_json_index(index_name, column_name):
index_name = index_name or f"{column_name}_idx"
if dify_config.DB_TYPE == "postgresql":
return sa.Index(index_name, column_name, postgresql_using="gin")
else:
return None